Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Greedy merge and better size reduction #43

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 28 additions & 1 deletion examples/rule_discovery.jl
Original file line number Diff line number Diff line change
Expand Up @@ -82,4 +82,31 @@ branching_region = SimpleGraph(Graphs.SimpleEdge.(edges))
# Generate the tree-like N3 neighborhood of R
graph = tree_like_N3_neighborhood(branching_region)

solve_opt_rule(branching_region, graph, vs)
solve_opt_rule(branching_region, graph, vs)


# ## Generating rules for large scale problems
# For large scale problems, we can use the greedy merge rule to generate rules, which avoids generating all candidate clauses.
function solve_greedy_rule(branching_region, graph, vs)
## Use default solver and measure
m = D3Measure()
table_solver = TensorNetworkSolver(; prune_by_env=true)

## Pruning irrelevant entries
ovs = OptimalBranchingMIS.open_vertices(graph, vs)
subg, vmap = induced_subgraph(graph, vs)
@info "solving the branching table..."
tbl = OptimalBranchingMIS.reduced_alpha_configs(table_solver, subg, Int[findfirst(==(v), vs) for v in ovs])
@info "the length of the truth_table after pruning irrelevant entries: $(length(tbl.table))"

@info "generating the optimal branching rule via greedy merge..."
candidates = OptimalBranchingCore.bit_clauses(tbl)
result = OptimalBranchingMIS.OptimalBranchingCore.greedymerge(candidates, MISProblem(graph), vs, m)
return result
@info "the greedily minimized gamma: $(result.γ)"

@info "the branching rule on R:"
viz_dnf(result.optimal_rule, vs)
end

result = solve_greedy_rule(branching_region, graph, vs)
2 changes: 1 addition & 1 deletion lib/OptimalBranchingCore/src/OptimalBranchingCore.jl
Original file line number Diff line number Diff line change
Expand Up @@ -30,5 +30,5 @@ include("interfaces.jl")
include("branching_table.jl")
include("setcovering.jl")
include("branch.jl")

include("greedymerge.jl")
end
29 changes: 17 additions & 12 deletions lib/OptimalBranchingCore/src/branch.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,15 @@ A [`OptimalBranchingResult`](@ref) object representing the optimal branching rul
"""
function optimal_branching_rule(table::BranchingTable, variables::Vector, problem::AbstractProblem, m::AbstractMeasure, solver::AbstractSetCoverSolver)
candidates = candidate_clauses(table)
size_reductions = [measure(problem, m) - measure(first(apply_branch(problem, candidate, variables)), m) for candidate in candidates]
return minimize_γ(table, candidates, size_reductions, solver; γ0=2.0)
size_reductions = [size_reduction(problem, m, candidate, variables) for candidate in candidates]
return minimize_γ(table, candidates, size_reductions, solver; γ0 = 2.0)
end

function size_reduction(p::AbstractProblem, m::AbstractMeasure, cl::Clause{INT}, variables::Vector) where {INT}
return measure(p, m) - measure(first(apply_branch(p, cl, variables)), m)
end


"""
BranchingStrategy
BranchingStrategy(; kwargs...)
Expand All @@ -31,20 +36,20 @@ A struct representing the configuration for a solver, including the reducer and
- `selector::AbstractSelector`: The selector to select the next branching variable or decision.
- `m::AbstractMeasure`: The measure to evaluate the performance of the branching strategy.
"""
@kwdef struct BranchingStrategy{TS<:AbstractTableSolver, SCS<:AbstractSetCoverSolver, SL<:AbstractSelector, M<:AbstractMeasure}
@kwdef struct BranchingStrategy{TS <: AbstractTableSolver, SCS <: AbstractSetCoverSolver, SL <: AbstractSelector, M <: AbstractMeasure}
set_cover_solver::SCS = IPSolver()
table_solver::TS
selector::SL
measure::M
end
Base.show(io::IO, config::BranchingStrategy) = print(io,
"""
BranchingStrategy
├── table_solver - $(config.table_solver)
├── set_cover_solver - $(config.set_cover_solver)
├── selector - $(config.selector)
└── measure - $(config.measure)
""")
Base.show(io::IO, config::BranchingStrategy) = print(io,
"""
BranchingStrategy
├── table_solver - $(config.table_solver)
├── set_cover_solver - $(config.set_cover_solver)
├── selector - $(config.selector)
└── measure - $(config.measure)
""")

"""
branch_and_reduce(problem::AbstractProblem, config::BranchingStrategy; reducer::AbstractReducer=NoReducer(), result_type=Int)
Expand Down Expand Up @@ -73,7 +78,7 @@ function branch_and_reduce(problem::AbstractProblem, config::BranchingStrategy,
variables = select_variables(rp, config.measure, config.selector) # select a subset of variables
tbl = branching_table(rp, config.table_solver, variables) # compute the BranchingTable
result = optimal_branching_rule(tbl, variables, rp, config.measure, config.set_cover_solver) # compute the optimal branching rule
return sum(result.optimal_rule.clauses) do branch # branch and recurse
return sum(get_clauses(result)) do branch # branch and recurse
subproblem, localvalue = apply_branch(rp, branch, variables)
branch_and_reduce(subproblem, config, reducer, result_type) * result_type(localvalue) * reducedvalue
end
Expand Down
43 changes: 43 additions & 0 deletions lib/OptimalBranchingCore/src/greedymerge.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
struct GreedyMerge <: AbstractSetCoverSolver end
function optimal_branching_rule(table::BranchingTable, variables::Vector, problem::AbstractProblem, m::AbstractMeasure, solver::GreedyMerge)
candidates = bit_clauses(table)
return greedymerge(candidates, problem, variables, m)
end

function bit_clauses(tbl::BranchingTable{INT}) where {INT}
n, bss = tbl.bit_length, tbl.table
temp_clauses = [[Clause(bmask(INT, 1:n), bs) for bs in bss1] for bss1 in bss]
return temp_clauses
end

function greedymerge(cls::Vector{Vector{Clause{INT}}}, problem::AbstractProblem, variables::Vector, m::AbstractMeasure) where {INT}
cls = copy(cls)
size_reductions = [size_reduction(problem, m, first(candidate), variables) for candidate in cls]
local γ
while true
γ = complexity_bv(size_reductions)
minval = zero(γ)
minidx = (-1, -1, -1, -1)
local minclause
local minred
for i ∈ 1:length(cls), j ∈ i+1:length(cls)
for ii in 1:length(cls[i]), jj in 1:length(cls[j])
cl12 = gather2(length(variables), cls[i][ii], cls[j][jj])
if cl12.mask == 0
continue
end
reduction = size_reduction(problem, m, cl12, variables)
val = γ^(-reduction) - γ^(-size_reductions[i]) - γ^(-size_reductions[j])
if val < minval
minval, minidx, minclause, minred = val, (i, j, ii, jj), cl12, reduction
end
end
end
minidx == (-1, -1, -1, -1) && break # no more merging
deleteat!(cls, minidx[1:2])
deleteat!(size_reductions, minidx[1:2])
push!(cls, [minclause])
push!(size_reductions, minred)
end
return OptimalBranchingResult(DNF([cl[1] for cl in cls]), size_reductions, γ)
end
10 changes: 5 additions & 5 deletions lib/OptimalBranchingCore/src/setcovering.jl
Original file line number Diff line number Diff line change
Expand Up @@ -101,18 +101,18 @@ end
The result type for the optimal branching rule.

### Fields
- `selected_ids::Vector{Int}`: The indices of the selected rows in the branching table.
- `optimal_rule::DNF{INT}`: The optimal branching rule.
- `branching_vector::Vector{T<:Real}`: The branching vector that records the size reduction in each subproblem.
- `γ::Float64`: The optimal γ value (the complexity of the branching rule).
"""
struct OptimalBranchingResult{INT <: Integer, T <: Real}
selected_ids::Vector{Int}
optimal_rule::DNF{INT}
branching_vector::Vector{T}
γ::Float64
end
Base.show(io::IO, results::OptimalBranchingResult{INT, T}) where {INT, T} = print(io, "OptimalBranchingResult{$INT, $T}:\n selected_ids: $(results.selected_ids)\n optimal_rule: $(results.optimal_rule)\n branching_vector: $(results.branching_vector)\n γ: $(results.γ)")
Base.show(io::IO, results::OptimalBranchingResult{INT, T}) where {INT, T} = print(io, "OptimalBranchingResult{$INT, $T}:\n optimal_rule: $(results.optimal_rule)\n branching_vector: $(results.branching_vector)\n γ: $(results.γ)")
get_clauses(results::OptimalBranchingResult) = results.optimal_rule.clauses
get_clauses(res::AbstractArray) = res

"""
minimize_γ(table::BranchingTable, candidates::Vector{Clause}, Δρ::Vector, solver)
Expand Down Expand Up @@ -140,7 +140,7 @@ function minimize_γ(table::BranchingTable, candidates::Vector{Clause{INT}}, Δ

# Note: the following instance is captured for time saving, and also for it may cause IP solver to fail
for (k, subset) in enumerate(subsets)
(length(subset) == num_items) && return OptimalBranchingResult([k], DNF([candidates[k]]), [Δρ[k]], 1.0)
(length(subset) == num_items) && return OptimalBranchingResult(DNF([candidates[k]]), [Δρ[k]], 1.0)
end

cx_old = cx = γ0
Expand All @@ -153,7 +153,7 @@ function minimize_γ(table::BranchingTable, candidates::Vector{Clause{INT}}, Δ
cx ≈ cx_old && break # convergence
cx_old = cx
end
return OptimalBranchingResult(picked_scs, DNF([candidates[i] for i in picked_scs]), Δρ[picked_scs], cx)
return OptimalBranchingResult(DNF([candidates[i] for i in picked_scs]), Δρ[picked_scs], cx)
end

# TODO: we need to extend this function to trim the candidate clauses
Expand Down
2 changes: 1 addition & 1 deletion lib/OptimalBranchingCore/test/branching_table.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
using OptimalBranchingCore, GenericTensorNetworks
using BitBasis
using OptimalBranchingCore.BitBasis
using Test

@testset "branching table" begin
Expand Down
15 changes: 15 additions & 0 deletions lib/OptimalBranchingCore/test/greedymerge.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
using Test
using OptimalBranchingCore
using OptimalBranchingCore: bit_clauses
using OptimalBranchingCore.BitBasis
using GenericTensorNetworks

@testset "bit_clauses" begin
tbl = BranchingTable(5, [
[StaticElementVector(2, [0, 0, 1, 0, 0]), StaticElementVector(2, [0, 1, 0, 0, 0])],
[StaticElementVector(2, [1, 0, 0, 1, 0])],
[StaticElementVector(2, [0, 0, 1, 0, 1])],
])

bc = bit_clauses(tbl)
end
3 changes: 0 additions & 3 deletions lib/OptimalBranchingCore/test/setcovering.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ end
Δρ = [count_ones(c.mask) for c in clauses]
result_ip = OptimalBranchingCore.minimize_γ(tbl, clauses, Δρ, IPSolver(; max_itr = 10, verbose = false))
result_lp = OptimalBranchingCore.minimize_γ(tbl, clauses, Δρ, LPSolver(; max_itr = 10, verbose = false))
@test result_ip.selected_ids == result_lp.selected_ids
@test result_ip.branching_vector ≈ result_lp.branching_vector
@test result_ip.γ ≈ result_lp.γ ≈ 1.0

Expand All @@ -29,7 +28,6 @@ end
Δρ = [count_ones(c.mask) for c in clauses]
result_ip = OptimalBranchingCore.minimize_γ(tbl, clauses, Δρ, IPSolver(; max_itr = 10, verbose = false))
result_lp = OptimalBranchingCore.minimize_γ(tbl, clauses, Δρ, LPSolver(; max_itr = 10, verbose = false))
@test result_ip.selected_ids == result_lp.selected_ids
@test result_ip.branching_vector ≈ result_lp.branching_vector
@test result_ip.γ ≈ result_lp.γ ≈ 1.1673039782614185

Expand All @@ -47,7 +45,6 @@ end
Δρ = [count_ones(c.mask) for c in clauses]
result_ip = OptimalBranchingCore.minimize_γ(tbl, clauses, Δρ, IPSolver(max_itr = 10, verbose = false))
result_lp = OptimalBranchingCore.minimize_γ(tbl, clauses, Δρ, LPSolver(max_itr = 10, verbose = false))
@test result_ip.selected_ids == result_lp.selected_ids
@test result_ip.branching_vector ≈ result_lp.branching_vector
@test OptimalBranchingCore.covered_by(tbl, result_ip.optimal_rule)
@test OptimalBranchingCore.covered_by(tbl, result_lp.optimal_rule)
Expand Down
16 changes: 8 additions & 8 deletions lib/OptimalBranchingMIS/src/interfaces.jl
Original file line number Diff line number Diff line change
@@ -1,37 +1,37 @@
"""
mis_size(g::AbstractGraph; bs::BranchingStrategy = BranchingStrategy(table_solver = TensorNetworkSolver(), selector = MinBoundaryHighDegreeSelector(2, 6, 0), measure=D3Measure()), reducer::AbstractReducer = MISReducer())
mis_size(g::AbstractGraph; branching_strategy::BranchingStrategy = BranchingStrategy(table_solver = TensorNetworkSolver(), selector = MinBoundaryHighDegreeSelector(2, 6, 0), measure=D3Measure()), reducer::AbstractReducer = MISReducer())

Calculate the size of the Maximum Independent Set (MIS) for a given graph.

### Arguments
- `g::AbstractGraph`: The graph for which the MIS size is to be calculated.
- `bs::BranchingStrategy`: (optional) The branching strategy to be used. Defaults to a strategy using `table_solver=TensorNetworkSolver`, `selector=MinBoundaryHighDegreeSelector(2, 6, 0)`, and `measure=D3Measure`.
- `branching_strategy::BranchingStrategy`: (optional) The branching strategy to be used. Defaults to a strategy using `table_solver=TensorNetworkSolver`, `selector=MinBoundaryHighDegreeSelector(2, 6, 0)`, and `measure=D3Measure`.
- `reducer::AbstractReducer`: (optional) The reducer to be applied. Defaults to `MISReducer`.

### Returns
- An integer representing the size of the Maximum Independent Set for the given graph.
"""
function mis_size(g::AbstractGraph; bs::BranchingStrategy = BranchingStrategy(table_solver = TensorNetworkSolver(), selector = MinBoundaryHighDegreeSelector(2, 6, 0), measure=D3Measure()), reducer=MISReducer())
function mis_size(g::AbstractGraph; branching_strategy::BranchingStrategy = BranchingStrategy(table_solver = TensorNetworkSolver(), selector = MinBoundaryHighDegreeSelector(2, 6, 0), measure = D3Measure()), reducer = MISReducer())
p = MISProblem(g)
res = branch_and_reduce(p, bs, reducer, MaxSize)
res = branch_and_reduce(p, branching_strategy, reducer, MaxSize)
return res.size
end

"""
mis_branch_count(g::AbstractGraph; bs::BranchingStrategy = BranchingStrategy(table_solver = TensorNetworkSolver(), selector = MinBoundaryHighDegreeSelector(2, 6, 0), measure=D3Measure()), reducer=MISReducer())
mis_branch_count(g::AbstractGraph; branching_strategy::BranchingStrategy = BranchingStrategy(table_solver = TensorNetworkSolver(), selector = MinBoundaryHighDegreeSelector(2, 6, 0), measure=D3Measure()), reducer=MISReducer())

Calculate the size and the number of branches of the Maximum Independent Set (MIS) for a given graph.

### Arguments
- `g::AbstractGraph`: The graph for which the MIS size and the number of branches are to be calculated.
- `bs::BranchingStrategy`: (optional) The branching strategy to be used. Defaults to a strategy using `table_solver=TensorNetworkSolver`, `selector=MinBoundaryHighDegreeSelector(2, 6, 0)`, and `measure=D3Measure`.
- `branching_strategy::BranchingStrategy`: (optional) The branching strategy to be used. Defaults to a strategy using `table_solver=TensorNetworkSolver`, `selector=MinBoundaryHighDegreeSelector(2, 6, 0)`, and `measure=D3Measure`.
- `reducer::AbstractReducer`: (optional) The reducer to be applied. Defaults to `MISReducer`.

### Returns
- A tuple `(size, count)` where `size` is the size of the Maximum Independent Set and `count` is the number of branches.
"""
function mis_branch_count(g::AbstractGraph; branching_strategy::BranchingStrategy = BranchingStrategy(table_solver = TensorNetworkSolver(), selector = MinBoundaryHighDegreeSelector(2, 6, 0), measure=D3Measure()), reducer=MISReducer())
function mis_branch_count(g::AbstractGraph; branching_strategy::BranchingStrategy = BranchingStrategy(table_solver = TensorNetworkSolver(), selector = MinBoundaryHighDegreeSelector(2, 6, 0), measure = D3Measure()), reducer = MISReducer())
p = MISProblem(g)
res = branch_and_reduce(p, branching_strategy, reducer, MaxSizeBranchCount)
return (res.size, res.count)
end
end
18 changes: 16 additions & 2 deletions lib/OptimalBranchingMIS/src/types.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ Represents a Maximum Independent Set (MIS) problem.

"""
mutable struct MISProblem <: AbstractProblem
g::SimpleGraph
g::SimpleGraph{Int}
end
Base.copy(p::MISProblem) = MISProblem(copy(p.g))
Base.show(io::IO, p::MISProblem) = print(io, "MISProblem($(nv(p.g)))")
Expand Down Expand Up @@ -83,4 +83,18 @@ function OptimalBranchingCore.measure(p::MISProblem, ::D3Measure)
dg = degree(g)
return Int(sum(max(d - 2, 0) for d in dg))
end
end
end

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In this case, the other measures will fail to work. I think we should keep the previous one as a fallback if no speacial size_reduction is defined.

Oh I noticed that this function is defined in another file, please move it here.

function OptimalBranchingCore.size_reduction(p::MISProblem, m::D3Measure, cl::Clause{INT}, variables::Vector) where {INT}
vertices_removed = removed_vertices(variables, p.g, cl)
isempty(vertices_removed) && return 0
sum = 0
for v in vertices_removed
sum += max(degree(p.g, v) - 2, 0)
end
vertices_removed_neighbors = setdiff(mapreduce(v -> neighbors(p.g, v), ∪, vertices_removed), vertices_removed)
for v in vertices_removed_neighbors
sum += max(degree(p.g, v) - 2,0) - max(degree(p.g, v) - 2 - count(vx -> vx ∈ vertices_removed, neighbors(p.g, v)), 0)
end
return sum
end
43 changes: 43 additions & 0 deletions lib/OptimalBranchingMIS/test/greedymerge.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
using OptimalBranchingMIS
using OptimalBranchingMIS.EliminateGraphs.Graphs
using Test
using Random
using OptimalBranchingCore
using OptimalBranchingCore.BitBasis
using GenericTensorNetworks
using OptimalBranchingCore: bit_clauses
Random.seed!(1234)

# Example from arXiv:2412.07685 Fig. 1
@testset "GreedyMerge" begin
edges = [(1, 4), (1, 5), (3, 4), (2, 5), (4, 5), (1, 6), (2, 7), (3, 8)]
example_g = SimpleGraph(Graphs.SimpleEdge.(edges))
p = MISProblem(example_g)
tbl = BranchingTable(5, [
[StaticElementVector(2, [0, 0, 0, 0, 1]), StaticElementVector(2, [0, 0, 0, 1, 0])],
[StaticElementVector(2, [0, 0, 1, 0, 1])],
[StaticElementVector(2, [0, 1, 0, 1, 0])],
[StaticElementVector(2, [1, 1, 1, 0, 0])],
])
cls = bit_clauses(tbl)
res = OptimalBranchingCore.greedymerge(cls, p, [1, 2, 3, 4, 5], NumOfVertices())
clsf = res.optimal_rule.clauses
@test clsf[1].mask == cls[3][1].mask
@test clsf[1].val == cls[3][1].val
@test clsf[2].mask == cls[4][1].mask
@test clsf[2].val == cls[4][1].val
@test clsf[3].mask == 27
@test clsf[3].val == 16
end

@testset "GreedyMerge" begin
g = random_regular_graph(20, 3)
mis_num, count2 = mis_branch_count(g)
for reducer in [NoReducer(), MISReducer()]
for measure in [D3Measure(), NumOfVertices()]
bs = BranchingStrategy(table_solver = TensorNetworkSolver(), selector = MinBoundaryHighDegreeSelector(2, 6, 0), measure = measure, set_cover_solver = OptimalBranchingCore.GreedyMerge())
mis1, count1 = mis_branch_count(g; branching_strategy = bs, reducer)
@test mis1 == mis_num
end
end
end
Loading
Loading