Continuing the discussion from Seeking help on Branch and Price:
I here furnish a toy example to help us fathoming that doing simplicial decomposition (or, column generation) in the primal space is equivalent to adding cutting planes in the dual space.
import LinearAlgebra.dot as dot
import LinearAlgebra.norm as norm
import JuMP, Gurobi
# The toy problem investigated:
# in euclidean-3 space, the full polyhedron is specified by 4 vertices
# p1 = [1, 0, 0]
# p2 = [0, 1, 0]
# (lets say, p3 =) [1, 1, 1]
# (lets say, p4 =) [0, 0, 0]
# It is known that it admits a constraint representation as the following `pricing_subproblem`
# Suppose initially we have the 1st and 2nd vertices:
V = [ # initial vertex set (each column is a member vertex)
1 0
0 1
0 0.
]; # size(V, 1) is fixed as the dimension of the euclidean space
# but size(V, 2) may be increased by recruiting new columns (attempting to reach a better ObjVal)
d = [-1, 0, -1]; # the linear obj coefficient
begin
m = JuMP.Model(Gurobi.Optimizer); JuMP.set_silent(m); # the master problem, which is V-dependent
JuMP.@variable(m, c[1:size(V, 2)] >= 0) # convex-combination coefficient
JuMP.@variable(m, p[1:size(V, 1)]) # point p in the (restricted) polyhedron
JuMP.@constraint(m, θ, sum(c) == 1)
JuMP.@constraint(m, π[i = 1:size(V, 1)], dot(view(V, i, :), c) == p[i])
# Note: if JuMP were smart enough, we could have equivalently
# written the above line as ✅ JuMP.@constraint(m, π, V * c == p)
JuMP.@objective(m, Min, dot(d, p))
JuMP.optimize!(m); JuMP.assert_is_solved_and_feasible(m; allow_local = false, dual = true)
@info "At num_of_cols = $(size(V, 2)), ObjVal is $(JuMP.objective_value(m))"
end
θ2 = JuMP.dual(θ) # the "2" in "θ2" indicates that this trial (dual) solution is derived when size(V, 2) == 2
π2 = JuMP.dual.(π)
let # [Remark] internally, Gurobi solves the following 📘 dual program to output the above 2 results
m = JuMP.Model(Gurobi.Optimizer); JuMP.set_silent(m);
JuMP.@variable(m, θ)
JuMP.@variable(m, π[i = 1:size(V, 1)])
JuMP.@constraint(m, π + d == 0) # [⇐ p free]
JuMP.@constraint(m, ones(size(V, 2)) * θ + V' * π .<= 0) # [⇐ c .>= 0]
# the above constraint can also be written as ✅ (θ .+ V' * π .<= 0)
JuMP.@objective(m, Max, θ)
JuMP.optimize!(m); JuMP.assert_is_solved_and_feasible(m; allow_local = false)
θ2 = JuMP.value(θ)
π2 = JuMP.value.(π)
end # Then, the aim 🟣 is to recruit a novel `p3` to be the 3rd column of `V` so that (θ2 + p3' * π2 > 0)
# Therefore, we need to employ the following famous subprocedure
function pricing_subproblem(neg_trial_π = -π2)
m = JuMP.Model(Gurobi.Optimizer); JuMP.set_silent(m);
JuMP.@variable(m, x)
JuMP.@variable(m, y)
JuMP.@variable(m, z >= 0)
JuMP.@constraint(m, z <= x)
JuMP.@constraint(m, z <= y)
JuMP.@constraint(m, z >= x + y - 1)
JuMP.@expression(m, p, [x, y, z])
JuMP.@objective(m, Min, dot(neg_trial_π, p))
JuMP.optimize!(m); JuMP.assert_is_solved_and_feasible(m; allow_local = false)
return o3, p3 = JuMP.objective_value(m), JuMP.value.(p)
end # if o3 < θ2, the aim 🟣 can be achieved and we can make a progress by recruiting p3
o3, p3 = pricing_subproblem(-π2)
if o3 < θ2 - 1e-6
@info "We can find an advantageous vertex p3, thus recruit it"
V = [V p3]
else
@info "No more new vertex is advantageous"
end
# Finally, we double check that the master problem is indeed improved
# by re-executing the begin-end block above and observing
# The initial [ Info: At num_of_cols = 2, ObjVal is -1.0
# The improved [ Info: At num_of_cols = 3, ObjVal is -2.0
Here is a 2-block example, but it readily applies for >2
block situations. Other than doing Dantzig-wolfe relaxation, the same dual bound can also be derived via solving the Lagrangian dual problem. The merit of the latter lies its constraint generation nature, which facilitates coding, and fits the modern MIP solver (branch-and-cut) framework.