This doc reads a bit lengthy, because it uses complex data structures (maybe for visualization purposes).
Therein it uses economic interpretation, which is hard to understand for me.
Therefore, I explicitly wrote the Lagrangian dual of the primal MIP, which makes the column generation subprocedure more intelligible.
Comments are welcome.
import JuMP, Gurobi; GRB_ENV = Gurobi.Env();
import LinearAlgebra.I as Identity
w, d, W, I = let # 📘 Data for the Cutting Stock Problem
w = [75.0, 53.8, 53.0, 51.0, 50.2, 32.2, 30.8, 29.8, 20.1, 16.2, 14.5, 11.0, 8.6, 8.2, 6.6, 5.1]; # length (in meters)
d = [189, 33, 36, 41, 35, 37, 44, 49, 37, 36, 42, 33, 47, 35, 49, 42]; # demand (in quantities)
W = 100; # width of a raw roll
I = length(d); @assert length(w) == I
w, d, W, I
end;
# 🟣 Part I
# The following is a primary large-scale deterministic MIP formulation of the Cutting Stock Problem.
# The chief advantage ✅ of this problem formulation is that it can bring a global optimal solution,
# as long as you have enough time for computing.
function deterministic_mip(J) # J is a prescribed (large) number,
# if the resulting termination status is INFEASIBLE, enlarge J.
det = JuMP.Model(() -> Gurobi.Optimizer(GRB_ENV))
JuMP.@variable(det, b[1:J], Bin) # b[j] == 1 ⟺ the j-th roll is used
JuMP.@variable(det, n[1:I, 1:J] ≥ 0, Int) # n[i, j] is the number of (length == w[i])-subrolls cut from the j-th roll
JuMP.@constraint(det, sum(n; dims = 2) .≥ d) # meet the demand
JuMP.@constraint(det, w'n .≤ b'W) # width limit of each raw roll
JuMP.@objective(det, Min, sum(b)) # number of used raw rolls
JuMP.optimize!(det); JuMP.assert_is_solved_and_feasible(det; allow_local = false)
num = JuMP.objective_value(det)
println("The global optimal value is $(num)")
end
function deterministic_mip(b0, n0) # This is merely a variant of the above formulation in which you specify initial solutions
J = length(b0) # 🟠
det = JuMP.Model(() -> Gurobi.Optimizer(GRB_ENV))
JuMP.@variable(det, b[1:J], Bin)
JuMP.@variable(det, n[1:I, 1:J] ≥ 0, Int)
JuMP.@constraint(det, sum(n; dims = 2) .≥ d)
JuMP.@constraint(det, w'n .≤ b'W)
JuMP.@objective(det, Min, sum(b))
JuMP.set_start_value.(b, b0) # 🟠
JuMP.set_start_value.(n, n0) # 🟠
JuMP.optimize!(det); JuMP.assert_is_solved_and_feasible(det; allow_local = false)
num = JuMP.objective_value(det)
println("The global optimal value is $(num)")
end
# Do a blind (the number 500 is a trial) and brute-force (entirely by the MIP solver) solve
deterministic_mip(500); # 🖥️: The global optimal value is 334.0
# 🟣 Part II
# We introduce a pattern perspective of the Cutting Stock Problem and write an iterative algorithm---Column Generation.
# Each pattern is encoded into a column of the matrix `P` below.
# We justify the action "recruiting a new column" through the lens of Dual Cutting Plane method.
P = Identity(I); # ⚠️ This initial columns should make the initial `outer_mip` feasible
COT = 1e-6; # cut-off tolerance---a hyper parameter
function outer_mip(P) # employed to find feasible/suboptimal solutions
J = size(P, 2)
outer = JuMP.Model(() -> Gurobi.Optimizer(GRB_ENV)); JuMP.set_silent(outer)
JuMP.@variable(outer, n[1:J] ≥ 0, Int)
JuMP.@constraint(outer, [i = 1:I], view(P, i, :)'n ≥ d[i]) # [π ≥ 0] 🟡
JuMP.@objective(outer, Min, sum(n))
JuMP.optimize!(outer); JuMP.assert_is_solved_and_feasible(outer; allow_local = false)
return ub, n = JuMP.objective_value(outer), JuMP.value.(n)
end;
# establish in advance (part of) the pricing subproblem
subproblem = JuMP.Model(() -> Gurobi.Optimizer(GRB_ENV)); JuMP.set_silent(subproblem);
JuMP.@variable(subproblem, p[1:I] ≥ 0, Int); JuMP.@constraint(subproblem, w'p ≤ W); # a feasible pattern
# establish in advance (part of) Lagrangian dual problem of the primal `outer_mip` above
outerDual = JuMP.Model(() -> Gurobi.Optimizer(GRB_ENV)); JuMP.set_silent(outerDual);
JuMP.@variable(outerDual, π[1:I] ≥ 0); JuMP.@objective(outerDual, Max, d'π);
J = size(P, 2);
JuMP.@constraint(outerDual, [j = 1:J], 1 ≥ view(P, :, j)'π); # [⇐ n ≥ 0] 🟡
while true # the aim of this main loop is to recruit as many advantageous columns as possible,
# in order to lower the primal objective value of the `outer_mip`.
# This goal is in turn pursued by attempting to lower the Lagrangian dual bound of `outer_mip`.
# Hence, a Dual Cutting Plane method (i.e. the `@constraint` below) is employed
Π, lb = let
JuMP.optimize!(outerDual); JuMP.assert_is_solved_and_feasible(outerDual; allow_local = false)
JuMP.value.(π), JuMP.objective_value(outerDual)
end
p_new, vio_level = let
JuMP.@objective(subproblem, Max, Π'p)
JuMP.optimize!(subproblem); JuMP.assert_is_solved_and_feasible(subproblem; allow_local = false)
JuMP.value.(p), JuMP.objective_value(subproblem) - 1
end
if vio_level > COT # ✅ Note that this `if` is a hallmark of the Cutting Plane method
@info "At J = $J, (invalid) lb = $lb"
JuMP.@constraint(outerDual, 1 ≥ p_new'π) # add a new cut to lower the lb of the outer_mip
global P = [P p_new] # recruit a new advantageous column
global J = size(P, 2) # update J
else
@info "At J = $J, (invalid) lb = $lb\nno more advantageous columns can be found, thus break"
break
end
end # during the execution of this loop, we observe that `lb` is indeed iteratively lowered.
# Note that `size(P, 2)` is enlarged compared to that initial Identity state.
# Next We generate a feasible solution (in the pattern-formulation) to the primal problem.
# This feasible solution cannot assure global optimality because we have no idea
# whether there is still an advantageous column unrecruited. And I'm unaware of
# an efficient algorithm to identify it without branching.
ub, n = outer_mip(P)
# 🟣 Part III
# The feasible solution found in Part II is suboptimal. But we can employ it as a starting
# solution to be delivered to the MIP solver, so that we end up with a global solution.
# To this end, we need to convert it to (b0, n0).
j_vec = findall(n -> n > .5, n) # used columns indices
n, P = n[j_vec], P[:, j_vec] # used part
n = round.(Int, n)
P = round.(Int, P)
n0 = hcat([repeat(view(P, :, j), 1, n[j]) for j in eachindex(n)]...)
b0 = trues(size(n0, 2))
@assert all(sum(n0; dims = 2) .≥ d) # do a demand feasibility check
# you can compare this "prepared" solve with that blind one in Part I
deterministic_mip(b0, n0) # 🖥️: The global optimal value is 334.0