Using the economic interpretation
To be honest, I didn’t understand its reasoning. But I can give a proof of its logic from a mathematical perspective, in which Lagrangian dual problem is involved.
This problem comes in a different flavor of the one I studied (This problem appears to have no large-scale deterministic MIP formulation). No wonder I was a bit confused at a earlier time.
But things are clear to me now. My code for that cutting roll problem is simply
import JuMP, Gurobi; GRB_ENV = Gurobi.Env();
l = [2, 3]; # length (in meters)
d = [4, 5]; # demand (in quantities)
W = 7; # length of the raw roll
P = [3 0; 0 2];
I, J = size(P) # J indicates the number of existing columns
@assert length(l) == length(d) == I
COT = 1e-6 # cut-off tolerance
glb = Inf # global lower bound
function outer_mip(P) # employed to find feasible/suboptimal solutions
J = size(P, 2)
outer = JuMP.Model(() -> Gurobi.Optimizer(GRB_ENV)); JuMP.set_silent(outer)
JuMP.@variable(outer, n[1:J] ≥ 0, Int)
JuMP.@constraint(outer, [i = 1:I], view(P, i, :)'n ≥ d[i]) # [π ≥ 0]
JuMP.@objective(outer, Min, sum(n))
JuMP.optimize!(outer); JuMP.assert_is_solved_and_feasible(outer; allow_local = false)
return ub, n = JuMP.objective_value(outer), JuMP.value.(n)
end;
subproblem = JuMP.Model(() -> Gurobi.Optimizer(GRB_ENV)); JuMP.set_silent(subproblem)
JuMP.@variable(subproblem, p[1:I] ≥ 0, Int); JuMP.@constraint(subproblem, l'p ≤ W) # a feasible pattern
outerDual = JuMP.Model(() -> Gurobi.Optimizer(GRB_ENV)); JuMP.set_silent(outerDual)
JuMP.@variable(outerDual, π[1:I] ≥ 0); JuMP.@objective(outerDual, Max, d'π)
J = size(P, 2)
JuMP.@constraint(outerDual, [j = 1:J], 1 ≥ view(P, :, j)'π) # [⇐ n ≥ 0]
while true # the aim of this main loop is to recruit as many advantageous columns as possible,
# in order to lower the feasible objective value of the `outer_mip`.
# This is in turn achieved by attempting to lower the DualBound of `outer_mip`.
# Hence, a dual cutting plane method is employed
Π, lb = let
JuMP.optimize!(outerDual); JuMP.assert_is_solved_and_feasible(outerDual; allow_local = false)
JuMP.value.(π), JuMP.objective_value(outerDual)
end;
p_new, vio_level = let
JuMP.@objective(subproblem, Max, Π'p)
JuMP.optimize!(subproblem); JuMP.assert_is_solved_and_feasible(subproblem; allow_local = false)
JuMP.value.(p), JuMP.objective_value(subproblem) - 1
end;
if vio_level > COT
@info "At J = $J, (invalid) lb = $lb"
JuMP.@constraint(outerDual, 1 ≥ p_new'π) # add a new cut to lower the lb of the outer_mip
P = [P p_new] # recruit a new advantageous column
J = size(P, 2) # update J
else
global glb = lb
@info "At J = $J, lb = $lb; no more advantageous columns can be found, thus break"
break
end
end
# After the main loop, we generate a feasible solution to the original problem.
# This feasible solution cannot assure global optimality because we have no idea
# whether there is still an advantageous column unrecruited. And I'm unaware of
# an efficient algorithm to identify it.
ub, n = outer_mip(P)
@info "We find a feasible solution attaining the ObjVal = $ub, Lagrangian ObjBound = $glb, the solution is" n