My code (full and runnable) is
import LinearAlgebra.dot as dot
import LinearAlgebra.norm as norm
import JuMP, Gurobi, Random
function my_callback_function(cb_data, cb_where::Cint)
if cb_where == Gurobi.GRB_CB_MIPSOL # || cb_where == Gurobi.GRB_CB_MIPNODE
Gurobi.load_callback_variable_primal(cb_data, cb_where)
for i in Random.shuffle(1:N), j in Random.shuffle(1:N)
vx = JuMP.callback_value(cb_data, x[i])
vy = JuMP.callback_value(cb_data, y[j])
vz = JuMP.callback_value(cb_data, z[i, j])
if vz > vx + COT
JuMP.MOI.submit(m, JuMP.MOI.LazyConstraint(cb_data),
JuMP.@build_constraint(z[i, j] <= x[i])
); return
elseif vz > vy + COT
JuMP.MOI.submit(m, JuMP.MOI.LazyConstraint(cb_data),
JuMP.@build_constraint(z[i, j] <= y[j])
); return
elseif vz < vx + vy - 1 - COT
JuMP.MOI.submit(m, JuMP.MOI.LazyConstraint(cb_data),
JuMP.@build_constraint(z[i, j] >= x[i] + y[j] - 1)
); return
end
end
end
end
const COT = 1e-6;
N = 10;
Random.seed!(1)
Q = rand(-1:.0017:1, N, N);
m = JuMP.Model(Gurobi.Optimizer);
JuMP.@variable(m, 0 <= x[1:N] <= 1, Bin);
JuMP.@variable(m, 0 <= y[1:N] <= 1, Bin);
JuMP.@variable(m, 0 <= z[1:N, 1:N] <= 1);
JuMP.@objective(m, Min, dot(Q, z));
JuMP.set_attribute(m, "LazyConstraints", 1);
JuMP.MOI.set(m, Gurobi.CallbackFunction(), my_callback_function);
JuMP.optimize!(m)
JuMP.assert_is_solved_and_feasible(m; allow_local = false)
norm(JuMP.value.(x) * JuMP.value.(y)' - JuMP.value.(z)) == 0 && @info "test success!"
The situation is:
if you run this code, you will see “test success!”.
However, if you remove that #
on line 5
, you will see ERROR: Gurobi Error 10005:
after calling JuMP.optimize!(m)
.
FYI Here is the Gurobi’s doc ErrorCode And GRBcblazy.
I can’t fathom what is happening.
Further Reading: Background of this case
This problem is a BQP (boolean quadratic programming), which is allegedly NP-hard.
I wrote a macro—adding lazy cuts without Gurobi’s Callback.
import LinearAlgebra.dot as dot
import LinearAlgebra.norm as norm
import JuMP, Gurobi, Random
import JuMP.value as v
macro my_adding_lazy_cut()
return esc(:(
let t0 = time();
while true
exists_violation = false
for i in Random.shuffle(1:N), j in Random.shuffle(1:N)
vx, vy, vz = v(x[i]), v(y[j]), v(z[i, j])
if vz > vx + COT
JuMP.@constraint(m, z[i, j] <= x[i])
exists_violation = true; break
elseif vz > vy + COT
JuMP.@constraint(m, z[i, j] <= y[j])
exists_violation = true; break
elseif vz < vx + vy - 1 - COT
JuMP.@constraint(m, z[i, j] >= x[i] + y[j] - 1)
exists_violation = true; break
end
end
if exists_violation
JuMP.optimize!(m); JuMP.assert_is_solved_and_feasible(m; allow_local = false)
@info "lb = $(JuMP.objective_bound(m))"
else
@info "â–¶â–¶â–¶ (time=$(time() - t0) sec) The current trial solution is already feasible"
break
end
end
end
))
end
const COT = 1e-6;
N = 9; # at this scale, there is no useful lazy cut in MIP phase (as the problem is easy)
N = 10; # at this scale, more lazy cuts are entailed in MIP phase
# The BQP problem gets harder if you enlarge N
Random.seed!(1);
Q = rand(-1:.0017:1, N, N);
m = JuMP.Model(Gurobi.Optimizer);
JuMP.@variable(m, 0 <= x[1:N] <= 1);
JuMP.@variable(m, 0 <= y[1:N] <= 1);
JuMP.@variable(m, 0 <= z[1:N, 1:N] <= 1);
JuMP.@objective(m, Min, dot(Q, z));
JuMP.set_silent(m)
JuMP.optimize!(m); JuMP.assert_is_solved_and_feasible(m; allow_local = false)
@my_adding_lazy_cut() # LP phase: fast
let
JuMP.set_integer.(x);
JuMP.set_integer.(y);
JuMP.optimize!(m); JuMP.assert_is_solved_and_feasible(m; allow_local = false)
@info "lb = $(JuMP.objective_bound(m))"
end
@my_adding_lazy_cut() # MIP phase: slower
@info "see" JuMP.value.(x) JuMP.value.(y) JuMP.value.(z) norm(JuMP.value.(x) * JuMP.value.(y)' - JuMP.value.(z))