For future reference purposes, here I devise a “tough” disjointed bilinear problem parameterized by N::Int
. (Such that we don’t need to download hard cases from miplib.) The problem has no constraint other than simple bounds on decisions. It’s style is very simple so that JuMP won’t feel strenuous to model it.
import JuMP, Ipopt, Gurobi
import Random, LinearAlgebra, SparseArrays
N = 40 # 18 seconds
N = 50 # 754 seconds
N = 80 # > 8500 seconds, already very tough
Random.seed!(1)
Q = 1. * SparseArrays.dropzeros(
SparseArrays.spdiagm(
[i => rand(-9:9, N - i) for i in 0:div(N, 2)]... # would be easier by increase `2`
)
) # generate a random sparse quad-obj-coeff matrix
m = JuMP.Model();
JuMP.@variable(m, rand(-7:-3) <= y[1:N] <= rand(3:7));
JuMP.@variable(m, rand(-9:-5) <= x[1:N] <= rand(5:9));
JuMP.@objective(m, Min, LinearAlgebra.dot(x, Q, y));
JuMP.set_optimizer(m, Gurobi.Optimizer) # Gurobi will use a MIP method
JuMP.optimize!(m); JuMP.solution_summary(m)
JuMP.set_optimizer(m, Ipopt.Optimizer) # Ipopt uses NLP method
JuMP.optimize!(m); JuMP.solution_summary(m)
Another easier dense case that is directly derived from a simple BQP (boolean quadratic programming):
import LinearAlgebra.dot as dot
import JuMP, Gurobi
import Random
N = 20 # easier
N = 80 # hard
Random.seed!(1)
Q = rand(-1:.0017:1, N, N)
m = JuMP.Model(Gurobi.Optimizer)
JuMP.@variable(m, x[1:N], Bin)
JuMP.@variable(m, y[1:N], Bin)
# the following 4 lines are classic BQP cuts
JuMP.@variable(m, z[1:N, 1:N] >= 0)
JuMP.@constraint(m, [j = 1:N], view(z, :, j) .<= x)
JuMP.@constraint(m, [i = 1:N], view(z, i, :) .<= y)
JuMP.@constraint(m, [i = 1:N, j = 1:N], z[i, j] >= x[i] + y[j] - 1)
JuMP.@objective(m, Min, dot(Q, z))
JuMP.optimize!(m)