Hi all

I have an optimisation of the following form.

```
# Size
rows = 6000
cols = 10
# Create model with solver
model = Model(with_optimizer(GLPK.Optimizer))
# Create binary variables
@variable(model, X[1:rows, 1:cols], Bin)
# Objective - maximise V
V = rand(rows, cols)
@objective(model, Max, sum(X .* V));
# Constraint - Each row must only have one entry
for i in 1:rows
@constraint(model, sum(X[i, :]) <= 1)
end
# Constraint - Each column sums to between min and max
minvols = rand(200:400, rows)
maxvols = rand(600:800, cols)
for j in 1:cols
@constraint(model, minvols[j] <= sum(X[:, j]) <= maxvols[j])
end
# Solve
@time optimize!(model)
```

My problem is that when the data set gets much larger - e.g. if `rows`

is around 500,000 - the solver takes far too long.

Does anyone have any advice on how to approach a problem like this?