Error solving multiple JuMP models in parallel

Hi everyone,

I’m struggling with an error that I can’t really understand. I am trying to solve several JuMP.Models in parallel as a step of a decomposition algorithm that can exploit a specific structure of my model.

I prepared this example that has kinda of the same nature of what I am trying to do and returns me the same error. I am using CPLEX to solve these very simple location-distribution problems with stochastic demands. I’m setting CPX_PARAM_THREADS=1 because I want to make sure each processor can work independently on their problems.

Say I have this file ParallelExample.jl. Don’t get too attached to the model and instances. The important bit is the function generatemodels() that generate a array of JuMP.Models to be solved independently.

using JuMP, CPLEX

type Data
   D::Array{Float64, 2}
   C::Array{Float64, 1}
   T::Array{Float64, 2}
   S::Array{Float64, 1}
   P::Array{Float64, 1}
end

function generateinstance(totalSuppliers::Int64, totalClients::Int64, totalScenarios::Int64)
   #generate a random instance of the problem
   D = rand(totalClients,totalScenarios)
   C = 100*rand(totalSuppliers)
   T = 10*rand(totalSuppliers, totalClients)
   S = ones(totalSuppliers)*100.0
   P = ones(totalScenarios).*1/totalScenarios
   return Data(D, C, T, S, P)
end

function generatemodel(ins::Data)
   #generate JuMP model
   Suppliers = 1:size(ins.C,1) #captures the total of suppliers
   Clients = 1:size(ins.D,1) #captures the total of clients
   Scenarios = 1:size(ins.P,1) #captures the total of scenarios

   m = Model(solver = CplexSolver(CPX_PARAM_SCRIND=0, CPX_PARAM_THREADS=1))

   @variables(m, begin
      y[Suppliers], Bin
      x[Suppliers, Clients, Scenarios] >= 0
   end)

   @constraints(m, begin
      demand[j in Clients, s in Scenarios], sum(x[i,j,s] for i in Suppliers) >= ins.D[j,s]
      supply[i in Suppliers, s in Scenarios], sum(x[i,j,s] for j in Clients) <= ins.S[i]*y[i]
   end)

   @objective(m, Min,
      sum(ins.C[i]*y[i] for i in Suppliers) +
      sum(ins.P[s]*(sum(ins.T[i,j]*x[i,j,s] for i in Suppliers, j in Clients)) for s in Scenarios));

   return m
end

function generatemodels(totalSuppliers::Int64, totalClients::Int64, totalScenarios::Int64, totalModels::Int64)
   # generate an array of models of same size
   modelArray = Array(JuMP.Model,totalModels)

   for i = 1:totalModels
      modelArray[i] =
         generatemodel(generateinstance(totalSuppliers::Int64, totalClients::Int64, totalScenarios::Int64))
   end

   return modelArray
end

function solvemodel(m::JuMP.Model)
   # solve and print obj. function value for each problem
   solve(m)
   println(getobjectivevalue(m))
end

What I am trying to accomplish is this:

addprocs(3) #say I have 4 processors available
@everywhere include("ParallelExample.jl")

# generate a JuMP.Model array with 50 elements of single-scenario problems
mA = generatemodels(5,10,1,50)

# serial version
for i = 1:length(mA)
   solvemodel(mA[i])
end

# parallel version
pmap(solvemodel, mA)

However, when I run the pmap I get the following error:

On worker 2:
AssertionError: env.ptr != C_NULL
in get_error_msg at /Users/Fabricio/.julia/v0.5/CPLEX/src/cpx_env.jl:65
in Type at /Users/Fabricio/.julia/v0.5/CPLEX/src/cpx_env.jl:89 [inlined]
in set_varLB! at /Users/Fabricio/.julia/v0.5/CPLEX/src/cpx_vars.jl:115
in setvarLB! at /Users/Fabricio/.julia/v0.5/CPLEX/src/CplexSolverInterface.jl:116
in build#114 at /Users/Fabricio/.julia/v0.5/JuMP/src/solvers.jl:387
in build at ./:0
in #solve#109 at /Users/Fabricio/.julia/v0.5/JuMP/src/solvers.jl:166
in solvemodel at /Users/Fabricio/Julia-Codes/Testing and Learning/Stochastic Prog/Julia-examples/Problems/LocationRouting/ParallelExample.jl:58
in #645 at ./multi.jl:1421
in run_work_thunk at ./multi.jl:1001
in macro expansion at ./multi.jl:1421 [inlined]
in #644 at ./event.jl:68
in #remotecall_fetch#626(::Array{Any,1}, ::Function, ::Function, ::Base.Worker, ::JuMP.Model, ::Vararg{JuMP.Model,N}) at ./multi.jl:1070
in remotecall_fetch(::Function, ::Base.Worker, ::JuMP.Model, ::Vararg{JuMP.Model,N}) at ./multi.jl:1062
in #remotecall_fetch#629(::Array{Any,1}, ::Function, ::Function, ::Int64, ::JuMP.Model, ::Vararg{JuMP.Model,N}) at ./multi.jl:1080
in remotecall_fetch(::Function, ::Int64, ::JuMP.Model, ::Vararg{JuMP.Model,N}) at ./multi.jl:1080
in #remotecall_pool#709(::Array{Any,1}, ::Function, ::Function, ::Function, ::WorkerPool, ::JuMP.Model, ::Vararg{JuMP.Model,N}) at ./workerpool.jl:93
in remotecall_pool(::Function, ::Function, ::WorkerPool, ::JuMP.Model, ::Vararg{JuMP.Model,N}) at ./workerpool.jl:91
in #remotecall_fetch#712(::Array{Any,1}, ::Function, ::Function, ::WorkerPool, ::JuMP.Model, ::Vararg{JuMP.Model,N}) at ./workerpool.jl:124
in remotecall_fetch(::Function, ::WorkerPool, ::JuMP.Model, ::Vararg{JuMP.Model,N}) at ./workerpool.jl:124
in (::Base.###717#718#720{WorkerPool,#solvemodel})(::Array{Any,1}, ::Function, ::JuMP.Model, ::Vararg{JuMP.Model,N}) at ./workerpool.jl:151
in (::Base.##717#719)(::JuMP.Model, ::Vararg{JuMP.Model,N}) at ./workerpool.jl:151
in macro expansion at ./asyncmap.jl:63 [inlined]
in (::Base.##775#777{Base.AsyncCollector,Base.AsyncCollectorState})() at ./task.jl:360

…and 2 other exceptions.

in sync_end() at task.jl:311
in done(::Base.AsyncCollector, ::Base.AsyncCollectorState) at asyncmap.jl:124
in pump_source(::Base.AsyncGenerator, ::Base.AsyncGeneratorState) at asyncmap.jl:185
in next(::Base.AsyncGenerator, ::Base.AsyncGeneratorState) at asyncmap.jl:201
in _collect(::UnitRange{Int64}, ::Base.AsyncGenerator, ::Base.HasEltype, ::Base.SizeUnknown) at array.jl:282
in pmap#734(::Bool, ::Int64, ::Void, ::Int64, ::Float64, ::Function, ::Void, ::Void, ::Void, ::Base.pmap, ::WorkerPool, ::#solvemodel, ::Array{JuMP.Model,1}) at pmap.jl:121
in pmap(::WorkerPool, ::Function, ::Array{JuMP.Model,1}) at pmap.jl:80
in pmap#746(::Array{Any,1}, ::Function, ::Function, ::Array{JuMP.Model,1}) at pmap.jl:146
in pmap(::Function, ::Array{JuMP.Model,1}) at pmap.jl:146
in include_string(::String, ::String) at loading.jl:441
in include_string(::String, ::String, ::Int64) at eval.jl:28
in include_string(::Module, ::String, ::String, ::Int64, ::Vararg{Int64,N}) at eval.jl:32
in (::Atom.##53#56{String,Int64,String})() at eval.jl:40
in withpath(::Atom.##53#56{String,Int64,String}, ::String) at utils.jl:30
in withpath(::Function, ::String) at eval.jl:46
in macro expansion at eval.jl:57 [inlined]
in (::Atom.##52#55{Dict{String,Any}})() at task.jl:60

So, any ideas what can be happening? I have tried Gurobi, and I get the exact same error. Thanks in advance everyone.

Since you have created all the models in the main process, the CPLEX (and gurobi) enviroment were only loaded locally (in the main process) hence when you send the julia model to another process it lacks the CPLEX structures over there.

Something that works is: you send relevant data to each process, create the JuMP.Models in each process, solve them in each process and then retrieve data to the main process.

1 Like

The scope rules for pmap are rather complex. I prefer to do my model construction with functions, which I place in a module.

Here is a simple example I put together for some labmates:

https://github.com/adowling2/DegeneracyHunter.jl/blob/master/Examples/DummyModule.jl

https://github.com/adowling2/DegeneracyHunter.jl/blob/master/Examples/basic_JuMP_ex3.jl

1 Like

Hi @joaquimg. Thanks for the fast response. I thought as was doing such by nesting my function calls, but I was obviously wrong then. Doesn’t @everywhere include("ParallelExample.jl") accomplish that (i.e., load CPLEX (Gurobi) env. in the workers?

Would you be able to provide a simple example that would work in that case?

Thanks @adowling2. Can I ask you a few questions about your code?

  1. Does this
push!(LOAD_PATH,pwd())
using DummyModule

accomplishes what I was trying to do with @everywhere include in my code?

  1. Do I need to use a module instead in this case?

  2. Would you recommend calling a new function that build and solve my models (kinda like your solveOptProb2) instead of simply calling the solvemodel function in my pmap call?

I modified you example to work in parallel, but i don’t have CPLEX right now, so I just changed to Gurobi.

using JuMP, Gurobi

type Data
   D::Array{Float64, 2}
   C::Array{Float64, 1}
   T::Array{Float64, 2}
   S::Array{Float64, 1}
   P::Array{Float64, 1}
end

function generateinstance(totalSuppliers::Int64, totalClients::Int64, totalScenarios::Int64)
   #generate a random instance of the problem
   D = rand(totalClients,totalScenarios)
   C = 100*rand(totalSuppliers)
   T = 10*rand(totalSuppliers, totalClients)
   S = ones(totalSuppliers)*100.0
   P = ones(totalScenarios).*1/totalScenarios
   return Data(D, C, T, S, P)
end

function generatemodel(ins::Data)
   #generate JuMP model
   Suppliers = 1:size(ins.C,1) #captures the total of suppliers
   Clients = 1:size(ins.D,1) #captures the total of clients
   Scenarios = 1:size(ins.P,1) #captures the total of scenarios

   m = Model(solver = GurobiSolver(OutputFlag = 0))

   @variables(m, begin
      y[Suppliers], Bin
      x[Suppliers, Clients, Scenarios] >= 0
   end)

   @constraints(m, begin
      demand[j in Clients, s in Scenarios], sum(x[i,j,s] for i in Suppliers) >= ins.D[j,s]
      supply[i in Suppliers, s in Scenarios], sum(x[i,j,s] for j in Clients) <= ins.S[i]*y[i]
   end)

   @objective(m, Min,
      sum(ins.C[i]*y[i] for i in Suppliers) +
      sum(ins.P[s]*(sum(ins.T[i,j]*x[i,j,s] for i in Suppliers, j in Clients)) for s in Scenarios));

   return m
end

function generatemodels(totalSuppliers::Int64, totalClients::Int64, totalScenarios::Int64, totalModels::Int64)
   # generate an array of models of same size
   modelArray = Array(JuMP.Model,totalModels)

   for i = 1:totalModels
      modelArray[i] =
         generatemodel(generateinstance(totalSuppliers::Int64, totalClients::Int64, totalScenarios::Int64))
   end

   return modelArray
end

function solvemodel(m::JuMP.Model)
   # solve and print obj. function value for each problem
   solve(m)
   println(getobjectivevalue(m))
end

function generate_then_solvemodel(totalSuppliers::Int64, totalClients::Int64, totalScenarios::Int64)
   m = generatemodel(generateinstance(totalSuppliers, totalClients, totalScenarios))

   # solve and print obj. function value for each problem
   solve(m)
   println(getobjectivevalue(m))

   return "solved by worker $(myid())"
end

and

addprocs(3) #say I have 4 processors available
@everywhere include("ParallelExample.jl")

nmodels = 10

# generate a JuMP.Model array with 50 elements of single-scenario problems
input1 = [5 for i in 1:nmodels]
input2 = [10 for i in 1:nmodels]
input3 = [1 for i in 1:nmodels]

# parallel version
pmap(generate_then_solvemodel, input1, input2, input3)

the output is:

WARNING: replacing module JuMP.
WARNING: replacing module JuMP.
WARNING: replacing module JuMP.
WARNING: replacing module Gurobi.
WARNING: replacing module Gurobi.
WARNING: replacing module Gurobi.
        From worker 4:  46.286221290378
        From worker 3:  41.86076330510095
        From worker 2:  65.32815164708654
        From worker 2:  26.216639310283675
        From worker 4:  49.79294335654677
        From worker 3:  18.167539537416573
        From worker 2:  43.64356148426247
        From worker 3:  31.302449242301762
        From worker 4:  54.00330691408722
        From worker 2:  74.27794195183323
10-element Array{Any,1}:
 "solved in worker 2"
 "solved in worker 3"
 "solved in worker 4"
 "solved in worker 4"
 "solved in worker 3"
 "solved in worker 2"
 "solved in worker 2"
 "solved in worker 4"
 "solved in worker 3"
 "solved in worker 2"
1 Like

Thank you so much @joaquimg. Now it is clear what you guys meant. As @adowling2 was saying, the scope rules for pmap are not as straightforward as I originally thought. Hopefully it will be helpful to other people too.

Cheers.