Type Stability with Named Constraints in JuMP

I’ve got a Nash equilibrium calculation running through JuMP that names a constraint on the row player so that the column player can be extracted (see Efficient Nash Equilibria using JuMP - #6 by Ian_Slagle) .

function strat_vec(l::Int64, i::Int64)
    to_return = zeros(l)
    @inbounds to_return[i] = 1.0
    return to_return
end

minmax(R::Matrix{Float64}, m::Int64) = @inbounds mapreduce(x -> maximum(R[:, x]), min, 1:m)
maxmin(R::Matrix{Float64}, n::Int64) = @inbounds mapreduce(x -> minimum(R[x, :]), max, 1:n)
findminmax(R::Matrix{Float64}, n::Int64) = @inbounds strat_vec(n, argmax(map(x -> minimum(R[x, :]), 1:n)))
findmaxmin(R::Matrix{Float64}, m::Int64) = @inbounds strat_vec(m, argmin(map(x -> maximum(R[:, x]), 1:m)))

struct NashResult
    payoff::Float64
    row_strategy::Vector{Float64}
    column_strategy::Vector{Float64}
end

function nash(R::Matrix{Float64})
    n, m = size(R)

    # Check if we have to do linear programming
    n == 1 && return NashResult(minimum(R), vec([1.0]), strat_vec(m, argmin(vec(R))))
    m == 1 && return NashResult(maximum(R), strat_vec(n, argmax(vec(R))), vec([1.0]))
    minmax(R, m) == maxmin(R, n) && return NashResult(minmax(R, m), findminmax(R, n), findmaxmin(R, m))

    # Set up model and payoff
    model = direct_model(GLPK.Optimizer())
    @variable(model, z)
    @objective(model, Max, 1.0 * z)

    # Solve for row player
    @variable(model, x[1:n], lower_bound = 0.0)
    @constraint(model, c1, x' * R .>= z)
    @constraint(model, sum(x) == 1.0)

    optimize!(model)

    return NashResult(JuMP.value(z), JuMP.value.(x), vec(shadow_price.(c1)))
end

However, looking at @code_warntype shows that the named constraint c1 is Any

Variables
  #self#::Core.Const(RandomBattles.nash)
  R::Matrix{Float64}
  #80::RandomBattles.var"#80#81"{JuMP.Model}
  @_4::Int64
  ##327::JuMP.AffExpr
  c1::Any
  ##320::Any
  ##321::Matrix{JuMP.AffExpr}
  x::Vector{JuMP.VariableRef}
  ##318::Vector{JuMP.VariableRef}
  ##315::JuMP.AffExpr
  z::JuMP.VariableRef
  ##314::JuMP.VariableRef
  model::JuMP.Model
  m::Int64
  n::Int64
  ##316::JuMP.AffExpr
  ##317::MutableArithmetics.Zero
  ##322::Matrix{JuMP.AffExpr}
  ##323::LinearAlgebra.Adjoint{JuMP.AffExpr, Vector{JuMP.AffExpr}}
  ##324::MutableArithmetics.Zero
  ##328::JuMP.AffExpr
  ##329::JuMP.AffExpr
  ##330::MutableArithmetics.Zero

Is there any way to make this more type stable?

The type of c1 ends up being Matrix{JuMP.ConstraintRef{JuMP.Model, MathOptInterface.ConstraintIndex{MathOptInterface.ScalarAffineFunction{Float64}, MathOptInterface.GreaterThan{Float64}}, JuMP.ScalarShape}}, but annotating c1 by this where it first appears does not seem to work. Any help on this would be much appreciated.

The underlying issue is that constraints don’t infer

julia> @code_warntype foo()
Variables
  #self#::Core.Const(foo)
  ##483::AffExpr
  x::VariableRef
  ##480::VariableRef
  model::Model
  ##484::AffExpr
  ##485::VariableRef
  ##486::MutableArithmetics.Zero

Body::ConstraintRef{Model, _A, ScalarShape} where _A
1 ─       (model = Main.Model())
β”‚         JuMP._valid_model(model, :model)
β”‚         JuMP._error_if_cannot_register(model, :x)
β”‚   %4  = model::Model
β”‚   %5  = JuMP.VariableInfo(false, NaN, false, NaN, false, NaN, false, NaN, false, false)::Core.Const(VariableInfo{Float64, Float64, Float64, Float64}(false, NaN, false, NaN, false, NaN, false, NaN, false, false))
β”‚   %6  = JuMP.build_variable(JuMP.var"#_error#90"{LineNumberNode, Tuple{Symbol, Symbol}}(:(#= REPL[63]:3 =#), (:model, :x)), %5)::Core.Const(ScalarVariable{Float64, Float64, Float64, Float64}(VariableInfo{Float64, Float64, Float64, Float64}(false, NaN, false, NaN, false, NaN, false, NaN, false, false)))
β”‚         (##480 = JuMP.add_variable(%4, %6, "x"))
β”‚         Base.setindex!(model, ##480, :x)
β”‚         (x = ##480)
β”‚         JuMP._valid_model(model, :model)
β”‚   %11 = MutableArithmetics.Zero::Core.Const(MutableArithmetics.Zero)
β”‚         (##486 = (%11)())
β”‚   %13 = MutableArithmetics.operate!::Core.Const(MutableArithmetics.operate!)
β”‚   %14 = MutableArithmetics.add_mul::Core.Const(MutableArithmetics.add_mul)
β”‚   %15 = ##486::Core.Const(MutableArithmetics.Zero())
β”‚         (##485 = (%13)(%14, %15, x))
β”‚   %17 = MutableArithmetics.operate!::Core.Const(MutableArithmetics.operate!)
β”‚   %18 = MutableArithmetics.sub_mul::Core.Const(MutableArithmetics.sub_mul)
β”‚   %19 = ##485::VariableRef
β”‚         (##484 = (%17)(%18, %19, 0))
β”‚         (##483 = ##484)
β”‚   %22 = model::Model
β”‚   %23 = JuMP._functionize(##483)::AffExpr
β”‚   %24 = JuMP.build_constraint(JuMP.var"#_error#77"{Symbol, LineNumberNode}(Core.Box(Any[:model, :(x >= 0)]), :constraint, :(#= REPL[63]:4 =#)), %23, MathOptInterface.GreaterThan{Float64}(0.0))::ScalarConstraint{AffExpr, MathOptInterface.GreaterThan{Float64}}
β”‚   %25 = JuMP.add_constraint(%22, %24, "")::ConstraintRef{Model, _A, ScalarShape} where _A
└──       return %25

Have you checked this is a performance problem? If so, you could use a function barrier: Performance Tips Β· The Julia Language

1 Like

Rewrite it in scalar form; it will be faster anyway:

@constraint(model, c[j=1:m], sum(R[i,j]*x[i] for i in 1:n) >= z)
1 Like