using JuMP
model = Model()
x1 = @variable(model, base_name = "x1")
x2 = x1 + 1
x3::AffExpr = x1
# Works: x1 is of type VariableRef
v1 = [sin(x1), x1]
# Works: x2 is of type AffExpr
v2 = [sin(x1), x2]
# Does not work: x3 is also of type AffExpr
v3 = [sin(x1), x3]
Hits error in Base.vect. So not sure if this is a JuMP issue or a Base issue.
That sure looks like a bug. I think the bug is in nlp_expr.jl in JuMP:
function Base.convert(
::Type{<:GenericNonlinearExpr},
x::GenericAffExpr{C,V},
) where {C,V}
args = Any[]
for (variable, coef) in x.terms
if isone(coef)
push!(args, variable)
elseif !iszero(coef)
push!(args, GenericNonlinearExpr{V}(:*, coef, variable))
end
end
if !iszero(x.constant) || isempty(args)
push!(args, x.constant)
end
if length(args) == 1 # <--- BUG is here
return args[1]
end
return GenericNonlinearExpr{V}(:+, args)
end
When args is length 1, the return value does not go through the GenericNonlinearExpr constructor and can be of the wrong type.
When removing the if length(args) == 1 statement, the example in the post goes through. This may also be the fix, as this seems to mainly function as an optimization.