JuMP Space Shuttle example failing

Hello all,

first let me say I’m just starting out with Julia and I’m already super impressed with what can be achieved. It’s really such a well designed language as far as I can tell.
I’m still using mostly Python for my PhD project but I’d love to become proficient in Julia as well.

Specifically, I’m interested in nonlinear optimization which is why I checked out all the JuMP examples. The most recent addition is the Space Shuttle Reentry Trajectory example, which unfortunately throws a TypeError in the optimize!(model) call I don’t know how to fix (yet):

ERROR: LoadError: TypeError: in typeassert, expected Float64, got a value of type Int64

Stacktrace:

  [1] eval_univariate_2nd_deriv
    @ ~/.julia/packages/JuMP/y5vgk/src/_Derivatives/forward.jl:494 [inlined]
  [2] forward_eval_ϵ(storage::Vector{Float64}, storage_ϵ::JuMP._VectorView{ForwardDiff.Partials{5, Float64}}, partials_storage::Vector{Float64}, partials_storage_ϵ::JuMP._VectorView{ForwardDiff.Partials{5, Float64}}, nd::Vector{JuMP._Derivatives.NodeData}, adj::SparseArrays.SparseMatrixCSC{Bool, Int64}, x_values_ϵ::JuMP._VectorView{ForwardDiff.Partials{5, Float64}}, subexpression_values_ϵ::JuMP._VectorView{ForwardDiff.Partials{5, Float64}}, user_operators::JuMP._Derivatives.UserOperatorRegistry)
    @ JuMP._Derivatives ~/.julia/packages/JuMP/y5vgk/src/_Derivatives/forward.jl:415
  [3] _hessian_slice_inner(d::NLPEvaluator, ex::JuMP._FunctionStorage, input_ϵ::JuMP._VectorView{ForwardDiff.Partials{5, Float64}}, output_ϵ::JuMP._VectorView{ForwardDiff.Partials{5, Float64}}, #unused#::Type{Val{5}})
    @ JuMP ~/.julia/packages/JuMP/y5vgk/src/nlp.jl:1148
  [4] _hessian_slice(d::NLPEvaluator, ex::JuMP._FunctionStorage, x::Vector{Float64}, H::SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}, scale::Float64, nzcount::Int64, recovery_tmp_storage::Vector{Float64}, #unused#::Type{Val{5}})
    @ JuMP ~/.julia/packages/JuMP/y5vgk/src/nlp.jl:1264
  [5] macro expansion
    @ ~/.julia/packages/JuMP/y5vgk/src/nlp.jl:1086 [inlined]
  [6] macro expansion
    @ ./timing.jl:287 [inlined]
  [7] eval_hessian_lagrangian(d::NLPEvaluator, H::SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}, x::Vector{Float64}, obj_factor::Float64, lambda::SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true})
    @ JuMP ~/.julia/packages/JuMP/y5vgk/src/nlp.jl:1033
  [8] eval_hessian_lagrangian(model::Ipopt.Optimizer, values::Vector{Float64}, x::Vector{Float64}, obj_factor::Float64, lambda::Vector{Float64})
    @ Ipopt ~/.julia/packages/Ipopt/P1XLY/src/MOI_wrapper.jl:1219
  [9] (::Ipopt.var"#eval_h_cb#50"{Ipopt.Optimizer, Vector{Tuple{Int64, Int64}}})(x::Vector{Float64}, mode::Symbol, rows::Vector{Int32}, cols::Vector{Int32}, obj_factor::Float64, lambda::Vector{Float64}, values::Vector{Float64})
    @ Ipopt ~/.julia/packages/Ipopt/P1XLY/src/MOI_wrapper.jl:1329
 [10] eval_h_wrapper(n::Int32, x_ptr::Ptr{Float64}, new_x::Int32, obj_factor::Float64, m::Int32, lambda_ptr::Ptr{Float64}, new_lambda::Int32, nele_hess::Int32, iRow::Ptr{Int32}, jCol::Ptr{Int32}, values_ptr::Ptr{Float64}, user_data::Ptr{Nothing})
    @ Ipopt ~/.julia/packages/Ipopt/P1XLY/src/Ipopt.jl:267
 [11] solveProblem(prob::IpoptProblem)
    @ Ipopt ~/.julia/packages/Ipopt/P1XLY/src/Ipopt.jl:513
 [12] optimize!(model::Ipopt.Optimizer)
    @ Ipopt ~/.julia/packages/Ipopt/P1XLY/src/MOI_wrapper.jl:1441
 [13] optimize!(b::MathOptInterface.Bridges.LazyBridgeOptimizer{Ipopt.Optimizer})
    @ MathOptInterface.Bridges ~/.julia/packages/MathOptInterface/5WwpK/src/Bridges/bridge_optimizer.jl:293
 [14] optimize!(m::MathOptInterface.Utilities.CachingOptimizer{MathOptInterface.AbstractOptimizer, MathOptInterface.Utilities.UniversalFallback{MathOptInterface.Utilities.Model{Float64}}})
    @ MathOptInterface.Utilities ~/.julia/packages/MathOptInterface/5WwpK/src/Utilities/cachingoptimizer.jl:237
 [15] optimize!(model::Model, optimizer_factory::Nothing; bridge_constraints::Bool, ignore_optimize_hook::Bool, kwargs::Base.Iterators.Pairs{Union{}, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ JuMP ~/.julia/packages/JuMP/y5vgk/src/optimizer_interface.jl:139
 [16] optimize! (repeats 2 times)
    @ ~/.julia/packages/JuMP/y5vgk/src/optimizer_interface.jl:115 [inlined]
 [17] top-level scope
    @ ~/code/julia/ss.jl:141
 [18] include(fname::String)
    @ Base.MainInclude ./client.jl:444
 [19] top-level scope
    @ none:1

The error is thrown right after the solver (Ipopt) is incurred.

Thanks for your help!

This was fixed in https://github.com/jump-dev/JuMP.jl/pull/2462 which is not part of the latest release. You can use the development version of JuMP to get the fix

4 Likes