-
Does using cubic spline instead of linear interpolation solve the problem? Much smoother, though it still does a lot of interpolation which I what I was originally worried about in terms of performance.
Also later when I generalize the problem to where f_{max} is no longer a constant but depends on t (or maybe even some integral of v(t)), then I will still need to define a function representing the constraints? I’m just not 100% sure if the way I write the constraints are correct. cons(res, u, p) = (res .= u)
is from the documentation tutorial, and I’m not sure how to extend it to include time in the inequality constraints.
-
I tried using Optimization.AutoReverseDiff()
as the auto-diff option in OptimizationFunction
, and it gives StackOverflowError:
which I have no idea about (see below for the cmplete error message). I have solved some PDE-constrained optimal control problem before using the adjoint method where I derived the adjoint equation myself and have the cost function output the gradient (PS: your adjoint method lecture notes were very helpful). For the problem considered here which looks a lot simpler than my previous problem (but with the addition of inequality constraints), I presume if I set everything correctly, I can just declare the use of reverse-mode AD and have Optimization.jl does all the work for me?
StackOverflowError:
Stacktrace:
[1] anyeltypedual(::Type{T}, ::Type{Val{counter}}) where {T<:Union{Set, AbstractArray}, counter}
@ DiffEqBase C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\forwarddiff.jl:258
--- the last 1 lines are repeated 1 more time ---
[3] (::Base.MappingRF{typeof(DiffEqBase.anyeltypedual), Base.BottomRF{typeof(DiffEqBase.promote_dual)}})(acc::Type, x::Type)
@ Base .\reduce.jl:100
[4] _foldl_impl(op::Base.MappingRF{typeof(DiffEqBase.anyeltypedual), Base.BottomRF{typeof(DiffEqBase.promote_dual)}}, init::Type, itr::Core.SimpleVector)
@ Base .\reduce.jl:62
[5] foldl_impl
@ .\reduce.jl:48 [inlined]
[6] mapfoldl_impl
@ .\reduce.jl:44 [inlined]
[7] mapfoldl
@ .\reduce.jl:175 [inlined]
[8] mapreduce
@ .\reduce.jl:307 [inlined]
[9] __anyeltypedual(::Type{ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}})
@ DiffEqBase C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\forwarddiff.jl:243
[10] anyeltypedual(::Type{ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}}, ::Type{Val{0}})
@ DiffEqBase C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\forwarddiff.jl:249
--- the last 1 lines are repeated 1 more time ---
--- the last 11 lines are repeated 5006 more times ---
[55078] anyeltypedual(::Type{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}, ::Type{Val{1}})
@ DiffEqBase C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\forwarddiff.jl:258
[55079] (::DiffEqBase.var"#80#81"{Int64})(x::Type)
@ DiffEqBase C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\forwarddiff.jl:102
[55080] MappingRF
@ .\reduce.jl:100 [inlined]
[55081] _foldl_impl(op::Base.MappingRF{DiffEqBase.var"#80#81"{Int64}, Base.BottomRF{typeof(DiffEqBase.promote_dual)}}, init::Type, itr::Core.SimpleVector)
@ Base .\reduce.jl:58
[55082] foldl_impl
@ .\reduce.jl:48 [inlined]
[55083] mapfoldl_impl
@ .\reduce.jl:44 [inlined]
[55084] mapfoldl
@ .\reduce.jl:175 [inlined]
[55085] mapreduce
@ .\reduce.jl:307 [inlined]
[55086] diffeqmapreduce(f::DiffEqBase.var"#80#81"{Int64}, op::typeof(DiffEqBase.promote_dual), x::Core.SimpleVector)
@ DiffEqBase C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\forwarddiff.jl:55
[55087] #s90#79
@ C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\forwarddiff.jl:102 [inlined]
[55088] var"#s90#79"(counter::Any, ::Any, x::Any, ::Any)
@ DiffEqBase .\none:0
[55089] (::Core.GeneratedFunctionStub)(::UInt64, ::LineNumberNode, ::Any, ::Vararg{Any})
@ Core .\boot.jl:602
[55090] anyeltypedual
@ C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\forwarddiff.jl:95 [inlined]
[55091] promote_u0
@ C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\forwarddiff.jl:387 [inlined]
[55092] get_concrete_problem(prob::ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, RunParams{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(f_ode!), LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing, Nothing, Nothing}, @Kwargs{}, SciMLBase.StandardODEProblem}, isadapt::Bool; kwargs::@Kwargs{u0::Vector{Float64}, p::RunParams{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}})
@ DiffEqBase C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\solve.jl:1213
[55093] get_concrete_problem
@ C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\solve.jl:1209 [inlined]
[55094] solve_up(::ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, RunParams{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(f_ode!), LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing, Nothing, Nothing}, @Kwargs{}, SciMLBase.StandardODEProblem}, ::Nothing, ::Vector{Float64}, ::RunParams{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}; kwargs::@Kwargs{})
@ DiffEqBase C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\solve.jl:1105
[55095] solve_up
@ C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\solve.jl:1101 [inlined]
[55096] solve(::ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, RunParams{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(f_ode!), LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing, Nothing, Nothing}, @Kwargs{}, SciMLBase.StandardODEProblem}; sensealg::Nothing, u0::Nothing, p::Nothing, wrap::Val{true}, kwargs::@Kwargs{})
@ DiffEqBase C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\solve.jl:1038
[55097] solve(::ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, RunParams{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(f_ode!), LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing, Nothing, Nothing}, @Kwargs{}, SciMLBase.StandardODEProblem})
@ DiffEqBase C:\Users\dchan\.julia\packages\DiffEqBase\R2Vjs\src\solve.jl:1028
[55098] cost_fun(f::ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, params::RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}})
@ Main .\In[17]:17
[55099] FixTail
@ C:\Users\dchan\.julia\packages\DifferentiationInterface\UWUBZ\src\utils\context.jl:7 [inlined]
[55100] ReverseDiff.GradientTape(f::DifferentiationInterface.FixTail{typeof(cost_fun), Tuple{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}}, input::Vector{Float64}, cfg::ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}})
@ ReverseDiff C:\Users\dchan\.julia\packages\ReverseDiff\p1MzG\src\api\tape.jl:199
[55101] gradient!(result::Vector{Float64}, f::DifferentiationInterface.FixTail{typeof(cost_fun), Tuple{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}}, input::Vector{Float64}, cfg::ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}})
@ ReverseDiff C:\Users\dchan\.julia\packages\ReverseDiff\p1MzG\src\api\gradients.jl:41
[55102] gradient!(f::Function, grad::Vector{Float64}, prep::DifferentiationInterfaceReverseDiffExt.ReverseDiffGradientPrep{ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}, Nothing}, ::AutoReverseDiff{false}, x::Vector{Float64}, contexts::DifferentiationInterface.Constant{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}})
@ DifferentiationInterfaceReverseDiffExt C:\Users\dchan\.julia\packages\DifferentiationInterface\UWUBZ\ext\DifferentiationInterfaceReverseDiffExt\onearg.jl:175
[55103] (::OptimizationBase.var"#grad#16"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, AutoReverseDiff{false}})(res::Vector{Float64}, θ::Vector{Float64})
@ OptimizationBase C:\Users\dchan\.julia\packages\OptimizationBase\gvXsf\src\OptimizationDIExt.jl:28
[55104] (::OptimizationOptimJL.var"#26#33"{OptimizationCache{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), OptimizationBase.var"#grad#16"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, AutoReverseDiff{false}}, Nothing, OptimizationBase.var"#hess#20"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, Nothing, OptimizationBase.var"#hv!#24"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, DifferentiationInterface.ReverseOverReverseHVPPrep{DifferentiationInterface.NoPullbackPrep}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, OptimizationBase.var"#9#26"{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, OptimizationBase.var"#cons_j!#29"{AutoReverseDiff{false}, DifferentiationInterfaceReverseDiffExt.ReverseDiffOneArgJacobianPrep{ReverseDiff.JacobianConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Nothing}, Nothing}}, Nothing, Nothing, OptimizationBase.var"#cons_h!#36"{Int64, Vector{DifferentiationInterface.HVPGradientHessianPrep{DifferentiationInterface.BatchSizeSettings{1, false, true}, Vector{Tuple{Vector{Float64}}}, Vector{Tuple{Vector{Float64}}}, DifferentiationInterface.ReverseOverReverseHVPPrep{DifferentiationInterface.NoPullbackPrep}, DifferentiationInterfaceReverseDiffExt.ReverseDiffGradientPrep{ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}, Nothing}}}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, OptimizationBase.ReInitCache{Vector{Float64}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, Nothing, Nothing, Vector{Float64}, Vector{Float64}, Nothing, IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol}, Bool, OptimizationOptimJL.var"#4#6", Nothing}, OptimizationOptimJL.var"#25#32"{OptimizationCache{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), OptimizationBase.var"#grad#16"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, AutoReverseDiff{false}}, Nothing, OptimizationBase.var"#hess#20"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, Nothing, OptimizationBase.var"#hv!#24"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, DifferentiationInterface.ReverseOverReverseHVPPrep{DifferentiationInterface.NoPullbackPrep}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, OptimizationBase.var"#9#26"{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, OptimizationBase.var"#cons_j!#29"{AutoReverseDiff{false}, DifferentiationInterfaceReverseDiffExt.ReverseDiffOneArgJacobianPrep{ReverseDiff.JacobianConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Nothing}, Nothing}}, Nothing, Nothing, OptimizationBase.var"#cons_h!#36"{Int64, Vector{DifferentiationInterface.HVPGradientHessianPrep{DifferentiationInterface.BatchSizeSettings{1, false, true}, Vector{Tuple{Vector{Float64}}}, Vector{Tuple{Vector{Float64}}}, DifferentiationInterface.ReverseOverReverseHVPPrep{DifferentiationInterface.NoPullbackPrep}, DifferentiationInterfaceReverseDiffExt.ReverseDiffGradientPrep{ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}, Nothing}}}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, OptimizationBase.ReInitCache{Vector{Float64}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, Nothing, Nothing, Vector{Float64}, Vector{Float64}, Nothing, IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol}, Bool, OptimizationOptimJL.var"#4#6", Nothing}}})(G::Vector{Float64}, θ::Vector{Float64})
@ OptimizationOptimJL C:\Users\dchan\.julia\packages\OptimizationOptimJL\e3bUa\src\OptimizationOptimJL.jl:371
[55105] value_gradient!!(obj::TwiceDifferentiable{Float64, Vector{Float64}, Matrix{Float64}, Vector{Float64}}, x::Vector{Float64})
@ NLSolversBase C:\Users\dchan\.julia\packages\NLSolversBase\kavn7\src\interface.jl:82
[55106] value_gradient!(obj::TwiceDifferentiable{Float64, Vector{Float64}, Matrix{Float64}, Vector{Float64}}, x::Vector{Float64})
@ NLSolversBase C:\Users\dchan\.julia\packages\NLSolversBase\kavn7\src\interface.jl:69
[55107] initial_state(method::IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol}, options::Optim.Options{Float64, OptimizationOptimJL.var"#_cb#31"{OptimizationCache{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), OptimizationBase.var"#grad#16"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, AutoReverseDiff{false}}, Nothing, OptimizationBase.var"#hess#20"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, Nothing, OptimizationBase.var"#hv!#24"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, DifferentiationInterface.ReverseOverReverseHVPPrep{DifferentiationInterface.NoPullbackPrep}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, OptimizationBase.var"#9#26"{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, OptimizationBase.var"#cons_j!#29"{AutoReverseDiff{false}, DifferentiationInterfaceReverseDiffExt.ReverseDiffOneArgJacobianPrep{ReverseDiff.JacobianConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Nothing}, Nothing}}, Nothing, Nothing, OptimizationBase.var"#cons_h!#36"{Int64, Vector{DifferentiationInterface.HVPGradientHessianPrep{DifferentiationInterface.BatchSizeSettings{1, false, true}, Vector{Tuple{Vector{Float64}}}, Vector{Tuple{Vector{Float64}}}, DifferentiationInterface.ReverseOverReverseHVPPrep{DifferentiationInterface.NoPullbackPrep}, DifferentiationInterfaceReverseDiffExt.ReverseDiffGradientPrep{ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}, Nothing}}}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, OptimizationBase.ReInitCache{Vector{Float64}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, Nothing, Nothing, Vector{Float64}, Vector{Float64}, Nothing, IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol}, Bool, OptimizationOptimJL.var"#4#6", Nothing}}}, d::TwiceDifferentiable{Float64, Vector{Float64}, Matrix{Float64}, Vector{Float64}}, constraints::TwiceDifferentiableConstraints{OptimizationBase.var"#9#26"{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, OptimizationBase.var"#cons_j!#29"{AutoReverseDiff{false}, DifferentiationInterfaceReverseDiffExt.ReverseDiffOneArgJacobianPrep{ReverseDiff.JacobianConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Nothing}, Nothing}}, OptimizationOptimJL.var"#29#36"{OptimizationCache{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), OptimizationBase.var"#grad#16"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, AutoReverseDiff{false}}, Nothing, OptimizationBase.var"#hess#20"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, Nothing, OptimizationBase.var"#hv!#24"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, DifferentiationInterface.ReverseOverReverseHVPPrep{DifferentiationInterface.NoPullbackPrep}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, OptimizationBase.var"#9#26"{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, OptimizationBase.var"#cons_j!#29"{AutoReverseDiff{false}, DifferentiationInterfaceReverseDiffExt.ReverseDiffOneArgJacobianPrep{ReverseDiff.JacobianConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Nothing}, Nothing}}, Nothing, Nothing, OptimizationBase.var"#cons_h!#36"{Int64, Vector{DifferentiationInterface.HVPGradientHessianPrep{DifferentiationInterface.BatchSizeSettings{1, false, true}, Vector{Tuple{Vector{Float64}}}, Vector{Tuple{Vector{Float64}}}, DifferentiationInterface.ReverseOverReverseHVPPrep{DifferentiationInterface.NoPullbackPrep}, DifferentiationInterfaceReverseDiffExt.ReverseDiffGradientPrep{ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}, Nothing}}}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, OptimizationBase.ReInitCache{Vector{Float64}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, Nothing, Nothing, Vector{Float64}, Vector{Float64}, Nothing, IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol}, Bool, OptimizationOptimJL.var"#4#6", Nothing}}, Float64}, initial_x::Vector{Float64})
@ Optim C:\Users\dchan\.julia\packages\Optim\HvjCd\src\multivariate\solvers\constrained\ipnewton\ipnewton.jl:125
[55108] optimize(d::TwiceDifferentiable{Float64, Vector{Float64}, Matrix{Float64}, Vector{Float64}}, constraints::TwiceDifferentiableConstraints{OptimizationBase.var"#9#26"{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, OptimizationBase.var"#cons_j!#29"{AutoReverseDiff{false}, DifferentiationInterfaceReverseDiffExt.ReverseDiffOneArgJacobianPrep{ReverseDiff.JacobianConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Nothing}, Nothing}}, OptimizationOptimJL.var"#29#36"{OptimizationCache{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), OptimizationBase.var"#grad#16"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, AutoReverseDiff{false}}, Nothing, OptimizationBase.var"#hess#20"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, Nothing, OptimizationBase.var"#hv!#24"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, DifferentiationInterface.ReverseOverReverseHVPPrep{DifferentiationInterface.NoPullbackPrep}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, OptimizationBase.var"#9#26"{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, OptimizationBase.var"#cons_j!#29"{AutoReverseDiff{false}, DifferentiationInterfaceReverseDiffExt.ReverseDiffOneArgJacobianPrep{ReverseDiff.JacobianConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Nothing}, Nothing}}, Nothing, Nothing, OptimizationBase.var"#cons_h!#36"{Int64, Vector{DifferentiationInterface.HVPGradientHessianPrep{DifferentiationInterface.BatchSizeSettings{1, false, true}, Vector{Tuple{Vector{Float64}}}, Vector{Tuple{Vector{Float64}}}, DifferentiationInterface.ReverseOverReverseHVPPrep{DifferentiationInterface.NoPullbackPrep}, DifferentiationInterfaceReverseDiffExt.ReverseDiffGradientPrep{ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}, Nothing}}}, DifferentiationInterface.SecondOrder{AutoReverseDiff{false}, AutoReverseDiff{false}}}, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, OptimizationBase.ReInitCache{Vector{Float64}, RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}}, Nothing, Nothing, Vector{Float64}, Vector{Float64}, Nothing, IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol}, Bool, OptimizationOptimJL.var"#4#6", Nothing}}, Float64}, initial_x::Vector{Float64}, method::IPNewton{typeof(Optim.backtrack_constrained_grad), Symbol}, options::Optim.Options{Float64, OptimizationOptimJL.var"#_cb#31"{OptimizationCache{OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), OptimizationBase.var"#grad#16"{RunParams{Vector{Float64}, StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, OptimizationFunction{true, AutoReverseDiff{false}, typeof(cost_fun), Nothing, Nothing, Nothing, Nothing, Nothing, typeof(cons), Nothing, ...
(exceeds word limit)