Please consider the following code snippet using Optim.jl
:
using Optim
dof = 7
fun(x) = 0.0; x0 = fill(0.1, dof)
df = TwiceDifferentiable(fun, x0)
lx = fill(-1.2, dof); ux = fill(+1.2, dof)
dfc = TwiceDifferentiableConstraints(lx, ux)
# res = optimize(df, dfc, x0, IPNewton())
res = optimize(df, dfc, x0, IPNewton(); autodiff=:forward)
If I call optimize()
without the autodiff=:forward
flag, the snippet above works. However, if I use set autodiff=:forward
, I get the following error:
No default objective type for IPNewton{typeof(Optim.backtrack_constrained_grad),Symbol}(Optim.backtrack_constrained_grad, :auto, false) and (TwiceDifferentiable{Float64,Array{Float64,1},Array{Float64,2},Array{Float64,1}}(fun, getfield(NLSolversBase, Symbol("#g!#44")){typeof(fun),DiffEqDiffTools.GradientCache{Nothing,Nothing,Nothing,Val{:central},Float64,Val{true}}}(fun, DiffEqDiffTools.GradientCache{Nothing,Nothing,Nothing,Val{:central},Float64,Val{true}}(nothing, nothing, nothing)), getfield(NLSolversBase, Symbol("#fg!#45")){typeof(fun)}(fun, Core.Box(getfield(NLSolversBase, Symbol("#g!#44")){typeof(fun),DiffEqDiffTools.GradientCache{Nothing,Nothing,Nothing,Val{:central},Float64,Val{true}}}(fun, DiffEqDiffTools.GradientCache{Nothing,Nothing,Nothing,Val{:central},Float64,Val{true}}(nothing, nothing, nothing)))), getfield(NLSolversBase, Symbol("#h!#46")){typeof(fun)}(fun), 0.0, [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0 0.0 0.0 0.0 0.0 0.0 0.0; 0.0 0.0 0.0 0.0 0.0 0.0 0.0; 0.0 0.0 0.0 0.0 0.0 0.0 0.0; 0.0 0.0 0.0 0.0 0.0 0.0 0.0; 0.0 0.0 0.0 0.0 0.0 0.0 0.0; 0.0 0.0 0.0 0.0 0.0 0.0 0.0; 0.0 0.0 0.0 0.0 0.0 0.0 0.0], [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1], [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1], [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1], [1], [1], [1]), TwiceDifferentiableConstraints{getfield(NLSolversBase, Symbol("##88#91")),getfield(NLSolversBase, Symbol("##89#92")),getfield(NLSolversBase, Symbol("##90#93")),Float64}(getfield(NLSolversBase, Symbol("##88#91"))(), getfield(NLSolversBase, Symbol("##89#92"))(), getfield(NLSolversBase, Symbol("##90#93"))(), ConstraintBounds:
Variables:
x[1]≥-1.2, x[1]≤1.2, x[2]≥-1.2, x[2]≤1.2, x[3]≥-1.2, x[3]≤1.2, x[4]≥-1.2, x[4]≤1.2, x[5]≥-1.2, x[5]≤1.2, x[6]≥-1.2, x[6]≤1.2, x[7]≥-1.2, x[7]≤1.2
Linear/nonlinear constraints:)).
Stacktrace:
[1] error(::String) at ./error.jl:33
[2] promote_objtype(::IPNewton{typeof(Optim.backtrack_constrained_grad),Symbol}, ::Array{Float64,1}, ::Symbol, ::Bool, ::TwiceDifferentiable{Float64,Array{Float64,1},Array{Float64,2},Array{Float64,1}}, ::TwiceDifferentiableConstraints{getfield(NLSolversBase, Symbol("##88#91")),getfield(NLSolversBase, Symbol("##89#92")),getfield(NLSolversBase, Symbol("##90#93")),Float64}) at /home/henrique/.julia/packages/Optim/Agd3B/src/multivariate/optimize/interface.jl:37
[3] #optimize#88(::Bool, ::Symbol, ::Function, ::TwiceDifferentiable{Float64,Array{Float64,1},Array{Float64,2},Array{Float64,1}}, ::TwiceDifferentiableConstraints{getfield(NLSolversBase, Symbol("##88#91")),getfield(NLSolversBase, Symbol("##89#92")),getfield(NLSolversBase, Symbol("##90#93")),Float64}, ::Array{Float64,1}, ::IPNewton{typeof(Optim.backtrack_constrained_grad),Symbol}, ::Optim.Options{Float64,Nothing}) at /home/henrique/.julia/packages/Optim/Agd3B/src/multivariate/optimize/interface.jl:121
[4] (::getfield(Optim, Symbol("#kw##optimize")))(::NamedTuple{(:autodiff,),Tuple{Symbol}}, ::typeof(optimize), ::TwiceDifferentiable{Float64,Array{Float64,1},Array{Float64,2},Array{Float64,1}}, ::TwiceDifferentiableConstraints{getfield(NLSolversBase, Symbol("##88#91")),getfield(NLSolversBase, Symbol("##89#92")),getfield(NLSolversBase, Symbol("##90#93")),Float64}, ::Array{Float64,1}, ::IPNewton{typeof(Optim.backtrack_constrained_grad),Symbol}, ::Optim.Options{Float64,Nothing}) at ./none:0 (repeats 2 times)
[5] top-level scope at In[51]:1
Can somebody help me understand why? Is it perhaps because AD is not supported for IPNewton with multiple constraints? Thanks!