Declaring ForwardDiff tag directly with a DifferentialEquations integrator / nested function

This is a continuation of an original question of mine (Updating integrator parameter/initial condition with ForwardDiff.Dual types) to be more focused on the actual problem of defining an array/scalar for use with automatic differentiation, when it needs to be used in a nested function. The problem I’m interested in is below which illustrates the main difficulties.

using Random, DifferentialEquations, LinearAlgebra, Optimization, OptimizationNLopt, OptimizationOptimJL
Random.seed!(2992999)
λ, y₀, σ = -0.5, 15.0, 0.1
T, n = 5.0, 200
Δt = T / n
t = [j * Δt for j in 0:n]
y = y₀ * exp.(λ * t)
yᵒ = y .+ [0.0, σ * randn(n)...]
ode_fnc(u, p, t) = p * u
function loglik(θ, data, integrator)
    yᵒ, n, ε = data
    λ, σ, u0 = θ
    integrator.p = λ
    reinit!(integrator, u0)
    solve!(integrator)
    ε = yᵒ .- integrator.sol.u
    ℓ = -0.5n * log(2π * σ^2) - 0.5 / σ^2 * sum(ε.^2)
end
θ₀ = [-1.0, 0.5, 19.73]
integrator = DifferentialEquations.init(ODEProblem(ode_fnc, y₀, (0.0, T), 1.0), Tsit5(); saveat = t)
negloglik = (θ, p) -> -loglik(θ, p, integrator)
fnc = OptimizationFunction(negloglik, Optimization.AutoForwardDiff())
ε = zeros(n)
prob = OptimizationProblem(fnc, θ₀, (yᵒ, n, ε), lb=[-10.0, 1e-6, 0.5], ub=[10.0, 10.0, 25.0])
solve(prob, LBFGS())
Stacktrace

julia> solve(prob, LBFGS())
ERROR: MethodError: no method matching Float64(::ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#69#84"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Float64}, Float64, 3})
Closest candidates are:
  (::Type{T})(::Real, ::RoundingMode) where T<:AbstractFloat at rounding.jl:200
  (::Type{T})(::T) where T<:Number at boot.jl:772
  (::Type{T})(::AbstractChar) where T<:Union{AbstractChar, Number} at char.jl:50
  ...
Stacktrace:
  [1] convert(#unused#::Type{Float64}, x::ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#69#84"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Float64}, Float64, 3})
    @ Base .\number.jl:7
  [2] setproperty!(x::OrdinaryDiffEq.ODEIntegrator{Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, false, Float64, Nothing, Float64, Float64, Float64, Float64, Float64, Float64, Vector{Float64}, ODESolution{Float64, 1, Vector{Float64}, Nothing, Nothing, Vector{Float64}, Vector{Vector{Float64}}, ODEProblem{Float64, Tuple{Float64, Float64}, false, Float64, ODEFunction{false, typeof(ode_fnc), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), 
typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, OrdinaryDiffEq.InterpolationData{ODEFunction{false, typeof(ode_fnc), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 
typeof(SciMLBase.DEFAULT_OBSERVED), Nothing}, Vector{Float64}, Vector{Float64}, Vector{Vector{Float64}}, OrdinaryDiffEq.Tsit5ConstantCache{Float64, Float64}}, DiffEqBase.DEStats}, ODEFunction{false, typeof(ode_fnc), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing}, OrdinaryDiffEq.Tsit5ConstantCache{Float64, Float64}, OrdinaryDiffEq.DEOptions{Float64, Float64, Float64, Float64, PIController{Rational{Int64}}, typeof(DiffEqBase.ODE_DEFAULT_NORM), typeof(opnorm), Nothing, CallbackSet{Tuple{}, Tuple{}}, typeof(DiffEqBase.ODE_DEFAULT_ISOUTOFDOMAIN), typeof(DiffEqBase.ODE_DEFAULT_PROG_MESSAGE), typeof(DiffEqBase.ODE_DEFAULT_UNSTABLE_CHECK), DataStructures.BinaryHeap{Float64, DataStructures.FasterForward}, DataStructures.BinaryHeap{Float64, DataStructures.FasterForward}, Nothing, Nothing, Int64, Tuple{}, Vector{Float64}, Tuple{}}, Float64, Float64, Nothing, OrdinaryDiffEq.DefaultInit}, f::Symbol, v::ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#69#84"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Float64}, Float64, 3})
    @ Base .\Base.jl:39
  [3] loglik(θ::Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#69#84"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 
Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Float64}, Float64, 3}}, data::Tuple{Vector{Float64}, Int64, Vector{Float64}}, integrator::OrdinaryDiffEq.ODEIntegrator{Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, false, Float64, Nothing, Float64, Float64, Float64, Float64, Float64, Float64, Vector{Float64}, ODESolution{Float64, 1, Vector{Float64}, Nothing, Nothing, Vector{Float64}, Vector{Vector{Float64}}, ODEProblem{Float64, Tuple{Float64, Float64}, false, Float64, ODEFunction{false, typeof(ode_fnc), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, OrdinaryDiffEq.InterpolationData{ODEFunction{false, typeof(ode_fnc), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing}, Vector{Float64}, Vector{Float64}, Vector{Vector{Float64}}, OrdinaryDiffEq.Tsit5ConstantCache{Float64, Float64}}, DiffEqBase.DEStats}, ODEFunction{false, typeof(ode_fnc), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 
Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing}, OrdinaryDiffEq.Tsit5ConstantCache{Float64, Float64}, OrdinaryDiffEq.DEOptions{Float64, Float64, Float64, Float64, PIController{Rational{Int64}}, typeof(DiffEqBase.ODE_DEFAULT_NORM), 
typeof(opnorm), Nothing, CallbackSet{Tuple{}, Tuple{}}, typeof(DiffEqBase.ODE_DEFAULT_ISOUTOFDOMAIN), typeof(DiffEqBase.ODE_DEFAULT_PROG_MESSAGE), typeof(DiffEqBase.ODE_DEFAULT_UNSTABLE_CHECK), DataStructures.BinaryHeap{Float64, DataStructures.FasterForward}, DataStructures.BinaryHeap{Float64, DataStructures.FasterForward}, Nothing, Nothing, Int64, Tuple{}, Vector{Float64}, Tuple{}}, Float64, Float64, Nothing, OrdinaryDiffEq.DefaultInit})
    @ Main c:\Users\licer\.julia\dev\ProfileLikelihood\dev\autodiff_tests.jl:107
  [4] (::var"#7#8")(θ::Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#69#84"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Float64}, Float64, 3}}, p::Tuple{Vector{Float64}, Int64, Vector{Float64}})
    @ Main c:\Users\licer\.julia\dev\ProfileLikelihood\dev\autodiff_tests.jl:115
  [5] (::Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}})(::Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#69#84"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, 
Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Float64}, Float64, 3}})
    @ Optimization C:\Users\licer\.julia\packages\Optimization\i9NGR\src\function\forwarddiff.jl:46
  [6] #71
    @ C:\Users\licer\.julia\packages\Optimization\i9NGR\src\function\forwarddiff.jl:50 [inlined]
  [7] vector_mode_dual_eval!
    @ C:\Users\licer\.julia\packages\ForwardDiff\wAaVJ\src\apiutils.jl:37 [inlined]
  [8] vector_mode_gradient!(result::Vector{Float64}, f::Optimization.var"#71#86"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 
Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, x::Vector{Float64}, cfg::ForwardDiff.GradientConfig{ForwardDiff.Tag{Optimization.var"#69#84"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Float64}, Float64, 3, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#69#84"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 
Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Float64}, Float64, 3}}})
    @ ForwardDiff C:\Users\licer\.julia\packages\ForwardDiff\wAaVJ\src\gradient.jl:113
  [9] gradient!(result::Vector{Float64}, f::Optimization.var"#71#86"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, x::Vector{Float64}, cfg::ForwardDiff.GradientConfig{ForwardDiff.Tag{Optimization.var"#69#84"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Float64}, Float64, 3, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Optimization.var"#69#84"{Tuple{}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Float64}, Float64, 3}}}, ::Val{false})
    @ ForwardDiff C:\Users\licer\.julia\packages\ForwardDiff\wAaVJ\src\gradient.jl:37
 [10] (::Optimization.var"#70#85"{Optimization.var"#68#83"{Vector{Float64}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}, Int64}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}})(::Vector{Float64}, ::Vector{Float64})
    @ Optimization C:\Users\licer\.julia\packages\Optimization\i9NGR\src\function\forwarddiff.jl:50
 [11] (::OptimizationOptimJL.var"#21#27"{OptimizationProblem{true, OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Tuple{Vector{Float64}, Int64, Vector{Float64}}, Vector{Float64}, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, OptimizationOptimJL.var"#20#26"{OptimizationProblem{true, OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Tuple{Vector{Float64}, Int64, Vector{Float64}}, Vector{Float64}, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Optimization.var"#70#85"{Optimization.var"#68#83"{Vector{Float64}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}, Int64}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Optimization.var"#74#89"{Optimization.var"#72#87"{Vector{Float64}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}, Int64}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Optimization.var"#76#91", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}}, OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Optimization.var"#70#85"{Optimization.var"#68#83"{Vector{Float64}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}, Int64}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Optimization.var"#74#89"{Optimization.var"#72#87"{Vector{Float64}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, 
Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}, Int64}, Optimization.var"#67#82"{OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Tuple{Vector{Float64}, Int64, Vector{Float64}}}}, Optimization.var"#76#91", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}})(G::Vector{Float64}, θ::Vector{Float64})
    @ OptimizationOptimJL C:\Users\licer\.julia\packages\OptimizationOptimJL\fdrJg\src\OptimizationOptimJL.jl:196
 [12] value_gradient!!(obj::OnceDifferentiable{Float64, Vector{Float64}, Vector{Float64}}, x::Vector{Float64})
    @ NLSolversBase C:\Users\licer\.julia\packages\NLSolversBase\cfJrN\src\interface.jl:82
 [13] value_gradient!!(bw::Optim.BarrierWrapper{OnceDifferentiable{Float64, Vector{Float64}, Vector{Float64}}, Optim.BoxBarrier{Vector{Float64}, Vector{Float64}}, Float64, Float64, Vector{Float64}}, x::Vector{Float64})
    @ Optim C:\Users\licer\.julia\packages\Optim\6Lpjy\src\multivariate\solvers\constrained\fminbox.jl:81
 [14] initial_state(method::LBFGS{Optim.InverseDiagonal, LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Optim.var"#66#67"{Vector{Float64}, Vector{Float64}, Fminbox{LBFGS{Nothing, LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Optim.var"#19#21"}, Float64, Optim.var"#49#51"}, Optim.BarrierWrapper{OnceDifferentiable{Float64, Vector{Float64}, Vector{Float64}}, Optim.BoxBarrier{Vector{Float64}, Vector{Float64}}, Float64, 
Float64, Vector{Float64}}}}, options::Optim.Options{Float64, OptimizationOptimJL.var"#_cb#25"{OptimizationOptimJL.var"#23#29", Fminbox{LBFGS{Nothing, LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Optim.var"#19#21"}, Float64, Optim.var"#49#51"}, Base.Iterators.Cycle{Tuple{Optimization.NullData}}}}, d::Optim.BarrierWrapper{OnceDifferentiable{Float64, Vector{Float64}, Vector{Float64}}, Optim.BoxBarrier{Vector{Float64}, Vector{Float64}}, Float64, Float64, Vector{Float64}}, initial_x::Vector{Float64})
    @ Optim C:\Users\licer\.julia\packages\Optim\6Lpjy\src\multivariate\solvers\first_order\l_bfgs.jl:164
 [15] optimize(df::OnceDifferentiable{Float64, Vector{Float64}, Vector{Float64}}, l::Vector{Float64}, u::Vector{Float64}, initial_x::Vector{Float64}, F::Fminbox{LBFGS{Nothing, LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Optim.var"#19#21"}, Float64, Optim.var"#49#51"}, options::Optim.Options{Float64, OptimizationOptimJL.var"#_cb#25"{OptimizationOptimJL.var"#23#29", Fminbox{LBFGS{Nothing, LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Optim.var"#19#21"}, Float64, Optim.var"#49#51"}, Base.Iterators.Cycle{Tuple{Optimization.NullData}}}})
    @ Optim C:\Users\licer\.julia\packages\Optim\6Lpjy\src\multivariate\solvers\constrained\fminbox.jl:322
 [16] ___solve(prob::OptimizationProblem{true, OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Tuple{Vector{Float64}, Int64, Vector{Float64}}, Vector{Float64}, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, opt::Fminbox{LBFGS{Nothing, LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Optim.var"#19#21"}, Float64, Optim.var"#49#51"}, data::Base.Iterators.Cycle{Tuple{Optimization.NullData}}; callback::Function, maxiters::Nothing, maxtime::Nothing, abstol::Nothing, reltol::Nothing, progress::Bool, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ OptimizationOptimJL C:\Users\licer\.julia\packages\OptimizationOptimJL\fdrJg\src\OptimizationOptimJL.jl:215
 [17] ___solve
    @ C:\Users\licer\.julia\packages\OptimizationOptimJL\fdrJg\src\OptimizationOptimJL.jl:150 [inlined]
 [18] #__solve#2
    @ C:\Users\licer\.julia\packages\OptimizationOptimJL\fdrJg\src\OptimizationOptimJL.jl:56 [inlined]
 [19] __solve (repeats 2 times)
    @ C:\Users\licer\.julia\packages\OptimizationOptimJL\fdrJg\src\OptimizationOptimJL.jl:40 [inlined]
 [20] #solve#494
    @ C:\Users\licer\.julia\packages\SciMLBase\IJbT7\src\solve.jl:71 [inlined]
 [21] solve(::OptimizationProblem{true, OptimizationFunction{true, Optimization.AutoForwardDiff{nothing}, var"#7#8", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Tuple{Vector{Float64}, Int64, Vector{Float64}}, Vector{Float64}, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, ::LBFGS{Nothing, LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Optim.var"#19#21"})
    @ SciMLBase C:\Users\licer\.julia\packages\SciMLBase\IJbT7\src\solve.jl:70
 [22] top-level scope
    @ c:\Users\licer\.julia\dev\ProfileLikelihood\dev\autodiff_tests.jl:119

This code errors due to trying to allocate a dual number to the integrator. I want this to be able to work with dual numbers, and so following the advice of https://discourse.julialang.org/t/updating-integrator-parameter-initial-condition-with-forwarddiff-dual-types/82862/2?u=legola18, I wanted to define the dual tag directly when using my integrator. But my `loglik` function requires this integrator to be define before it is used, so I can't get the tag for `loglik`. I also initially tried PreallocationTools but ran into essentially the same difficulty.

Is there a way around this to implement the above advice for having my integrator be dual-valued and have the above code actually work with Optimization.AutoForwardDiff. I’d prefer to keep the third argument “integrator” rather than putting it into the vector data (second argmuent of loglik, i.e. the p in the OptimizationProblem), but maybe that’s needed.

I would just do something to generalize the PreallocationTools.LazyBufferCache, like:

using Random, DifferentialEquations, LinearAlgebra, Optimization, OptimizationNLopt, OptimizationOptimJL

struct GeneralLazyBufferCache{F <: Function}
    bufs::Dict # a dictionary mapping types to buffers
    f::F
    GeneralLazyBufferCache(f::F = identity) where {F <: Function} = new{F}(Dict(), f) # start with empty dict
end

function Base.getindex(b::GeneralLazyBufferCache, u::T) where T
    get!(b.bufs, T) do
        b.f(u)
    end
end

lbc = GeneralLazyBufferCache(function (p)
    DifferentialEquations.init(ODEProblem(ode_fnc, y₀, (0.0, T), p), Tsit5(); saveat = t)
end)

Random.seed!(2992999)
λ, y₀, σ = -0.5, 15.0, 0.1
T, n = 5.0, 200
Δt = T / n
t = [j * Δt for j in 0:n]
y = y₀ * exp.(λ * t)
yᵒ = y .+ [0.0, σ * randn(n)...]
ode_fnc(u, p, t) = p * u
function loglik(θ, data, integrator)
    yᵒ, n, ε = data
    λ, σ, u0 = θ
    integrator.p = λ
    reinit!(integrator, u0)
    solve!(integrator)
    ε = yᵒ .- integrator.sol.u
    ℓ = -0.5n * log(2π * σ^2) - 0.5 / σ^2 * sum(ε.^2)
end
θ₀ = [-1.0, 0.5, 19.73]
negloglik = (θ, p) -> -loglik(θ, p, lbc[θ[1]])
fnc = OptimizationFunction(negloglik, Optimization.AutoForwardDiff())
ε = zeros(n)
prob = OptimizationProblem(fnc, θ₀, (yᵒ, n, ε), lb=[-10.0, 1e-6, 0.5], ub=[10.0, 10.0, 25.0])
solve(prob, LBFGS())

# u: 3-element Vector{Float64}:
# -0.49995439135388436
#  0.09595962728808986
# 14.997578175676887

This is a nice simple and general solution which just uses a small function barrier.

1 Like

And with the new PR:

This example looks like:

using Random, DifferentialEquations, LinearAlgebra, Optimization, OptimizationNLopt, OptimizationOptimJL, PreallocationTools

lbc = GeneralLazyBufferCache(function (p)
    DifferentialEquations.init(ODEProblem(ode_fnc, y₀, (0.0, T), p), Tsit5(); saveat = t)
end)

Random.seed!(2992999)
λ, y₀, σ = -0.5, 15.0, 0.1
T, n = 5.0, 200
Δt = T / n
t = [j * Δt for j in 0:n]
y = y₀ * exp.(λ * t)
yᵒ = y .+ [0.0, σ * randn(n)...]
ode_fnc(u, p, t) = p * u
function loglik(θ, data, integrator)
    yᵒ, n, ε = data
    λ, σ, u0 = θ
    integrator.p = λ
    reinit!(integrator, u0)
    solve!(integrator)
    ε = yᵒ .- integrator.sol.u
    ℓ = -0.5n * log(2π * σ^2) - 0.5 / σ^2 * sum(ε.^2)
end
θ₀ = [-1.0, 0.5, 19.73]
negloglik = (θ, p) -> -loglik(θ, p, lbc[θ[1]])
fnc = OptimizationFunction(negloglik, Optimization.AutoForwardDiff())
ε = zeros(n)
prob = OptimizationProblem(fnc, θ₀, (yᵒ, n, ε), lb=[-10.0, 1e-6, 0.5], ub=[10.0, 10.0, 25.0])
solve(prob, LBFGS())
2 Likes

Amazing!! Many thanks - this is extremely useful.