ReverseDiff + SciMLSensitivity method ambiguity

Hi, I’m getting a method ambiguity when trying to use ReverseDiff + SciMLSensitivity.

ERROR: MethodError: kwcall(
  ::NamedTuple{(:save_everystep,), Tuple{Bool}},
  ::typeof(DiffEqBase.solve_up),
  ::ODEProblem{SVector{4, ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, Tuple{Float64, Float64}, false, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ...<I removed some guff here>...}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem},
  ::ForwardSensitivity{0, true, Val{:central}}, 
  ::SVector{4, ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, 
  ::ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}) 
is ambiguous.

Candidates:
  kwcall(::Any, ::typeof(DiffEqBase.solve_up), prob::SciMLBase.AbstractDEProblem, sensealg::Union{Nothing, SciMLBase.AbstractOverloadingSensitivityAlgorithm}, u0::AbstractArray{<:ReverseDiff.TrackedReal}, p::AbstractArray{<:ReverseDiff.TrackedReal}, args...)
    @ DiffEqBaseReverseDiffExt C:\Users\dan_l\.julia\packages\DiffEqBase\jvL5B\ext\DiffEqBaseReverseDiffExt.jl:98
  kwcall(::Any, ::typeof(DiffEqBase.solve_up), prob::SciMLBase.AbstractDEProblem, sensealg::Union{Nothing, SciMLBase.AbstractOverloadingSensitivityAlgorithm}, u0::AbstractArray{<:ReverseDiff.TrackedReal}, p, args...)
    @ DiffEqBaseReverseDiffExt C:\Users\dan_l\.julia\packages\DiffEqBase\jvL5B\ext\DiffEqBaseReverseDiffExt.jl:118
  kwcall(::Any, ::typeof(DiffEqBase.solve_up), prob::SciMLBase.AbstractDEProblem, sensealg::Union{Nothing, SciMLBase.AbstractOverloadingSensitivityAlgorithm}, u0, p::ReverseDiff.TrackedArray, args...)
    @ DiffEqBaseReverseDiffExt C:\Users\dan_l\.julia\packages\DiffEqBase\jvL5B\ext\DiffEqBaseReverseDiffExt.jl:81
  kwcall(::Any, ::typeof(DiffEqBase.solve_up), prob::SciMLBase.AbstractDEProblem, sensealg::Union{Nothing, SciMLBase.AbstractOverloadingSensitivityAlgorithm}, u0, p::AbstractArray{<:ReverseDiff.TrackedReal}, args...)
    @ DiffEqBaseReverseDiffExt C:\Users\dan_l\.julia\packages\DiffEqBase\jvL5B\ext\DiffEqBaseReverseDiffExt.jl:109

Possible fix, define
  kwcall(::Any, ::typeof(DiffEqBase.solve_up), ::SciMLBase.AbstractDEProblem, ::Union{Nothing, SciMLBase.AbstractOverloadingSensitivityAlgorithm}, ::AbstractArray{<:ReverseDiff.TrackedReal}, ::ReverseDiff.TrackedArray{V, D} where {V<:Real, D<:Real}, ::Vararg{Any})

As far as I can tell, there exist methods solve_up (which is internal to SciMLSensitivity?) for different combinations of types for u0 and p

When I try to solve my ode problem, it gets called with types

u0::::SVector{4, ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, 
p::ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}) 

but there is an ambiguity in which method to use.

Is this a bug, or am I doing something wrong here?

Can you open an issue with an MWE?

Hi Chris, thanks for taking a look.

Thanks for replying. I’ve been trying to cook up a minimum working example. Here is what I have so far.

using DifferentialEquations
using DiffResults
using ReverseDiff
using SciMLSensitivity
using StaticArrays

function get_ode_function()
    ODEFunction{false}() do x, p, t
        return x .+ p[1]
    end
end

function get_ode_problem(u0, tspan, p)
    f = get_ode_function()
    ODEProblem{false}(f, u0, tspan, p)
end

function get_loss_function(u0, tspan, solver_options)
    function loss(p)
        prob = get_ode_problem(u0, tspan, p)
        sol = solve(prob, Tsit5(); solver_options...)
        sol[end][1]
    end
end

function get_reversediff_closure(f, p)
    diffcfg = ReverseDiff.GradientConfig(p)
    diffresult = DiffResults.GradientResult(p)
    function autodiff_closure(p)
      ReverseDiff.gradient!(diffresult, f, p, diffcfg)
      cost, gradient = DiffResults.value(diffresult), DiffResults.gradient(diffresult)
      cost, gradient
    end
end

function optimize(u0, tspan, p, solver_options)
    f = get_loss_function(u0, tspan, solver_options)
    df = get_reversediff_closure(f, p)
    df(p)
end


u0 = SVector(1.0, 2.0, 3.0, 4.0)
tspan = 5.0
p = [1.0]

solver_options = (sensealg=ForwardDiffSensitivity(), )
optimize(u0, tspan, p, solver_options) # Works

solver_options = (sensealg=BacksolveAdjoint(), )
optimize(u0, tspan, p, solver_options) # Doesn't work

This is a toy example. The first case works, but I get a warning that AD failed, then an inexplicable error deep in SciMLSensitivity.

I’ll post the stacktrace in a separate comment…

I would like to get adjoint sensitivity calculations working. I don’t really care which AD program I use outside the ODE, as long as it works. Until now, I have been using ForwardDiff only, and therefore not needed SciMLSensitivity. If the problem is with ReverseDiff and something else is more suitable I’m happy to jump ship, but I need to be able to use an adjoint method for the ODE, which uses ForwardDiff internally.

Stacktrace part 1

┌ Warning: Automatic AD choice of autojacvec failed in ODE adjoint, failing back to ODE adjoint + numerical vjp
└ @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\sensitivity_interface.jl:381
ERROR: type ODEBacksolveSensitivityFunction has no field sol
Stacktrace:
  [1] getproperty
    @ .\Base.jl:37 [inlined]
  [2] SciMLSensitivity.ReverseLossCallback(sensefun::SciMLSensitivity.ODEBacksolveSensitivityFunction{SciMLSensitivity.AdjointDiffCache{SciMLBase.UDerivativeWrapper{ODEFunction{false, SciMLBase.FullSpecialize, 
var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Float64, Vector{Float64}}, SciMLSensitivity.ParamGradientWrapper{ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Float64, SVector{4, Float64}}, Nothing, Matrix{Float64}, Matrix{Float64}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, SOneTo{4}, UnitRange{Int64}, LinearAlgebra.UniformScaling{Bool}}, BacksolveAdjoint{0, true, Val{:central}, Bool}, SVector{4, Float64}, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 
typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}}, λ::Vector{Float64}, t::Vector{Float64}, dgdu::Function, dgdp::Nothing, cur_time::Base.RefValue{Int64})    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\adjoint_common.jl:491   
  [3] generate_callbacks(sensefun::SciMLSensitivity.ODEBacksolveSensitivityFunction{SciMLSensitivity.AdjointDiffCache{SciMLBase.UDerivativeWrapper{ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, 
Float64, Vector{Float64}}, SciMLSensitivity.ParamGradientWrapper{ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Float64, SVector{4, Float64}}, Nothing, Matrix{Float64}, Matrix{Float64}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, SOneTo{4}, UnitRange{Int64}, LinearAlgebra.UniformScaling{Bool}}, BacksolveAdjoint{0, true, Val{:central}, Bool}, SVector{4, Float64}, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}}, dgdu::Function, dgdp::Nothing, λ::Vector{Float64}, t::Vector{Float64}, t0::Float64, callback::Nothing, init_cb::Bool, terminated::Bool)
    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\adjoint_common.jl:582   
  [4] ODEAdjointProblem(sol::ODESolution{Float64, 2, Vector{SVector{4, Float64}}, Nothing, Nothing, Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, OrdinaryDiffEq.InterpolationData{ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Vector{SVector{4, Float64}}, Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, OrdinaryDiffEq.Tsit5ConstantCache}, DiffEqBase.Stats, Nothing}, sensealg::BacksolveAdjoint{0, true, Val{:central}, Bool}, alg::Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, t::Vector{Float64}, dgdu_discrete::SciMLSensitivity.var"#df_oop#285"{SciMLSensitivity.var"#df_oop#276#286"{Matrix{Float64}, Colon}}, dgdp_discrete::Nothing, dgdu_continuous::Nothing, dgdp_continuous::Nothing, g::Nothing, ::Val{true}; checkpoints::Vector{Float64}, callback::Nothing, z0::Nothing, M::Nothing, nilss::Nothing, tspan::Tuple{Float64, Float64}, 
kwargs::Base.Pairs{Symbol, Real, Tuple{Symbol, Symbol, Symbol}, NamedTuple{(:abstol, :reltol, :verbose), 
Tuple{Float64, Float64, Bool}}})
    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\backsolve_adjoint.jl:186  [5] _adjoint_sensitivities(sol::ODESolution{Float64, 2, Vector{SVector{4, Float64}}, Nothing, Nothing, 
Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, OrdinaryDiffEq.InterpolationData{ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Vector{SVector{4, Float64}}, Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, OrdinaryDiffEq.Tsit5ConstantCache}, DiffEqBase.Stats, Nothing}, sensealg::BacksolveAdjoint{0, true, Val{:central}, Bool}, alg::Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}; t::Vector{Float64}, dgdu_discrete::Function, dgdp_discrete::Nothing, dgdu_continuous::Nothing, dgdp_continuous::Nothing, g::Nothing, abstol::Float64, reltol::Float64, checkpoints::Vector{Float64}, corfunc_analytical::Nothing, callback::Nothing, kwargs::Base.Pairs{Symbol, Bool, Tuple{Symbol}, NamedTuple{(:verbose,), Tuple{Bool}}})
    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\sensitivity_interface.jl:407
  [6] _adjoint_sensitivities
    @ C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\sensitivity_interface.jl:390 [inlined]   
  [7] adjoint_sensitivities(sol::ODESolution{Float64, 2, Vector{SVector{4, Float64}}, Nothing, Nothing, Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, OrdinaryDiffEq.InterpolationData{ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Vector{SVector{4, Float64}}, Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, OrdinaryDiffEq.Tsit5ConstantCache}, DiffEqBase.Stats, Nothing}, args::Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}; sensealg::BacksolveAdjoint{0, true, Val{:central}, Nothing}, verbose::Bool, kwargs::Base.Pairs{Symbol, Any, Tuple{Symbol, Symbol, Symbol}, NamedTuple{(:t, :dgdu_discrete, :callback), Tuple{Vector{Float64}, SciMLSensitivity.var"#df_oop#285"{SciMLSensitivity.var"#df_oop#276#286"{Matrix{Float64}, Colon}}, 
Nothing}}})
    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\sensitivity_interface.jl:382
  [8] (::SciMLSensitivity.var"#adjoint_sensitivity_backpass#283"{Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, BacksolveAdjoint{0, true, Val{:central}, Nothing}, SVector{4, Float64}, Vector{Float64}, SciMLBase.ReverseDiffOriginator, Tuple{}, Colon, NamedTuple{(), Tuple{}}})(Δ::Matrix{Float64})      
    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\concrete_solve.jl:529   
  [9] actual_adjoint
    @ C:\Users\dan_l\.julia\packages\DiffEqBase\jvL5B\ext\DiffEqBaseReverseDiffExt.jl:134 [inlined]      
 [10] special_reverse_exec!(instruction::ReverseDiff.SpecialInstruction{typeof(DiffEqBase.solve_up), Tuple{ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, 
Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, BacksolveAdjoint{0, true, Val{:central}, Nothing}, SVector{4, Float64}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}}, ReverseDiff.TrackedArray{Float64, Float64, 2, Matrix{Float64}, Matrix{Float64}}, Tuple{DiffEqBaseReverseDiffExt.var"#actual_adjoint#23"{Tuple{Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}}, Tuple{ODESolution{Float64, 2, Vector{SVector{4, Float64}}, Nothing, Nothing, Vector{Float64}, Nothing, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 
Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, SciMLBase.SensitivityInterpolation{Vector{Float64}, Vector{SVector{4, Float64}}}, DiffEqBase.Stats, Nothing}, SciMLSensitivity.var"#adjoint_sensitivity_backpass#283"{Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, BacksolveAdjoint{0, true, Val{:central}, Nothing}, SVector{4, Float64}, Vector{Float64}, SciMLBase.ReverseDiffOriginator, Tuple{}, Colon, NamedTuple{(), Tuple{}}}}}, DiffEqBaseReverseDiffExt.var"##solve_up#292#21"{DiffEqBaseReverseDiffExt.var"##solve_up#292#20#22"}, NamedTuple{(), Tuple{}}}})
    @ DiffEqBaseReverseDiffExt C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\macros.jl:216        
 [11] reverse_exec!(instruction::ReverseDiff.SpecialInstruction{typeof(DiffEqBase.solve_up), Tuple{ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, BacksolveAdjoint{0, true, Val{:central}, Nothing}, SVector{4, Float64}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}}, ReverseDiff.TrackedArray{Float64, Float64, 2, Matrix{Float64}, Matrix{Float64}}, Tuple{DiffEqBaseReverseDiffExt.var"#actual_adjoint#23"{Tuple{Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}}, Tuple{ODESolution{Float64, 2, Vector{SVector{4, Float64}}, Nothing, Nothing, Vector{Float64}, Nothing, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, SciMLBase.SensitivityInterpolation{Vector{Float64}, Vector{SVector{4, Float64}}}, DiffEqBase.Stats, Nothing}, SciMLSensitivity.var"#adjoint_sensitivity_backpass#283"{Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, 
BacksolveAdjoint{0, true, Val{:central}, Nothing}, SVector{4, Float64}, Vector{Float64}, SciMLBase.ReverseDiffOriginator, Tuple{}, Colon, NamedTuple{(), Tuple{}}}}}, DiffEqBaseReverseDiffExt.var"##solve_up#292#21"{DiffEqBaseReverseDiffExt.var"##solve_up#292#20#22"}, NamedTuple{(), Tuple{}}}})
    @ ReverseDiff C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\tape.jl:93
 [12] reverse_pass!(tape::Vector{ReverseDiff.AbstractInstruction})
    @ ReverseDiff C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\tape.jl:87
 [13] reverse_pass!
    @ C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\api\tape.jl:36 [inlined]
 [14] seeded_reverse_pass!(result::DiffResults.MutableDiffResult{1, Float64, Tuple{Vector{Float64}}}, output::ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float64, Float64, 2, Matrix{Float64}, Matrix{Float64}}}, input::ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, tape::ReverseDiff.GradientTape{var"#loss#9"{SVector{4, Float64}, Float64, NamedTuple{(:sensealg,), Tuple{BacksolveAdjoint{0, true, Val{:central}, Nothing}}}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float64, Float64, 2, Matrix{Float64}, Matrix{Float64}}}})
    @ ReverseDiff C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\api\utils.jl:31
 [15] seeded_reverse_pass!(result::DiffResults.MutableDiffResult{1, Float64, Tuple{Vector{Float64}}}, t::ReverseDiff.GradientTape{var"#loss#9"{SVector{4, Float64}, Float64, NamedTuple{(:sensealg,), Tuple{BacksolveAdjoint{0, true, Val{:central}, Nothing}}}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float64, Float64, 2, Matrix{Float64}, Matrix{Float64}}}})
    @ ReverseDiff C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\api\tape.jl:47
 [16] gradient!(result::DiffResults.MutableDiffResult{1, Float64, Tuple{Vector{Float64}}}, f::Function, input::Vector{Float64}, cfg::ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}})
    @ ReverseDiff C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\api\gradients.jl:42
 [17] autodiff_closure
    @ c:\Users\dan_l\Dropbox (Cambridge University)\shared_Daniel\code\PBCTuning.jl\examples\reversediff_mwe.jl:30 [inlined]
 [18] optimize(u0::SVector{4, Float64}, tspan::Float64, p::Vector{Float64}, solver_options::NamedTuple{(:sensealg,), Tuple{BacksolveAdjoint{0, true, Val{:central}, Nothing}}})
    @ Main c:\Users\dan_l\Dropbox (Cambridge University)\shared_Daniel\code\PBCTuning.jl\examples\reversediff_mwe.jl:39
 [19] top-level scope
    @ c:\Users\dan_l\Dropbox (Cambridge University)\shared_Daniel\code\PBCTuning.jl\examples\reversediff_mwe.jl:51

Stacktrace part 2

caused by: type ODEBacksolveSensitivityFunction has no field sol
Stacktrace:
  [1] getproperty
    @ .\Base.jl:37 [inlined]
  [2] SciMLSensitivity.ReverseLossCallback(sensefun::SciMLSensitivity.ODEBacksolveSensitivityFunction{SciMLSensitivity.AdjointDiffCache{Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, SOneTo{4}, UnitRange{Int64}, LinearAlgebra.UniformScaling{Bool}}, BacksolveAdjoint{0, true, Val{:central}, ZygoteVJP}, SVector{4, Float64}, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 
Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}}, λ::Vector{Float64}, t::Vector{Float64}, dgdu::Function, dgdp::Nothing, cur_time::Base.RefValue{Int64})
    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\adjoint_common.jl:491
  [3] generate_callbacks(sensefun::SciMLSensitivity.ODEBacksolveSensitivityFunction{SciMLSensitivity.AdjointDiffCache{Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, SOneTo{4}, UnitRange{Int64}, LinearAlgebra.UniformScaling{Bool}}, BacksolveAdjoint{0, true, Val{:central}, ZygoteVJP}, SVector{4, Float64}, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 
typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}}, dgdu::Function, dgdp::Nothing, λ::Vector{Float64}, t::Vector{Float64}, t0::Float64, callback::Nothing, init_cb::Bool, terminated::Bool)
    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\adjoint_common.jl:582   
  [4] ODEAdjointProblem(sol::ODESolution{Float64, 2, Vector{SVector{4, Float64}}, Nothing, Nothing, Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, OrdinaryDiffEq.InterpolationData{ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Vector{SVector{4, Float64}}, Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, OrdinaryDiffEq.Tsit5ConstantCache}, DiffEqBase.Stats, Nothing}, sensealg::BacksolveAdjoint{0, true, Val{:central}, ZygoteVJP}, alg::Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, t::Vector{Float64}, dgdu_discrete::SciMLSensitivity.var"#df_oop#285"{SciMLSensitivity.var"#df_oop#276#286"{Matrix{Float64}, Colon}}, dgdp_discrete::Nothing, dgdu_continuous::Nothing, dgdp_continuous::Nothing, g::Nothing, ::Val{true}; checkpoints::Vector{Float64}, callback::Nothing, z0::Nothing, M::Nothing, nilss::Nothing, tspan::Tuple{Float64, Float64}, kwargs::Base.Pairs{Symbol, Real, Tuple{Symbol, Symbol, Symbol}, NamedTuple{(:abstol, :reltol, :verbose), Tuple{Float64, Float64, Bool}}})
    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\backsolve_adjoint.jl:186  [5] _adjoint_sensitivities(sol::ODESolution{Float64, 2, Vector{SVector{4, Float64}}, Nothing, Nothing, 
Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, OrdinaryDiffEq.InterpolationData{ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Vector{SVector{4, Float64}}, Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, OrdinaryDiffEq.Tsit5ConstantCache}, DiffEqBase.Stats, Nothing}, sensealg::BacksolveAdjoint{0, true, Val{:central}, ZygoteVJP}, alg::Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}; t::Vector{Float64}, dgdu_discrete::Function, dgdp_discrete::Nothing, dgdu_continuous::Nothing, dgdp_continuous::Nothing, g::Nothing, abstol::Float64, reltol::Float64, checkpoints::Vector{Float64}, corfunc_analytical::Nothing, callback::Nothing, kwargs::Base.Pairs{Symbol, Bool, Tuple{Symbol}, NamedTuple{(:verbose,), Tuple{Bool}}})
    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\sensitivity_interface.jl:407
  [6] _adjoint_sensitivities
    @ C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\sensitivity_interface.jl:390 [inlined]   
  [7] adjoint_sensitivities(sol::ODESolution{Float64, 2, Vector{SVector{4, Float64}}, Nothing, Nothing, Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, OrdinaryDiffEq.InterpolationData{ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Vector{SVector{4, Float64}}, Vector{Float64}, Vector{Vector{SVector{4, Float64}}}, OrdinaryDiffEq.Tsit5ConstantCache}, DiffEqBase.Stats, Nothing}, args::Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}; sensealg::BacksolveAdjoint{0, true, Val{:central}, Nothing}, verbose::Bool, kwargs::Base.Pairs{Symbol, Any, Tuple{Symbol, Symbol, Symbol}, NamedTuple{(:t, :dgdu_discrete, :callback), Tuple{Vector{Float64}, SciMLSensitivity.var"#df_oop#285"{SciMLSensitivity.var"#df_oop#276#286"{Matrix{Float64}, Colon}}, 
Nothing}}})
    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\sensitivity_interface.jl:378
  [8] (::SciMLSensitivity.var"#adjoint_sensitivity_backpass#283"{Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, BacksolveAdjoint{0, true, Val{:central}, Nothing}, SVector{4, Float64}, Vector{Float64}, SciMLBase.ReverseDiffOriginator, Tuple{}, Colon, NamedTuple{(), Tuple{}}})(Δ::Matrix{Float64})      
    @ SciMLSensitivity C:\Users\dan_l\.julia\packages\SciMLSensitivity\NhfkF\src\concrete_solve.jl:529   
  [9] actual_adjoint
    @ C:\Users\dan_l\.julia\packages\DiffEqBase\jvL5B\ext\DiffEqBaseReverseDiffExt.jl:134 [inlined]      
 [10] special_reverse_exec!(instruction::ReverseDiff.SpecialInstruction{typeof(DiffEqBase.solve_up), Tuple{ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, 
Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, BacksolveAdjoint{0, true, Val{:central}, Nothing}, SVector{4, Float64}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}}, ReverseDiff.TrackedArray{Float64, Float64, 2, Matrix{Float64}, Matrix{Float64}}, Tuple{DiffEqBaseReverseDiffExt.var"#actual_adjoint#23"{Tuple{Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}}, Tuple{ODESolution{Float64, 2, Vector{SVector{4, Float64}}, Nothing, Nothing, Vector{Float64}, Nothing, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 
Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, SciMLBase.SensitivityInterpolation{Vector{Float64}, Vector{SVector{4, Float64}}}, DiffEqBase.Stats, Nothing}, SciMLSensitivity.var"#adjoint_sensitivity_backpass#283"{Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, BacksolveAdjoint{0, true, Val{:central}, Nothing}, SVector{4, Float64}, Vector{Float64}, SciMLBase.ReverseDiffOriginator, Tuple{}, Colon, NamedTuple{(), Tuple{}}}}}, DiffEqBaseReverseDiffExt.var"##solve_up#292#21"{DiffEqBaseReverseDiffExt.var"##solve_up#292#20#22"}, NamedTuple{(), Tuple{}}}})
    @ DiffEqBaseReverseDiffExt C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\macros.jl:216        
 [11] reverse_exec!(instruction::ReverseDiff.SpecialInstruction{typeof(DiffEqBase.solve_up), Tuple{ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, BacksolveAdjoint{0, true, Val{:central}, Nothing}, SVector{4, Float64}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}}, ReverseDiff.TrackedArray{Float64, Float64, 2, Matrix{Float64}, Matrix{Float64}}, Tuple{DiffEqBaseReverseDiffExt.var"#actual_adjoint#23"{Tuple{Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}}, Tuple{ODESolution{Float64, 2, Vector{SVector{4, Float64}}, Nothing, Nothing, Vector{Float64}, Nothing, ODEProblem{SVector{4, Float64}, Tuple{Float64, Float64}, false, Vector{Float64}, ODEFunction{false, SciMLBase.FullSpecialize, var"#7#8", LinearAlgebra.UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, SciMLBase.SensitivityInterpolation{Vector{Float64}, Vector{SVector{4, Float64}}}, DiffEqBase.Stats, Nothing}, SciMLSensitivity.var"#adjoint_sensitivity_backpass#283"{Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}, 
BacksolveAdjoint{0, true, Val{:central}, Nothing}, SVector{4, Float64}, Vector{Float64}, SciMLBase.ReverseDiffOriginator, Tuple{}, Colon, NamedTuple{(), Tuple{}}}}}, DiffEqBaseReverseDiffExt.var"##solve_up#292#21"{DiffEqBaseReverseDiffExt.var"##solve_up#292#20#22"}, NamedTuple{(), Tuple{}}}})
    @ ReverseDiff C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\tape.jl:93
 [12] reverse_pass!(tape::Vector{ReverseDiff.AbstractInstruction})
    @ ReverseDiff C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\tape.jl:87
 [13] reverse_pass!
    @ C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\api\tape.jl:36 [inlined]
 [14] seeded_reverse_pass!(result::DiffResults.MutableDiffResult{1, Float64, Tuple{Vector{Float64}}}, output::ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float64, Float64, 2, Matrix{Float64}, Matrix{Float64}}}, input::ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, tape::ReverseDiff.GradientTape{var"#loss#9"{SVector{4, Float64}, Float64, NamedTuple{(:sensealg,), Tuple{BacksolveAdjoint{0, true, Val{:central}, Nothing}}}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float64, Float64, 2, Matrix{Float64}, Matrix{Float64}}}})
    @ ReverseDiff C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\api\utils.jl:31
 [15] seeded_reverse_pass!(result::DiffResults.MutableDiffResult{1, Float64, Tuple{Vector{Float64}}}, t::ReverseDiff.GradientTape{var"#loss#9"{SVector{4, Float64}, Float64, NamedTuple{(:sensealg,), Tuple{BacksolveAdjoint{0, true, Val{:central}, Nothing}}}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float64, Float64, 2, Matrix{Float64}, Matrix{Float64}}}})
    @ ReverseDiff C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\api\tape.jl:47
 [16] gradient!(result::DiffResults.MutableDiffResult{1, Float64, Tuple{Vector{Float64}}}, f::Function, input::Vector{Float64}, cfg::ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}})
    @ ReverseDiff C:\Users\dan_l\.julia\packages\ReverseDiff\UJhiD\src\api\gradients.jl:42
 [17] autodiff_closure
    @ c:\Users\dan_l\Dropbox (Cambridge University)\shared_Daniel\code\PBCTuning.jl\examples\reversediff_mwe.jl:30 [inlined]
 [18] optimize(u0::SVector{4, Float64}, tspan::Float64, p::Vector{Float64}, solver_options::NamedTuple{(:sensealg,), Tuple{BacksolveAdjoint{0, true, Val{:central}, Nothing}}})
    @ Main c:\Users\dan_l\Dropbox (Cambridge University)\shared_Daniel\code\PBCTuning.jl\examples\reversediff_mwe.jl:39
 [19] top-level scope
    @ c:\Users\dan_l\Dropbox (Cambridge University)\shared_Daniel\code\PBCTuning.jl\examples\reversediff_mwe.jl:51

Sorry, finally got the time to look at this. Reverse mode doesn’t make any sense here since you’re using SVector on small problems. If you need to do that, the system automatically defaults to QuadratureAdjoint which would be the preferred method to keep the immutability.

using DifferentialEquations
using DiffResults
using ReverseDiff
using SciMLSensitivity
using StaticArrays

function get_ode_function()
    ODEFunction{false}() do x, p, t
        return x .+ p[1]
    end
end

function get_ode_problem(u0, tspan, p)
    f = get_ode_function()
    ODEProblem{false}(f, u0, tspan, p)
end

function get_loss_function(u0, tspan, solver_options)
    function loss(p)
        prob = get_ode_problem(u0, tspan, p)
        sol = solve(prob, Tsit5(); solver_options...)
        sol[end][1]
    end
end

function get_reversediff_closure(f, p)
    diffcfg = ReverseDiff.GradientConfig(p)
    diffresult = DiffResults.GradientResult(p)
    function autodiff_closure(p)
      ReverseDiff.gradient!(diffresult, f, p, diffcfg)
      cost, gradient = DiffResults.value(diffresult), DiffResults.gradient(diffresult)
      cost, gradient
    end
end

function optimize(u0, tspan, p, solver_options)
    f = get_loss_function(u0, tspan, solver_options)
    df = get_reversediff_closure(f, p)
    df(p)
end


u0 = SVector(1.0, 2.0, 3.0, 4.0)
tspan = 5.0
p = [1.0]

solver_options = (sensealg=nothing, )
optimize(u0, tspan, p, solver_options) # not passing a sensealg works fine

solver_options = (sensealg=QuadratureAdjoint(), )
optimize(u0, tspan, p, solver_options) # choosing QuadratureAdjoint is fine

I’m not entirely sure why you set it up to choose BacksolveAdjoint? That is only for cases where you’re hitting a memory limit, and if you’re using StaticArrays for state then it’s effectively impossible given the compiler limit. So it’s just a confusing combination that isn’t recommended. I would highly recommend just using what’s given as the default here.

But even then, it’s a small enough problem that forward mode is just faster, which is going to be the case in almost any case with StaticArrays.

Dear Chris,

Thanks for your help on this issue. With what I’ve learnt from trying things out and making this MWE I’ve been able to progress a bit.

I have a bit of a pattern I would like to question you on. My problem is something like this:

  function odefunc(dx, x, p, t)
    controller = build_controller(p) # This function is expensive
    dx .= dynamics(x, controller, t)
  end

My problem is this: to get things working well with autodiff I had to move build_controller inside the ode function—Otherwise I kept having problems with types when using ReverseDiff.

This is terrible as build_controller is expensive: it is contructing a rigid body mechanism. HOWEVER, as the paramters don’t change during an ODE solve, I shouldn’t have to call it every time.

Is there a nice you have a pattern to avoid this problem? If you want a MWE just say MWE and I shall comply!!

Just do it inside the optimization loop but not inside the ODE solve. Make build_controller output something to be differentiated and use that as the parameters in the ODE