Here is my Lotka-Volterra tutorial code using reversediff:
using Turing
using DifferentialEquations
using StatsPlots
using LinearAlgebra
using Random
using Zygote
using ReverseDiff
using SciMLSensitivity
# Turing.setadbackend(:forwarddiff)
# Turing.setadbackend(:zygote)
Turing.setadbackend(:reversediff)
Random.seed!(14);
# Define Lotka-Volterra model.
function lotka_volterra(du, u, p, t)
# Model parameters.
Ξ±, Ξ², Ξ³, Ξ΄ = p
# Current state.
x, y = u
# Evaluate differential equations.
du[1] = (Ξ± - Ξ² * y) * x # prey
du[2] = (Ξ΄ * x - Ξ³) * y # predator
return nothing
end
# Define initial-value problem.
u0 = [1.0, 1.0]
p = [1.5, 1.0, 3.0, 1.0]
tspan = (0.0, 10.0)
prob = ODEProblem(lotka_volterra, u0, tspan, p)
# Plot simulation.
plot(solve(prob, Tsit5()))
sol = solve(prob, Tsit5(); saveat=0.1)
odedata = Array(sol) + 0.8 * randn(size(Array(sol)))
# Plot simulation and noisy observations.
plot(sol; alpha=0.3)
scatter!(sol.t, odedata'; color=[1 2], label="")
@model function fitlv(data, prob)
# Prior distributions.
Ο ~ InverseGamma(2, 3)
Ξ± ~ truncated(Normal(1.5, 0.5), 0.5, 2.5)
Ξ² ~ truncated(Normal(1.2, 0.5), 0, 2)
Ξ³ ~ truncated(Normal(3.0, 0.5), 1, 4)
Ξ΄ ~ truncated(Normal(1.0, 0.5), 0, 2)
# Simulate Lotka-Volterra model.
p = [Ξ±, Ξ², Ξ³, Ξ΄]
predicted = solve(prob, Tsit5(); p=p, saveat=0.1)
# Observations.
for i in 1:length(predicted)
data[:, i] ~ MvNormal(predicted[i], Ο^2)
end
return nothing
end
model = fitlv(odedata, prob)
# Sample 3 independent chains with forward-mode automatic differentiation (the default).
chain = sample(model, NUTS(0.65), MCMCSerial(), 1000, 3; progress=false)
And the error that I get:
ERROR: MethodError: promote_rule(::Type{ForwardDiff.Dual{ForwardDiff.Tag{DiffEqBase.OrdinaryDiffEqTag, ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, ReverseDiff.TrackedReal{Float64, Float64, Nothing}, 1}}, ::Type{ReverseDiff.TrackedReal{Float64, Float64, Nothing}}) is ambiguous. Candidates:
promote_rule(::Type{R}, ::Type{ReverseDiff.TrackedReal{V, D, O}}) where {R<:Real, V, D, O} in ReverseDiff at C:\Users\Brendan\.julia\packages\ReverseDiff\5MMPp\src\tracked.jl:276
promote_rule(::Type{ForwardDiff.Dual{T, V, N}}, ::Type{R}) where {T, V, N, R<:Real} in ForwardDiff at C:\Users\Brendan\.julia\packages\ForwardDiff\pDtsf\src\dual.jl:426
Possible fix, define
promote_rule(::Type{ForwardDiff.Dual{T, V, N}}, ::Type{ReverseDiff.TrackedReal{V, D, O}}) where {T, V, N, V, D, O}
Stacktrace:
[1] promote_type(#unused#::Type{ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, #unused#::Type{ForwardDiff.Dual{ForwardDiff.Tag{DiffEqBase.OrdinaryDiffEqTag, ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, ReverseDiff.TrackedReal{Float64, Float64, Nothing}, 1}})
@ Base .\promotion.jl:298
[2] promote_eltype(#unused#::Type{Vector{ReverseDiff.TrackedReal{Float64, Float64, Nothing}}}, #unused#::Type{ForwardDiff.Dual{ForwardDiff.Tag{DiffEqBase.OrdinaryDiffEqTag, ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, ReverseDiff.TrackedReal{Float64, Float64, Nothing}, 1}})
@ ArrayInterfaceCore C:\Users\Brendan\.julia\packages\ArrayInterfaceCore\j22dF\src\ArrayInterfaceCore.jl:143
[3] wrapfun_iip(ff::Function, inputs::Tuple{Vector{ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, Vector{ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, Vector{ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, Float64})
@ DiffEqBase C:\Users\Brendan\.julia\packages\DiffEqBase\5rKYk\src\norecompile.jl:58
[4] promote_f(f::ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, #unused#::Val{SciMLBase.AutoSpecialize}, u0::Vector{ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, p::Vector{ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, t::Float64)
@ DiffEqBase C:\Users\Brendan\.julia\packages\DiffEqBase\5rKYk\src\solve.jl:992
[5] get_concrete_problem(prob::ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, isadapt::Bool; kwargs::Base.Pairs{Symbol, Any, Tuple{Symbol, Symbol, Symbol}, NamedTuple{(:u0, :p, :saveat), Tuple{Vector{Float64}, Vector{ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, Float64}}})
@ DiffEqBase C:\Users\Brendan\.julia\packages\DiffEqBase\5rKYk\src\solve.jl:916
[6] solve_up(prob::ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, sensealg::Nothing, u0::Vector{Float64}, p::Vector{ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, args::Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}; kwargs::Base.Pairs{Symbol, Float64, Tuple{Symbol}, NamedTuple{(:saveat,), Tuple{Float64}}})
@ DiffEqBase C:\Users\Brendan\.julia\packages\DiffEqBase\5rKYk\src\solve.jl:831
[7] solve(prob::ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, args::Tsit5{typeof(OrdinaryDiffEq.trivial_limiter!), typeof(OrdinaryDiffEq.trivial_limiter!), Static.False}; sensealg::Nothing, u0::Nothing, p::Vector{ReverseDiff.TrackedReal{Float64, Float64, Nothing}}, wrap::Val{true}, kwargs::Base.Pairs{Symbol, Float64, Tuple{Symbol}, NamedTuple{(:saveat,), Tuple{Float64}}})
@ DiffEqBase C:\Users\Brendan\.julia\packages\DiffEqBase\5rKYk\src\solve.jl:801
[8] fitlv(__model__::DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, __varinfo__::DynamicPPL.TypedVarInfo{NamedTuple{(:Ο, :Ξ±, :Ξ², :Ξ³, :Ξ΄), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ο, Setfield.IdentityLens}, Int64}, Vector{InverseGamma{Float64}}, Vector{AbstractPPL.VarName{:Ο, Setfield.IdentityLens}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ±, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ±, Setfield.IdentityLens}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ², Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ², Setfield.IdentityLens}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ³, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ³, Setfield.IdentityLens}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ΄, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ΄, Setfield.IdentityLens}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, Vector{Set{DynamicPPL.Selector}}}}}, ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}}, __context__::DynamicPPL.SamplingContext{DynamicPPL.Sampler{NUTS{Turing.Essential.ReverseDiffAD{false}, (), AdvancedHMC.DiagEuclideanMetric}}, DynamicPPL.DefaultContext, Random._GLOBAL_RNG}, data::Matrix{Float64}, prob::ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem})
@ Main .\REPL[26]:11
[9] macro expansion
@ C:\Users\Brendan\.julia\packages\DynamicPPL\zPOYL\src\model.jl:593 [inlined]
[10] _evaluate!!
@ C:\Users\Brendan\.julia\packages\DynamicPPL\zPOYL\src\model.jl:576 [inlined]
[11] evaluate_threadunsafe!!
@ C:\Users\Brendan\.julia\packages\DynamicPPL\zPOYL\src\model.jl:551 [inlined]
[12] evaluate!!
@ C:\Users\Brendan\.julia\packages\DynamicPPL\zPOYL\src\model.jl:504 [inlined]
[13] evaluate!!
@ C:\Users\Brendan\.julia\packages\DynamicPPL\zPOYL\src\model.jl:515 [inlined]
[14] evaluate!!
@ C:\Users\Brendan\.julia\packages\DynamicPPL\zPOYL\src\model.jl:523 [inlined]
[15] LogDensityFunction
@ C:\Users\Brendan\.julia\packages\Turing\szPqN\src\Turing.jl:38 [inlined]
[16] logdensity
@ C:\Users\Brendan\.julia\packages\Turing\szPqN\src\Turing.jl:42 [inlined]
[17] Fix1
@ .\operators.jl:1096 [inlined]
[18] ReverseDiff.GradientTape(f::Base.Fix1{typeof(LogDensityProblems.logdensity), Turing.LogDensityFunction{DynamicPPL.TypedVarInfo{NamedTuple{(:Ο, :Ξ±, :Ξ², :Ξ³, :Ξ΄), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ο, Setfield.IdentityLens}, Int64}, Vector{InverseGamma{Float64}}, Vector{AbstractPPL.VarName{:Ο, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ±, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ±, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ², Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ², Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ³, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ³, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ΄, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ΄, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}, DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, DynamicPPL.Sampler{NUTS{Turing.Essential.ReverseDiffAD{false}, (), AdvancedHMC.DiagEuclideanMetric}}, DynamicPPL.DefaultContext}}, input::Vector{Float64}, cfg::ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}})
@ ReverseDiff C:\Users\Brendan\.julia\packages\ReverseDiff\5MMPp\src\api\tape.jl:199
[19] gradient!(result::DiffResults.MutableDiffResult{1, Float64, Tuple{Vector{Float64}}}, f::Function, input::Vector{Float64}, cfg::ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}) (repeats 2 times)
@ ReverseDiff C:\Users\Brendan\.julia\packages\ReverseDiff\5MMPp\src\api\gradients.jl:41
[20] logdensity_and_gradient
@ C:\Users\Brendan\.julia\packages\LogDensityProblems\oAYeE\src\AD_ReverseDiff.jl:55 [inlined]
[21] βlogΟβΞΈ
@ C:\Users\Brendan\.julia\packages\Turing\szPqN\src\inference\hmc.jl:166 [inlined]
[22] βHβΞΈ
@ C:\Users\Brendan\.julia\packages\AdvancedHMC\iWHPQ\src\hamiltonian.jl:31 [inlined]
[23] phasepoint(h::AdvancedHMC.Hamiltonian{AdvancedHMC.DiagEuclideanMetric{Float64, Vector{Float64}}, Base.Fix1{typeof(LogDensityProblems.logdensity), LogDensityProblems.ReverseDiffLogDensity{Turing.LogDensityFunction{DynamicPPL.TypedVarInfo{NamedTuple{(:Ο, :Ξ±, :Ξ², :Ξ³, :Ξ΄), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ο, Setfield.IdentityLens}, Int64}, Vector{InverseGamma{Float64}}, Vector{AbstractPPL.VarName{:Ο, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ±, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ±, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ², Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ², Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ³, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ³, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ΄, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ΄, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}, DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, DynamicPPL.Sampler{NUTS{Turing.Essential.ReverseDiffAD{false}, (), AdvancedHMC.DiagEuclideanMetric}}, DynamicPPL.DefaultContext}, Nothing}}, Turing.Inference.var"#βlogΟβΞΈ#44"{LogDensityProblems.ReverseDiffLogDensity{Turing.LogDensityFunction{DynamicPPL.TypedVarInfo{NamedTuple{(:Ο, :Ξ±, :Ξ², :Ξ³, :Ξ΄), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ο, Setfield.IdentityLens}, Int64}, Vector{InverseGamma{Float64}}, Vector{AbstractPPL.VarName{:Ο, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ±, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ±, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ², Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ², Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ³, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ³, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ΄, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ΄, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}, DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, DynamicPPL.Sampler{NUTS{Turing.Essential.ReverseDiffAD{false}, (), AdvancedHMC.DiagEuclideanMetric}}, DynamicPPL.DefaultContext}, Nothing}}}, ΞΈ::Vector{Float64}, r::Vector{Float64})
@ AdvancedHMC C:\Users\Brendan\.julia\packages\AdvancedHMC\iWHPQ\src\hamiltonian.jl:76
[24] phasepoint
@ C:\Users\Brendan\.julia\packages\AdvancedHMC\iWHPQ\src\hamiltonian.jl:153 [inlined]
[25] initialstep(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, spl::DynamicPPL.Sampler{NUTS{Turing.Essential.ReverseDiffAD{false}, (), AdvancedHMC.DiagEuclideanMetric}}, vi::DynamicPPL.TypedVarInfo{NamedTuple{(:Ο, :Ξ±, :Ξ², :Ξ³, :Ξ΄), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ο, Setfield.IdentityLens}, Int64}, Vector{InverseGamma{Float64}}, Vector{AbstractPPL.VarName{:Ο, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ±, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ±, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ², Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ², Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ³, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ³, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:Ξ΄, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:Ξ΄, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}; init_params::Nothing, nadapts::Int64, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
@ Turing.Inference C:\Users\Brendan\.julia\packages\Turing\szPqN\src\inference\hmc.jl:170
[26] step(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, spl::DynamicPPL.Sampler{NUTS{Turing.Essential.ReverseDiffAD{false}, (), AdvancedHMC.DiagEuclideanMetric}}; resume_from::Nothing, init_params::Nothing, kwargs::Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}})
@ DynamicPPL C:\Users\Brendan\.julia\packages\DynamicPPL\zPOYL\src\sampler.jl:104
[27] macro expansion
@ C:\Users\Brendan\.julia\packages\AbstractMCMC\fnRmh\src\sample.jl:120 [inlined]
[28] macro expansion
@ C:\Users\Brendan\.julia\packages\AbstractMCMC\fnRmh\src\logging.jl:16 [inlined]
[29] mcmcsample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ReverseDiffAD{false}, (), AdvancedHMC.DiagEuclideanMetric}}, N::Int64; progress::Bool, progressname::String, callback::Nothing, discard_initial::Int64, thinning::Int64, chain_type::Type, kwargs::Base.Pairs{Symbol, Union{Nothing, Int64}, Tuple{Symbol, Symbol}, NamedTuple{(:nadapts, :init_params), Tuple{Int64, Nothing}}})
@ AbstractMCMC C:\Users\Brendan\.julia\packages\AbstractMCMC\fnRmh\src\sample.jl:111
[30] #sample#42
@ C:\Users\Brendan\.julia\packages\Turing\szPqN\src\inference\hmc.jl:133 [inlined]
[31] (::AbstractMCMC.var"#sample_chain#78"{String, Base.Pairs{Symbol, Any, Tuple{Symbol, Symbol}, NamedTuple{(:chain_type, :progress), Tuple{UnionAll, Bool}}}, Random._GLOBAL_RNG, DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, DynamicPPL.Sampler{NUTS{Turing.Essential.ReverseDiffAD{false}, (), AdvancedHMC.DiagEuclideanMetric}}, Int64, Int64})(i::Int64, seed::UInt64, init_params::Nothing)
@ AbstractMCMC C:\Users\Brendan\.julia\packages\AbstractMCMC\fnRmh\src\sample.jl:506
[32] sample_chain
@ C:\Users\Brendan\.julia\packages\AbstractMCMC\fnRmh\src\sample.jl:503 [inlined]
[33] #4
@ .\generator.jl:36 [inlined]
[34] iterate
@ .\generator.jl:47 [inlined]
[35] collect(itr::Base.Generator{Base.Iterators.Zip{Tuple{UnitRange{Int64}, Vector{UInt64}}}, Base.var"#4#5"{AbstractMCMC.var"#sample_chain#78"{String, Base.Pairs{Symbol, Any, Tuple{Symbol, Symbol}, NamedTuple{(:chain_type, :progress), Tuple{UnionAll, Bool}}}, Random._GLOBAL_RNG, DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, DynamicPPL.Sampler{NUTS{Turing.Essential.ReverseDiffAD{false}, (), AdvancedHMC.DiagEuclideanMetric}}, Int64, Int64}}})
@ Base .\array.jl:787
[36] map
@ .\abstractarray.jl:3055 [inlined]
[37] mcmcsample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ReverseDiffAD{false}, (), AdvancedHMC.DiagEuclideanMetric}}, ::MCMCSerial, N::Int64, nchains::Int64; progressname::String, init_params::Nothing, kwargs::Base.Pairs{Symbol, Any, Tuple{Symbol, Symbol}, NamedTuple{(:chain_type, :progress), Tuple{UnionAll, Bool}}})
@ AbstractMCMC C:\Users\Brendan\.julia\packages\AbstractMCMC\fnRmh\src\sample.jl:518
[38] sample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ReverseDiffAD{false}, (), AdvancedHMC.DiagEuclideanMetric}}, ensemble::MCMCSerial, N::Int64, n_chains::Int64; chain_type::Type, progress::Bool, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
@ Turing.Inference C:\Users\Brendan\.julia\packages\Turing\szPqN\src\inference\Inference.jl:220
[39] #sample#6
@ C:\Users\Brendan\.julia\packages\Turing\szPqN\src\inference\Inference.jl:205 [inlined]
[40] #sample#5
@ C:\Users\Brendan\.julia\packages\Turing\szPqN\src\inference\Inference.jl:192 [inlined]
[41] top-level scope
@ REPL[29]:1
The tutorial does work with the Zygote/SciMLSensitivity backend:
using Turing
using DifferentialEquations
using StatsPlots
using LinearAlgebra
using Random
using Zygote
using ReverseDiff
using SciMLSensitivity
# Turing.setadbackend(:forwarddiff)
Turing.setadbackend(:zygote)
# Turing.setadbackend(:reversediff)
Random.seed!(14);
# Define Lotka-Volterra model.
function lotka_volterra(du, u, p, t)
# Model parameters.
Ξ±, Ξ², Ξ³, Ξ΄ = p
# Current state.
x, y = u
# Evaluate differential equations.
du[1] = (Ξ± - Ξ² * y) * x # prey
du[2] = (Ξ΄ * x - Ξ³) * y # predator
return nothing
end
# Define initial-value problem.
u0 = [1.0, 1.0]
p = [1.5, 1.0, 3.0, 1.0]
tspan = (0.0, 10.0)
prob = ODEProblem(lotka_volterra, u0, tspan, p)
# Plot simulation.
plot(solve(prob, Tsit5()))
sol = solve(prob, Tsit5(); saveat=0.1)
odedata = Array(sol) + 0.8 * randn(size(Array(sol)))
# Plot simulation and noisy observations.
plot(sol; alpha=0.3)
scatter!(sol.t, odedata'; color=[1 2], label="")
@model function fitlv(data, prob)
# Prior distributions.
Ο ~ InverseGamma(2, 3)
Ξ± ~ truncated(Normal(1.5, 0.5), 0.5, 2.5)
Ξ² ~ truncated(Normal(1.2, 0.5), 0, 2)
Ξ³ ~ truncated(Normal(3.0, 0.5), 1, 4)
Ξ΄ ~ truncated(Normal(1.0, 0.5), 0, 2)
# Simulate Lotka-Volterra model.
p = [Ξ±, Ξ², Ξ³, Ξ΄]
predicted = solve(prob, Tsit5(); p=p, saveat=0.1)
# Observations.
for i in 1:length(predicted)
data[:, i] ~ MvNormal(predicted[i], Ο^2)
end
return nothing
end
model = fitlv(odedata, prob)
# Sample 3 independent chains with forward-mode automatic differentiation (the default).
chain = sample(model, NUTS(0.65), MCMCSerial(), 1000, 3; progress=false)
(The sampler on this one looks fine β Iβm not running it to completion but I have before).
For my code, I shared the error I got with reversediff before. Here is my code using Zygote/SciMLSensitivity:
using Turing
using DifferentialEquations
using Plots
using Random
using BenchmarkTools
using Zygote
using SciMLSensitivity
Turing.setadbackend(:zygote)
Random.seed!(10);
const Nr = 50
const Nc = 100
true_R = rand(Beta(2,3), (Nr, Nc))
true_K = true_R .* 30.0
true_d = 5.0
initial_pop = zeros(Nr, Nc)
initial_pop[1:20,end-30:end] = true_K[1:20,end-30:end]
tspan=(0.0, 25.0)
p = (true_d, true_R, true_K)
function fast_react_diffuse!(du,u,p,t)
D, R, K = p
@inbounds for j in 2:Nc-1, i in 2:Nr-1
du[i,j] = D*(u[i-1,j] + u[i+1,j] + u[i,j+1] + u[i,j-1] - 4u[i,j]) +
R[i,j]*u[i,j]*(1 - u[i,j]/K[i,j])
end
@inbounds for j in 2:Nc-1
i = 1
du[1,j] = D*(2u[i+1,j] + u[i,j+1] + u[i,j-1] - 4u[i,j]) +
R[i,j]*u[i,j]*(1 - u[i,j]/K[i,j])
end
@inbounds for j in 2:Nc-1
i = Nr
du[end,j] = D*(2u[i-1,j] + u[i,j+1] + u[i,j-1] - 4u[i,j]) +
R[i,j]*u[i,j]*(1 - u[i,j]/K[i,j])
end
@inbounds for i in 2:Nr-1
j = 1
du[i,1] = D*(u[i-1,j] + u[i+1,j] + 2u[i,j+1] - 4u[i,j]) +
R[i,j]*u[i,j]*(1 - u[i,j]/K[i,j])
end
@inbounds for i in 2:Nr-1
j = Nc
du[i,end] = D*(u[i-1,j] + u[i+1,j] + 2u[i,j-1] - 4u[i,j]) +
R[i,j]*u[i,j]*(1 - u[i,j]/K[i,j])
end
@inbounds begin
i = 1; j = 1
du[1,1] = D*(2u[i+1,j] + 2u[i,j+1] - 4u[i,j]) +
R[i,j]*u[i,j]*(1 - u[i,j]/K[i,j])
i = 1; j = Nc
du[1,Nc] = D*(2u[i+1,j] + 2u[i,j-1] - 4u[i,j]) +
R[i,j]*u[i,j]*(1 - u[i,j]/K[i,j])
i = Nr; j = 1
du[Nr,1] = D*(2u[i-1,j] + 2u[i,j+1] - 4u[i,j]) +
R[i,j]*u[i,j]*(1 - u[i,j]/K[i,j])
i = Nr; j = Nc
du[end,end] = D*(2u[i-1,j] + 2u[i,j-1] - 4u[i,j]) +
R[i,j]*u[i,j]*(1 - u[i,j]/K[i,j])
end
end
prob = ODEProblem(fast_react_diffuse!,initial_pop,tspan,p)
# @benchmark solve(prob,Tsit5())
sol = solve(prob,Tsit5(), saveat=1.0)
heatmap(sol[end])
@model function extra_simple_troubleshootfitlv(covariates, response, sample_idx, prob)
# Prior distributions.
d ~ Gamma(4, 1)
sigma ~ InverseGamma(2, 3)
println("d is ", d)
println("sigma is ", sigma)
p = [d, covariates[1], covariates[2]]
predicted = vec(Array(solve(prob, Tsit5(); p=p, saveat=1.0))[sample_idx])
# Observations.
response ~ MvNormal(predicted, sigma^2)
return nothing
end
## 5x10x26 ##
sample_idx = CartesianIndices((5:10:size(sol)[1], 5:10:size(sol)[2], 1:size(sol)[3]))
ts_extra_simple_model_1 = extra_simple_troubleshootfitlv((p[2], p[3]), vec(Array(sol)[sample_idx]), sample_idx, prob)
chain_1 = sample(ts_extra_simple_model_1, NUTS(0.65), MCMCSerial(), 500, 1; progress=true)
Then my VSCode terminal crashes so now Iβm not sure of the error anymore. I can come back to this.
For printing predicted and response, I tried outputting to a txt file but it is too long to share. Let me look at a good way of formatting/sharing and get back to you.