I got now this error and then it looks like it is from my local Julia because the tutorial is usually working well …
ERROR: MethodError: no method matching ADgradient(::Val{:ForwardDiff}, ::LogDensityFunction{DynamicPPL.TypedVarInfo{NamedTuple{(:σ, :α, :β, :γ, :δ), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:σ, Setfield.IdentityLens}, Int64}, Vector{InverseGamma{Float64}}, Vector{AbstractPPL.VarName{:σ, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:α, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:α, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:β, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:β, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:γ, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:γ, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:δ, Setfield.IdentityLens}, Int64}, Vector{Truncated{Normal{Float64}, Continuous, Float64}}, Vector{AbstractPPL.VarName{:δ, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}, DynamicPPL.Model{typeof(fitlv), (:data, :prob), (), (), Tuple{Matrix{Float64}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float64}, ODEFunction{true, SciMLBase.AutoSpecialize, typeof(lotka_volterra), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing, Nothing}, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}}, Tuple{}, DynamicPPL.DefaultContext}, DynamicPPL.SamplingContext{DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}, DynamicPPL.DefaultContext, Random._GLOBAL_RNG}}; gradientconfig=ForwardDiff.GradientConfig{ForwardDiff.Tag{Turing.TuringTag, Float64}, Float64, 5, Vector{ForwardDiff.Dual{ForwardDiff.Tag{Turing.TuringTag, Float64}, Float64, 5}}}((Partials(1.0, 0.0, 0.0, 0.0, 0.0), Partials(0.0, 1.0, 0.0, 0.0, 0.0), Partials(0.0, 0.0, 1.0, 0.0, 0.0), Partials(0.0, 0.0, 0.0, 1.0, 0.0), Partials(0.0, 0.0, 0.0, 0.0, 1.0)), ForwardDiff.Dual{ForwardDiff.Tag{Turing.TuringTag, Float64}, Float64, 5}[Dual{ForwardDiff.Tag{Turing.TuringTag, Float64}}(2.3546804574e-314,2.3546663153e-314,0.0,2.3546663627e-314,2.96e-322,2.3546804574e-314), Dual{ForwardDiff.Tag{Turing.TuringTag, Float64}}(2.3546663153e-314,0.0,2.3546804574e-314,2.3546663153e-314,0.0,2.3546804574e-314), Dual{ForwardDiff.Tag{Turing.TuringTag, Float64}}(2.3546663153e-314,0.0,5.685154233e-314,5.685154233e-314,0.0,2.3546804574e-314), Dual{ForwardDiff.Tag{Turing.TuringTag, Float64}}(2.3546663153e-314,0.0,2.3546804574e-314,2.3546663153e-314,0.0,2.3546804574e-314), Dual{ForwardDiff.Tag{Turing.TuringTag, Float64}}(2.3546663153e-314,0.0,2.3546804574e-314,2.3546663153e-314,0.0,2.3546804574e-314)]))
Closest candidates are:
ADgradient(::Val{:ForwardDiff}, ::Any; chunk, tag, x) at ~/.julia/packages/LogDensityProblemsAD/pwc6T/ext/LogDensityProblemsADForwardDiffExt.jl:98 got unsupported keyword argument “gradientconfig”
ADgradient(::Val{kind}, ::Any; kwargs…) where kind at ~/.julia/packages/LogDensityProblemsAD/pwc6T/src/LogDensityProblemsAD.jl:68
ADgradient(::ADTypes.AutoForwardDiff{C}, ::Any) where C at ~/.julia/packages/LogDensityProblemsAD/pwc6T/ext/LogDensityProblemsADADTypesExt.jl:31 got unsupported keyword argument “gradientconfig”