Turing.jl - Error running tutorial code

I was try Turing.jl tutorial code for Bayesian Neural networks (Bayesian Neural Networks)

When I try to run the Variational inference code (code below)

using Bijectors
using Turing: Variational
using AdvancedVI

m = bayes_nn(hcat(xs...), ts);

q = Variational.meanfield(m)

μ = randn(length(q))
ω = -1 .* ones(length(q))

q = AdvancedVI.update(q, μ, exp.(ω));

advi = ADVI(10, 5_000)
q_hat = vi(m, advi, q);

I get the following error from last line.

┌ Info: [ADVI] Should only be seen once: optimizer created for θ
└   objectid(θ) = 0x71ef269d3ba1f604
ERROR: LoadError: BoundsError: attempt to access 0-element Vector{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}} at index [1]
Stacktrace:
  [1] getindex(A::Vector{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}, i1::Int64)
    @ Base ./array.jl:801
  [2] mapvcat(::Function, ::Base.Iterators.Drop{Tuple{Identity{1}}}, ::Vararg{Any, N} where N)
    @ Bijectors ~/.julia/packages/Bijectors/LmARY/src/Bijectors.jl:95
  [3] forward(sb::Stacked{Tuple{Identity{1}}, Vector{UnitRange{Int64}}}, x::ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}})
    @ Bijectors ~/.julia/packages/Bijectors/LmARY/src/bijectors/stacked.jl:167
  [4] _forward(td::MultivariateTransformed{DistributionsAD.TuringDiagMvNormal{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}, Stacked{Tuple{Identity{1}}, Vector{UnitRange{Int64}}}}, x::ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}})
    @ Bijectors ~/.julia/packages/Bijectors/LmARY/src/transformed_distribution.jl:235
  [5] forward(rng::Random._GLOBAL_RNG, td::MultivariateTransformed{DistributionsAD.TuringDiagMvNormal{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}, Stacked{Tuple{Identity{1}}, Vector{UnitRange{Int64}}}})
    @ Bijectors ~/.julia/packages/Bijectors/LmARY/src/transformed_distribution.jl:244
  [6] (::ELBO)(rng::Random._GLOBAL_RNG, alg::ADVI{ReverseDiffAD{false}}, q::MultivariateTransformed{DistributionsAD.TuringDiagMvNormal{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}, Stacked{Tuple{Identity{1}}, Vector{UnitRange{Int64}}}}, logπ::Turing.Variational.var"#logπ#2"{DynamicPPL.Model{var"#17#18", (:xs, :ts), (), (), Tuple{Matrix{Float64}, Vector{Float64}}, Tuple{}}, DynamicPPL.TypedVarInfo{NamedTuple{(:nn_params,), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:nn_params, Tuple{}}, Int64}, Vector{DiagNormal}, Vector{AbstractPPL.VarName{:nn_params, Tuple{}}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}}, num_samples::Int64)
    @ AdvancedVI ~/.julia/packages/AdvancedVI/yCVq7/src/advi.jl:85
  [7] #_#21
    @ ~/.julia/packages/AdvancedVI/yCVq7/src/objectives.jl:6 [inlined]
  [8] ELBO
    @ ~/.julia/packages/AdvancedVI/yCVq7/src/objectives.jl:6 [inlined]
  [9] (::AdvancedVI.var"#f#27"{ELBO, ADVI{ReverseDiffAD{false}}, MultivariateTransformed{DistributionsAD.TuringDiagMvNormal{Vector{Float64}, Vector{Float64}}, Stacked{Tuple{Identity{1}}, Vector{UnitRange{Int64}}}}, Turing.Variational.var"#logπ#2"{DynamicPPL.Model{var"#17#18", (:xs, :ts), (), (), Tuple{Matrix{Float64}, Vector{Float64}}, Tuple{}}, DynamicPPL.TypedVarInfo{NamedTuple{(:nn_params,), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:nn_params, Tuple{}}, Int64}, Vector{DiagNormal}, Vector{AbstractPPL.VarName{:nn_params, Tuple{}}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}}, Tuple{Int64}})(θ::ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}})
    @ AdvancedVI ~/.julia/packages/AdvancedVI/yCVq7/src/AdvancedVI.jl:69
 [10] ReverseDiff.GradientTape(f::AdvancedVI.var"#f#27"{ELBO, ADVI{ReverseDiffAD{false}}, MultivariateTransformed{DistributionsAD.TuringDiagMvNormal{Vector{Float64}, Vector{Float64}}, Stacked{Tuple{Identity{1}}, Vector{UnitRange{Int64}}}}, Turing.Variational.var"#logπ#2"{DynamicPPL.Model{var"#17#18", (:xs, :ts), (), (), Tuple{Matrix{Float64}, Vector{Float64}}, Tuple{}}, DynamicPPL.TypedVarInfo{NamedTuple{(:nn_params,), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:nn_params, Tuple{}}, Int64}, Vector{DiagNormal}, Vector{AbstractPPL.VarName{:nn_params, Tuple{}}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}}, Tuple{Int64}}, input::Vector{Float64}, cfg::ReverseDiff.GradientConfig{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}})
    @ ReverseDiff ~/.julia/packages/ReverseDiff/E4Tzn/src/api/tape.jl:199
 [11] ReverseDiff.GradientTape(f::Function, input::Vector{Float64})
    @ ReverseDiff ~/.julia/packages/ReverseDiff/E4Tzn/src/api/tape.jl:198
 [12] tape
    @ ~/.julia/packages/AdvancedVI/yCVq7/src/compat/reversediff.jl:13 [inlined]
 [13] grad!(vo::ELBO, alg::ADVI{ReverseDiffAD{false}}, q::MultivariateTransformed{DistributionsAD.TuringDiagMvNormal{Vector{Float64}, Vector{Float64}}, Stacked{Tuple{Identity{1}}, Vector{UnitRange{Int64}}}}, model::Function, θ::Vector{Float64}, out::DiffResults.MutableDiffResult{1, Float64, Tuple{Vector{Float64}}}, args::Int64)
    @ AdvancedVI ~/.julia/packages/AdvancedVI/yCVq7/src/AdvancedVI.jl:73
 [14] macro expansion
    @ ~/.julia/packages/AdvancedVI/yCVq7/src/AdvancedVI.jl:213 [inlined]
 [15] macro expansion
    @ ./timing.jl:287 [inlined]
 [16] optimize!(vo::ELBO, alg::ADVI{ReverseDiffAD{false}}, q::MultivariateTransformed{DistributionsAD.TuringDiagMvNormal{Vector{Float64}, Vector{Float64}}, Stacked{Tuple{Identity{1}}, Vector{UnitRange{Int64}}}}, model::Function, θ::Vector{Float64}; optimizer::TruncatedADAGrad)
    @ AdvancedVI ~/.julia/packages/AdvancedVI/yCVq7/src/AdvancedVI.jl:212
 [17] vi(model::DynamicPPL.Model{var"#17#18", (:xs, :ts), (), (), Tuple{Matrix{Float64}, Vector{Float64}}, Tuple{}}, alg::ADVI{ReverseDiffAD{false}}, q::MultivariateTransformed{DistributionsAD.TuringDiagMvNormal{Vector{Float64}, Vector{Float64}}, Stacked{Tuple{Identity{1}}, Vector{UnitRange{Int64}}}}; optimizer::TruncatedADAGrad)
    @ Turing.Variational ~/.julia/packages/Turing/YGtAo/src/variational/advi.jl:126
 [18] vi(model::DynamicPPL.Model{var"#17#18", (:xs, :ts), (), (), Tuple{Matrix{Float64}, Vector{Float64}}, Tuple{}}, alg::ADVI{ReverseDiffAD{false}}, q::MultivariateTransformed{DistributionsAD.TuringDiagMvNormal{Vector{Float64}, Vector{Float64}}, Stacked{Tuple{Identity{1}}, Vector{UnitRange{Int64}}}})
    @ Turing.Variational ~/.julia/packages/Turing/YGtAo/src/variational/advi.jl:122

I am new to Turing and is unable to debug.

Any suggestions on how to resolve this error?

1 Like

This is a known issue see BNN Variational Inference Error · Issue #59 · TuringLang/TuringTutorials · GitHub

Thanks @Storopoli.
It seems no fix as of now. And now the tutorial site is updated after removing ADVI.