Issue with for-loop free model

Hi All,

I’m trying to get a simple MLP trained. Pretty much all turing tutorials still use for i in 1:N loops for the likelihood on the data.

I’m trying the following:

using Flux
using Turing,Zygote
using MCMCChains

nn = Chain(Dense(72,4,gelu),Dense(4,1, σ))
parameters_initial,reconstruct = Flux.destructure(nn)

@model function bnn(x::Matrix{Float32},y::Vector{Float32},nparameters,reconstruct)
    #parameters ~ MvNormal(zeros(nparameters), ones(nparameters))
    parameters ~ filldist(Normal(),nparameters)

    mlp = reconstruct(parameters)
    preds = mlp(x)    
    y  ~ arraydist(map(i -> Bernoulli(i),preds))
end 

N = 1000
ch = sample(
    bnn(x, y, length(parameters_initial), reconstruct), HMC(.04,6), N
);

And I get the following error. I’m assuming it something stupid. x is a 72×983 Matrix{Float32} and y is a 983-element Vector{Float32}

{
	"name": "LoadError",
	"message": "DimensionMismatch(\"number of dimensions of `x` (1) must be greater than number of dimensions of `d` (2)\")",
	"stack": "DimensionMismatch(\"number of dimensions of `x` (1) must be greater than number of dimensions of `d` (2)\")\n\nStacktrace:\n  [1] loglikelihood\n    @ ~/.julia/packages/Distributions/O5xl5/src/common.jl:433 [inlined]\n  [2] observe(right::DistributionsAD.MatrixOfUnivariate{Discrete, Bernoulli{Float32}, Matrix{Bernoulli{Float32}}}, left::Vector{Float32}, vi::DynamicPPL.ThreadSafeVarInfo{DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, Vector{Base.RefValue{Float64}}})\n    @ DynamicPPL ~/.julia/packages/DynamicPPL/R7VK9/src/context_implementations.jl:233\n  [3] observe(sampler::DynamicPPL.SampleFromUniform, right::DistributionsAD.MatrixOfUnivariate{Discrete, Bernoulli{Float32}, Matrix{Bernoulli{Float32}}}, left::Vector{Float32}, vi::DynamicPPL.ThreadSafeVarInfo{DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, Vector{Base.RefValue{Float64}}})\n    @ DynamicPPL ~/.julia/packages/DynamicPPL/R7VK9/src/context_implementations.jl:230\n  [4] tilde_observe(::DynamicPPL.IsLeaf, ::DynamicPPL.DefaultContext, ::DynamicPPL.SampleFromUniform, ::DistributionsAD.MatrixOfUnivariate{Discrete, Bernoulli{Float32}, Matrix{Bernoulli{Float32}}}, ::Vector{Float32}, ::DynamicPPL.ThreadSafeVarInfo{DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, Vector{Base.RefValue{Float64}}})\n    @ DynamicPPL ~/.julia/packages/DynamicPPL/R7VK9/src/context_implementations.jl:137\n  [5] tilde_observe(::DynamicPPL.DefaultContext, ::DynamicPPL.SampleFromUniform, ::DistributionsAD.MatrixOfUnivariate{Discrete, Bernoulli{Float32}, Matrix{Bernoulli{Float32}}}, ::Vector{Float32}, ::DynamicPPL.ThreadSafeVarInfo{DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, Vector{Base.RefValue{Float64}}})\n    @ DynamicPPL ~/.julia/packages/DynamicPPL/R7VK9/src/context_implementations.jl:135\n  [6] tilde_observe(context::DynamicPPL.SamplingContext{DynamicPPL.SampleFromUniform, DynamicPPL.DefaultContext, Random._GLOBAL_RNG}, right::DistributionsAD.MatrixOfUnivariate{Discrete, Bernoulli{Float32}, Matrix{Bernoulli{Float32}}}, left::Vector{Float32}, vi::DynamicPPL.ThreadSafeVarInfo{DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, Vector{Base.RefValue{Float64}}})\n    @ DynamicPPL ~/.julia/packages/DynamicPPL/R7VK9/src/context_implementations.jl:130\n  [7] tilde_observe!!(context::DynamicPPL.SamplingContext{DynamicPPL.SampleFromUniform, DynamicPPL.DefaultContext, Random._GLOBAL_RNG}, right::DistributionsAD.MatrixOfUnivariate{Discrete, Bernoulli{Float32}, Matrix{Bernoulli{Float32}}}, left::Vector{Float32}, vi::DynamicPPL.ThreadSafeVarInfo{DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, Vector{Base.RefValue{Float64}}})\n    @ DynamicPPL ~/.julia/packages/DynamicPPL/R7VK9/src/context_implementations.jl:183\n  [8] tilde_observe!!(context::DynamicPPL.SamplingContext{DynamicPPL.SampleFromUniform, DynamicPPL.DefaultContext, Random._GLOBAL_RNG}, right::DistributionsAD.MatrixOfUnivariate{Discrete, Bernoulli{Float32}, Matrix{Bernoulli{Float32}}}, left::Vector{Float32}, vname::AbstractPPL.VarName{:y, Setfield.IdentityLens}, vi::DynamicPPL.ThreadSafeVarInfo{DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, Vector{Base.RefValue{Float64}}})\n    @ DynamicPPL ~/.julia/packages/DynamicPPL/R7VK9/src/context_implementations.jl:170\n  [9] macro expansion\n    @ ~/.julia/packages/DynamicPPL/R7VK9/src/compiler.jl:539 [inlined]\n [10] bnn(__model__::DynamicPPL.Model{typeof(bnn), (:x, :y, :nparameters, :reconstruct), (), (), Tuple{Matrix{Float32}, Vector{Float32}, Int64, Optimisers.Restructure{Chain{Tuple{Dense{typeof(gelu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(σ), Matrix{Float32}, Vector{Float32}}}}, NamedTuple{(:layers,), Tuple{Tuple{NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}, NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}}}}}}, Tuple{}, DynamicPPL.DefaultContext}, __varinfo__::DynamicPPL.ThreadSafeVarInfo{DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, Vector{Base.RefValue{Float64}}}, __context__::DynamicPPL.SamplingContext{DynamicPPL.SampleFromUniform, DynamicPPL.DefaultContext, Random._GLOBAL_RNG}, x::Matrix{Float32}, y::Vector{Float32}, nparameters::Int64, reconstruct::Optimisers.Restructure{Chain{Tuple{Dense{typeof(gelu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(σ), Matrix{Float32}, Vector{Float32}}}}, NamedTuple{(:layers,), Tuple{Tuple{NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}, NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}}}}})\n    @ Main ./In[16]:10\n [11] macro expansion\n    @ ~/.julia/packages/DynamicPPL/R7VK9/src/model.jl:493 [inlined]\n [12] _evaluate!!\n    @ ~/.julia/packages/DynamicPPL/R7VK9/src/model.jl:476 [inlined]\n [13] evaluate_threadsafe!!\n    @ ~/.julia/packages/DynamicPPL/R7VK9/src/model.jl:467 [inlined]\n [14] evaluate!!\n    @ ~/.julia/packages/DynamicPPL/R7VK9/src/model.jl:402 [inlined]\n [15] evaluate!!(model::DynamicPPL.Model{typeof(bnn), (:x, :y, :nparameters, :reconstruct), (), (), Tuple{Matrix{Float32}, Vector{Float32}, Int64, Optimisers.Restructure{Chain{Tuple{Dense{typeof(gelu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(σ), Matrix{Float32}, Vector{Float32}}}}, NamedTuple{(:layers,), Tuple{Tuple{NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}, NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}}}}}}, Tuple{}, DynamicPPL.DefaultContext}, rng::Random._GLOBAL_RNG, varinfo::DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, sampler::DynamicPPL.SampleFromUniform, context::DynamicPPL.DefaultContext)\n    @ DynamicPPL ~/.julia/packages/DynamicPPL/R7VK9/src/model.jl:415\n [16] (::DynamicPPL.Model{typeof(bnn), (:x, :y, :nparameters, :reconstruct), (), (), Tuple{Matrix{Float32}, Vector{Float32}, Int64, Optimisers.Restructure{Chain{Tuple{Dense{typeof(gelu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(σ), Matrix{Float32}, Vector{Float32}}}}, NamedTuple{(:layers,), Tuple{Tuple{NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}, NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}}}}}}, Tuple{}, DynamicPPL.DefaultContext})(::Random._GLOBAL_RNG, ::Vararg{Any})\n    @ DynamicPPL ~/.julia/packages/DynamicPPL/R7VK9/src/model.jl:375\n [17] VarInfo\n    @ ~/.julia/packages/DynamicPPL/R7VK9/src/varinfo.jl:127 [inlined]\n [18] VarInfo\n    @ ~/.julia/packages/DynamicPPL/R7VK9/src/varinfo.jl:126 [inlined]\n [19] step(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(bnn), (:x, :y, :nparameters, :reconstruct), (), (), Tuple{Matrix{Float32}, Vector{Float32}, Int64, Optimisers.Restructure{Chain{Tuple{Dense{typeof(gelu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(σ), Matrix{Float32}, Vector{Float32}}}}, NamedTuple{(:layers,), Tuple{Tuple{NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}, NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}}}}}}, Tuple{}, DynamicPPL.DefaultContext}, spl::DynamicPPL.Sampler{HMC{Turing.Essential.ReverseDiffAD{true}, (), AdvancedHMC.UnitEuclideanMetric}}; resume_from::Nothing, init_params::Nothing, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})\n    @ DynamicPPL ~/.julia/packages/DynamicPPL/R7VK9/src/sampler.jl:86\n [20] step\n    @ ~/.julia/packages/DynamicPPL/R7VK9/src/sampler.jl:79 [inlined]\n [21] macro expansion\n    @ ~/.julia/packages/AbstractMCMC/fnRmh/src/sample.jl:120 [inlined]\n [22] macro expansion\n    @ ~/.julia/packages/ProgressLogging/6KXlp/src/ProgressLogging.jl:328 [inlined]\n [23] (::AbstractMCMC.var\"#21#22\"{Bool, String, Nothing, Int64, Int64, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, Random._GLOBAL_RNG, DynamicPPL.Model{typeof(bnn), (:x, :y, :nparameters, :reconstruct), (), (), Tuple{Matrix{Float32}, Vector{Float32}, Int64, Optimisers.Restructure{Chain{Tuple{Dense{typeof(gelu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(σ), Matrix{Float32}, Vector{Float32}}}}, NamedTuple{(:layers,), Tuple{Tuple{NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}, NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}}}}}}, Tuple{}, DynamicPPL.DefaultContext}, DynamicPPL.Sampler{HMC{Turing.Essential.ReverseDiffAD{true}, (), AdvancedHMC.UnitEuclideanMetric}}, Int64, Int64})()\n    @ AbstractMCMC ~/.julia/packages/AbstractMCMC/fnRmh/src/logging.jl:12\n [24] with_logstate(f::Function, logstate::Any)\n    @ Base.CoreLogging ./logging.jl:511\n [25] with_logger(f::Function, logger::LoggingExtras.TeeLogger{Tuple{LoggingExtras.EarlyFilteredLogger{ConsoleProgressMonitor.ProgressLogger, AbstractMCMC.var\"#1#3\"{Module}}, LoggingExtras.EarlyFilteredLogger{Base.CoreLogging.SimpleLogger, AbstractMCMC.var\"#2#4\"{Module}}}})\n    @ Base.CoreLogging ./logging.jl:623\n [26] with_progresslogger(f::Function, _module::Module, logger::Base.CoreLogging.SimpleLogger)\n    @ AbstractMCMC ~/.julia/packages/AbstractMCMC/fnRmh/src/logging.jl:36\n [27] macro expansion\n    @ ~/.julia/packages/AbstractMCMC/fnRmh/src/logging.jl:11 [inlined]\n [28] mcmcsample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(bnn), (:x, :y, :nparameters, :reconstruct), (), (), Tuple{Matrix{Float32}, Vector{Float32}, Int64, Optimisers.Restructure{Chain{Tuple{Dense{typeof(gelu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(σ), Matrix{Float32}, Vector{Float32}}}}, NamedTuple{(:layers,), Tuple{Tuple{NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}, NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}}}}}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{HMC{Turing.Essential.ReverseDiffAD{true}, (), AdvancedHMC.UnitEuclideanMetric}}, N::Int64; progress::Bool, progressname::String, callback::Nothing, discard_initial::Int64, thinning::Int64, chain_type::Type, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})\n    @ AbstractMCMC ~/.julia/packages/AbstractMCMC/fnRmh/src/sample.jl:111\n [29] sample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(bnn), (:x, :y, :nparameters, :reconstruct), (), (), Tuple{Matrix{Float32}, Vector{Float32}, Int64, Optimisers.Restructure{Chain{Tuple{Dense{typeof(gelu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(σ), Matrix{Float32}, Vector{Float32}}}}, NamedTuple{(:layers,), Tuple{Tuple{NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}, NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}}}}}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{HMC{Turing.Essential.ReverseDiffAD{true}, (), AdvancedHMC.UnitEuclideanMetric}}, N::Int64; chain_type::Type, resume_from::Nothing, progress::Bool, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})\n    @ Turing.Inference ~/.julia/packages/Turing/Oczpc/src/inference/Inference.jl:159\n [30] sample\n    @ ~/.julia/packages/Turing/Oczpc/src/inference/Inference.jl:158 [inlined]\n [31] #sample#2\n    @ ~/.julia/packages/Turing/Oczpc/src/inference/Inference.jl:145 [inlined]\n [32] sample\n    @ ~/.julia/packages/Turing/Oczpc/src/inference/Inference.jl:145 [inlined]\n [33] #sample#1\n    @ ~/.julia/packages/Turing/Oczpc/src/inference/Inference.jl:135 [inlined]\n [34] sample(model::DynamicPPL.Model{typeof(bnn), (:x, :y, :nparameters, :reconstruct), (), (), Tuple{Matrix{Float32}, Vector{Float32}, Int64, Optimisers.Restructure{Chain{Tuple{Dense{typeof(gelu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(σ), Matrix{Float32}, Vector{Float32}}}}, NamedTuple{(:layers,), Tuple{Tuple{NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}, NamedTuple{(:weight, :bias, :σ), Tuple{Int64, Int64, Tuple{}}}}}}}}, Tuple{}, DynamicPPL.DefaultContext}, alg::HMC{Turing.Essential.ReverseDiffAD{true}, (), AdvancedHMC.UnitEuclideanMetric}, N::Int64)\n    @ Turing.Inference ~/.julia/packages/Turing/Oczpc/src/inference/Inference.jl:135\n [35] top-level scope\n    @ In[17]:2\n [36] eval\n    @ ./boot.jl:373 [inlined]\n [37] include_string(mapexpr::typeof(REPL.softscope), mod::Module, code::String, filename::String)\n    @ Base ./loading.jl:1196"

preds will be of size (1, size(x, 2)) and y is (I assume) of shape size(x, 2), hence LHS of the last ~ will be of size size(x,2) while RHS will be of size (1, size(x, 2)) which causes the error.