This MWE is eq.6 from Kyung et al. which features group-wise scaling of the coefficients to facilitate shrinkage, analogous to the grouped lasso.
Pretty sure I just messed up the model specification somehow but I also filed an issue, in case that makes it easier for the devs.
It seems like Turing does not like how I have defined the MvNormal for each observation of y.
I have the following model:
# Import Turing and Distributions.
using Turing, Distributions
# Import MCMCChains, Plots, and StatPlots for visualizations and diagnostics.
using MCMCChains, Plots, StatsPlots
# Functionality for splitting and normalizing the data.
using MLDataUtils: shuffleobs, splitobs, rescale!
# Functionality for evaluating the model predictions.
using Distances
# Set a seed for reproducibility.
using Random
using LinearAlgebra
Random.seed!(0)
# Hide the progress prompt while sampling.
Turing.turnprogress(false);
@model function grouped_lasso(y,x1,x2,x0,σ²,λ²)
# for pairwise model, only two groups for lasso: home (self) and remote (other)
ngroups = 2
# home and remote groups
mk = size(x1)[2] + size(x0)[2]
# number of observations
nobs = size(x1)[1]
# set variance prior (shrinkage of the group-wise linear coefficients)
τ² = Vector{Real}(undef,ngroups)
for i = 1:ngroups
τ²[i] ~ Gamma((mk+1)/2,λ²/2)
end
# set the coefficient prior
β = Vector{Vector}(undef,ngroups)
for i = 1:ngroups
β[i] ~ MvNormal(mk,σ²*τ²[i])
end
# set the target distribution
ntarg = size(y)[2]
for i = 1:nobs
mu = Vector{Real}(undef,ntarg)
for j = 1:ntarg
mu[j] = dot(vec([x1[i,:]' x0[i,:]']),vec(β[1])) + dot(vec([x2[i,:]' x0[i,:]']),vec(β[2]))
end
y[i,:] ~ MvNormal(mu,σ²)
end
end
yTrain = rand(MvNormal(fill(0,3),1),1000)'
x1Train = rand(MvNormal(fill(0,2),1),1000)'
x2Train = rand(MvNormal(fill(0,2),1),1000)'
x0Train = rand(MvNormal(fill(0,2),1),1000)'
σ² = 1
λ² = 2
model = grouped_lasso(yTrain,x1Train,x2Train,x0Train,σ²,λ²)
chain = sample(model, NUTS(0.65), 300);
error output:
ERROR: StackOverflowError:
Stacktrace:
[1] MvNormal(::Array{Real,1}, ::PDMats.ScalMat{Float64}) at /home/me/.julia/packages/Distributions/HjzA0/src/multivariate/mvnormal.jl:200
[2] MvNormal(::Array{Real,1}, ::PDMats.ScalMat{Float64}) at /home/me/.julia/packages/Distributions/HjzA0/src/multivariate/mvnormal.jl:202 (repeats 65353 times)
[3] MvNormal(::Array{Real,1}, ::Int64) at /home/me/.julia/packages/Distributions/HjzA0/src/multivariate/mvnormal.jl:220
[4] #29 at /home/me/code/scratch/local/turing/test_lasso.jl:50 [inlined]
[5] (::var"#29#30")(::Random._GLOBAL_RNG, ::DynamicPPL.Model{var"#29#30",(:y, :x1, :x2, :x0, :σ², :λ²),(),(),Tuple{Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Int64,Int64},Tuple{}}, ::DynamicPPL.VarInfo{DynamicPPL.Metadata{Dict{DynamicPPL.VarName,Int64},Array{Distribution,1},Array{DynamicPPL.VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64}, ::DynamicPPL.SampleFromPrior, ::DynamicPPL.DefaultContext, ::Adjoint{Float64,Array{Float64,2}}, ::Adjoint{Float64,Array{Float64,2}}, ::Adjoint{Float64,Array{Float64,2}}, ::Adjoint{Float64,Array{Float64,2}}, ::Int64, ::Int64) at ./none:0
[6] macro expansion at /home/me/.julia/packages/DynamicPPL/YDJiG/src/model.jl:0 [inlined]
[7] _evaluate at /home/me/.julia/packages/DynamicPPL/YDJiG/src/model.jl:160 [inlined]
[8] evaluate_threadunsafe at /home/me/.julia/packages/DynamicPPL/YDJiG/src/model.jl:130 [inlined]
[9] Model at /home/me/.julia/packages/DynamicPPL/YDJiG/src/model.jl:92 [inlined]
[10] Model at /home/me/.julia/packages/DynamicPPL/YDJiG/src/model.jl:98 [inlined]
[11] VarInfo at /home/me/.julia/packages/DynamicPPL/YDJiG/src/varinfo.jl:110 [inlined]
[12] VarInfo at /home/me/.julia/packages/DynamicPPL/YDJiG/src/varinfo.jl:109 [inlined]
[13] DynamicPPL.Sampler(::NUTS{Turing.Core.ForwardDiffAD{40},(),AdvancedHMC.DiagEuclideanMetric}, ::DynamicPPL.Model{var"#29#30",(:y, :x1, :x2, :x0, :σ², :λ²),(),(),Tuple{Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Int64,Int64},Tuple{}}, ::DynamicPPL.Selector) at /home/me/.julia/packages/Turing/UsQlw/src/inference/hmc.jl:378
[14] Sampler at /home/me/.julia/packages/Turing/UsQlw/src/inference/hmc.jl:376 [inlined]
[15] sample(::Random._GLOBAL_RNG, ::DynamicPPL.Model{var"#29#30",(:y, :x1, :x2, :x0, :σ², :λ²),(),(),Tuple{Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Int64,Int64},Tuple{}}, ::NUTS{Turing.Core.ForwardDiffAD{40},(),AdvancedHMC.DiagEuclideanMetric}, ::Int64; kwargs::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}) at /home/me/.julia/packages/Turing/UsQlw/src/inference/Inference.jl:164
[16] sample at /home/me/.julia/packages/Turing/UsQlw/src/inference/Inference.jl:164 [inlined]
[17] #sample#1 at /home/me/.julia/packages/Turing/UsQlw/src/inference/Inference.jl:154 [inlined]
[18] sample(::DynamicPPL.Model{var"#29#30",(:y, :x1, :x2, :x0, :σ², :λ²),(),(),Tuple{Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Adjoint{Float64,Array{Float64,2}},Int64,Int64},Tuple{}}, ::NUTS{Turing.Core.ForwardDiffAD{40},(),AdvancedHMC.DiagEuclideanMetric}, ::Int64) at /home/me/.julia/packages/Turing/UsQlw/src/inference/Inference.jl:154
[19] top-level scope at none:1