Here is the entire code:
using Turing, Random, Distributions
Random.seed!(746);
μ, σ, n₀ = 0, 1, 750 # Parameters of the generative process
y₀ = [zeros(n₀);rand(LogNormal(μ,σ),1000 - n₀)] # The synthetic data
# Turing
@model function outcome(Y₀)
# Assumptions
μ ~ Uniform(0,1)
σ ~ Uniform(0,1)
N₀ ~ DiscreteUniform(1,1000)
# Observations
Y₀ ~ arraydist([ n < N₀ ? Dirac(0.0) : Normal(μ,σ) for n in 1:length(Y₀)])
# Y₀ .~ filldist(Normal(μ,σ),N₀)
return Y₀
end
# New Idea
Nzeros = findfirst(x-> x != 0.0,y₀)-1
mymodel = outcome(@views y₀[Nzeros+1:end])
# MCMC sampling
chain_outcome = sample(mymodel, NUTS(0.65), 1000);
# MCMC Results
summarystats(chain_outcome)
Here’s the error message:
MethodError: no method matching Product(::Vector{UnivariateDistribution})
Closest candidates are:
Product(::V) where {S<:ValueSupport, T<:UnivariateDistribution{S}, V<:AbstractVector{T}} at ~/.julia/packages/Distributions/tFdHM/src/multivariate/product.jl:25
Stacktrace:
[1] arraydist(dists::Vector{UnivariateDistribution})
@ DistributionsAD ~/.julia/packages/DistributionsAD/bxCGB/src/arraydist.jl:6
[2] outcome(__model__::DynamicPPL.Model{typeof(outcome), (:Y₀,), (), (), Tuple{SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}}, Tuple{}, DynamicPPL.DefaultContext}, __varinfo__::DynamicPPL.TypedVarInfo{NamedTuple{(:μ, :σ, :N₀), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:μ, Setfield.IdentityLens}, Int64}, Vector{Uniform{Float64}}, Vector{AbstractPPL.VarName{:μ, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:σ, Setfield.IdentityLens}, Int64}, Vector{Uniform{Float64}}, Vector{AbstractPPL.VarName{:σ, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:N₀, Setfield.IdentityLens}, Int64}, Vector{DiscreteUniform}, Vector{AbstractPPL.VarName{:N₀, Setfield.IdentityLens}}, Vector{Int64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}, __context__::DynamicPPL.SamplingContext{DynamicPPL.SampleFromUniform, DynamicPPL.DefaultContext, Random._GLOBAL_RNG}, Y₀::SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true})
@ Main ~/Dropbox/University Life/Academic/Research/Working Papers/egalitarian equivalent treatment effects/Julia_prototypes/test_of_Turing.jl:16
[3] macro expansion
@ ~/.julia/packages/DynamicPPL/R7VK9/src/model.jl:493 [inlined]
[4] _evaluate!!
@ ~/.julia/packages/DynamicPPL/R7VK9/src/model.jl:476 [inlined]
[5] evaluate_threadunsafe!!
@ ~/.julia/packages/DynamicPPL/R7VK9/src/model.jl:451 [inlined]
[6] evaluate!!
@ ~/.julia/packages/DynamicPPL/R7VK9/src/model.jl:404 [inlined]
[7] evaluate!! (repeats 2 times)
@ ~/.julia/packages/DynamicPPL/R7VK9/src/model.jl:415 [inlined]
[8] initialstep(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(outcome), (:Y₀,), (), (), Tuple{SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}}, Tuple{}, DynamicPPL.DefaultContext}, spl::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}, vi::DynamicPPL.TypedVarInfo{NamedTuple{(:μ, :σ, :N₀), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:μ, Setfield.IdentityLens}, Int64}, Vector{Uniform{Float64}}, Vector{AbstractPPL.VarName{:μ, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:σ, Setfield.IdentityLens}, Int64}, Vector{Uniform{Float64}}, Vector{AbstractPPL.VarName{:σ, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:N₀, Setfield.IdentityLens}, Int64}, Vector{DiscreteUniform}, Vector{AbstractPPL.VarName{:N₀, Setfield.IdentityLens}}, Vector{Int64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}; init_params::Nothing, nadapts::Int64, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
@ Turing.Inference ~/.julia/packages/Turing/S4Y4B/src/inference/hmc.jl:173
[9] step(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(outcome), (:Y₀,), (), (), Tuple{SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}}, Tuple{}, DynamicPPL.DefaultContext}, spl::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}; resume_from::Nothing, init_params::Nothing, kwargs::Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}})
@ DynamicPPL ~/.julia/packages/DynamicPPL/R7VK9/src/sampler.jl:104
[10] macro expansion
@ ~/.julia/packages/AbstractMCMC/fnRmh/src/sample.jl:120 [inlined]
[11] macro expansion
@ ~/.julia/packages/ProgressLogging/6KXlp/src/ProgressLogging.jl:328 [inlined]
[12] macro expansion
@ ~/.julia/packages/AbstractMCMC/fnRmh/src/logging.jl:9 [inlined]
[13] mcmcsample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(outcome), (:Y₀,), (), (), Tuple{SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}, N::Int64; progress::Bool, progressname::String, callback::Nothing, discard_initial::Int64, thinning::Int64, chain_type::Type, kwargs::Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}})
@ AbstractMCMC ~/.julia/packages/AbstractMCMC/fnRmh/src/sample.jl:111
[14] sample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(outcome), (:Y₀,), (), (), Tuple{SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}, N::Int64; chain_type::Type, resume_from::Nothing, progress::Bool, nadapts::Int64, discard_adapt::Bool, discard_initial::Int64, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
@ Turing.Inference ~/.julia/packages/Turing/S4Y4B/src/inference/hmc.jl:133
[15] sample
@ ~/.julia/packages/Turing/S4Y4B/src/inference/hmc.jl:103 [inlined]
[16] #sample#2
@ ~/.julia/packages/Turing/S4Y4B/src/inference/Inference.jl:145 [inlined]
[17] sample
@ ~/.julia/packages/Turing/S4Y4B/src/inference/Inference.jl:138 [inlined]
[18] #sample#1
@ ~/.julia/packages/Turing/S4Y4B/src/inference/Inference.jl:135 [inlined]
[19] sample(model::DynamicPPL.Model{typeof(outcome), (:Y₀,), (), (), Tuple{SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}}, Tuple{}, DynamicPPL.DefaultContext}, alg::NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}, N::Int64)
@ Turing.Inference ~/.julia/packages/Turing/S4Y4B/src/inference/Inference.jl:129
[20] top-level scope
@ ~/Dropbox/University Life/Academic/Research/Working Papers/egalitarian equivalent treatment effects/Julia_prototypes/test_of_Turing.jl:28