I am trying to modify the example Poisson regression to suit my own needs. I am running into an issue in the for loop and likelihood expression in the model definition. The following is my script:
using DataFrames, Turing, Random, Distributions, StatsPlots
using MLDataUtils: rescale!
Random.seed!(4444)
can_cov_dist = truncated(Normal(70, 20), 0, 96) #0 is full closure, 96 is no canopy
sim_can_cov = rand(can_cov_dist, 100)
μ_can_cov, σ_can_cov = rescale!(sim_can_cov)
moss_div_dist = Poisson(12)
sim_moss_div = rand(moss_div_dist, 100)
@model function poisson_regression(x, y)
n = size(x, 1)
b0 = Normal(0, sqrt(3))
b1 = Normal(0,sqrt(3))
for i in 1:n #changed from for i = 1:n, old syntax??
theta = b0 + b1*x[i, 1]
y[i] ~ Poisson(exp(theta))
end
end
my_mod = poisson_regression(sim_can_cov, sim_moss_div)
chain = sample(my_mod, NUTS(), 1000)
When I run this I get this error:
julia> chain = sample(my_mod, NUTS(), 1000)
ERROR: LoadError: MethodError: no method matching +(::Normal{Float64}, ::Normal{Float64})
Closest candidates are:
+(::Any, ::Any, ::Any, ::Any...) at ~/packages/julias/julia-1.7/share/julia/base/operators.jl:655
+(::Union{InitialValues.NonspecificInitialValue, InitialValues.SpecificInitialValue{typeof(+)}}, ::Any) at ~/.julia/packages/InitialValues/OWP8V/src/InitialValues.jl:154
+(::ChainRulesCore.Tangent{P}, ::P) where P at ~/.julia/packages/ChainRulesCore/uxrij/src/tangent_arithmetic.jl:146
...
Stacktrace:
[1] macro expansion
@ ~/Coding/Stan_coding/frstproject/test_data.jl:20 [inlined]
[2] macro expansion
@ ~/.julia/packages/DynamicPPL/c8MjC/src/compiler.jl:529 [inlined]
[3] poisson_regression(__model__::DynamicPPL.Model{typeof(poisson_regression), (:x, :y), (), (), Tuple{Vector{Float64}, Vector{Int64}}, Tuple{}, DynamicPPL.DefaultContext}, __varinfo__::DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{AbstractPPL.VarName, Int64}, Vector{Distribution}, Vector{AbstractPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, __context__::DynamicPPL.SamplingContext{DynamicPPL.SampleFromUniform, DynamicPPL.DefaultContext, Random._GLOBAL_RNG}, x::Vector{Float64}, y::Vector{Int64})
@ Main ~/Coding/Stan_coding/frstproject/test_data.jl:19
[4] macro expansion
@ ~/.julia/packages/DynamicPPL/c8MjC/src/model.jl:489 [inlined]
[5] _evaluate!!
@ ~/.julia/packages/DynamicPPL/c8MjC/src/model.jl:472 [inlined]
[6] evaluate_threadunsafe!!
@ ~/.julia/packages/DynamicPPL/c8MjC/src/model.jl:447 [inlined]
[7] evaluate!!
@ ~/.julia/packages/DynamicPPL/c8MjC/src/model.jl:404 [inlined]
[8] evaluate!!
@ ~/.julia/packages/DynamicPPL/c8MjC/src/model.jl:415 [inlined]
[9] Model
@ ~/.julia/packages/DynamicPPL/c8MjC/src/model.jl:377 [inlined]
[10] VarInfo
@ ~/.julia/packages/DynamicPPL/c8MjC/src/varinfo.jl:127 [inlined]
[11] VarInfo
@ ~/.julia/packages/DynamicPPL/c8MjC/src/varinfo.jl:126 [inlined]
[12] step(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(poisson_regression), (:x, :y), (), (), Tuple{Vector{Float64}, Vector{Int64}}, Tuple{}, DynamicPPL.DefaultContext}, spl::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{40}, (), AdvancedHMC.DiagEuclideanMetric}}; resume_from::Nothing, kwargs::Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}})
@ DynamicPPL ~/.julia/packages/DynamicPPL/c8MjC/src/sampler.jl:81
[13] macro expansion
@ ~/.julia/packages/AbstractMCMC/BPJCW/src/sample.jl:123 [inlined]
[14] macro expansion
@ ~/.julia/packages/ProgressLogging/6KXlp/src/ProgressLogging.jl:328 [inlined]
[15] macro expansion
@ ~/.julia/packages/AbstractMCMC/BPJCW/src/logging.jl:8 [inlined]
[16] mcmcsample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(poisson_regression), (:x, :y), (), (), Tuple{Vector{Float64}, Vector{Int64}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{40}, (), AdvancedHMC.DiagEuclideanMetric}}, N::Int64; progress::Bool, progressname::String, callback::Nothing, discard_initial::Int64, thinning::Int64, chain_type::Type, kwargs::Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}})
@ AbstractMCMC ~/.julia/packages/AbstractMCMC/BPJCW/src/sample.jl:114
[17] sample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(poisson_regression), (:x, :y), (), (), Tuple{Vector{Float64}, Vector{Int64}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{40}, (), AdvancedHMC.DiagEuclideanMetric}}, N::Int64; chain_type::Type, resume_from::Nothing, progress::Bool, nadapts::Int64, discard_adapt::Bool, discard_initial::Int64, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
@ Turing.Inference ~/.julia/packages/Turing/Ja9Eg/src/inference/hmc.jl:133
[18] sample
@ ~/.julia/packages/Turing/Ja9Eg/src/inference/hmc.jl:116 [inlined]
[19] #sample#2
@ ~/.julia/packages/Turing/Ja9Eg/src/inference/Inference.jl:145 [inlined]
[20] sample
@ ~/.julia/packages/Turing/Ja9Eg/src/inference/Inference.jl:145 [inlined]
[21] #sample#1
@ ~/.julia/packages/Turing/Ja9Eg/src/inference/Inference.jl:135 [inlined]
[22] sample(model::DynamicPPL.Model{typeof(poisson_regression), (:x, :y), (), (), Tuple{Vector{Float64}, Vector{Int64}}, Tuple{}, DynamicPPL.DefaultContext}, alg::NUTS{Turing.Essential.ForwardDiffAD{40}, (), AdvancedHMC.DiagEuclideanMetric}, N::Int64)
@ Turing.Inference ~/.julia/packages/Turing/Ja9Eg/src/inference/Inference.jl:135
[23] top-level scope
@ ~/Coding/Stan_coding/frstproject/test_data.jl:26
in expression starting at /home/isaac/Coding/Stan_coding/frstproject/test_data.jl:26
I am fairly new to both Julia and Turing, so not really sure what is happening here. Thanks for your help!