Turns out I was clumsy. So fixing a mistake in the covariance matrix solves the positive definite problem
using Turing, StatsPlots #, LinearAlgebra
@model function pearson_correlation(x, ::Type{T}=Float64) where T
rows = size(x, 1)
# priors
μ = Vector{T}(undef, 2)
μ[1] ~ Normal(0, 100)
μ[2] ~ Normal(0, 100)
σ₁ ~ Uniform(0, 100)
σ₂ ~ Uniform(0, 100)
r ~ Uniform(-0.99, 0.99)
# covariance matrix
Σ = [σ₁*σ₁ r*σ₁*σ₂;
r*σ₁*σ₂ σ₂*σ₂]
# if !isposdef(Σ)
# Turing.@addlogprob! -Inf
# return
# end
# likelihood
for i = 1:rows
x[i,:] ~ MvNormal(μ, Σ)
end
end
x = [0.8 102; 1 98; 0.5 100; 0.9 105; 0.7 103; 0.4 110; 1.2 99; 1.4 87; 0.6 113; 1.1 89; 1.3 93]
chain = sample(pearson_correlation(x), HMC(0.05, 10), 10000)
plot(layout=(1,2))
scatter!(x[:,1], x[:,2], legend=false, subplot=1, xlabel="Response Time (sec)", ylabel="IQ", title="Data space")
histogram!(chain[:r], subplot=2, bins=[-1:0.02:1;], normalize=:pdf, alpha=0.5, xlabel="Correlation", ylabel="Posterior density", title="Parameter space", legend=false)
Overall looks good
EDIT: There were some warmup issues with HMC, but works great with NUTS.