Odd, this runs fine for me:
using Turing, Random, Distributions, FFTW, Zygote
Turing.setadbackend(:zygote)
N = 32;
γ = 0.01;
x = LinRange(0,1,N+1)[2:end-1];
uᵗ(x) = x*(1-x);
Random.seed!(500); # set a seed for reproducibility
y_data = @. uᵗ(x) + γ * randn()
"""
`build_field` - Build a mean zero Gaussian random field with the (-Δ)^{-α} covariance operator in dimension one
### Fields
`ξ` - Vector of N(0,1) values
`α=1` - Smoothness parameter
"""
function build_field(ξ; α=one(eltype(ξ)))
N = length(ξ)
# construct the eigenvalues
πk = π * (1:N);
# NOTE we need to multiply by 2 *N for FFT scaling
c = 2N * sqrt(2)
umid = @. c * ξ / πk^α;
uhat = [0; umid; zeros(N - 1)]
# invert and get the relevant imaginary part
u = @views imag.(ifft(uhat)[N+2:end]);
return u
end
@model function mean_recovery(y_data)
ξ ~ filldist(Normal(), N)
u = build_field(ξ);
y_data .~ Normal.(u, γ)
end
model=mean_recovery(y_data)
chain = sample(model, NUTS(), MCMCThreads(), 10^3, 4)
Note that I switched to NUTS and dropped the number of requested samples. This model samples so well with NUTS that the 4000 draws for most parameters have effective sample sizes of over 10,000.
I’m not familiar with the interpolation packages. I’d say just try and see.