To follow up, the solution was quite simple as soon as I figured out the failure point. Basically, I needed to define a mixed multivariate type and implement DynampicPPL.vectorize(d::MixedMultivariateDistribution, r::NamedTuple) = [r...]
. A simple working example can be found below for anyone who is interested.
Summary
using Distributions
using Turing
using Random
import Distributions: logpdf
import Distributions: loglikelihood
import Distributions: rand
import DynamicPPL: vectorize
import Base: length
abstract type Mixed <: ValueSupport end
const MixedMultivariateDistribution = Distribution{Multivariate, Mixed}
abstract type SSM1D <: ContinuousUnivariateDistribution end
abstract type SSM2D <: MixedMultivariateDistribution end
struct MyType{T<:Real} <: SSM2D
μ::T
σ::T
end
Base.broadcastable(x::MyType) = Ref(x)
vectorize(d::MixedMultivariateDistribution, r::NamedTuple) = [r...]
Base.length(d::MixedMultivariateDistribution) = 2
rand(d::MixedMultivariateDistribution) = rand(Random.default_rng(), d)
rand(d::MixedMultivariateDistribution, n::Int) = rand(Random.default_rng(), d, n)
function rand(rng::AbstractRNG, d::MyType)
choice = rand(1:2)
rt = rand(rng, LogNormal(d.μ, d.σ))
return (;choice, rt)
end
function rand(rng::AbstractRNG, d::MyType, N::Int)
choice = fill(0, N)
rt = fill(0.0, N)
for i in 1:N
choice[i],rt[i] = rand(rng, d)
end
return (choice=choice,rt=rt)
end
function logpdf(d::MyType, choice::Int, rt::Float64)
return logpdf(LogNormal(d.μ, d.σ), rt)
end
logpdf(d::MyType, data::NamedTuple) = logpdf(d::MyType, data.choice, data.rt)
loglikelihood(d::MyType, data::NamedTuple) = sum(logpdf.(d, data...))
@model function my_model(data)
μ ~ Normal(0, 1)
σ ~ truncated(Normal(0, 1), 0, Inf)
data ~ MyType(μ, σ)
return (;data, μ, σ)
end
data = rand(MyType(0, 1), 10)
chain = sample(my_model(data), NUTS(), 1_000)
predictions = predict(my_model(missing), chain)
@DominiqueMakowski, can you please mark this as the solution? Thanks!