Custom Likelihood Distribution/increment likelihood

Hi, I’m fairly new to Julia so my apologies if this is an obvious question. I’m trying to create a model to sample from the below log-likelihood function. The C matrix is a constant and Δ follows a Dirichlet distribution. For reasons that don’t particularly matter here j is always either 3 or 9 and i can be any integer > 1.
image
I built this model using Stan by simply incrementing the log density directly.

data {
  int<lower=0> i;
  int<lower=0> j;
  vector[j] alpha;
  matrix[i, j] C;
}
model {
  Delta ~ dirichlet(alpha);
  target += log(C * Delta);
}

Now I’m trying to convert this model to use Turing.jl and it looks like the best way to go about doing that is to create a custom likelihood distribution. I’m trying to follow along with the guide here: https://turing.ml/stable/docs/using-turing/advanced and this previous response Custom likelihood with Turing.jl - #3 by Christopher_Fisher. But seem to be failing at some point.

C = [0.089398 0.0267295 0.007992; 0.0 0.000164962 4.31057e-5; 0.0170703 0.00223028 0.000291394; 0.0 0.0 0.00431887]

struct Δ_dist{c, δ} <: ContinuousMultivariateDistribution
  C::c
  Δ::δ
end

Broadcast.broadcastable(x::Δ_dist) = Ref(x)

Δ_dist(;C, Δ) = Δ_dist(C, Δ)

function rand(dist::Δ_dist)
    @unpack C, Δ = dist
    return @. rand(rng, Dirichlet(size(C)[2], 1/size(C)[2]))
end

rand(dist::Δ_dist, N::Int) = [rand(dist) for i in 1:N]

function logpdf(d::T) where {T<:Δ_dist}
    @unpack C, Δ = d
    LL = 0.0
    LL += sum(log.(C * Δ))
    return LL
end

logpdf(d::Δ_dist, data::Tuple) = logpdf(d, data...)

@model dyadRel(C) = begin
  nCrosses = size(C)[2]
  d ~ Dirichlet(nCrosses, 1 / nCrosses)
  Δ ~ Δ_dist(C, d)
end

sample(dyadRel(C), NUTS(0.5), 100)
julia> sample(dyadRel(C), NUTS(0.5), 100)
ERROR: MethodError: no method matching length(::Δ_dist{Array{Float64,2},Array{Float64,1}})
Closest candidates are:
  length(::Sampleable{Multivariate,S} where S<:ValueSupport) at C:\Users\jdsel\.julia\packages\Distributions\RAeyY\src\common.jl:39
  length(::Sampleable) at C:\Users\jdsel\.julia\packages\Distributions\RAeyY\src\common.jl:37
  length(::Core.SimpleVector) at essentials.jl:596
  ...
Stacktrace:
 [1] length(::Δ_dist{Array{Float64,2},Array{Float64,1}}) at C:\Users\jdsel\.julia\packages\Distributions\RAeyY\src\common.jl:39
 [2] rand(::Random._GLOBAL_RNG, ::Δ_dist{Array{Float64,2},Array{Float64,1}}) at C:\Users\jdsel\.julia\packages\Distributions\RAeyY\src\multivariates.jl:76
 [3] init(::Random._GLOBAL_RNG, ::Δ_dist{Array{Float64,2},Array{Float64,1}}, ::DynamicPPL.SampleFromPrior) at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\sampler.jl:10
 [4] assume(::Random._GLOBAL_RNG, ::DynamicPPL.SampleFromPrior, ::Δ_dist{Array{Float64,2},Array{Float64,1}}, ::DynamicPPL.VarName{:Δ,Tuple{}}, ::DynamicPPL.ThreadSafeVarInfo{DynamicPPL.VarInfo{DynamicPPL.Metadata{Dict{DynamicPPL.VarName,Int64},Array{Distribution,1},Array{DynamicPPL.VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64},Array{Base.RefValue{Float64},1}}) at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\context_implementations.jl:138
 [5] _tilde at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\context_implementations.jl:59 [inlined]
 [6] tilde at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\context_implementations.jl:23 [inlined]
 [7] tilde_assume(::Random._GLOBAL_RNG, ::DynamicPPL.DefaultContext, ::DynamicPPL.SampleFromPrior, ::Δ_dist{Array{Float64,2},Array{Float64,1}}, ::DynamicPPL.VarName{:Δ,Tuple{}}, ::Tuple{}, ::DynamicPPL.ThreadSafeVarInfo{DynamicPPL.VarInfo{DynamicPPL.Metadata{Dict{DynamicPPL.VarName,Int64},Array{Distribution,1},Array{DynamicPPL.VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64},Array{Base.RefValue{Float64},1}}) at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\context_implementations.jl:52
 [8] macro expansion at .\REPL[375]:4 [inlined]
 [9] ##evaluator#498(::Random._GLOBAL_RNG, ::DynamicPPL.Model{var"###evaluator#498",(:C,),Tuple{Array{Float64,2}},(),DynamicPPL.ModelGen{var"###generator#499",(:C,),(),Tuple{}}}, ::DynamicPPL.ThreadSafeVarInfo{DynamicPPL.VarInfo{DynamicPPL.Metadata{Dict{DynamicPPL.VarName,Int64},Array{Distribution,1},Array{DynamicPPL.VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64},Array{Base.RefValue{Float64},1}}, ::DynamicPPL.SampleFromPrior, ::DynamicPPL.DefaultContext) at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\compiler.jl:356
 [10] evaluate_threadsafe(::Random._GLOBAL_RNG, ::DynamicPPL.Model{var"###evaluator#498",(:C,),Tuple{Array{Float64,2}},(),DynamicPPL.ModelGen{var"###generator#499",(:C,),(),Tuple{}}}, ::DynamicPPL.VarInfo{DynamicPPL.Metadata{Dict{DynamicPPL.VarName,Int64},Array{Distribution,1},Array{DynamicPPL.VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64}, ::DynamicPPL.SampleFromPrior, ::DynamicPPL.DefaultContext) at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\model.jl:177
 [11] Model at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\model.jl:138 [inlined]
 [12] Model at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\model.jl:126 [inlined]
 [13] VarInfo at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\varinfo.jl:110 [inlined]
 [14] VarInfo at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\varinfo.jl:109 [inlined]
 [15] DynamicPPL.Sampler(::NUTS{Turing.Core.ForwardDiffAD{40},(),AdvancedHMC.DiagEuclideanMetric}, ::DynamicPPL.Model{var"###evaluator#498",(:C,),Tuple{Array{Float64,2}},(),DynamicPPL.ModelGen{var"###generator#499",(:C,),(),Tuple{}}}, ::DynamicPPL.Selector) at C:\Users\jdsel\.julia\packages\Turing\GMBTf\src\inference\hmc.jl:378
 [16] Sampler at C:\Users\jdsel\.julia\packages\Turing\GMBTf\src\inference\hmc.jl:376 [inlined]
 [17] sample(::Random._GLOBAL_RNG, ::DynamicPPL.Model{var"###evaluator#498",(:C,),Tuple{Array{Float64,2}},(),DynamicPPL.ModelGen{var"###generator#499",(:C,),(),Tuple{}}}, ::NUTS{Turing.Core.ForwardDiffAD{40},(),AdvancedHMC.DiagEuclideanMetric}, ::Int64; kwargs::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}) at C:\Users\jdsel\.julia\packages\Turing\GMBTf\src\inference\Inference.jl:163
 [18] sample at C:\Users\jdsel\.julia\packages\Turing\GMBTf\src\inference\Inference.jl:163 [inlined]
 [19] #sample#1 at C:\Users\jdsel\.julia\packages\Turing\GMBTf\src\inference\Inference.jl:153 [inlined]
 [20] sample(::DynamicPPL.Model{var"###evaluator#498",(:C,),Tuple{Array{Float64,2}},(),DynamicPPL.ModelGen{var"###generator#499",(:C,),(),Tuple{}}}, ::NUTS{Turing.Core.ForwardDiffAD{40},(),AdvancedHMC.DiagEuclideanMetric}, ::Int64) at C:\Users\jdsel\.julia\packages\Turing\GMBTf\src\inference\Inference.jl:153
 [21] top-level scope at REPL[376]:100:

Am I going about trying to implement this likelihood function in the right way? Any help would be hugely appreciated!
Thank you!

As in Stan you can also simply increment the log joint in Turing.

See the example listed here: Advanced Usage

(Under Model Internals)

Awesome thank you!

Am I able to increment the log joint using the @model macro form?

When I try to run the below code block I get an error about no method for DyanamicPPL

using Turing

# Initialize a NamedTuple containing our data variables.
data = (x = [1.5, 2.0],)

# Create the model function.
mf(vi, sampler, ctx, model) = begin
    # Set the accumulated logp to zero.
    resetlogp!(vi)
    x = model.args.x

    # Assume s has an InverseGamma distribution.
    s, lp = Turing.Inference.tilde(
        ctx,
        sampler,
        InverseGamma(2, 3),
        Turing.@varname(s),
        (),
        vi,
    )

    # Add the lp to the accumulated logp.
    acclogp!(vi, lp)

    # Assume m has a Normal distribution.
    m, lp = Turing.Inference.tilde(
        ctx,
        sampler,
        Normal(0, sqrt(s)),
        Turing.@varname(m),
        (),
        vi,
    )

    # Add the lp to the accumulated logp.
    acclogp!(vi, lp)

    # Observe each value of x[i], according to a
    # Normal distribution.
    lp = Turing.Inference.dot_tilde(ctx, sampler, Normal(m, sqrt(s)), x, vi)
    acclogp!(vi, lp)
end

# Instantiate a Model object.
model = DynamicPPL.Model(mf, data, DynamicPPL.ModelGen{()}(nothing, nothing))

# Sample the model.
chain = sample(model, HMC(0.1, 5), 1000)
julia> model = DynamicPPL.Model(mf, data, DynamicPPL.ModelGen{()}(nothing, nothing))
ERROR: MethodError: no method matching DynamicPPL.ModelGen{(),argnames,defaultnames,Tdefaults} where Tdefaults where defaultnames where argnames(::Nothing, ::Nothing)
Closest candidates are:
  DynamicPPL.ModelGen{(),argnames,defaultnames,Tdefaults} where Tdefaults where defaultnames where argnames(::G, ::NamedTuple{defaultnames,Tdefaults}) where {G, argnames, defaultnames, Tdefaults} at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\model.jl:18
Stacktrace:
 [1] top-level scope at REPL[87]:100 

I’m also unable to find anything using ?acclogp which I’m assuming is how I increment the log joint so I’m unsure how I could go about using it inside the macro form of the model code if that is even possible.

Thanks for any advise you can offer. I’m using Turing.jl.v"0.13.0"

Yes you can increment the log joint in the @model block. I’m not on my computer atm but I can post an example once I’m back.

1 Like

Ok, here is an example implementation of the model in question.

@model function dyadRel(C)
    nCrosses = size(C)[2]
    d ~ Dirichlet(nCrosses, 1 / nCrosses)
    target = sum(log.(C * d))
    Turing.acclogp!(target)
end

Thank you! When I try running that code I end up getting an error about no method matching acclogp!(::Float64).


C = [0.089398 0.0267295 0.007992; 0.0 0.000164962 4.31057e-5; 0.0170703 0.00223028 0.000291394; 0.0 0.0 0.00431887]
@model function dyadRel(C)
    nCrosses = size(C)[2]
    d ~ Dirichlet(nCrosses, 1 / nCrosses)
    target = sum(log.(C * d))
    Turing.acclogp!(target)
end
sample(dyadRel(C), NUTS(0.5), MCMCThreads(), 100, 4)

julia> sample(dyadRel(C), NUTS(0.5), MCMCThreads(), 100, 4)
ERROR: MethodError: no method matching acclogp!(::Float64)
Closest candidates are:
  acclogp!(::DynamicPPL.VarInfo, ::Any) at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\varinfo.jl:617
  acclogp!(::DynamicPPL.ThreadSafeVarInfo, ::Any) at C:\Users\jdsel\.julia\packages\DynamicPPL\MRwtL\src\threadsafe.jl:19

When I modify the code based on what I had been guessing and to have _varinfo based on [ANN] Turing.jl 0.12.0 release the model runs at least. But I get a warning about using an internal variable Warning: you are using the internal variable _varinfo. Is this a warning that should be concerning?

C = [0.089398 0.0267295 0.007992; 0.0 0.000164962 4.31057e-5; 0.0170703 0.00223028 0.000291394; 0.0 0.0 0.00431887]
@model dyadRel(C) = begin
  nCrosses = size(C)[2]
  Δ ~ Dirichlet(nCrosses, 1 / nCrosses)

  lp = sum(log.(C * Δ))
  Turing.acclogp!(_varinfo, lp)
end
sample(dyadRel(C), NUTS(0.5), MCMCThreads(), 100, 4)

Iterations        = 1:50
Thinning interval = 1
Chains            = 1, 2, 3, 4
Samples per chain = 50
internals         = acceptance_rate, hamiltonian_energy, hamiltonian_energy_error, is_accept, log_density, lp, max_hamiltonian_energy_error, n_steps, nom_step_size, numerical_error, step_size, tree_depth
parameters        = Δ[1], Δ[2], Δ[3]

2-element Array{ChainDataFrame,1}

Summary Statistics
  parameters    mean     std  naive_se    mcse       ess   r_hat
  ──────────  ──────  ──────  ────────  ──────  ────────  ──────
        Δ[1]  0.3466  0.2479    0.0175  0.0018  111.7912  1.0274
        Δ[2]  0.2571  0.2373    0.0168  0.0310  125.2170  1.0299
        Δ[3]  0.3963  0.2427    0.0172  0.0293  132.4176  1.0059

Quantiles
  parameters    2.5%   25.0%   50.0%   75.0%   97.5%
  ──────────  ──────  ──────  ──────  ──────  ──────
        Δ[1]  0.0003  0.1239  0.3212  0.5270  0.8542
        Δ[2]  0.0001  0.0385  0.1770  0.4398  0.7939
        Δ[3]  0.0569  0.2049  0.3708  0.5697  0.8781

Thank you for all your help on this!

1 Like

Yes, sorry forgot to pass the _varinfo. We should have a function that doesn’t require the user to know about varinfo at the first place.

Thanks again!