Hi all-

I have two related questions. First, how do I implement a custom likelihood in turing.jl? Unfortunately, I was unable to implement a simple example based on the documentation, which is probably impeding my ability to identify problems in the second question: How to I implement a custom likelihood that has two potential complications? In my particular use case, my model has vectors of related parameters. This is convenient because the vector may have different sizes in different applications and it allows vectorization of certain operations. In some versions of the model, I would like to fix some elements in the vector, while allowing others to update with the data (see model definition below). The other complicating factor in my use case is that the likelihood requires computationally intensive operations, which cause it to scale linearly with the number of data. Thus, its dramatically faster to perform those operations once and then loop over the data to evaluate the loglikelihood. Otherwise, the model will run 2-3 orders of magnitude slower in my cases.

Below, I posted a toy example with essential features that are used in a more realistic application. Any help would be greatly appreciated.

```
using Turing,Distributions
import Distributions: pdf, logpdf
#In most applications, i need the parameters to be vectors
mutable struct mydistribution{T} <: ContinuousUnivariateDistribution
ν::T
α::T
end
function pdf(d::mydistribution,T::AbstractArray)
#do some expensive computations
likelihoods = fill(0.0,length(T))
for (i,t) in enumerate(T)
likelihoods[i] = sum(@. .5*pdf(Normal.(d.ν,d.α),t))
end
#return array of likelihoods
return likelihoods
end
#Returns sum of loglikelihood for all data
logpdf(d::mydistribution,T::AbstractArray) = sum(log.(pdf(d,T)))
@model myModel(data) = begin
α = fill(.85,3) #fixed parameter
ν1 ~ Normal(15,5)
ν2 ~ Normal(15,5)
N = length(data)
#fix the third element to zero
data ~ mydistribution([ν1,ν2,0.0],α)
end
# Settings of the Hamiltonian Monte Carlo (HMC) sampler.
iterations = 1000
ϵ = 0.05
τ = 10
data = rand(Normal(0,1),50)
chain = sample(myModel(data), HMC(iterations, ϵ, τ))
```

I receive the following error:

```
MethodError: no method matching mydistribution(::Array{Flux.Tracker.TrackedReal{Float64},1}, ::Array{Float64,1})
Closest candidates are:
mydistribution(::T, !Matched::T) where T at /home/dfish/Projects/ACT-R-Likelihood/FFT Models/LogNormalACTR/temp.jl:6
macro expansion at compiler.jl:56 [inlined]
#myModel_model#18(::Array{Float64,1}, ::Function, ::Turing.VarReplay.VarInfo, ::Turing.Sampler{HMCDA{Any}}) at utils.jl:297
myModel_model(::Turing.VarReplay.VarInfo, ::Turing.Sampler{HMCDA{Any}}) at utils.jl:297
#invokelatest#1 at essentials.jl:697 [inlined]
invokelatest at essentials.jl:696 [inlined]
runmodel!(::Function, ::Turing.VarReplay.VarInfo, ::Turing.Sampler{HMCDA{Any}}) at hmc_core.jl:65
(::getfield(Turing, Symbol("#f#115")){Turing.VarReplay.VarInfo,typeof(myModel_model),Turing.Sampler{HMCDA{Any}}})(::TrackedArray{…,Array{Float64,1}}) at ad.jl:92
#8 at back.jl:139 [inlined]
forward at back.jl:126 [inlined]
forward(::Function, ::Array{Real,1}) at back.jl:139
gradient_reverse(::Array{Real,1}, ::Turing.VarReplay.VarInfo, ::Function, ::Turing.Sampler{HMCDA{Any}}) at ad.jl:96
gradient at ad.jl:23 [inlined]
gradient at ad.jl:19 [inlined]
#67 at hmc_core.jl:10 [inlined]
#_leapfrog#75(::getfield(Turing, Symbol("##71#72")){Turing.VarReplay.VarInfo,Turing.Sampler{HMCDA{Any}}}, ::getfield(Turing, Symbol("##73#74")){Turing.Sampler{HMCDA{Any}}}, ::Function, ::Array{Real,1}, ::Array{Float64,1}, ::Int64, ::Float64, ::getfield(Turing, Symbol("##67#68")){Turing.VarReplay.VarInfo,Turing.Sampler{HMCDA{Any}},typeof(myModel_model)}) at hmc_core.jl:93
(::getfield(Turing, Symbol("#kw##_leapfrog")))(::NamedTuple{(:rev_func, :log_func),Tuple{getfield(Turing, Symbol("##71#72")){Turing.VarReplay.VarInfo,Turing.Sampler{HMCDA{Any}}},getfield(Turing, Symbol("##73#74")){Turing.Sampler{HMCDA{Any}}}}}, ::typeof(Turing._leapfrog), ::Array{Real,1}, ::Array{Float64,1}, ::Int64, ::Float64, ::Function) at none:0
macro expansion at logging.jl:312 [inlined]
#_hmc_step#83(::Function, ::Function, ::Function, ::Array{Real,1}, ::Float64, ::getfield(Turing, Symbol("##69#70")){Turing.VarReplay.VarInfo,Turing.Sampler{HMCDA{Any}},typeof(myModel_model)}, ::Function, ::Float64, ::Float64, ::Array{Float64,1}) at hmcda.jl:150
(::getfield(Turing, Symbol("#kw##_hmc_step")))(::NamedTuple{(:rev_func, :log_func),Tuple{getfield(Turing, Symbol("##71#72")){Turing.VarReplay.VarInfo,Turing.Sampler{HMCDA{Any}}},getfield(Turing, Symbol("##73#74")){Turing.Sampler{HMCDA{Any}}}}}, ::typeof(Turing._hmc_step), ::Array{Real,1}, ::Float64, ::Function, ::Function, ::Float64, ::Float64, ::Array{Float64,1}) at none:0
macro expansion at logging.jl:319 [inlined]
step(::Function, ::Turing.Sampler{HMCDA{Any}}, ::Turing.VarReplay.VarInfo, ::Bool) at hmcda.jl:82
macro expansion at logging.jl:309 [inlined]
#sample#94(::Int64, ::Bool, ::Nothing, ::Int64, ::Nothing, ::Function, ::Function, ::HMC{Any}) at hmc.jl:148
sample(::Function, ::HMC{Any}) at hmc.jl:104
top-level scope at none:0
include_string(::Module, ::String, ::String) at loading.jl:1005
include_string(::Module, ::String, ::String, ::Int64) at eval.jl:30
(::getfield(Atom, Symbol("##117#123")){String,Int64,String})() at eval.jl:94
withpath(::getfield(Atom, Symbol("##117#123")){String,Int64,String}, ::String) at utils.jl:30
withpath at eval.jl:46 [inlined]
#116 at eval.jl:93 [inlined]
with_logstate(::getfield(Atom, Symbol("##116#122")){String,Int64,String}, ::Base.CoreLogging.LogState) at logging.jl:397
with_logger at logging.jl:493 [inlined]
#115 at eval.jl:92 [inlined]
hideprompt(::getfield(Atom, Symbol("##115#121")){String,Int64,String}) at repl.jl:85
macro expansion at eval.jl:91 [inlined]
(::getfield(Atom, Symbol("##114#120")){Dict{String,Any}})() at task.jl:85
```

Wrapping the data in an array, but that caused other problems.