This runs, I just used plain arrays, it’s a bit ugly but it’s simpler I think. I also added some variables to your function to avoid globals and a minus sign to the error function (if you want to maximize the likelihood, you have to minimize the negative log likelihood).
using Distributions, Optim
nObs = 1000
const X = [ones(nObs) rand(0:1, nObs, 3)]
nVars = size(X,2)
const Y = rand(Truncated(Normal(.5), 0, 1), nObs, 2)
nShares = size(Y,2)
function censQML(p, nObs, nShares)
δ = reshape(p[1:nVars*nShares],nVars, nShares)
σ = p[(nVars*nShares+1):end]
l = zeros(nObs, nShares, nShares)
for i in 1:nShares
e = Y .- *(X, δ)
for j in 1:nShares
if j != i
ρ = tanh(cor(e[:,i], e[:,j]))
w = e[:,i].^2 + e[:,j].^2 - 2ρ.*e[:,i].*e[:,j]
for t in 1:nObs
e_i = e[t,i]/σ[i]
e_j = e[t,j]/σ[j]
if (Y[t,i] == 0) & (Y[t,j] == 0)
l[t,i,j] = logpdf(MvNormal([0, 0], ρ), [e_i, e_j])
end
if (Y[t,i] == 1) & (Y[t,j] == 0)
l[t,i,j] = logpdf(MvNormal([0, 0], -ρ), [-e_i, e_j])
end
if (Y[t,i] == 0) & (Y[t,j] == 1)
l[t,i,j] = logpdf(MvNormal([0, 0], -ρ), [e_i, -e_j])
end
if (0 < Y[t,i] < 1) & (0 < Y[t,j] < 1)
l[t,i,j] = -log(2*pi*sqrt(1 - ρ^2)) - σ[i] - σ[j] - w[t]./(2*(1 - ρ^2))
end
if (0 < Y[t,i] < 1) & (Y[t,j] == 0)
l[t,i,j] = log(1/exp(σ[i])*pdf(Normal(0, σ[i]), e_i))
end
if (Y[t,i] == 0) & (0 < Y[t,j] < 1)
l[t,i,j] = log(1/exp(σ[j])*pdf(Normal(0, σ[j]), e_j))
end
end
end
end
end
return(sum(l))
end
δ0 = ones(nVars, nShares)
σ0 = ones(nShares)
pinit = [δ0[:]; σ0[:]]
errorfun = p -> -censQML(p, nObs, nShares)
@assert typeof(errorfun(pinit)) <: Real
@time errorfun(pinit)
res = optimize(errorfun, pinit, NelderMead())
pmin = Optim.minimizer(res)