The issue is that automatic differentiation changes the types of your variables from Float64 to ForwardDiff.Dual, so the type constraints in b = Array{Float64,2}(undef, k, m) throw an error.
Fortunately, there’s a quick fix for this. Turing provides type-stable notation, so you can just change your model to
@model function grm(y, n, m, k, ::Type{TM}=Array{Float64,2}, ::Type{TV}=Vector{Float64}) where {TM, TV}
b = TM(undef, k, m)
for i in 1:m
b[1,i] ~ Normal(0,1)
for j in 2:k
b[j,i] ~ truncated(Normal(0,1), b[j-1,i], Inf)
end
end
probs = zeros(k, m, n)
theta = TV(undef, n)
for p in 1:n
theta[p] ~ Normal(0,1)
for i in 1:m
probs[1,i,p] = 1.0
for j in 1:(k-1)
Q = invlogit(theta[p] - b[j,i])
probs[j,i,p] -= Q
probs[j+1,i,p] = Q
end
y[i,p] ~ Categorical(probs[:,i,p])
end
end
return theta, b
end;
Give that a shot and let me know if it works for you.