Hi, thanks for using my package!
I think you’re right and this is related to creating arrays with the proper element type for differentiation. These changes might be enough to fix the initial error:
function with_endstates(v, size)
a = zeros(eltype(v), size) # FIX: added eltype(v) here
for (i, x) in enumerate(v)
a[i] = x
end
return a
end
function with_endstates(M::AbstractMatrix, size; fld=size)
A = Matrix(Diagonal(ones(eltype(M), size))) # FIX: added eltype(M) here
for (i, m) in enumerate(M)
A[fldmod1(i, fld)...] = m
end
return A
end
However I get a new error now, not sure where it comes from:
julia> chn = sample(model, NUTS(), 500)
┌ Warning: failed to find valid initial parameters in 10 tries; consider providing explicit initial parameters using the `initial_params` keyword
└ @ Turing.Inference ~/.julia/packages/Turing/oFGEb/src/mcmc/hmc.jl:188
ERROR: failed to find valid initial parameters in 1000 tries. This may indicate an error with the model or AD backend; please open an issue at https://github.com/TuringLang/Turing.jl/issues
Stacktrace:
[1] error(s::String)
@ Base ./error.jl:35
[2] initialstep(rng::Random.TaskLocalRNG, model::DynamicPPL.Model{…}, spl::DynamicPPL.Sampler{…}, vi_original::DynamicPPL.TypedVarInfo{…}; initial_params::Nothing, nadapts::Int64, kwargs::@Kwargs{})
@ Turing.Inference ~/.julia/packages/Turing/oFGEb/src/mcmc/hmc.jl:191
[3] step(rng::Random.TaskLocalRNG, model::DynamicPPL.Model{…}, spl::DynamicPPL.Sampler{…}; initial_params::Nothing, kwargs::@Kwargs{…})
@ DynamicPPL ~/.julia/packages/DynamicPPL/senfM/src/sampler.jl:130
[4] step
@ ~/.julia/packages/DynamicPPL/senfM/src/sampler.jl:113 [inlined]
[5] macro expansion
@ ~/.julia/packages/AbstractMCMC/FSyVk/src/sample.jl:159 [inlined]
[6] macro expansion
@ ~/.julia/packages/ProgressLogging/6KXlp/src/ProgressLogging.jl:328 [inlined]
[7] macro expansion
@ ~/.julia/packages/AbstractMCMC/FSyVk/src/logging.jl:9 [inlined]
[8] mcmcsample(rng::Random.TaskLocalRNG, model::DynamicPPL.Model{…}, sampler::DynamicPPL.Sampler{…}, N::Int64; progress::Bool, progressname::String, callback::Nothing, num_warmup::Int64, discard_initial::Int64, thinning::Int64, chain_type::Type, initial_state::Nothing, kwargs::@Kwargs{…})
@ AbstractMCMC ~/.julia/packages/AbstractMCMC/FSyVk/src/sample.jl:142
[9] sample(rng::Random.TaskLocalRNG, model::DynamicPPL.Model{…}, sampler::DynamicPPL.Sampler{…}, N::Int64; chain_type::Type, resume_from::Nothing, initial_state::Nothing, progress::Bool, nadapts::Int64, discard_adapt::Bool, discard_initial::Int64, kwargs::@Kwargs{})
@ Turing.Inference ~/.julia/packages/Turing/oFGEb/src/mcmc/hmc.jl:119
[10] sample
@ ~/.julia/packages/Turing/oFGEb/src/mcmc/hmc.jl:88 [inlined]
[11] #sample#6
@ ~/.julia/packages/Turing/oFGEb/src/mcmc/Inference.jl:321 [inlined]
[12] sample
@ ~/.julia/packages/Turing/oFGEb/src/mcmc/Inference.jl:312 [inlined]
[13] #sample#5
@ ~/.julia/packages/Turing/oFGEb/src/mcmc/Inference.jl:309 [inlined]
[14] sample(model::DynamicPPL.Model{…}, alg::NUTS{…}, N::Int64)
@ Turing.Inference ~/.julia/packages/Turing/oFGEb/src/mcmc/Inference.jl:306
[15] top-level scope
@ ~/Documents/GitHub/Julia/Scratchpad/mwe.jl:77
Some type information was truncated. Use `show(err)` to see complete types.
As a side note, why do you need to use Turing.jl? To insert the HMM into some bigger model?