You can get around this by replacing
Σ_L = Diagonal(sigma .+ 1e-3) * F.L
Sigma = PDMat(Cholesky(Σ_L))
with
Σ_L = Diagonal(sigma) * F.L
Sigma = PDMat(Cholesky(Σ_L + eps() * I))
in the model in in Singular Exception with LKJCholesky - #11 by sethaxen
Then you get
julia> chain2 = sample(correlation_chol(J, N, y), NUTS(), 1000);
┌ Info: Found initial step size
└ ϵ = 0.0125
Sampling 100%|█████████████████████████████████████████████████████████████████████████| Time: 0:00:00
ERROR: MethodError: no method matching length(::Cholesky{Float64, Matrix{Float64}})
Closest candidates are:
length(::RegexMatch)
@ Base regex.jl:285
length(::ExponentialBackOff)
@ Base error.jl:267
length(::Combinatorics.FixedPartitions)
@ Combinatorics ~/.julia/packages/Combinatorics/Udg6X/src/partitions.jl:96
...
Stacktrace:
[1] _length(a::Cholesky{Float64, Matrix{Float64}})
@ Turing.Utilities ~/.julia/packages/Turing/aBDzv/src/utilities/helper.jl:9
[2] length(iter::Turing.Utilities.FlattenIterator{Cholesky{Float64, Matrix{Float64}}})
@ Turing.Utilities ~/.julia/packages/Turing/aBDzv/src/utilities/helper.jl:8
[3] _similar_shape(itr::Turing.Utilities.FlattenIterator{Cholesky{Float64, Matrix{Float64}}}, ::Base.HasLength)
@ Base ./array.jl:708
[4] _collect(cont::UnitRange{…}, itr::Turing.Utilities.FlattenIterator{…}, ::Base.HasEltype, isz::Base.HasLength)
@ Base ./array.jl:763
[5] collect(itr::Turing.Utilities.FlattenIterator{Cholesky{Float64, Matrix{Float64}}})
@ Base ./array.jl:757
[6] (::Turing.Inference.var"#17#21")(::Tuple{Cholesky{Float64, Matrix{Float64}}, String})
@ Turing.Inference ~/.julia/packages/Turing/aBDzv/src/inference/Inference.jl:326
[7] MappingRF
@ Base ./reduce.jl:100 [inlined]
[8] _foldl_impl
@ Base ./reduce.jl:58 [inlined]
[9] foldl_impl(op::Base.MappingRF{…}, nt::Base._InitialValue, itr::Base.Iterators.Zip{…})
@ Base ./reduce.jl:48
[10] mapfoldl_impl(f::Turing.Inference.var"#17#21", op::typeof(vcat), nt::Base._InitialValue, itr::Base.Iterators.Zip{…})
@ Base ./reduce.jl:44
[11] mapfoldl(f::Function, op::Function, itr::Base.Iterators.Zip{Tuple{Vector{…}, Vector{…}}}; init::Base._InitialValue)
@ Base ./reduce.jl:175
[12] mapfoldl
@ Base ./reduce.jl:175 [inlined]
[13] mapreduce(f::Function, op::Function, itr::Base.Iterators.Zip{Tuple{Vector{Cholesky{…}}, Vector{String}}})
@ Base ./reduce.jl:307
[14] #16
@ Base ~/.julia/packages/Turing/aBDzv/src/inference/Inference.jl:325 [inlined]
[15] MappingRF
@ Base ./reduce.jl:100 [inlined]
[16] afoldl(::Base.MappingRF{Turing.Inference.var"#16#20"{…}, Base.BottomRF{…}}, ::Base._InitialValue, ::Symbol, ::Symbol)
@ Base ./operators.jl:543
[17] _foldl_impl
@ Base ./reduce.jl:68 [inlined]
[18] foldl_impl(op::Base.MappingRF{…}, nt::Base._InitialValue, itr::Tuple{…})
@ Base ./reduce.jl:48
[19] mapfoldl_impl(f::Turing.Inference.var"#16#20"{…}, op::typeof(vcat), nt::Base._InitialValue, itr::Tuple{…})
@ Base ./reduce.jl:44
[20] mapfoldl(f::Function, op::Function, itr::Tuple{Symbol, Symbol}; init::Base._InitialValue)
@ Base ./reduce.jl:175
[21] mapfoldl
@ Base ./reduce.jl:175 [inlined]
[22] mapreduce(f::Function, op::Function, itr::Tuple{Symbol, Symbol})
@ Base ./reduce.jl:307
[23] flatten_namedtuple(nt::@NamedTuple{sigma::Tuple{Vector{…}, Vector{…}}, F::Tuple{Vector{…}, Vector{…}}})
@ Turing.Inference ~/.julia/packages/Turing/aBDzv/src/inference/Inference.jl:320
[24] (::Turing.Inference.var"#10#13"{…})(t::Turing.Inference.Transition{…})
@ Turing.Inference ~/.julia/packages/Turing/aBDzv/src/inference/Inference.jl:305
[25] iterate(g::Base.Generator, s::Vararg{Any})
@ Base ./generator.jl:47 [inlined]
[26] _collect(c::Vector{…}, itr::Base.Generator{…}, ::Base.EltypeUnknown, isz::Base.HasShape{…})
@ Base ./array.jl:852
[27] collect_similar
@ Turing.Inference ./array.jl:761 [inlined]
[28] map
@ Turing.Inference ./abstractarray.jl:3273 [inlined]
[29] _params_to_array(ts::Vector{Turing.Inference.Transition{@NamedTuple{…}, Float64, @NamedTuple{…}}})
@ Turing.Inference ~/.julia/packages/Turing/aBDzv/src/inference/Inference.jl:304
[30] bundle_samples(ts::Vector{…}, model::DynamicPPL.Model{…}, spl::DynamicPPL.Sampler{…}, state::Turing.Inference.HMCState{…}, chain_type::Type{…}; save_state::Bool, stats::AbstractMCMC.SamplingStats, sort_chain::Bool, discard_initial::Int64, thinning::Int64, kwargs::@Kwargs{…})
I’m guessing this is an issue with flattening a Cholesky object for storage in a Chains, since if we use a different chain_type, all is well:
julia> chain3 = sample(correlation_chol(J, N, y), NUTS(), 1000; chain_type=VarInfo);
┌ Info: Found initial step size
└ ϵ = 0.00625
Sampling 100%|█████████████████████████████████████████████████████████████████████████| Time: 0:00:00
julia> map(t -> t.θ.F[1][1], chain3)[1]
Cholesky{Float64, Matrix{Float64}}
L factor:
3×3 LowerTriangular{Float64, Matrix{Float64}}:
1.0 ⋅ ⋅
0.539406 0.842046 ⋅
0.295462 -0.139833 0.945066