Turing sampling from MatrixNormal returns `MethodError: no method matching bijector(::MatrixNormal`

Hi! I’m trying to sample from a MatrixNormal in Turing.jl running the model below (I’m just playing around with matrix distributions there is no deeper reason for this). However I keep running into the following error (which might be related to this Github issue):

ERROR: MethodError: no method matching bijector(::MatrixNormal{Float64, Matrix{Float64}, PDMats.PDMat{Float64, Matrix{Float64}}, PDMats.PDMat{Float64, Matrix{Float64}}})
Closest candidates are:
  bijector(::Union{Kolmogorov, BetaPrime, Chi, Chisq, Erlang, Exponential, FDist, Frechet, Gamma, InverseGamma, InverseGaussian, LogNormal, NoncentralChisq, NoncentralF, Rayleigh, Weibull}) at ~/.julia/packages/Bijectors/vUc4m/src/transformed_distribution.jl:58
  bijector(::Union{Arcsine, Beta, Biweight, Cosine, Epanechnikov, NoncentralBeta}) at ~/.julia/packages/Bijectors/vUc4m/src/transformed_distribution.jl:69
  bijector(::Union{Levy, Pareto}) at ~/.julia/packages/Bijectors/vUc4m/src/transformed_distribution.jl:72
  ...
Stacktrace:
  [1] macro expansion
    @ ~/.julia/packages/DynamicPPL/xKo8W/src/varinfo.jl:784 [inlined]
  [2] _link!(metadata::NamedTuple{(:mu,), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:mu, Setfield.IdentityLens}, Int64}, Vector{MatrixNormal{Float64, Matrix{Float64}, PDMats.PDMat{Float64, Matrix{Float64}}, PDMats.PDMat{Float64, Matrix{Float64}}}}, Vector{AbstractPPL.VarName{:mu, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, vi::DynamicPPL.TypedVarInfo{NamedTuple{(:mu,), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:mu, Setfield.IdentityLens}, Int64}, Vector{MatrixNormal{Float64, Matrix{Float64}, PDMats.PDMat{Float64, Matrix{Float64}}, PDMats.PDMat{Float64, Matrix{Float64}}}}, Vector{AbstractPPL.VarName{:mu, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}, vns::NamedTuple{(:mu,), Tuple{Vector{AbstractPPL.VarName{:mu, Setfield.IdentityLens}}}}, #unused#::Val{()})
    @ DynamicPPL ~/.julia/packages/DynamicPPL/xKo8W/src/varinfo.jl:768
  [3] _link!
    @ ~/.julia/packages/DynamicPPL/xKo8W/src/varinfo.jl:766 [inlined]
  [4] _link!
    @ ~/.julia/packages/DynamicPPL/xKo8W/src/varinfo.jl:762 [inlined]
  [5] link!!
    @ ~/.julia/packages/DynamicPPL/xKo8W/src/varinfo.jl:717 [inlined]
  [6] link!!
    @ ~/.julia/packages/DynamicPPL/xKo8W/src/abstract_varinfo.jl:383 [inlined]
  [7] initialstep(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(estimateA), (:A, :U, :V, :Uₐ, :Vₐ), (), (), NTuple{5, Matrix{Float64}}, Tuple{}, DynamicPPL.DefaultContext}, spl::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}, vi::DynamicPPL.TypedVarInfo{NamedTuple{(:mu,), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:mu, Setfield.IdentityLens}, Int64}, Vector{MatrixNormal{Float64, Matrix{Float64}, PDMats.PDMat{Float64, Matrix{Float64}}, PDMats.PDMat{Float64, Matrix{Float64}}}}, Vector{AbstractPPL.VarName{:mu, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}; init_params::Nothing, nadapts::Int64, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ Turing.Inference ~/.julia/packages/Turing/KOb5J/src/inference/hmc.jl:153
  [8] step(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(estimateA), (:A, :U, :V, :Uₐ, :Vₐ), (), (), NTuple{5, Matrix{Float64}}, Tuple{}, DynamicPPL.DefaultContext}, spl::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}; resume_from::Nothing, init_params::Nothing, kwargs::Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}})
    @ DynamicPPL ~/.julia/packages/DynamicPPL/xKo8W/src/sampler.jl:111
  [9] macro expansion
    @ ~/.julia/packages/AbstractMCMC/fnRmh/src/sample.jl:120 [inlined]
 [10] macro expansion
    @ ~/.julia/packages/ProgressLogging/6KXlp/src/ProgressLogging.jl:328 [inlined]
 [11] macro expansion
    @ ~/.julia/packages/AbstractMCMC/fnRmh/src/logging.jl:9 [inlined]
 [12] mcmcsample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(estimateA), (:A, :U, :V, :Uₐ, :Vₐ), (), (), NTuple{5, Matrix{Float64}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}, N::Int64; progress::Bool, progressname::String, callback::Nothing, discard_initial::Int64, thinning::Int64, chain_type::Type, kwargs::Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}})
    @ AbstractMCMC ~/.julia/packages/AbstractMCMC/fnRmh/src/sample.jl:111
 [13] sample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(estimateA), (:A, :U, :V, :Uₐ, :Vₐ), (), (), NTuple{5, Matrix{Float64}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}, N::Int64; chain_type::Type, resume_from::Nothing, progress::Bool, nadapts::Int64, discard_adapt::Bool, discard_initial::Int64, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ Turing.Inference ~/.julia/packages/Turing/KOb5J/src/inference/hmc.jl:133
 [14] sample
    @ ~/.julia/packages/Turing/KOb5J/src/inference/hmc.jl:103 [inlined]
 [15] #sample#2
    @ ~/.julia/packages/Turing/KOb5J/src/inference/Inference.jl:145 [inlined]
 [16] sample
    @ ~/.julia/packages/Turing/KOb5J/src/inference/Inference.jl:138 [inlined]
 [17] #sample#1
    @ ~/.julia/packages/Turing/KOb5J/src/inference/Inference.jl:135 [inlined]
 [18] sample(model::DynamicPPL.Model{typeof(estimateA), (:A, :U, :V, :Uₐ, :Vₐ), (), (), NTuple{5, Matrix{Float64}}, Tuple{}, DynamicPPL.DefaultContext}, alg::NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}, N::Int64)
    @ Turing.Inference ~/.julia/packages/Turing/KOb5J/src/inference/Inference.jl:129
 [19] top-level scope
    @ ~/RandomStuff/scratch.jl:133

this is the model I’m using:

using Random, Turing, Bijectors
Random.seed!(123)

#Estimate a MatrixNormal as simulated here
U  = rand(LKJ(2, 0.5))
V  = rand(LKJ(2, 0.5))
Uₐ = rand(LKJ(2, 0.5))
Vₐ = rand(LKJ(2, 0.5))
Asample = rand(MatrixNormal(zeros(Float64, 2, 2), U, V))

#Create the model
@model function estimateA(A, U, V, Uₐ, Vₐ)
    mu ~ MatrixNormal(zeros(Float64,size(A,1), size(A,2)), Uₐ, Vₐ)
    A  ~ MatrixNormal(mu, U, V)
end

#Estimate!
model  = estimateA(Asample, U, V, Uₐ, Vₐ);
#Bijectors.bijector(::MatrixNormal) = PDBijector()
chains = sample(model, NUTS(), 100);

I’ve also tried the suggestion of the related Github issue to add the line Bijectors.bijector(::MatrixNormal) = PDBijector() (uncomment above) however that also results in an error:

ERROR: UndefVarError: PDBijector not defined
Stacktrace:
  [1] bijector(#unused#::MatrixNormal{Float64, Matrix{Float64}, PDMats.PDMat{Float64, Matrix{Float64}}, PDMats.PDMat{Float64, Matrix{Float64}}})
    @ Main ~/RandomStuff/scratch.jl:133
  [2] macro expansion
    @ ~/.julia/packages/DynamicPPL/xKo8W/src/varinfo.jl:784 [inlined]
  [3] _link!(metadata::NamedTuple{(:mu,), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:mu, Setfield.IdentityLens}, Int64}, Vector{MatrixNormal{Float64, Matrix{Float64}, PDMats.PDMat{Float64, Matrix{Float64}}, PDMats.PDMat{Float64, Matrix{Float64}}}}, Vector{AbstractPPL.VarName{:mu, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, vi::DynamicPPL.TypedVarInfo{NamedTuple{(:mu,), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:mu, Setfield.IdentityLens}, Int64}, Vector{MatrixNormal{Float64, Matrix{Float64}, PDMats.PDMat{Float64, Matrix{Float64}}, PDMats.PDMat{Float64, Matrix{Float64}}}}, Vector{AbstractPPL.VarName{:mu, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}, vns::NamedTuple{(:mu,), Tuple{Vector{AbstractPPL.VarName{:mu, Setfield.IdentityLens}}}}, #unused#::Val{()})
    @ DynamicPPL ~/.julia/packages/DynamicPPL/xKo8W/src/varinfo.jl:768
  [4] _link!
    @ ~/.julia/packages/DynamicPPL/xKo8W/src/varinfo.jl:766 [inlined]
  [5] _link!
    @ ~/.julia/packages/DynamicPPL/xKo8W/src/varinfo.jl:762 [inlined]
  [6] link!!
    @ ~/.julia/packages/DynamicPPL/xKo8W/src/varinfo.jl:717 [inlined]
  [7] link!!
    @ ~/.julia/packages/DynamicPPL/xKo8W/src/abstract_varinfo.jl:383 [inlined]
  [8] initialstep(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(estimateA), (:A, :U, :V, :Uₐ, :Vₐ), (), (), NTuple{5, Matrix{Float64}}, Tuple{}, DynamicPPL.DefaultContext}, spl::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}, vi::DynamicPPL.TypedVarInfo{NamedTuple{(:mu,), Tuple{DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:mu, Setfield.IdentityLens}, Int64}, Vector{MatrixNormal{Float64, Matrix{Float64}, PDMats.PDMat{Float64, Matrix{Float64}}, PDMats.PDMat{Float64, Matrix{Float64}}}}, Vector{AbstractPPL.VarName{:mu, Setfield.IdentityLens}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}}, Float64}; init_params::Nothing, nadapts::Int64, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ Turing.Inference ~/.julia/packages/Turing/KOb5J/src/inference/hmc.jl:153
  [9] step(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(estimateA), (:A, :U, :V, :Uₐ, :Vₐ), (), (), NTuple{5, Matrix{Float64}}, Tuple{}, DynamicPPL.DefaultContext}, spl::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}; resume_from::Nothing, init_params::Nothing, kwargs::Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}})
    @ DynamicPPL ~/.julia/packages/DynamicPPL/xKo8W/src/sampler.jl:111
 [10] macro expansion
    @ ~/.julia/packages/AbstractMCMC/fnRmh/src/sample.jl:120 [inlined]
 [11] macro expansion
    @ ~/.julia/packages/ProgressLogging/6KXlp/src/ProgressLogging.jl:328 [inlined]
 [12] macro expansion
    @ ~/.julia/packages/AbstractMCMC/fnRmh/src/logging.jl:9 [inlined]
 [13] mcmcsample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(estimateA), (:A, :U, :V, :Uₐ, :Vₐ), (), (), NTuple{5, Matrix{Float64}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}, N::Int64; progress::Bool, progressname::String, callback::Nothing, discard_initial::Int64, thinning::Int64, chain_type::Type, kwargs::Base.Pairs{Symbol, Int64, Tuple{Symbol}, NamedTuple{(:nadapts,), Tuple{Int64}}})
    @ AbstractMCMC ~/.julia/packages/AbstractMCMC/fnRmh/src/sample.jl:111
 [14] sample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{typeof(estimateA), (:A, :U, :V, :Uₐ, :Vₐ), (), (), NTuple{5, Matrix{Float64}}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}}, N::Int64; chain_type::Type, resume_from::Nothing, progress::Bool, nadapts::Int64, discard_adapt::Bool, discard_initial::Int64, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ Turing.Inference ~/.julia/packages/Turing/KOb5J/src/inference/hmc.jl:133
 [15] sample
    @ ~/.julia/packages/Turing/KOb5J/src/inference/hmc.jl:103 [inlined]
 [16] #sample#2
    @ ~/.julia/packages/Turing/KOb5J/src/inference/Inference.jl:145 [inlined]
 [17] sample
    @ ~/.julia/packages/Turing/KOb5J/src/inference/Inference.jl:138 [inlined]
 [18] #sample#1
    @ ~/.julia/packages/Turing/KOb5J/src/inference/Inference.jl:135 [inlined]
 [19] sample(model::DynamicPPL.Model{typeof(estimateA), (:A, :U, :V, :Uₐ, :Vₐ), (), (), NTuple{5, Matrix{Float64}}, Tuple{}, DynamicPPL.DefaultContext}, alg::NUTS{Turing.Essential.ForwardDiffAD{0}, (), AdvancedHMC.DiagEuclideanMetric}, N::Int64)
    @ Turing.Inference ~/.julia/packages/Turing/KOb5J/src/inference/Inference.jl:129
 [20] top-level scope
    @ ~/RandomStuff/scratch.jl:134

The solution might be obvious but I’m relatively new to Turing. Can anyone help me? Thanks!

Update
The solution might be that Turing.org has not defined the bijector class for this case as the following also fails (same error):


using Bijectors, Distributions

dist = MatrixNormal(zeros(2,2), rand(LKJ(2, 0.5)), rand(LKJ(2, 0.5)))

x = rand(dist)
b = bijector(dist)

however I’m unfamiliar with how to create a ad-hoc new bijector.

PS
Yes I know I can vectorice the matrix and built it from an MvNormal using the propererty in the Wikipedia definition. However I’m trying to directly sample from the MatrixNormal.