Hierarchical proposal in AdvancedMH.jl

I am trying to use AdvancedMH.jl within Turing.jl to perform static MH sampling for a hierarchical model:

@model function model_toyProblem_original( y, σ², β, ϑ )
    θ ~ Gamma( β, ϑ ) # hyper-prior model 
    x ~ Normal( 0, sqrt(θ) ) # conditional prior 
    y ~ Normal( x, sqrt(σ²) ) # likelihood part 
end

To this end, I would like to generate proposals from the prior for (θ,x). I understand that this requires me to define a StaticProposal that generates samples (θ,x) with θ ~ Gamma( β, ϑ ) and x ~ Normal( 0, sqrt(θ) ). I got this to work for independent distributions, e.g., θ ~ Gamma( β, ϑ ) and x ~ Normal( 0, 1 ) by defining the proposal as

proposal = StaticProposal([ 
    GeneralizedGamma(r, β, ϑ), 
    Normal(0,1)
])

However, I could not make this work for the hierarchical case where x ~ Normal( 0, sqrt(θ) ). I tried modifying the above part into something like

proposal = StaticProposal([ 
    GeneralizedGamma(r, β, ϑ), 
    (state) -> Normal(0, sqrt(state.θ)) 
])

Does anyone know how to properly set up a proposal for such a hierarchical prior?

Any help would be appreciated. Please find a minimal working example below.

# Import packages. 
using AdvancedMH # for MH sampling
using Turing # for setting up the model and sampling 

# Define the model.  
@model function model_toyProblem( y, σ², β, ϑ )
    θ ~ Gamma( β, ϑ ) # hyper-prior model 
    x ~ Normal( 0, sqrt(θ) ) # conditional prior 
    y ~ Normal( x, sqrt(σ²) ) # likelihood part 
end

# Condition the model on the given data and parameters. 
y = .2; σ² = 10^(-2.8); β = 1.501; ϑ = 5*10^(-2)
model_func = model_toyProblem( y, σ², β, ϑ ) 

# Define the proposal distribution for θ and x
proposal = StaticProposal([ 
    Gamma(β, ϑ ), 
    (state) -> Normal(0, sqrt(state.θ)) 
    #Normal(0, 1) 
])

# Use external static MH sampler
spl = MetropolisHastings(proposal) 
# Type it adequately to use it in the Turing framework 
espl = externalsampler(spl)

# Generate a chain containing the samples of u and τ 
chn_original = sample( 
    model_func, 
    espl,  
    10^3
)

Output:

MethodError: no method matching Random.Sampler(::Type{Random.TaskLocalRNG}, ::Random.SamplerTrivial{var"#1#2", Any}, ::Val{1})

Closest candidates are:
  Random.Sampler(::Type{<:Random.AbstractRNG}, ::Random.Sampler, ::Union{Val{1}, Val{Inf}})
   @ Random /Applications/Julia-1.10.app/Contents/Resources/julia/share/julia/stdlib/v1.10/Random/src/Random.jl:147
  Random.Sampler(::Type{<:Random.AbstractRNG}, ::Any, ::Union{Val{1}, Val{Inf}})
   @ Random /Applications/Julia-1.10.app/Contents/Resources/julia/share/julia/stdlib/v1.10/Random/src/Random.jl:183
  Random.Sampler(::Type{<:Random.AbstractRNG}, ::BitSet, ::Union{Val{1}, Val{Inf}})
   @ Random /Applications/Julia-1.10.app/Contents/Resources/julia/share/julia/stdlib/v1.10/Random/src/generation.jl:450
  ...


Stacktrace:
  [1] Random.Sampler(T::Type{Random.TaskLocalRNG}, sp::Random.SamplerTrivial{var"#1#2", Any}, r::Val{1})
    @ Random /Applications/Julia-1.10.app/Contents/Resources/julia/share/julia/stdlib/v1.10/Random/src/Random.jl:147
  [2] Random.Sampler(rng::Random.TaskLocalRNG, x::Random.SamplerTrivial{var"#1#2", Any}, r::Val{1})
    @ Random /Applications/Julia-1.10.app/Contents/Resources/julia/share/julia/stdlib/v1.10/Random/src/Random.jl:139
  [3] rand(rng::Random.TaskLocalRNG, X::Random.SamplerTrivial{var"#1#2", Any})
    @ Random /Applications/Julia-1.10.app/Contents/Resources/julia/share/julia/stdlib/v1.10/Random/src/Random.jl:255
  [4] rand(rng::Random.TaskLocalRNG, X::Function)
    @ Random /Applications/Julia-1.10.app/Contents/Resources/julia/share/julia/stdlib/v1.10/Random/src/Random.jl:255
  [5] (::AdvancedMH.var"#11#12"{Random.TaskLocalRNG})(x::Function)
    @ AdvancedMH ~/.julia/packages/AdvancedMH/yFKzJ/src/proposal.jl:27
  [6] iterate(g::Base.Generator, s::Vararg{Any})
    @ Base ./generator.jl:47 [inlined]
  [7] collect_to!(dest::Vector{Float64}, itr::Base.Generator{Vector{Any}, AdvancedMH.var"#11#12"{Random.TaskLocalRNG}}, offs::Int64, st::Int64)
    @ Base ./array.jl:892
  [8] collect_to_with_first!(dest::Vector{Float64}, v1::Float64, itr::Base.Generator{Vector{Any}, AdvancedMH.var"#11#12"{Random.TaskLocalRNG}}, st::Int64)
    @ Base ./array.jl:870
  [9] _collect(c::Vector{Any}, itr::Base.Generator{Vector{Any}, AdvancedMH.var"#11#12"{Random.TaskLocalRNG}}, ::Base.EltypeUnknown, isz::Base.HasShape{1})
    @ Base ./array.jl:864
 [10] collect_similar
    @ ./array.jl:763 [inlined]
 [11] map
    @ ./abstractarray.jl:3282 [inlined]
 [12] rand
    @ ~/.julia/packages/AdvancedMH/yFKzJ/src/proposal.jl:27 [inlined]
 [13] propose (repeats 2 times)
    @ ~/.julia/packages/AdvancedMH/yFKzJ/src/proposal.jl:76 [inlined]
 [14] propose
    @ ~/.julia/packages/AdvancedMH/yFKzJ/src/mh-core.jl:54 [inlined]
 [15] step(rng::Random.TaskLocalRNG, model::AbstractMCMC.LogDensityModel{LogDensityProblemsADForwardDiffExt.ForwardDiffLogDensity{LogDensityFunction{DynamicPPL.TypedVarInfo{@NamedTuple{θ::DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:θ, typeof(identity)}, Int64}, Vector{Gamma{Float64}}, Vector{AbstractPPL.VarName{:θ, typeof(identity)}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}, x::DynamicPPL.Metadata{Dict{AbstractPPL.VarName{:x, typeof(identity)}, Int64}, Vector{Normal{Float64}}, Vector{AbstractPPL.VarName{:x, typeof(identity)}}, Vector{Float64}, Vector{Set{DynamicPPL.Selector}}}}, Float64}, DynamicPPL.Model{typeof(model_toyProblem), (:y, :σ², :β, :ϑ), (), (), NTuple{4, Float64}, Tuple{}, DynamicPPL.DefaultContext}, Nothing}, ForwardDiff.Chunk{2}, ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, ForwardDiff.GradientConfig{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2, Vector{ForwardDiff.Dual{ForwardDiff.Tag{DynamicPPL.DynamicPPLTag, Float64}, Float64, 2}}}}}, sampler::MetropolisHastings{StaticProposal{false, Vector{Any}}}; initial_params::Nothing, kwargs::@Kwargs{})
    @ AdvancedMH ~/.julia/packages/AdvancedMH/yFKzJ/src/mh-core.jl:83
 [16] step(rng::Random.TaskLocalRNG, model::DynamicPPL.Model{typeof(model_toyProblem), (:y, :σ², :β, :ϑ), (), (), NTuple{4, Float64}, Tuple{}, DynamicPPL.DefaultContext}, sampler_wrapper::DynamicPPL.Sampler{Turing.Inference.ExternalSampler{MetropolisHastings{StaticProposal{false, Vector{Any}}}, AutoForwardDiff{nothing, Nothing}, true}}; initial_state::Nothing, initial_params::Nothing, kwargs::@Kwargs{})
    @ Turing.Inference ~/.julia/packages/Turing/duwEY/src/mcmc/abstractmcmc.jl:67
 [17] step
    @ ~/.julia/packages/Turing/duwEY/src/mcmc/abstractmcmc.jl:36 [inlined]
 [18] macro expansion
    @ ~/.julia/packages/AbstractMCMC/YrmkI/src/sample.jl:130 [inlined]
 [19] macro expansion
    @ ~/.julia/packages/ProgressLogging/6KXlp/src/ProgressLogging.jl:328 [inlined]
 [20] (::AbstractMCMC.var"#22#23"{Bool, String, Nothing, Int64, Int64, Nothing, @Kwargs{}, Random.TaskLocalRNG, DynamicPPL.Model{typeof(model_toyProblem), (:y, :σ², :β, :ϑ), (), (), NTuple{4, Float64}, Tuple{}, DynamicPPL.DefaultContext}, DynamicPPL.Sampler{Turing.Inference.ExternalSampler{MetropolisHastings{StaticProposal{false, Vector{Any}}}, AutoForwardDiff{nothing, Nothing}, true}}, Int64, Int64})()
    @ AbstractMCMC ~/.julia/packages/AbstractMCMC/YrmkI/src/logging.jl:12
 [21] with_logstate(f::Function, logstate::Any)
    @ Base.CoreLogging ./logging.jl:515
 [22] with_logger
    @ Base.CoreLogging ./logging.jl:627 [inlined]
 [23] with_progresslogger(f::Function, _module::Module, logger::Logging.ConsoleLogger)
    @ AbstractMCMC ~/.julia/packages/AbstractMCMC/YrmkI/src/logging.jl:36
 [24] macro expansion
    @ ~/.julia/packages/AbstractMCMC/YrmkI/src/logging.jl:11 [inlined]
 [25] mcmcsample(rng::Random.TaskLocalRNG, model::DynamicPPL.Model{typeof(model_toyProblem), (:y, :σ², :β, :ϑ), (), (), NTuple{4, Float64}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{Turing.Inference.ExternalSampler{MetropolisHastings{StaticProposal{false, Vector{Any}}}, AutoForwardDiff{nothing, Nothing}, true}}, N::Int64; progress::Bool, progressname::String, callback::Nothing, discard_initial::Int64, thinning::Int64, chain_type::Type, initial_state::Nothing, kwargs::@Kwargs{})
    @ AbstractMCMC ~/.julia/packages/AbstractMCMC/YrmkI/src/sample.jl:120
 [26] sample(rng::Random.TaskLocalRNG, model::DynamicPPL.Model{typeof(model_toyProblem), (:y, :σ², :β, :ϑ), (), (), NTuple{4, Float64}, Tuple{}, DynamicPPL.DefaultContext}, sampler::DynamicPPL.Sampler{Turing.Inference.ExternalSampler{MetropolisHastings{StaticProposal{false, Vector{Any}}}, AutoForwardDiff{nothing, Nothing}, true}}, N::Int64; chain_type::Type, resume_from::Nothing, initial_state::Nothing, kwargs::@Kwargs{})
    @ DynamicPPL ~/.julia/packages/DynamicPPL/ACaKr/src/sampler.jl:93
 [27] sample
    @ DynamicPPL ~/.julia/packages/DynamicPPL/ACaKr/src/sampler.jl:83 [inlined]
 [28] #sample#4
    @ Turing.Inference ~/.julia/packages/Turing/duwEY/src/mcmc/Inference.jl:272 [inlined]
 [29] sample
    @ Turing.Inference ~/.julia/packages/Turing/duwEY/src/mcmc/Inference.jl:263 [inlined]
 [30] #sample#3
    @ Turing.Inference ~/.julia/packages/Turing/duwEY/src/mcmc/Inference.jl:260 [inlined]
 [31] sample(model::DynamicPPL.Model{typeof(model_toyProblem), (:y, :σ², :β, :ϑ), (), (), NTuple{4, Float64}, Tuple{}, DynamicPPL.DefaultContext}, alg::Turing.Inference.ExternalSampler{MetropolisHastings{StaticProposal{false, Vector{Any}}}, AutoForwardDiff{nothing, Nothing}, true}, N::Int64)
    @ Turing.Inference ~/.julia/packages/Turing/duwEY/src/mcmc/Inference.jl:254
 [32] top-level scope
    @ In[12]:14

Hi, is there a reason why you need to use Metropolis-Hastings?

For your original question, what you’re trying to do is technically Gibbs sampling, not a vanilla Metropolis-Hastings sampler, so you will need to use the Gibbs sampling interface to do that.