GraphPPL RxInfer Model

I have tried to extend the example from here to include an unknown parameter theta:

@model function rotate_ssm(y, x0, Q, P)
    x_prior ~ MvNormalMeanCovariance(mean(x0), cov(x0))
    x_prev = x_prior

    theta ~ Normal(1,1)
    A = [cos(theta) -sin(theta); sin(theta) cos(theta)]
    B = diageye(2)
    
    for i in 1:length(y)
        x[i] ~ MvNormalMeanCovariance(A * x_prev, Q)
        y[i] ~ MvNormalMeanCovariance(B * x[i], P)
        x_prev = x[i]
    end
end

however I get:

ERROR: MethodError: no method matching cos(::GraphPPL.NodeLabel)

Closest candidates are:
  cos(::Irrational{:π})
   @ Base mathconstants.jl:127
  cos(::IrrationalConstants.Halfπ)
   @ IrrationalConstants C:\Users\KaisermayerV\.julia\packages\IrrationalConstants\vp5v4\src\trigonometric.jl:16
  cos(::BigFloat)
   @ Base mpfr.jl:801
  ...

Stacktrace:
  [1] macro expansion
    @ dev\dec_rxinfer.jl:51 [inlined]
  [2] add_terminated_submodel!(__model__::GraphPPL.Model{…}, __context__::GraphPPL.Context, __options__::GraphPPL.NodeCreationOptions{…}, ::typeof(rotate_ssm), __interfaces__::@NamedTuple{…}, ::Static.StaticInt{…})      
    @ Main .julia\packages\GraphPPL\Z49xA\src\model_macro.jl:737
  [3] add_terminated_submodel!
    @ .julia\packages\GraphPPL\Z49xA\src\graph_engine.jl:1892 [inlined]
  [4] add_terminated_submodel!
    @ .julia\packages\GraphPPL\Z49xA\src\graph_engine.jl:1888 [inlined]
  [5] add_toplevel_model!
    @ .julia\packages\GraphPPL\Z49xA\src\graph_engine.jl:1908 [inlined]
  [6] create_model(callback::RxInfer.var"#24#26"{…}, generator::GraphPPL.ModelGenerator{…})
    @ GraphPPL .julia\packages\GraphPPL\Z49xA\src\model_generator.jl:97
  [7] __infer_create_factor_graph_model
    @ .julia\packages\RxInfer\wbFg1\src\model\model.jl:122 [inlined]
  [8] create_model(generator::RxInfer.ConditionedModelGenerator{GraphPPL.ModelGenerator{…}, @NamedTuple{…}})
    @ RxInfer .julia\packages\RxInfer\wbFg1\src\model\model.jl:110
  [9] batch_inference(; model::GraphPPL.ModelGenerator{…}, data::@NamedTuple{…}, initialization::Nothing, constraints::Nothing, meta::GraphPPL.MetaSpecification, options::@NamedTuple{…}, returnvars::Nothing, predictvars::Nothing, iterations::Nothing, free_energy::Bool, free_energy_diagnostics::Tuple{…}, showprogress::Bool, callbacks::Nothing, addons::Nothing, postprocess::DefaultPostprocess, warn::Bool, catch_exception::Bool)
    @ RxInfer .julia\packages\RxInfer\wbFg1\src\inference\batch.jl:199
 [10] batch_inference
    @ .julia\packages\RxInfer\wbFg1\src\inference\batch.jl:94 [inlined]
 [11] #infer#242
    @ .julia\packages\RxInfer\wbFg1\src\inference\inference.jl:306 [inlined]
 [12] top-level scope
    @ dev\dec_rxinfer.jl:71
Some type information was truncated. Use `show(err)` to see complete types.

My current model

@model function rotate_ssm(y, x0, Q, P)
    x_prior ~ MvNormalMeanCovariance(mean(x0), cov(x0))
    x_prev = x_prior

    Afunc = (theta::Real) -> [cos(theta) -sin(theta); sin(theta) cos(theta)]

    theta ~ Normal(mean=0.0,variance=1.0)
    A := Afunc(theta) where { meta=DeltaMeta(method=Unscented()) }
    B = diageye(2)

    for i in 1:length(y)
        x[i] ~ MvNormalMeanCovariance(A * x_prev, Q)
        y[i] ~ MvNormalMeanCovariance(B * x[i], P)
        x_prev = x[i]
    end
end

x0 = MvNormalMeanCovariance(zeros(2), 100.0 * diageye(2))

init = @initialization begin
    μ(theta) = Normal(0, 1)
end

result = infer(
    model = rotate_ssm(x0=x0, Q=Q, P=P), 
    data = (y = y,),
    free_energy = true,
    options = (limit_stack_depth = 100, ),
    initialization = init,
    iterations = 20,
)
ERROR: Variables [ A, x_prior, theta, x ] have not been updated after an update event. 
Therefore, make sure to initialize all required marginals and messages. See `initialization` keyword argument for the inference function.
See the official documentation for detailed information regarding the initialization.

Hi @ohmsweetohm1! I suppose your question was somewhat resolved here:
State observer · ReactiveBayes · Discussion #309 · GitHub?

Cheers,

1 Like

Yes, certainly. Thanks for the help and patience.