ModelingToolkit and State Variables with Optimization

I have the simple system from the documentation.

module Grid1

export get_model

using ModelingToolkit
using ModelingToolkitStandardLibrary.Electrical
using ModelingToolkitStandardLibrary.Blocks: Constant

function get_model(; resistance::Float64=0.4, capacitance::Float64=0.6, voltage::Float64=1.0)
    @variables t
    @named R = Resistor(R=resistance)
    @named C = Capacitor(C=capacitance)
    @named V = Voltage()
    @named ground = Ground()
    @named source = Constant(k=voltage)

    rc_eqs = [
        connect(V.p, R.p)
        connect(R.n, C.p)
        connect(C.n, V.n, ground.g)
        connect(V.V, source.output)
    ]

    @named _rc_model = ODESystem(rc_eqs, t)

    @named rc_model = compose(_rc_model, [R, C, V, source, ground])
    sys = structural_simplify(rc_model)
    return sys, parameters(sys), states(sys), C 
end

end

I want to run optimization of that system wrt parameters [R and C]. The following code works well.

using Revise

includet("./grid1.jl")
using .Grid1

using ModelingToolkit, DifferentialEquations
using Optimization, OptimizationOptimisers, DiffEqSensitivity, ForwardDiff, Zygote, OptimizationOptimJL
using CSV, DataFrames, Statistics

function get_obs(fname::String, obs_vars::Vector{String})
    df = DataFrame(CSV.File(fname))
    return df[:, "time"], df[:, obs_vars]
end

function loss(p::Vector{Float64})
    pred_sol = solve(prob, Tsit5(), p=p, saveat=t_save)
    pred = Array(pred_sol[model_state[1]])
    # pred = Array(pred_sol')
    mse = mean((pred - obs_vals) .^ 2)
    return mse
end

function callback(p, l)
    println(l, " ", p)
    return false
end

obs_var = ["C₊v(t)"]
obs_data_file = "working_example/res1.csv"

t_save, obs = get_obs(obs_data_file, obs_var)

model, param_vars, model_state, target_var = get_model()


u0 = [s => 0.0 for s in model_state]
tspan = (t_save[1], last(t_save))

lb = zeros(length(param_vars))
ub = zeros(length(param_vars)) .+ 10

prob = ODAEProblem(model, u0, tspan)
obs_vals = Array(obs)

p_init = rand(length(param_vars))

adtype = Optimization.AutoZygote()
optf = Optimization.OptimizationFunction((x, p) -> loss(x), adtype)
optprob = Optimization.OptimizationProblem(optf, p_init, lb=lb, ub=ub)
result = Optimization.solve(optprob, LBFGS(), callback=callback, maxiters=300)

Sometimes I need to point out the name of the output variable explicitly. I mean in the cost function I want to do something like that:

function loss(p::Vector{Float64})
    pred_sol = solve(prob, Tsit5(), p=p, saveat=t_save)
    pred = Array(pred_sol[model_state[1]])  # this doesn't work
    # pred = Array(pred_sol')  # this works
    mse = mean((pred - obs_vals) .^ 2)
    return mse
end

the model_state is:

julia> model_state
1-element Vector{Term{Real, Base.ImmutableDict{DataType, Any}}}:
 C₊v(t)

I have variable C but if I do pred = Array(pred_sol[C.v]) I get the same error.

The error:

ERROR: ArgumentError: invalid index: C₊v(t) of type Term{Real, Base.ImmutableDict{DataType, Any}}

It’s important to me to have that option to point name explicitly because sometimes it can be variable not from the state. For e.g. it can be R.v etc.

If I solve this outside of loss-function

sol = solve(prob, Tsit5(), p=p, saveat=t_save)

I can get sol[C.v] without any errors.

Any idea how I can use the names of variables in the loss function?

Thanks!

Your working and desired loss functions look the same to me, but perhaps you want something like this:

function loss(p::Vector{Float64})
    pred_sol = solve(prob, Tsit5(), p=p, saveat=t_save)
    @variables t C₊v(t)
    pred = Array(pred_sol[C₊v])
    # pred = Array(pred_sol[model_state[1]])
    # pred = Array(pred_sol')
    mse = mean((pred - obs_vals) .^ 2)
    return mse
end

Thank you, @contradict, but it doesn’t work.

I have target_var:

julia> target_var
Model C with 4 (6) equations
States (6):
  v(t) [defaults to 0.0]
  i(t) [defaults to 0.0]
  p₊v(t) [defaults to 1.0]
  p₊i(t) [defaults to 1.0]
⋮
Parameters (1):
  C [defaults to 0.6]
julia> target_var.v
C₊v(t)

I can do pred_sol[target_var.v] without problems (in any place out of loss function). But inside of the loss function, I get the above error.

Hmm, perhaps your ModelingToolkit is out of date? The loss function I posted works as far as I can tell.
Note that I redefined the variables in the loss function.

I updated all my packages, but the error remains the same.
It seems something related to the autograd - AutoZygote…

[[deps.ModelingToolkit]]
deps = ["AbstractTrees", "ArrayInterfaceCore", "Combinatorics", "ConstructionBase", "DataStructures", "DiffEqBase", "DiffEqCallbacks", "DiffRules", "Distributed", "Distributions", "DocStringExtensions", "DomainSets", "ForwardDiff", "Graphs", "IfElse", "InteractiveUtils", "JuliaFormatter", "JumpProcesses", "LabelledArrays", "Latexify", "Libdl", "LinearAlgebra", "MacroTools", "NaNMath", "NonlinearSolve", "RecursiveArrayTools", "Reexport", "RuntimeGeneratedFunctions", "SciMLBase", "Serialization", "Setfield", "SparseArrays", "SpecialFunctions", "StaticArrays", "SymbolicUtils", "Symbolics", "UnPack", "Unitful"]
git-tree-sha1 = "8766e01941f35f1f50c4ddf815d5eccab8d4c794"
uuid = "961ee093-0014-501f-94e3-6117800e7a78"
version = "8.16.0"

[[deps.ModelingToolkitStandardLibrary]]
deps = ["IfElse", "ModelingToolkit", "OffsetArrays", "OrdinaryDiffEq", "Symbolics"]
git-tree-sha1 = "7ea10c396479260034d0ab2ffe11e3d597a97d32"
uuid = "16a59e39-deab-5bd0-87e4-056b12336739"
version = "1.4.0"

I see a similar issue here:

@lamorton did you find the solution?

I’ve created the issue here

I created the minimum working example to reproduce it: