UODE error

following this link signal (11.1): Segmentation fault in uODE · Issue #851 · SciML/DiffEqFlux.jl · GitHub I tried the code of the solution proposed by @ChrisRackauckas , but I have this error. can anyone help me?

ERROR: ArgumentError: broadcasting over dictionaries and `NamedTuple`s is reserved
Stacktrace:
 [1] broadcastable(#unused#::NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4), NTuple{4, NamedTuple{(:weight, :bias), Tuple{Base.ReshapedArray{Float64, 2, SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}, Tuple{}}, Base.ReshapedArray{Float64, 2, SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}, Tuple{}}}}}})
   @ Base.Broadcast .\broadcast.jl:718
 [2] broadcasted(::Function, ::NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4), NTuple{4, NamedTuple{(:weight, :bias), Tuple{Base.ReshapedArray{Float64, 2, SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}, Tuple{}}, Base.ReshapedArray{Float64, 2, SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}, Tuple{}}}}}})
   @ Base.Broadcast .\broadcast.jl:1309
 [3] (::Optimization.var"#275#284"{Optimization.var"#274#283"{OptimizationFunction{true, Optimization.AutoZygote, var"#15#16", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}})(res::ComponentVector{Float64, Vector{Float64}, Tuple{Axis{(layer_1 = ViewAxis(1:175, Axis(weight = ViewAxis(1:150, ShapedAxis((25, 6), NamedTuple())), bias = ViewAxis(151:175, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(176:825, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(826:1475, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1476:1631, Axis(weight = ViewAxis(1:150, ShapedAxis((6, 25), NamedTuple())), bias = ViewAxis(151:156, ShapedAxis((6, 1), NamedTuple())))))}}}, θ::ComponentVector{Float64, Vector{Float64}, Tuple{Axis{(layer_1 = ViewAxis(1:175, Axis(weight = ViewAxis(1:150, ShapedAxis((25, 6), NamedTuple())), bias = ViewAxis(151:175, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(176:825, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(826:1475, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1476:1631, Axis(weight = ViewAxis(1:150, ShapedAxis((6, 25), NamedTuple())), bias = ViewAxis(151:156, ShapedAxis((6, 1), NamedTuple())))))}}}, args::Matrix{Float64})
   @ Optimization C:\Users\marco\.julia\packages\Optimization\vFala\src\function\zygote.jl:33
 [4] macro expansion
   @ C:\Users\marco\.julia\packages\OptimizationOptimisers\FWIuf\src\OptimizationOptimisers.jl:31 [inlined]
 [5] macro expansion
   @ C:\Users\marco\.julia\packages\Optimization\vFala\src\utils.jl:37 [inlined]
 [6] __solve(prob::OptimizationProblem{true, OptimizationFunction{true, Optimization.AutoZygote, var"#15#16", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, ComponentVector{Float64, SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}, Tuple{Axis{(layer_1 = ViewAxis(1:175, Axis(weight = ViewAxis(1:150, ShapedAxis((25, 6), NamedTuple())), bias = ViewAxis(151:175, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(176:825, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(826:1475, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1476:1631, Axis(weight = ViewAxis(1:150, ShapedAxis((6, 25), NamedTuple())), bias = ViewAxis(151:156, ShapedAxis((6, 1), NamedTuple())))))}}}, SciMLBase.NullParameters, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, opt::Adam{Float64}, data::IterTools.NCycle{MLUtils.DataLoader{Vector{Matrix{Float64}}, Random._GLOBAL_RNG, Val{nothing}}}; maxiters::Int64, callback::Function, progress::Bool, save_best::Bool, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
   @ OptimizationOptimisers C:\Users\marco\.julia\packages\OptimizationOptimisers\FWIuf\src\OptimizationOptimisers.jl:30
 [7] __solve
   @ C:\Users\marco\.julia\packages\OptimizationOptimisers\FWIuf\src\OptimizationOptimisers.jl:7 [inlined]
 [8] #solve#540
   @ C:\Users\marco\.julia\packages\SciMLBase\QqtZA\src\solve.jl:84 [inlined]
 [9] top-level scope
   @ Untitled-1:98

Not with this level of detail I’m afraid.

The error says you try to broadcast something over a dictionary or NamedTuple. How and why that happens we can’t tell without you posting some code. Ideally that code is a bit simplified and can actually be run, because that helps debugging.

using Optimization, OptimizationOptimisers, OptimizationOptimJL

using LinearAlgebra, Statistics

using ComponentArrays, Lux, Zygote, Plots, LaTeXStrings, StableRNGs, IterTools
import Flux

rng = StableRNG(1111)

function lotka!(du, u, p, t)
    α, β, γ, δ = [1.3, 0.9, 0.8, 1.8]
    du[1] = α * u[1] - β * u[2] * u[1]
    du[2] = γ * u[1] * u[2] - δ * u[2]
end

function get_train_data()
    t_true = LinRange(0.0,5.0,300)
    tspan = (0.0, 5.0)
    u0 = 5.0f0 * rand(rng, 2)
    prob = ODEProblem(lotka!, u0, tspan, zeros(4))
    solution = solve(prob, Vern7(), abstol = 1e-12, reltol = 1e-12, saveat = t_true);

    Xₙ = Array(solution);
    t = solution.t;

    data = Flux.DataLoader([vcat(Xₙ, t')], batchsize = 1)
    return data
end

train_loader = get_train_data();

rbf(x) = exp.(-(x .^ 2))

# Multilayer FeedForward
U = Lux.Chain(  Lux.Dense(6, 25, rbf), 
                Lux.Dense(25, 25, rbf), 
                Lux.Dense(25, 25, rbf),
                Lux.Dense(25, 6))

ps, st = Lux.setup(rng, U)

p_ = zeros(4)

p = ComponentArray{Float64}(p=ps, p_true=p_) 

nn_dynamics! = let
       function nn_dynamics!(du, u, p, t)
              û = U(u, p.p, st)[1] 

              du[1] = p.p_true[1] * u[1] + û[1]
              du[2] = -p.p_true[4] * u[2] + û[2]

              du[3] = p.p_true[1] * u[1] + û[3]
              du[4] = -p.p_true[4] * u[2] + û[4]

              du[5] = p.p_true[1] * u[1] + û[5]
              du[6] = -p.p_true[4] * u[2] + û[6]
       end
end

# Define the problem
prob_nn = ODEProblem(nn_dynamics!, repeat(first(train_loader)[1][1:2,1],3), (0.0, 5.0), p)

function predict(θ, batch)
    T = batch[3, :];  

    IC = batch[1:2, 1];
    
    _prob = remake(prob_nn, u0 = repeat(IC,3), tspan = (T[1], T[end]), p=ComponentArray(p=θ, p_true=[1.3, 0.9, 0.8, 1.8]))
   
    Array(solve(_prob, AutoTsit5(Rodas5()), saveat = T,
                abstol = 1e-6, reltol = 1e-6,  verbose=false))
end

function loss(θ, batch)
    X̂ = predict(θ, batch);

    X_true = @view batch[1:2, :]; 

    mean(abs2, X_true .- X̂[1:2,:])
end

losses = Float64[]

callback = function (p, l)
    push!(losses, l)
    if length(losses) % 10 == 0
        println("Current loss after $(length(losses)) iterations: $(losses[end])")
    end
    return false
end

adtype = Optimization.AutoZygote();
optfun = Optimization.OptimizationFunction((θ, p, batch) -> loss(θ, batch), adtype); 
optprob = Optimization.OptimizationProblem(optfun, p.p);

res = Optimization.solve(  optprob,
                            ADAM(0.001), 
                            IterTools.ncycle(train_loader, 100),
                            callback = callback)

Ok I am not too familiar with most libraries you use so I can only really guess what happens.
I suspect the line

optprob = Optimization.OptimizationProblem(optfun, p.p);

and more specifically p.p to be responsible for the error. Can you try to pass ps directly and see if that fixes it?

optprob = Optimization.OptimizationProblem(optfun, ps);

it changes the error

ERROR: MethodError: no method matching copy(::NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4), NTuple{4, NamedTuple{(:weight, :bias), Tuple{Matrix{Float32}, Matrix{Float32}}}}})

Closest candidates are:
  copy(::Union{DiffEqNoiseProcess.BoxWedgeTail, DiffEqNoiseProcess.NoiseApproximation, DiffEqNoiseProcess.NoiseGrid, DiffEqNoiseProcess.NoiseWrapper, DiffEqNoiseProcess.VirtualBrownianTree})
   @ DiffEqNoiseProcess C:\Users\marco\.julia\packages\DiffEqNoiseProcess\clsTF\src\copy_noise_types.jl:55
  copy(::Union{TransparentColor{C, T}, C} where {T, C<:Union{AbstractRGB{T}, AbstractGray{T}}})
   @ ColorVectorSpace C:\Users\marco\.julia\packages\ColorVectorSpace\QI5vM\src\ColorVectorSpace.jl:250
  copy(::T) where T<:RandomNumbers.Xorshifts.AbstractXoshiro128
   @ RandomNumbers C:\Users\marco\.julia\packages\RandomNumbers\3pD1N\src\Xorshifts\xoshiro128.jl:61
  ...

Stacktrace:
 [1] __solve(prob::OptimizationProblem{true, OptimizationFunction{true, Optimization.AutoZygote, var"#25#26", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4), NTuple{4, NamedTuple{(:weight, :bias), Tuple{Matrix{Float32}, Matrix{Float32}}}}}, SciMLBase.NullParameters, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, opt::Adam{Float64}, data::IterTools.NCycle{MLUtils.DataLoader{Vector{Matrix{Float64}}, Random._GLOBAL_RNG, Val{nothing}}}; maxiters::Int64, callback::Function, progress::Bool, save_best::Bool, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
   @ OptimizationOptimisers C:\Users\marco\.julia\packages\OptimizationOptimisers\FWIuf\src\OptimizationOptimisers.jl:18
 [2] solve(prob::OptimizationProblem{true, OptimizationFunction{true, Optimization.AutoZygote, var"#25#26", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4), NTuple{4, NamedTuple{(:weight, :bias), Tuple{Matrix{Float32}, Matrix{Float32}}}}}, SciMLBase.NullParameters, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, alg::Adam{Float64}, args::IterTools.NCycle{MLUtils.DataLoader{Vector{Matrix{Float64}}, Random._GLOBAL_RNG, Val{nothing}}}; kwargs::Base.Pairs{Symbol, var"#23#24", Tuple{Symbol}, NamedTuple{(:callback,), Tuple{var"#23#24"}}})
   @ SciMLBase C:\Users\marco\.julia\packages\SciMLBase\QqtZA\src\solve.jl:84
 [3] top-level scope
   @ Untitled-1:98
ps = ComponentArray{Float64}(ps) 

and then:

optprob = Optimization.OptimizationProblem(optfun, p.p);

the error is still there

It looks like it worked fine on the first try?

using Optimization, OptimizationOptimisers, OptimizationOptimJL

using LinearAlgebra, Statistics

using ComponentArrays, Lux, Zygote, Plots, LaTeXStrings, StableRNGs, IterTools
import Flux

using OrdinaryDiffEq, SciMLSensitivity

rng = StableRNG(1111)

function lotka!(du, u, p, t)
    α, β, γ, δ = [1.3, 0.9, 0.8, 1.8]
    du[1] = α * u[1] - β * u[2] * u[1]
    du[2] = γ * u[1] * u[2] - δ * u[2]
end

function get_train_data()
    t_true = LinRange(0.0,5.0,300)
    tspan = (0.0, 5.0)
    u0 = 5.0f0 * rand(rng, 2)
    prob = ODEProblem(lotka!, u0, tspan, zeros(4))
    solution = solve(prob, Vern7(), abstol = 1e-12, reltol = 1e-12, saveat = t_true);

    Xₙ = Array(solution);
    t = solution.t;

    data = Flux.DataLoader([vcat(Xₙ, t')], batchsize = 1)
    return data
end

train_loader = get_train_data();

rbf(x) = exp.(-(x .^ 2))

# Multilayer FeedForward
U = Lux.Chain(  Lux.Dense(6, 25, rbf), 
                Lux.Dense(25, 25, rbf), 
                Lux.Dense(25, 25, rbf),
                Lux.Dense(25, 6))

ps, st = Lux.setup(rng, U)

p_ = zeros(4)

p = ComponentArray{Float64}(p=ps, p_true=p_) 

nn_dynamics! = let
       function nn_dynamics!(du, u, p, t)
              û = U(u, p.p, st)[1] 

              du[1] = p.p_true[1] * u[1] + û[1]
              du[2] = -p.p_true[4] * u[2] + û[2]

              du[3] = p.p_true[1] * u[1] + û[3]
              du[4] = -p.p_true[4] * u[2] + û[4]

              du[5] = p.p_true[1] * u[1] + û[5]
              du[6] = -p.p_true[4] * u[2] + û[6]
       end
end

# Define the problem
prob_nn = ODEProblem(nn_dynamics!, repeat(first(train_loader)[1][1:2,1],3), (0.0, 5.0), p)

function predict(θ, batch)
    T = batch[3, :];  

    IC = batch[1:2, 1];
    
    _prob = remake(prob_nn, u0 = repeat(IC,3), tspan = (T[1], T[end]), p=ComponentArray(p=θ, p_true=[1.3, 0.9, 0.8, 1.8]))
   
    Array(solve(_prob, AutoTsit5(Rodas5()), saveat = T,
                abstol = 1e-6, reltol = 1e-6,  verbose=false))
end

function loss(θ, batch)
    X̂ = predict(θ, batch);

    X_true = @view batch[1:2, :]; 

    mean(abs2, X_true .- X̂[1:2,:])
end

losses = Float64[]

callback = function (p, l)
    push!(losses, l)
    if length(losses) % 10 == 0
        println("Current loss after $(length(losses)) iterations: $(losses[end])")
    end
    return false
end

adtype = Optimization.AutoZygote();
optfun = Optimization.OptimizationFunction((θ, p, batch) -> loss(θ, batch), adtype); 
optprob = Optimization.OptimizationProblem(optfun, p.p);

res = Optimization.solve(  optprob,
                            ADAM(0.001), 
                            IterTools.ncycle(train_loader, 100),
                            callback = callback)