I encountered a problem when using a convolutional neural network for training by using Flux package

using Flux, DifferentialEquations, DiffEqSensitivity

model = Chain(Conv((3,), 2 => N, relu, pad = SamePad()), )
p, re = Flux.destructure(model)
ps = Flux.params(p)

function ODE(du, u, p, t)
    Mat = re(p)(rand(N, 2, 1)) |> Flux.squeezebatch
    # Mat_sq = Mat
    Vec = rand(1, N) * Mat
    G = (diagm(0 => [-Vec[:]; 0.0]) + diagm(-1 => Vec[:]))
    du =  G * u
end

u0 = [1.0; zeros(N)]
prob = ODEProblem(ODE, u0, (0.0, 2.0), ps[1])

function loss()
    sol = Array(solve(prob, Tsit5(), u0 = u0, p = ps[1], saveat = 0.0:0.1:2.0))
    println("GO")
    return Flux.mse(sol, rand(40, 21))
end

Flux.train!(loss, ps[1], Iterators.repeated((), 1), ADAM())

The above is my code. I want to use CNN to solve some ordinary differential equation problems, but I encounter the following error when backpropagating:

UndefRefError: access to undefined reference
getindex at array.jl:802 [inlined]
conv_direct!(y::Array{ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float32, Float64, 3, Array{Float32, 3}, Array{Float64, 3}}}, 5}, x::Array{Float64, 5}, w::ReverseDiff.TrackedArray{Float32, Float64, 5, Array{Float32, 5}, Array{Float64, 5}}, cdims::DenseConvDims{3, (3, 1, 1), 2, 39, 1, (1, 1, 1), (1, 1, 0, 0, 0, 0), (1, 1, 1), false}; alpha::ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float32, Float64, 3, Array{Float32, 3}, Array{Float64, 3}}}, beta::Bool) at conv_direct.jl:98
conv_direct! at conv_direct.jl:51 [inlined]
conv!(y::Array{ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float32, Float64, 3, Array{Float32, 3}, Array{Float64, 3}}}, 5}, in1::Array{Float64, 5}, in2::ReverseDiff.TrackedArray{Float32, Float64, 5, Array{Float32, 5}, Array{Float64, 5}}, cdims::DenseConvDims{3, (3, 1, 1), 2, 39, 1, (1, 1, 1), (1, 1, 0, 0, 0, 0), (1, 1, 1), false}; kwargs::Base.Iterators.Pairs{Union{}, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}) at conv.jl:293
conv!(y::Array{ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float32, Float64, 3, Array{Float32, 3}, Array{Float64, 3}}}, 5}, in1::Array{Float64, 5}, in2::ReverseDiff.TrackedArray{Float32, Float64, 5, Array{Float32, 5}, Array{Float64, 5}}, cdims::DenseConvDims{3, (3, 1, 1), 2, 39, 1, (1, 1, 1), (1, 1, 0, 0, 0, 0), (1, 1, 1), false}) at conv.jl:291
conv!(y::Array{ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float32, Float64, 3, Array{Float32, 3}, Array{Float64, 3}}}, 3}, x::Array{Float64, 3}, w::ReverseDiff.TrackedArray{Float32, Float64, 3, Array{Float32, 3}, Array{Float64, 3}}, cdims::DenseConvDims{1, (3,), 2, 39, 1, (1,), (1, 1), (1,), false}; kwargs::Base.Iterators.Pairs{Union{}, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}) at conv.jl:151
conv! at conv.jl:151 [inlined]
#conv#89 at conv.jl:91 [inlined]
conv(x::Array{Float64, 3}, w::ReverseDiff.TrackedArray{Float32, Float64, 3, Array{Float32, 3}, Array{Float64, 3}}, cdims::DenseConvDims{1, (3,), 2, 39, 1, (1,), (1, 1), (1,), false}) at conv.jl:89
(::Conv{1, 2, typeof(relu), ReverseDiff.TrackedArray{Float32, Float64, 3, Array{Float32, 3}, Array{Float64, 3}}, ReverseDiff.TrackedArray{Float32, Float64, 1, Vector{Float32}, Vector{Float64}}})(x::Array{Float64, 3}) at conv.jl:165
applychain at basic.jl:37 [inlined]
(::Chain{Tuple{Conv{1, 2, typeof(relu), ReverseDiff.TrackedArray{Float32, Float64, 3, Array{Float32, 3}, Array{Float64, 3}}, ReverseDiff.TrackedArray{Float32, Float64, 1, Vector{Float32}, Vector{Float64}}}}})(x::Array{Float64, 3}) at basic.jl:39
ODE(du::Vector{ReverseDiff.TrackedReal{Float32, Float64, ReverseDiff.TrackedArray{Float32, Float64, 1, Vector{Float32}, Vector{Float64}}}}, u::ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, p::ReverseDiff.TrackedArray{Float32, Float64, 1, Vector{Float32}, Vector{Float64}}, t::ReverseDiff.TrackedReal{Float64, Float64, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}) at test.jl:8
ODEFunction at scimlfunctions.jl:334 [inlined]
(::DiffEqSensitivity.var"#109#124"{ODEFunction{true, typeof(ODE), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing}})(u::ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, p::ReverseDiff.TrackedArray{Float32, Float64, 1, Vector{Float32}, Vector{Float64}}, t::ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}) at adjoint_common.jl:146
ReverseDiff.GradientTape(f::Function, input::Tuple{Vector{Float64}, Vector{Float32}, Vector{Float64}}, cfg::ReverseDiff.GradientConfig{Tuple{ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}, ReverseDiff.TrackedArray{Float32, Float64, 1, Vector{Float32}, Vector{Float64}}, ReverseDiff.TrackedArray{Float64, Float64, 1, Vector{Float64}, Vector{Float64}}}}) at tape.jl:207
ReverseDiff.GradientTape(f::Function, input::Tuple{Vector{Float64}, Vector{Float32}, Vector{Float64}}) at tape.jl:204
adjointdiffcache(g::DiffEqSensitivity.var"#df#217"{Matrix{Float64}, Colon}, sensealg::InterpolatingAdjoint{0, true, Val{:central}, ReverseDiffVJP{false}, Bool}, discrete::Bool, sol::ODESolution{Float64, 2, Vector{Vector{Float64}}, Nothing, Nothing, Vector{Float64}, Vector{Vector{Vector{Float64}}}, ODEProblem{Vector{Float64}, Tuple{Float64, Float64}, true, Vector{Float32}, ODEFunction{true, typeof(ODE), UniformScaling{Bool}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED), Nothing}, Base.Iterators.Pairs{Union{}, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}, SciMLBase.StandardODEProblem}, Tsit5, OrdinaryDiffEq.InterpolationData{ODEFunction{true, typeof(ODE), UniformScaling{Bool}, Nothing, Noth...

I don’t know if CNN can’t be used to solve the ODE problem, which causes problems in backpropagation.