I am trying to train a custom DiffEqFlux neural ODE model and wanting to use parts of the solution in a regularization term. I wrote the following brief function:
function gen_penalty(arr, similarity)
[similarity(arr[i,:], arr[j,:]) for i in 1:size(arr)[1] for j in 1:size(arr)[1]]
end
where arr
is a slice from the output of a solve call on a DifferentialEquations.ODEProblem
and similarity
is a similarity function that takes two vectors and returns a number, for example:
function euclid_sim(x1, x2)
return sum((x1 .- x2).^2)^0.5
end
When calculating the gradients I get the error:
ERROR: LoadError: Mutating arrays is not supported -- called push!(::Vector{Float64}, _...)
. I am puzzled as to what am I mutating since in the functions themselves I am not overwriting anything at all. I am running Julia v1.7.1, any suggestions are very welcome!
Full stacktrace:
ERROR: LoadError: Mutating arrays is not supported -- called push!(::Vector{Float64}, _...)
Stacktrace:
[1] error(s::String)
@ Base ./error.jl:33
[2] (::Zygote.var"#450#451"{Vector{Float64}})(#unused#::Nothing)
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/lib/array.jl:78
[3] (::Zygote.var"#2359#back#452"{Zygote.var"#450#451"{Vector{Float64}}})(Ξ::Nothing)
@ Zygote ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:67
[4] Pullback
@ ./array.jl:823 [inlined]
[5] (::typeof(β(grow_to!)))(Ξ::FillArrays.Fill{Float64, 1, Tuple{Base.OneTo{Int64}}})
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/compiler/interface2.jl:0
[6] Pullback
@ ./array.jl:801 [inlined]
[7] (::typeof(β(grow_to!)))(Ξ::FillArrays.Fill{Float64, 1, Tuple{Base.OneTo{Int64}}})
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/compiler/interface2.jl:0
[8] Pullback
@ ./array.jl:701 [inlined]
[9] (::typeof(β(_collect)))(Ξ::FillArrays.Fill{Float64, 1, Tuple{Base.OneTo{Int64}}})
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/compiler/interface2.jl:0
[10] Pullback
@ ./array.jl:649 [inlined]
[11] (::typeof(β(collect)))(Ξ::FillArrays.Fill{Float64, 1, Tuple{Base.OneTo{Int64}}})
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/compiler/interface2.jl:0
[12] Pullback
@ /disk/scratch/s1788788/NODE_work/CaMKII_system_v4/codebase.jl:118 [inlined]
[13] (::typeof(β(gen_pen_matrix)))(Ξ::FillArrays.Fill{Float64, 1, Tuple{Base.OneTo{Int64}}})
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/compiler/interface2.jl:0
[14] Pullback
@ /disk/scratch/s1788788/NODE_work/CaMKII_system_v4/sciml_model_training.jl:131 [inlined]
[15] (::typeof(β(loss_n_ode)))(Ξ::Tuple{Float64, Nothing, Nothing, Nothing})
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/compiler/interface2.jl:0
[16] Pullback
@ /disk/scratch/s1788788/NODE_work/CaMKII_system_v4/sciml_model_training.jl:206 [inlined]
[17] (::typeof(β(training_loss)))(Ξ::Tuple{Float64, Nothing, Nothing, Nothing})
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/compiler/interface2.jl:0
[18] Pullback
@ ~/.julia/packages/DiffEqFlux/w4Zm0/src/train.jl:86 [inlined]
[19] #213
@ ~/.julia/packages/Zygote/FPUm3/src/lib/lib.jl:203 [inlined]
[20] #1752#back
@ ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:67 [inlined]
[21] Pullback
@ ~/.julia/packages/SciMLBase/BoNUy/src/problems/basic_problems.jl:109 [inlined]
[22] #213
@ ~/.julia/packages/Zygote/FPUm3/src/lib/lib.jl:203 [inlined]
[23] #1752#back
@ ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:67 [inlined]
[24] Pullback
@ ~/.julia/packages/GalacticOptim/1ht5p/src/function/zygote.jl:6 [inlined]
[25] (::typeof(β(#267)))(Ξ::Float64)
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/compiler/interface2.jl:0
[26] #213
@ ~/.julia/packages/Zygote/FPUm3/src/lib/lib.jl:203 [inlined]
[27] #1752#back
@ ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:67 [inlined]
[28] Pullback
@ ~/.julia/packages/GalacticOptim/1ht5p/src/function/zygote.jl:8 [inlined]
[29] (::typeof(β(#270)))(Ξ::Float64)
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/compiler/interface2.jl:0
[30] (::Zygote.var"#57#58"{typeof(β(#270))})(Ξ::Float64)
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/compiler/interface.jl:41
[31] gradient(f::Function, args::Vector{Float64})
@ Zygote ~/.julia/packages/Zygote/FPUm3/src/compiler/interface.jl:76
[32] WARNING: both Flux and Iterators export "flatten"; uses of it in module DiffEqFlux must be qualified
WARNING: both Flux and Distributions export "params"; uses of it in module DiffEqFlux must be qualified
(::GalacticOptim.var"#268#278"{GalacticOptim.var"#267#277"{SciMLBase.OptimizationFunction{true, GalacticOptim.AutoZygote, DiffEqFlux.var"#84#89"{typeof(training_loss)}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Nothing}})(::Vector{Float64}, ::Vector{Float64})
@ GalacticOptim ~/.julia/packages/GalacticOptim/1ht5p/src/function/zygote.jl:8
[33] macro expansion
@ ~/.julia/packages/GalacticOptim/1ht5p/src/solve/flux.jl:27 [inlined]
[34] macro expansion
@ ~/.julia/packages/GalacticOptim/1ht5p/src/utils.jl:35 [inlined]
[35] __solve(prob::SciMLBase.OptimizationProblem{false, SciMLBase.OptimizationFunction{false, GalacticOptim.AutoZygote, SciMLBase.OptimizationFunction{true, GalacticOptim.AutoZygote, DiffEqFlux.var"#84#89"{typeof(training_loss)}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, GalacticOptim.var"#268#278"{GalacticOptim.var"#267#277"{SciMLBase.OptimizationFunction{true, GalacticOptim.AutoZygote, DiffEqFlux.var"#84#89"{typeof(training_loss)}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Nothing}}, GalacticOptim.var"#271#281"{GalacticOptim.var"#267#277"{SciMLBase.OptimizationFunction{true, GalacticOptim.AutoZygote, DiffEqFlux.var"#84#89"{typeof(training_loss)}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Nothing}}, GalacticOptim.var"#276#286", Nothing, Nothing, Nothing}, Vector{Float64}, SciMLBase.NullParameters, Vector{Float64}, Nothing, Nothing, Nothing, Base.Pairs{Symbol, typeof(callback), Tuple{Symbol}, NamedTuple{(:cb,), Tuple{typeof(callback)}}}}, opt::Flux.Optimise.ADAM, data::Base.Iterators.Cycle{Tuple{GalacticOptim.NullData}}; maxiters::Int64, cb::Function, progress::Bool, save_best::Bool, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
@ GalacticOptim ~/.julia/packages/GalacticOptim/1ht5p/src/solve/flux.jl:25
[36] #solve#480
@ ~/.julia/packages/SciMLBase/BoNUy/src/solve.jl:3 [inlined]
[37] sciml_train(::typeof(training_loss), ::Vector{Float64}, ::Flux.Optimise.ADAM, ::Nothing; lower_bounds::Vector{Float64}, upper_bounds::Vector{Float64}, maxiters::Int64, kwargs::Base.Pairs{Symbol, typeof(callback), Tuple{Symbol}, NamedTuple{(:cb,), Tuple{typeof(callback)}}})
@ DiffEqFlux ~/.julia/packages/DiffEqFlux/w4Zm0/src/train.jl:91
[38] top-level scope
@ /disk/scratch/s1788788/NODE_work/CaMKII_system_v4/sciml_model_training.jl:220
in expression starting at /disk/scratch/s1788788/NODE_work/CaMKII_system_v4/sciml_model_training.jl:220
(codebase.jl:118 is just the gen_penalty
function).