Hello,
I am trying to train a simple nn with an external input that depends on time. The input is interpolated using:
broadband = LinearInterpolation(time, U)
The network runs without any problem for the first iteration of the optimization process using ADAM. After calculating the loss, the program stops to work and I get the following error message:
ERROR: LoadError: ArgumentError: unable to check bounds for indices of type Intertions.WeightedAdjIndex{2,Float64}
Does the backpropagation algorithm fail when interpolation is involved?
Please find an excerpt of my code below:
# Define the neural network and ODE part
nn_model = FastChain(FastDense(4, 8, tanh), FastDense(8, 4))
p_model = initial_params(nn_model)
function dudt(u, p, t)
nn_model(vcat(u[1], broadband(t), u[3], u[4]), p)
end
prob = ODEProblem(dudt, u0, [time_training[1], time_training[end]])
function predict_neuralode(p)
_prob = remake(prob, p = p)
sol = Array(solve(_prob, Tsit5(), saveat = t_step, abstol = 1e-8, reltol = 1e-6))
sol = sol[3, :]
end
# Loss function defined as elementwise distance bewteen pred. actual data
function loss(p) # loss function
sol = predict_neuralode(p)
loss = sum(abs2, sol - Q_training)
print("Loss: $loss")
return loss
end
# start optimization
res0 = DiffEqFlux.sciml_train(loss, p_model, ADAM(0.01), maxiters = 100)