# No method matching loss_multiple_shooting

using DataDrivenDiffEq
using DiffEqFlux, Flux
using Plots
path = @DIR
include(“multipleshooting.jl”)

## Mutliple shoot function

#include(“Multiple_shooting1.jl”)

## generating possible basis

@variables x[1:ns] # variables used to create polynomial_basis
polys = polynomial_basis(x,2)[2:end]
f_polys = build_function(polys, x, expression = Val{false})[1]
λ(x,p) = f_polys(x)

NN = FastChain(λ, FastDense(length(polys), ns;
initW = Flux.sparse_init(sparsity=0.8)))

solver = Tsit5();
prob_neuralode = NeuralODE(NN, tspan, solver, saveat = tsteps)
neural_ode_f(u,p,t) = NN(u,p)
prob_nn = ODEProblem(neural_ode_f, u0_list[1,:], tspan,
initial_params(NN))

## training

function plot_function_for_multiple_shoot(plt, pred, grp_size)
step = 1
if(grp_size != 1)
step = grp_size-1
end
if(grp_size == datasize)
Plots.scatter!(plt, tsteps, pred[1][1,:], label = “pred”)
else
for i in 1:step:datasize-grp_size
# The term `trunc(Integer,(i-1)/(grp_size-1)+1)` goes from 1, 2, … , N where N is the total number of groups that can be formed from `ode_data` (In other words, N = trunc(Integer, (datasize-1)/(grp_size-1)))
Plots.scatter!(plt, tsteps[i:i+step], pred[trunc(Integer,(i-1)/step+1)][1,:], label = “grp”*string(trunc(Integer,(i-1)/step+1)))
end
end
end

statesvar = string.(species(rn))
callback = function (p, l, pred; doplot = true)
display(l)

`````` if doplot
list_plt = []
for spec in 1:ns
plt = Plots.scatter(tsteps[1:size(pred,2)],
ode_data[spec,1:size(pred,2)],
markercolor=:transparent, label="Data",
framestyle=:box)
## plot the different predictions for individual shoot
# plot_function_for_multiple_shoot(plt, predictions, grp_size_param)
# plot a single shooting performance of our multiple shooting training (this is what the solver predicts after the training is done)
plot!(plt, tsteps[1:size(pred,2)], pred[spec,:], lw=3,
label = "ODENet prediction")
plot!(xlabel="Time(sec)",
ylabel="Concentration of " * statesvar[spec])
if spec == 1
plot!(plt, legend=true, framealpha=0)
else
plot!(plt, legend=false)
end
push!(list_plt, plt)
end

plt_all = plot(list_plt...)
display(plt_all)

#  png(plt_all, string(path,"/figs/i_exp_", i))
end
return false
``````

end

# Define parameters for Multiple Shooting

grp_size_param = 3;
loss_multiplier_param = 100

function loss_function(ode_data, pred):: Float32
return sum(abs2, (ode_data - pred)) #+ μ*sum(abs, ps)
end

function loss_multiple_shooting(p)
return multiple_shoot1(p, ode_data ,tsteps, prob_nn,
loss_function, solver, grp_size_param;
loss_multiplier_param=100)
end

ps = prob_neuralode.p

# μ = [0.1, 1, 5, 10,100]

for i in randperm(n_exp_train)
ode_data = ode_data_list_noise[i,:,:]

``````result_neuralode = DiffEqFlux.sciml_train(loss_multiple_shooting,ps,ADAM(0.01),maxiters = 300)
ps = result_neuralode.minimizer
``````

end

Welcome to the Julia discourse.

I am not familiar with machine learning libraries, but a quick check of the docs suggests that the loss function should take 2 arguments.

A general note: It is easier for others to help you if you follow some suggestions. In particular, a MWE with a complete error message would be useful.

2 Likes