DiffEqFlux.sciml_train doesn't accept an adtype

This code works OK

using DifferentialEquations, DiffEqFlux, GalacticOptim

function lotka_volterra!(du, u, p, t)
  x, y = u
  α, β, δ, γ = p
  du[1] = α*x - β*x*y
  du[2] = -δ*y + γ*x*y
end

u0 = [1.0, 1.0]
tspan = (0.0, 10.0)
tsteps = 0.0:0.1:10.0
p = [1.5, 1.0, 3.0, 1.0]

prob = ODEProblem(lotka_volterra!, u0, tspan, p)

function loss1(p)
  sol = solve(prob, Tsit5(), p=p, saveat = tsteps)
  loss1 = sum(abs2, sol.-1)
  return loss1
end

result = DiffEqFlux.sciml_train(loss1, p)

But in the case of the explicit setup of adtype I get an error:

result = DiffEqFlux.sciml_train(loss1, p, adtype=GalacticOptim.AutoZygote())

ERROR: MethodError: no method matching Optim.Options(; extended_trace=true, adtype=GalacticOptim.AutoZygote(), callback=GalacticOptimJL.var"#_cb#11"{DiffEqFlux.var"#86#93", BFGS{LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Nothing, Float64, Flat}, Base.Iterators.Cycle{Tuple{GalacticOptim.NullData}}}(DiffEqFlux.var"#86#93"(), BFGS{LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Nothing, Float64, Flat}(LineSearches.InitialStatic{Float64}
alpha: Float64 1.0
scaled: Bool false
, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}
delta: Float64 0.1
sigma: Float64 0.9
alphamax: Float64 Inf
rho: Float64 5.0
epsilon: Float64 1.0e-6
gamma: Float64 0.66
linesearchmax: Int64 50
psi3: Float64 0.1
display: Int64 0
mayterminate: Base.RefValue{Bool}
, nothing, 0.01, Flat()), Base.Iterators.Cycle{Tuple{GalacticOptim.NullData}}((GalacticOptim.NullData(),)), Core.Box(#undef), Core.Box(GalacticOptim.NullData()), Core.Box(2)))
Closest candidates are:
Optim.Options(; x_tol, f_tol, g_tol, x_abstol, x_reltol, f_abstol, f_reltol, g_abstol, g_reltol, outer_x_tol, outer_f_tol, outer_g_tol, outer_x_abstol, outer_x_reltol, outer_f_abstol, outer_f_reltol, outer_g_abstol, outer_g_reltol, f_calls_limit, g_calls_limit, h_calls_limit, allow_f_increases, allow_outer_f_increases, successive_f_tol, iterations, outer_iterations, store_trace, trace_simplex, show_trace, extended_trace, show_every, callback, time_limit) at C:\Users\mzhen.julia\packages\Optim\6Lpjy\src\types.jl:73 got unsupported keyword argument “adtype”
Optim.Options(::T, ::T, ::T, ::T, ::T, ::T, ::T, ::T, ::T, ::T, ::T, ::T, ::Int64, ::Int64, ::Int64, ::Bool, ::Bool, ::Int64, ::Int64, ::Int64, ::Bool, ::Bool, ::Bool, ::Bool, ::Int64, ::TCallback, ::Float64) where {T, TCallback} at C:\Users\mzhen.julia\packages\Optim\6Lpjy\src\types.jl:44 got unsupported keyword arguments “extended_trace”, “adtype”, “callback”
Stacktrace:
[1] kwerr(kw::NamedTuple{(:extended_trace, :adtype, :callback), Tuple{Bool, GalacticOptim.AutoZygote, GalacticOptimJL.var"#_cb#11"{DiffEqFlux.var"#86#93", BFGS{LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Nothing, Float64, Flat}, Base.Iterators.Cycle{Tuple{GalacticOptim.NullData}}}}}, args::Type)
@ Base .\error.jl:163

Any idea how to fix it?

Thanks!

https://diffeqflux.sciml.ai/dev/sciml_train/

It’s documented as a positional, not a keyword, argument.

result = DiffEqFlux.sciml_train(loss1, p, ADAM(0.1), GalacticOptim.AutoZygote())

Thank you @ChrisRackauckas. It helped!