Hi, I’m a new user for Julia, and I’m trying to use neural networks to simulate test data with differential equations, while I’m getting an error that no method matching Float64. I checked the DifferentialEquations.jl documents and tried the methods in Frequently Asked Questions, but they did not work. I would greatly appreciate it if any assistance could be provided.
using Random, Plots, Statistics
using SeasonalTrendLoess
using Lux
using DifferentialEquations
using ComponentArrays
using Optimization, Optimisers, Optim, OptimizationOptimisers, Zygote
using SciMLSensitivity
using ForwardDiff, PreallocationTools, OrdinaryDiffEq
rng = Random.default_rng()
Random.seed!(714)
xs = collect(1.0:1.0:120.0)
data = @. xs + sin(xs)
NN_sea = Lux.Chain(
Lux.Dense(1, 16, sin),
Lux.Dense(16, 1)
)
p_se, st_se = Lux.setup(rng, NN_sea)
function step1(du, u, p, t)
du[1] = 1.0
end
function step2(du, u, p, t)
du[1] = NN_sea([t], p, st_se)[1][1]
end
function calculation(θ)
delta_t = 0.1
u0 = [data[1]]
result = []
for k in 0:1:119
tspan = (k * delta_t, (k + 1.0) * delta_t)
prob1 = ODEProblem(step1, u0, tspan)
sol1 = solve(prob1, saveat = 0.01)
u1 = [sol1[1, :][end]]
prob2 = ODEProblem(step2, u1, tspan, ComponentArray(θ))
sol2 = solve(prob2, saveat = 0.01)
u0 = [sol2[1, :][end]]
push!(result, u0[1])
end
return result
end
function loss(θ)
pred = calculation(θ)[1, :]
losse = (1/120) * sum(abs2, (pred .- data))
return losse
end
const lossess = []
function cb(θ, l)
loss = l
push!(lossess, l)
if length(lossess) % 50 == 0
println(lossess[end])
end
return false
end
adtype = Optimization.AutoZygote()
optf = Optimization.OptimizationFunction((x, p) -> loss(x), adtype)
optprob = Optimization.OptimizationProblem(optf, ComponentArray(p_se))
@time result1 = Optimization.solve(optprob,
OptimizationOptimisers.ADAM(0.01),
callback = cb,
maxiters = 2000)
The error is the following
ERROR: MethodError: no method matching Float64(::ForwardDiff.Dual{Nothing, Float64, 1})
Closest candidates are:
(::Type{T})(::Real, ::RoundingMode) where T<:AbstractFloat
@ Base rounding.jl:207
(::Type{T})(::T) where T<:Number
@ Core boot.jl:792
Float64(::IrrationalConstants.Inv4π)
@ IrrationalConstants C:\Users\49084\.julia\packages\IrrationalConstants\vp5v4\src\macro.jl:112
...
Stacktrace:
[1] convert(::Type{Float64}, x::ForwardDiff.Dual{Nothing, Float64, 1})
@ Base .\number.jl:7
[2] setindex!(A::Vector{Float64}, x::ForwardDiff.Dual{Nothing, Float64, 1}, i1::Int64)
@ Base .\array.jl:1021
[3] adjoint
@ C:\Users\49084\.julia\packages\Zygote\nyzjS\src\lib\array.jl:81 [inlined]
[4] _pullback
@ C:\Users\49084\.julia\packages\ZygoteRules\M4xmc\src\adjoint.jl:67 [inlined]
[5] #loess#2
@ C:\Users\49084\.julia\packages\SeasonalTrendLoess\GAV3b\src\loess.jl:82 [inlined]
[6] _pullback(::Zygote.Context{…}, ::SeasonalTrendLoess.var"##loess#2", ::Int64, ::Int64, ::Vector{…}, ::StepRangeLen{…}, ::typeof(SeasonalTrendLoess.loess), ::StepRange{…}, ::Vector{…})
@ Zygote C:\Users\49084\.julia\packages\Zygote\nyzjS\src\compiler\interface2.jl:0
[7] loess
@ C:\Users\49084\.julia\packages\SeasonalTrendLoess\GAV3b\src\loess.jl:60 [inlined]
[8] _pullback(::Zygote.Context{…}, ::typeof(Core.kwcall), ::@NamedTuple{…}, ::typeof(SeasonalTrendLoess.loess), ::StepRange{…}, ::Vector{…})
@ Zygote C:\Users\49084\.julia\packages\Zygote\nyzjS\src\compiler\interface2.jl:0
[9] #stl#8
@ C:\Users\49084\.julia\packages\SeasonalTrendLoess\GAV3b\src\stl.jl:151 [inlined]
[10] _pullback(::Zygote.Context{…}, ::SeasonalTrendLoess.var"##stl#8", ::Bool, ::Int64, ::Int64, ::Int64, ::Int64, ::Int64, ::Bool, ::Int64, ::Bool, ::Float64, ::typeof(stl), ::Vector{…}, ::Int64)
@ Zygote C:\Users\49084\.julia\packages\Zygote\nyzjS\src\compiler\interface2.jl:0
[11] stl
@ C:\Users\49084\.julia\packages\SeasonalTrendLoess\GAV3b\src\stl.jl:101 [inlined]
[12] _pullback(::Zygote.Context{…}, ::typeof(Core.kwcall), ::@NamedTuple{…}, ::typeof(stl), ::Vector{…}, ::Int64)
@ Zygote C:\Users\49084\.julia\packages\Zygote\nyzjS\src\compiler\interface2.jl:0
[13] mystl
@ .\Untitled-6:22 [inlined]
[14] _pullback(::Zygote.Context{false}, ::typeof(mystl), ::Vector{Float64}, ::Int64)
@ Zygote C:\Users\49084\.julia\packages\Zygote\nyzjS\src\compiler\interface2.jl:0
[15] loss
@ .\Untitled-6:54 [inlined]
[16] _pullback(ctx::Zygote.Context{false}, f::typeof(loss), args::ComponentVector{Float32, Vector{Float32}, Tuple{Axis{…}}})
@ Zygote C:\Users\49084\.julia\packages\Zygote\nyzjS\src\compiler\interface2.jl:0
[17] #61
@ .\Untitled-7:83 [inlined]
[18] _pullback(::Zygote.Context{…}, ::var"#61#62", ::ComponentVector{…}, ::SciMLBase.NullParameters)
@ Zygote C:\Users\49084\.julia\packages\Zygote\nyzjS\src\compiler\interface2.jl:0
[19] pullback(::Function, ::Zygote.Context{false}, ::ComponentVector{Float32, Vector{…}, Tuple{…}}, ::Vararg{Any})
@ Zygote C:\Users\49084\.julia\packages\Zygote\nyzjS\src\compiler\interface.jl:90
[20] pullback(::Function, ::ComponentVector{Float32, Vector{Float32}, Tuple{Axis{…}}}, ::SciMLBase.NullParameters)
@ Zygote C:\Users\49084\.julia\packages\Zygote\nyzjS\src\compiler\interface.jl:88
[21] withgradient(::Function, ::ComponentVector{Float32, Vector{Float32}, Tuple{Axis{…}}}, ::Vararg{Any})
@ Zygote C:\Users\49084\.julia\packages\Zygote\nyzjS\src\compiler\interface.jl:205
[22] value_and_gradient
@ C:\Users\49084\.julia\packages\DifferentiationInterface\6QHLL\ext\DifferentiationInterfaceZygoteExt\DifferentiationInterfaceZygoteExt.jl:97 [inlined]
[23] value_and_gradient!(f::Function, grad::ComponentVector{…}, prep::DifferentiationInterface.NoGradientPrep, backend::AutoZygote, x::ComponentVector{…}, contexts::DifferentiationInterface.Constant{…})
@ DifferentiationInterfaceZygoteExt C:\Users\49084\.julia\packages\DifferentiationInterface\6QHLL\ext\DifferentiationInterfaceZygoteExt\DifferentiationInterfaceZygoteExt.jl:119
[24] (::OptimizationZygoteExt.var"#fg!#16"{…})(res::ComponentVector{…}, θ::ComponentVector{…})
@ OptimizationZygoteExt C:\Users\49084\.julia\packages\OptimizationBase\gvXsf\ext\OptimizationZygoteExt.jl:53
[25] macro expansion
@ C:\Users\49084\.julia\packages\OptimizationOptimisers\i6VZS\src\OptimizationOptimisers.jl:101 [inlined]
[26] macro expansion
@ C:\Users\49084\.julia\packages\Optimization\cfp9i\src\utils.jl:32 [inlined]
[27] __solve(cache::OptimizationCache{…})
@ OptimizationOptimisers C:\Users\49084\.julia\packages\OptimizationOptimisers\i6VZS\src\OptimizationOptimisers.jl:83
[28] solve!(cache::OptimizationCache{…})
@ SciMLBase C:\Users\49084\.julia\packages\SciMLBase\XzPx0\src\solve.jl:186
[29] solve(::OptimizationProblem{…}, ::Optimisers.Adam; kwargs::@Kwargs{…})
@ SciMLBase C:\Users\49084\.julia\packages\SciMLBase\XzPx0\src\solve.jl:94
[30] macro expansion
@ .\timing.jl:279 [inlined]
[31] top-level scope
@ .\Untitled-7:269