Hello,
I have been trying to fit one parameter of a an ODE using the experimental data. I used the following example to write the code
The problem is that the MSE loss is decreasing really really slowly even though the initial guess I have given is pretty good. The starting loss was 156.88371364301955
and even after 500 iterations the ending loss was 156.49626077372287
.
And after 500 iteration the code ended with the following error which I couldn’t understand.
┌ Warning: At t=5840.613462152258, dt was forced below floating point epsilon 9.094947017729282e-13, and step error estimate = NaN. Aborting. There is either an error in your model specification or the true solution is unstable (or the true solution can not be represented in the precision of ForwardDiff.Dual{ForwardDiff.Tag{DifferentiationInterface.FixTail{var"#15#16", Tuple{SciMLBase.NullParameters}}, Float64}, Float64, 1}).
└ @ SciMLBase C:\Users\Kalath_A\.julia\packages\SciMLBase\YTOjh\src\integrator_interface.jl:623
ERROR: AssertionError: isfinite(phi_d) && isfinite(gphi)
Stacktrace:
[1] bisect!(ϕdϕ::LineSearches.var"#ϕdϕ#6"{…}, alphas::Vector{…}, values::Vector{…}, slopes::Vector{…}, ia::Int64, ib::Int64, phi_lim::Float64, display::Int64)
@ LineSearches C:\Users\Kalath_A\.julia\packages\LineSearches\jgnxK\src\hagerzhang.jl:510
[2] (::LineSearches.HagerZhang{…})(ϕ::Function, ϕdϕ::LineSearches.var"#ϕdϕ#6"{…}, c::Float64, phi_0::Float64, dphi_0::Float64)
@ LineSearches C:\Users\Kalath_A\.julia\packages\LineSearches\jgnxK\src\hagerzhang.jl:208
[3] HagerZhang
@ C:\Users\Kalath_A\.julia\packages\LineSearches\jgnxK\src\hagerzhang.jl:102 [inlined]
[4] perform_linesearch!(state::Optim.BFGSState{…}, method::Optim.BFGS{…}, d::Optim.ManifoldObjective{…})
@ Optim C:\Users\Kalath_A\.julia\packages\Optim\HvjCd\src\utilities\perform_linesearch.jl:58
[5] update_state!(d::NLSolversBase.TwiceDifferentiable{…}, state::Optim.BFGSState{…}, method::Optim.BFGS{…})
@ Optim C:\Users\Kalath_A\.julia\packages\Optim\HvjCd\src\multivariate\solvers\first_order\bfgs.jl:139
[6] optimize(d::NLSolversBase.TwiceDifferentiable{…}, initial_x::Vector{…}, method::Optim.BFGS{…}, options::Optim.Options{…}, state::Optim.BFGSState{…})
@ Optim C:\Users\Kalath_A\.julia\packages\Optim\HvjCd\src\multivariate\optimize\optimize.jl:54
[7] optimize(d::NLSolversBase.TwiceDifferentiable{…}, initial_x::Vector{…}, method::Optim.BFGS{…}, options::Optim.Options{…})
@ Optim C:\Users\Kalath_A\.julia\packages\Optim\HvjCd\src\multivariate\optimize\optimize.jl:36
[8] __solve(cache::OptimizationCache{…})
@ OptimizationOptimJL C:\Users\Kalath_A\.julia\packages\OptimizationOptimJL\e3bUa\src\OptimizationOptimJL.jl:218
[9] solve!(cache::OptimizationCache{…})
@ SciMLBase C:\Users\Kalath_A\.julia\packages\SciMLBase\YTOjh\src\solve.jl:187
[10] solve(::OptimizationProblem{…}, ::Optim.BFGS{…}; kwargs::@Kwargs{…})
[10] solve(::OptimizationProblem{…}, ::Optim.BFGS{…}; kwargs::@Kwargs{…})
[10] solve(::OptimizationProblem{…}, ::Optim.BFGS{…}; kwargs::@Kwargs{…})
@ SciMLBase C:\Users\Kalath_A\.julia\packages\SciMLBase\YTOjh\src\solve.jl:95
[10] solve(::OptimizationProblem{…}, ::Optim.BFGS{…}; kwargs::@Kwargs{…})
[10] solve(::OptimizationProblem{…}, ::Optim.BFGS{…}; kwargs::@Kwargs{…})
[10] solve(::OptimizationProblem{…}, ::Optim.BFGS{…}; kwargs::@Kwargs{…})
@ SciMLBase C:\Users\Kalath_A\.julia\packages\SciMLBase\YTOjh\src\solve.jl:95
[11] __solve(::OptimizationProblem{…}, ::PolyOpt; maxiters::Int64, kwargs::@Kwargs{…})
@ OptimizationPolyalgorithms C:\Users\Kalath_A\.julia\packages\OptimizationPolyalgorithms\5lhpw\src\OptimizationPolyalgorithms.jl:34
[12] __solve
@ C:\Users\Kalath_A\.julia\packages\OptimizationPolyalgorithms\5lhpw\src\OptimizationPolyalgorithms.jl:11 [inlined]
[13] solve(::OptimizationProblem{…}, ::PolyOpt; kwargs::@Kwargs{…})
@ SciMLBase C:\Users\Kalath_A\.julia\packages\SciMLBase\YTOjh\src\solve.jl:98
[14] top-level scope
@ e:\PhD Ashima\Neural ODE\Julia\IQgen_Tmixed\Curve fitting\ode_fit_julia.jl:103
Some type information was truncated. Use `show(err)` to see complete types.
The following is my code. extract_index
is a function I defined to extract data from my raw experimental data. I didn’t want to make the code lengthy here so didn’t add it in here.
using DifferentialEquations, Optimization, OptimizationPolyalgorithms, SciMLSensitivity
using ForwardDiff, Plots
using JLD2
using Statistics
#fitting conditions
Crate,Temp = "2C",0
# Loading data
data_file = load("unique_Datasets.jld2")["Datasets"]
data = data_file["$(Crate)_T$(Temp)"]
n1,n2 = extract_index(data["current"])
t,T,T∞ = data["time"][n1:n2],data["temperature"][n1:n2],data["temperature"][1]
# Plot
plot(t,T,label = "Temperature")
# Define the ODE
function ode_model!(du,u,p,t,T∞)
T = u[1]
C₁ = p[1]
du[1] = -(T-T∞)/C₁
end
# Initial condition
u0 = [T[1]]
# Simulation interval
tspan = (t[1],t[end])
# Parameters guess
pguess = [1000]
# Set up ODE problem with guessed parameters
ode_model1!(du,u,p,t) = ode_model!(du,u,p,t,T∞)
prob = ODEProblem(ode_model1!,u0,tspan,pguess)
# Solve ode problem with guessed parameters
initial_sol = solve(prob,saveat = t)
# view guessed solution
plt = plot(initial_sol,label = ["T pred"])
scatter!(t,T,label = ["T"])
# Define cost function
function loss(newp)
newprob = remake(prob,p = newp)
sol = solve(newprob,saveat = t)
loss = mean(abs2,sol.-T)
return loss
end
# Callback function to monitor the optimization
function callback(state,l)
display(l)
newprob = remake(prob,p = state.u)
sol = solve(newprob,saveat = t)
plt = plot(sol,ylim = (240,360),label = ["T current pred"])
scatter!(plt,t,T,label=["T actual"])
display(plt)
return false
end
# Optimize the parameters
adtype = AutoForwardDiff()
pguess = [1000.0]
optf = OptimizationFunction((x,_) -> loss(x),adtype)
optprob = OptimizationProblem(optf,pguess)
pfinal = solve(optprob,PolyOpt(),callback = callback,maxiters = 500)
It requires data set to run. Can anyone please help me with this? This should be a simple problem cooling problem to solve. But somehow takes a lot of time