Dear All,
I am having a hard time figuring out how to declare dependent_lags that depend
on one of the state variables in my DDE model (using DelayDiffEq.jl). It seems
like it should be easy since the state variable is tracking the length of the
delay as it changes with time. So I feel like it should be
d_lags = ((u,p,t) -> u[8],)
prob = DDEProblem(DynDDE,u0,h,tspan; constant_lags = tauM, dependent_lags = d_lags)
However, when I do this the simulation becomes very slow (usually asks for larger maxiters), and I thought declaring lags was supposed to improve performance.
If I donāt declare the dependent lags, the result is reasonable, fast, and seems to be essentially
identical to when they are declared.
Iām fairly new to Julia so hopefully Iāve just made a rookie mistake that someone
can spot. Itās also possible that Iāve misunderstood exactly what is happening when
lags are declared.
Iāve provided below the simplest working version of the problem as I could without
having to spend a lot of time modifying the original. Sorry it is so long. Briefly,
it models a host-parasite system in which the length of the developmental delay period for one
of the larval stages depends on environmental temperatures, which in turn vary
continuously with time (e.g.,shorter development time in the summer, longer in
the winter). In the original, many of the other parameters are functions of time
as well, and the issue above becomes even more pronounced.
Iād greatly appreciate any help!
using QuadGK
using Roots
using DelayDiffEq
using Plots;
#Parameters
const Temp0K = 15+273.15; const k = 8.62 * 10^(-5); const H = 0.075;const S = 4000000.0
const b_H = 1/5292.5; const b_S = 0.0037; const lambda = 488.0;const mu_W = 0.0186
const Phi_a = 0.024; const Phi_E = 0.46; const mu_P = 1.5e-7; const mu_M = 0.0056
const tauM = 166.82; const mu_U = 0.0001; const mu_I = mu_U; const tauU_a = 25.1;
const tauU_E = 0.87; const tauU_EL = 5.0*tauU_E; const tauU_TL = 282.0218;
const Alpha = 1.4752e-6;const phi_H = 2.7728e-7;const c_K = 263.0; const d_K = 22.6;
const t0 = 105.6;
#A couple functions that describe how parameters are sensitive to temprature
#Temperature as a function of time
Temp(t) = c_K + d_K * sin((t-t0)*2*pi/365);
TempKstop = 273.15-0.0;
#Development rate, as a function of Temperature
function g_U(T)
if T > TempKstop::Float64
g_U = 1/(tauU_a * exp((tauU_E/k)*((1/T)-(1/Temp0K))) * (1 + exp((tauU_EL/k)*((1/T)-(1/tauU_TL))))); #Development rate
else
g_U = 1/(tauU_a * exp((tauU_E/k)*((1/TempKstop::Float64)-(1/Temp0K))) * (1 + exp((tauU_EL/k)*((1/TempKstop::Float64)-(1/tauU_TL)))));#Development rate if it's below freezing
end
end
gt(t) = g_U(Temp(t))
#Infection rate, as a function of temperature
function Phi(T)
if T > TempKstop::Float64
Phi = Phi_a * exp((-Phi_E/k)*((1/T)-(1/Temp0K))); #Infection rate
else
Phi = Phi_a * exp((-Phi_E/k)*((1/TempKstop::Float64)-(1/Temp0K))); #Infection rate if it's too cold
end
return(Phi)
end
function DynDDE(du,u,h,p,t)
#State variables
#W,U,I,M,and P track different stages of a parasite life-cycle
#DU1 gives the proportion of U that survive the variabl developmental delay period
#DM1 give proportion of M that survive constant delay period
#tauU tracks the length of the developmental delay, which changes continuously with time
W = u[1]; U = u[2]; I = u[3]; M = u[4]; P = u[5]; DU1 = u[6]; DM1 = u[7]; tauU = u[8];
#History calculations
W_past = h(p, t-tauU)[1]; #W at time t-tauU
U_past = h(p, t-tauU)[2]; #U at time t-tauU
I_past = h(p, t-tauM)[3]; #I at time t-tauM
P_past = h(p, t-tauM)[5]; #P at time t-tauM
#"Constant" Survival functions
DM2 = exp(-mu_M*tauM)*exp(-b_H*tauM);
DU2 = exp(-b_S*tauU)*exp(-phi_H*H*tauU);
#Model
du[1] = lambda*P - mu_W*W - Phi(Temp(t))*S*W; #W)
du[2] = Phi(Temp(t))*S*W - U*(b_S + phi_H*H) - mu_U*U - Phi(Temp(t-tauU))*S*W_past*DU1*DU2*(g_U(Temp(t))/g_U(Temp(t-tauU))); #U)
du[3] = Phi(Temp(t-tauU))*S*W_past*DU1*DU2*(g_U(Temp(t))/g_U(Temp(t-tauU))) - I*(mu_I+b_S) - phi_H*H*I; #I)
du[4] = phi_H*H*I - M*(mu_M+b_H) - Alpha*P*M/H - phi_H*H*I_past*DM1*DM2; #M)
du[5] = phi_H*H*I_past*DM1*DM2 - P*(mu_P+b_H) - Alpha*P; #P)
du[6] = DU1*(mu_U*U_past/S*(g_U(Temp(t))/g_U(Temp(t-tauU))) - mu_U*U/S); #DU1)
du[7] = DM1*(Alpha*P_past/H - Alpha*P/H); #DM1)
du[8] = 1-(g_U(Temp(t))/g_U(Temp(t-tauU))); #tauU) Change in delay duration
end
#Set up simulation
function DySimulate(tstart,tfin)
birthday = tstart;
gint(u) = quadgk(gt, birthday-u, birthday)[1] #integrate over gt from days[jj]-u to days[jj]
initTauU = [fzero(u -> gint(u) - p, [0.001, 5000]) for p in [1.0]][1]; #Initial lag duration
#Initial survival probabilities
mUint = exp(-mu_U*initTauU);
bSint = exp(-b_S*initTauU);
pHint = exp(-phi_H*H*initTauU);
DUinit = mUint*bSint*pHint;
DUhist = DUinit;
DM2 = exp(-mu_M*tauM)*exp(-b_H*tauM);
#Initial values
u0 = [0.0,0.0,0.0,0.0,1.0,DUinit,DM2,initTauU];
h(p,t) = [0.0,0.0,0.0,0.0,0.0,DUhist,DM2,initTauU]; #Empty system when t<0
tspan = (birthday,tfin);
#Declare dependent lags. What should they be? It works well when d_lags is not declared.
d_lags = ((u,p,t) -> u[8],)
prob = DDEProblem(DynDDE,u0,h,tspan; constant_lags = tauM, dependent_lags = d_lags)
solDynDDE = solve(prob,alg; isoutofdomain=(u,p,t) -> any(x -> x < 0.0, u),abstol = 1e-6,reltol = 1e-4)
end
alg = MethodOfSteps(Rosenbrock23());
out = DySimulate(1.0,365.0*10)
plot(out, vars = (0,[1,2,3,4,5]))
plot(out, vars = (0,[8])) #Visualize change in delay period over time (dtauU/dt)