Comparing equivalency of ForwardDiff.Jacobian

Hello,

I am trying to work on a neural network framework that identifies reaction pathways autonomously and currently trying to add an analysis tool called degree of rate control. I am comparing two approaches where I am defining the kinetic ODEs of the reactions explicitly (ODE.jl) and then using neural network definition via matrix multiplication (CRNN.jl). Ideally both the scripts should produce the same results after calculating the Jacobian matrix but for some reason CRNN.jl does not produce the same result as ODE.jl. Can anyone help me diagnose why?

Since I am a new user and can’t upload attachments here, please find the scripts here:

ODE.jl

using DifferentialEquations
using Plots
using DiffEqSensitivity, ForwardDiff, DelimitedFiles

ns = 2
nr = 2
k = [1e-5, 1]
alg = RK4()
b0 = 0
lb = 1e-5
ub = 1e1
nt = 15

function trueODEfunc(dydt, y, k, t)
aA = 1
aB = 1
kr = 0
dydt[1] = -k[1] * aA * y[1] + kr * y[2] + k[2] * aB * y[2];
dydt[2] = -dydt[1]
#dydt[2] = k[1] * aA * y[1] - k[2] * y[2] - k3 * aB * y[2];
end

#u0 = zeros(Float64, ns);
free_ic = 1 - 1 / (1 + 1.0e5);
adsorbed_ic = (1 / (1 + 1.0e5));
u0 = [free_ic, adsorbed_ic]
tspan = (0., 4.)
tsteps = LinRange(0, 4, nt)

prob = ODEProblem(trueODEfunc, u0, tspan, k);
ode_sol = Array(solve(prob, alg, saveat=tsteps))
data_matrix = hcat(tsteps, ode_sol[1, :], ode_sol[2, :])

Add column headers

headers = [“Time_Steps” “Free_Sites” “Adsorbed_Sites”]

Combine headers and data

output_data = vcat(headers, data_matrix)

Save to CSV file

writedlm(“ODE_ipynb.csv”, output_data, ‘,’)

function target_rate(y, k)
aB = 1
rate = y[2] * k[2] * aB
return rate
end

function rate_wrapper(lnk)
k = exp.(lnk)
_prob = remake(prob, p=k)
sol = Array(solve(_prob, alg, saveat=tsteps, sensealg=ForwardDiffSensitivity()))
println(size(sol))
k_matrix = reshape(k, 1, size(k, 1))
k_repeat = repeat(k_matrix, nt, 1)
rate = Array{Real, 2}(undef, nt, 1)
for i in 1:nt
rate[i, 1] = target_rate(sol[:, i], k_repeat[i, :])
end
println(“Rate”)
println(rate)
return log.(rate)
end

drc = ForwardDiff.jacobian(rate_wrapper, log.(k))

plt = plot()
plot!(plt, tsteps, drc[:, 1],
linewidth=3, xlabel=“Time (s)”, ylabel=“Degree of Rate Control”,
label=“DRC-1”)
plot!(plt, tsteps, drc[:, 2], linewidth=3, label=“DRC-2”)
png(plt, string(“DRC_ODE”))

CRNN.jl

using DifferentialEquations
using Plots
using DiffEqSensitivity, ForwardDiff, DelimitedFiles

ns = 2
nr = 2
k = [1e-5, 1]
alg = RK4()
b0 = 0
lb = 1e-5
ub = 1e1
nt = 15

#u0 = zeros(Float64, ns);
free_ic = 1 - 1 / (1 + 1.0e5);
adsorbed_ic = (1 / (1 + 1.0e5));
u0 = [free_ic, adsorbed_ic]
tspan = (0., 4.)
tsteps = LinRange(0, 4, nt)

function p2vec(p)
w_b = p[1:nr] .+ b0;
# More robust reshaping that works with dual numbers
remaining_params = p[nr + 1:end]
w_out = reshape(remaining_params, ns, nr);
# w_out = clamp.(w_out, -2.5, 2.5);
w_in = clamp.(-w_out, 0, 2.5);
return w_in, w_b, w_out
end

function display_p(p)
w_in, w_b, w_out = p2vec(p);
println(“species (column) reaction (row)”)
println(“w_in”)
show(stdout, “text/plain”, round.(w_in’, digits=3))

println("\nw_b")
show(stdout, "text/plain", round.(exp.(w_b'), digits=6))

println("\nw_out")
show(stdout, "text/plain", round.(w_out', digits=3))
println("\n\n")

end

function crnn(du, u, p, t)
w_in, w_b, w_out = p2vec(p);
w_in_x = w_in’ * @. log(clamp(u, lb, ub));
du .= w_out * @. exp(w_in_x + w_b);
end

p = [log(1e-51), log(11), -1, 1, 1, -1]
display_p(p)

prob = ODEProblem(crnn, u0, tspan, p)

function predict_neuralode(prob, u0)
sol = Array(solve(prob, alg, u0=u0, saveat=tsteps))
return sol
end

sol = predict_neuralode(prob, u0)
data_matrix = hcat(tsteps, sol[1, :], sol[2, :])

Add column headers

headers = [“Time_Steps” “Free_Sites” “Adsorbed_Sites”]

Combine headers and data

output_data = vcat(headers, data_matrix)

Save to CSV file

writedlm(“CRNN_ipynb.csv”, output_data, ‘,’)

function target_rate(u, p)
w_in, w_b, w_out = p2vec(p);
w_in_x = w_in’ * @. log(clamp(u, lb, ub));
rate_all_reaction = @. exp(w_in_x + w_b);
println(size(rate_all_reaction))
target_rate = rate_all_reaction[2]
return target_rate
end

function rate_wrapper(p_new)
_prob = remake(prob, p=p_new)
sol = predict_neuralode(_prob, u0)
rate = Vector{eltype(p_new)}(undef, nt) # Use eltype to handle dual numbers
for i in 1:nt
rate[i] = target_rate(sol[:, i], p_new)
end
println(“Rate”)
println(rate)
return log.(rate)
end

Compute Jacobian with respect to all parameters

drc = ForwardDiff.jacobian(rate_wrapper, p)

Plot only the first two columns (corresponding to the rate constants)

plt = plot()
plot!(plt, tsteps, drc[:, 1],
linewidth=3, xlabel=“Time (s)”, ylabel=“Degree of Rate Control”,
label=“DRC-1 (rate constant 1)”)
plot!(plt, tsteps, drc[:, 2], linewidth=3,
label=“DRC-2 (rate constant 2)”)
png(plt, string(“DRC_CRNN”))

From what you showed… you just built the neural network but never trained the coefficients?

Correct, I have not trained parameters because I am using pre-trained parameters via the following line:

p = [log(1e-51), log(11), -1, 1, 1, -1]

Basically I am trying to calculate a property called “Degree of Rate Control” or DRC. DRC is partial derivative of log of rate of the reaction with respect to the log of rate constant. This is the paper I am referring to: Evaluation of the degree of rate control via automatic differentiation

How much is the difference?

The pre-defined parameters when put in the crnn function will replicate the ODEs defined in trueODEfunc in ODE.jl. So there is not any difference as such.

However, I found the fix. The issue was clamping of the solution of the ODEs in CRNN.jl and incorrect definition in the function rate_wrapper.

Thanks for your help!!