Could you please expand on that? I am not sure if I understand it correctly, especially what do you mean by “previous versions”?
To my understanding, currently in SciMLBenchmarks there is no specialization regarding sparsity since jac_prototype is not provided i.e. everything is done on dense matrices. Furthermore adding sparsity with the use of jac_prototype yields better results, which means that the code where jac_prototype was not set, did not specialize.
Here is my implementation of adding sparsity to the SciMLBenchmark:
function jac_sparsity_adtypes(u0, func, p)
du0 = similar(u0)
return ADTypes.jacobian_sparsity((du, u) -> func(du, u, p, 0.0), du0, u0, TracerSparsityDetector())
end
function auto_sen_l2(f, u0, tspan, p, t, alg=Tsit5(); diffalg=ReverseDiff.gradient, kwargs...)
jac_sparsity = jac_sparsity_adtypes(u0, f, p)
f_ode = ODEFunction(f, jac_prototype=jac_sparsity)
test_f(p) = begin
prob = ODEProblem{true, SciMLBase.FullSpecialize}(f_ode,convert.(eltype(p),u0),tspan,p)
sol = solve(prob,alg,saveat=t; kwargs...)
sum(sol.u) do x
sum(z->(1-z)^2/2, x)
end
end
diffalg(test_f, p)
end
csa = map(csan) do n
bfun, b_u0, b_p, brusselator_jac, brusselator_comp = makebrusselator!(PROBS, n)
@time ts = map(ADJOINT_METHODS[1:2end÷3]) do alg
@info "Running $alg ($(SciMLSensitivity.alg_autodiff(alg) ? "AD" : "user")-jac) for N = $n"
jac_sparsity = jac_sparsity_adtypes(b_u0, bfun, b_p)
f = SciMLSensitivity.alg_autodiff(alg) ? ODEFunction(bfun, jac_prototype=jac_sparsity) : ODEFunction(bfun, jac=brusselator_jac, jac_prototype=jac_sparsity)
solver = Rodas5(autodiff=false)
@time diffeq_sen_l2(f, b_u0, tspan, b_p, bt, solver; sensalg=alg, tols...)
t = @elapsed diffeq_sen_l2(f, b_u0, tspan, b_p, bt, solver; sensalg=alg, tols...)
return t
end
@show n,ts
ts
end
And here are the results with the sparsity and bugfix implemented:
It seems that ForwardDiff gained the most from the change, but still ForwardDiff is not even close to being faster than adjoint methods (for N<=9), as it is the case in my benchmark.
