Quick question on the side, may not be related: When trying relatively naive code like
using LinearAlgebra, DifferentiationInterface, StaticArrays, LinearSolve
using RecursiveFactorization
import ForwardDiff
function want_to_diff!(c::MVector{n, T}, b::StaticVector{n,T}, A::MMatrix{n,n,T}, linsolve) where {n<:Any, T<:Real}
fill!(A, zero(T))
for i in 1:n
A[i, i] = sin(b[i])
end
linsolve.A = SMatrix(A)
linsolve.b = SVector(b)
c .= solve!(linsolve).u
return c
end
n = 4
A = one(MMatrix{n,n,Float32})
b = rand(MVector{n, Float32})
c = MVector{n, Float32}(undef)
prob = LinearProblem(SMatrix(A), SVector(c))
linsolve = LinearSolve.init(prob, RFLUFactorization())
want_to_diff!(c, b, A, linsolve)
(I have switched to using static arrays for LinearProblem
as I otherwise got errors of the form
TypeError: in setfield!, expected Tuple{StaticArrays.LU{LowerTriangular{Float32, SMatrix{4, 4, Float32, 16}}, UpperTriangular{Float32, SMatrix{4, 4, Float32, 16}}, SVector{4, Int64}}, Vector{Int64}}, got a value of type Tuple{LU{Float32, MMatrix{4, 4, Float32, 16}, Vector{Int64}}, Vector{Int64}}
)
Running the above still yields
ERROR: TypeError: in setfield!, expected Tuple{StaticArrays.LU{LowerTriangular{Float32, SMatrix{4, 4, Float32, 16}}, UpperTriangular{Float32, SMatrix{4, 4, Float32, 16}}, SVector{4, Int64}}, Vector{Int64}}, got a value of type Tuple{LU{Float32, SMatrix{4, 4, Float32, 16}, Vector{Int64}}, Vector{Int64}}
Am I using this interface wrong? Or is this something I should raise an Issue about over on GitHub - SciML/LinearSolve.jl: LinearSolve.jl: High-Performance Unified Interface for Linear Solvers in Julia. Easily switch between factorization and Krylov methods, add preconditioners, and all in one interface.?
Trying this with the reduced MWE by yolhan_mannes I have
using DifferentiationInterface, LinearSolve, PreallocationTools, LinearAlgebra
import ForwardDiff
function f!(c,b,linsolve_cache::GeneralLazyBufferCache)
linsolve = linsolve_cache[b]
linsolve.A = LinearAlgebra.diagm(sin.(b))
linsolve.b = b
c .= solve!(linsolve).u
return c
end
n = 5
b = rand(n)
c = rand(n)
function get_cache(p)
A = diagm(p)
prob = LinearProblem(A, p)
return LinearSolve.init(prob)
end
jacobian(f!,c,AutoForwardDiff(),b,ConstantOrCache(GeneralLazyBufferCache(get_cache)))
gives an error
ERROR: MethodError: no method matching has_sys(::Nothing)
The function `has_sys` exists, but no method is defined for this combination of argument types.
I think the code in LinearSolveForwardDiffExt.jl
assumes that a SymbolicLinearInterface
is set? It seems just doing a bit of type piracy and setting SciMLBase.has_sys(::Nothing) = false
fixes the issue, but am I breaking any assumptions by doing this or is this just a small oversight?