Take a look at the type passed to f_inputs by ForwardDiff.jacobian. You have compiled the tape for Float64 arguments, but it gets passed a ForwardDiff.Dual.
Untested, but I guess you could do something like this:
const CACHE = Dict{DataType,Any}()
function inner(x::Vector{T}) where {T<:Real}
if !haskey(CACHE, T)
tape = ReverseDiff.compile(ReverseDiff.GradientTape(f, x))
CACHE[T] = (tape, zeros(T, length(x)))
end
tape, y = CACHE[T]
return ReverseDiff.gradient!(y, tape, x)
end
ForwardDiff.jacobian(inner, inputs)