I am trying to use ForwardDiff.gradient find the gradient of the following function:
function loss_test(x::Array{T,2},SecreteFuncs,pInf::Array{T,2},cmat::Array{T,2}) where {T <: Real}
cmat_sol = Findcss(x,SecreteFuncs,pInf,cmat)
loss = (sum(cmat_sol[:])-5)^2
return loss
end
where
function Findcss(muvec::Array{T,2},SecreteFuncs,pInf::Array{T,2},cmat::Array{T,2}) where {T <: Real}
lb = zeros(eltype(cmat),length(cmat[:]),1)
ub = ones(eltype(cmat),length(cmat[:]),1).*Inf
inner_optimizer = GradientDescent()
res = optimize(c → f_cssObjloss(c,muvec,SecreteFuncs,pInf), lb, ub,
0.1.*ones(eltype(cmat_guess),length(cmat_guess[:]),1), Fminbox(inner_optimizer), autodiff = :forward)
css_sol = reshape(Optim.minimizer(res),size(cmat_guess))
return css_sol
end
I have tested that the function loss_test works i.e.
using ForwardDiff
using Optim
f_test = x → loss_test(x,SecreteFuncs,pInf_test,cmat_test)
f_test(mumaxvec)
gives the correct output.
But when I try to take gradient:
g_test = x → ForwardDiff.gradient(f_test, x)
g_test(mumaxvec)
I get the following error:
MethodError: no method matching Float64(::ForwardDiff.Dual{ForwardDiff.Tag{var"#133#134",Real},Real,2}).
Is this because the output of Findcss is Float64? If so, how should I resolve this?
Any help would be greatly appreciated. Thanks!