Need some help debugging an error from ForwardDiff. Let me know what additional information would help!
ERROR:
Stacktrace:
[1] ≺(::Nothing, ::Type) at C:\Users\bdeon\.julia\packages\ForwardDiff\DVizx\src\dual.jl:49
[2] +(::ForwardDiff.Dual{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12}, ::ForwardDiff.Dual{nothing,Float64,12}) at C:\Users\bdeon\.julia\packages\ForwardDiff\DVizx\src\dual.jl:135
[3] objective_fxn(::Array{ForwardDiff.Dual{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12},1}) at .\REPL[50]:16
[4] vector_mode_gradient!(::DiffResults.MutableDiffResult{1,Float64,Tuple{Array{Float64,1}}}, ::typeof(objective_fxn), ::Array{Float64,1}, ::ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12,Array{ForwardDiff.Dual{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12},1}}) at C:\Users\bdeon\.julia\packages\ForwardDiff\DVizx\src\apiutils.jl:37
[5] gradient! at C:\Users\bdeon\.julia\packages\ForwardDiff\DVizx\src\gradient.jl:35 [inlined]
[6] gradient! at C:\Users\bdeon\.julia\packages\ForwardDiff\DVizx\src\gradient.jl:33 [inlined]
[7] (::getfield(NLSolversBase, Symbol("##14#18")){Float64,typeof(objective_fxn),ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12,Array{ForwardDiff.Dual{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12},1}}})(::Array{Float64,1}, ::Array{Float64,1}) at C:\Users\bdeon\.julia\packages\NLSolversBase\NsXIC\src\objective_types\oncedifferentiable.jl:69
[8] value_gradient!!(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}) at C:\Users\bdeon\.julia\packages\NLSolversBase\NsXIC\src\interface.jl:82
[9] initial_state(::LBFGS{Nothing,InitialStatic{Float64},HagerZhang{Float64,Base.RefValue{Bool}},getfield(Optim, Symbol("##19#21"))}, ::Optim.Options{Float64,Nothing}, ::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}) at C:\Users\bdeon\.julia\packages\Optim\DKSJU\src\multivariate\solvers\first_order\l_bfgs.jl:158
[10] optimize(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}, ::LBFGS{Nothing,InitialStatic{Float64},HagerZhang{Float64,Base.RefValue{Bool}},getfield(Optim, Symbol("##19#21"))}, ::Optim.Options{Float64,Nothing}) at C:\Users\bdeon\.julia\packages\Optim\DKSJU\src\multivariate\optimize\optimize.jl:33
[11] #optimize#87(::Bool, ::Symbol, ::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}, ::Function, ::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}) at C:\Users\bdeon\.julia\packages\Optim\DKSJU\src\multivariate\optimize\interface.jl:64
[12] optimize(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}) at C:\Users\bdeon\.julia\packages\Optim\DKSJU\src\multivariate\optimize\interface.jl:58
[13] top-level scope at none:0SYSTEM: show(lasterr) caused an error
ArgumentError("`nothing` should not be printed; use `show`, `repr`, or custom output instead.")
Stacktrace:
[1] ≺(::Nothing, ::Type) at C:\Users\bdeon\.julia\packages\ForwardDiff\DVizx\src\dual.jl:49
[2] +(::ForwardDiff.Dual{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12}, ::ForwardDiff.Dual{nothing,Float64,12}) at C:\Users\bdeon\.julia\packages\ForwardDiff\DVizx\src\dual.jl:135
[3] objective_fxn(::Array{ForwardDiff.Dual{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12},1}) at .\REPL[50]:16
[4] vector_mode_gradient!(::DiffResults.MutableDiffResult{1,Float64,Tuple{Array{Float64,1}}}, ::typeof(objective_fxn), ::Array{Float64,1}, ::ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12,Array{ForwardDiff.Dual{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12},1}}) at C:\Users\bdeon\.julia\packages\ForwardDiff\DVizx\src\apiutils.jl:37
[5] gradient! at C:\Users\bdeon\.julia\packages\ForwardDiff\DVizx\src\gradient.jl:35 [inlined]
[6] gradient! at C:\Users\bdeon\.julia\packages\ForwardDiff\DVizx\src\gradient.jl:33 [inlined]
[7] (::getfield(NLSolversBase, Symbol("##14#18")){Float64,typeof(objective_fxn),ForwardDiff.GradientConfig{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12,Array{ForwardDiff.Dual{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12},1}}})(::Array{Float64,1}, ::Array{Float64,1}) at C:\Users\bdeon\.julia\packages\NLSolversBase\NsXIC\src\objective_types\oncedifferentiable.jl:69
[8] value_gradient!!(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}) at C:\Users\bdeon\.julia\packages\NLSolversBase\NsXIC\src\interface.jl:82
[9] initial_state(::LBFGS{Nothing,InitialStatic{Float64},HagerZhang{Float64,Base.RefValue{Bool}},getfield(Optim, Symbol("##19#21"))}, ::Optim.Options{Float64,Nothing}, ::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}) at C:\Users\bdeon\.julia\packages\Optim\DKSJU\src\multivariate\solvers\first_order\l_bfgs.jl:158
[10] optimize(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}, ::LBFGS{Nothing,InitialStatic{Float64},HagerZhang{Float64,Base.RefValue{Bool}},getfield(Optim, Symbol("##19#21"))}, ::Optim.Options{Float64,Nothing}) at C:\Users\bdeon\.julia\packages\Optim\DKSJU\src\multivariate\optimize\optimize.jl:33
[11] #optimize#87(::Bool, ::Symbol, ::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}, ::Function, ::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}) at C:\Users\bdeon\.julia\packages\Optim\DKSJU\src\multivariate\optimize\interface.jl:64
[12] optimize(::OnceDifferentiable{Float64,Array{Float64,1},Array{Float64,1}}, ::Array{Float64,1}) at C:\Users\bdeon\.julia\packages\Optim\DKSJU\src\multivariate\optimize\interface.jl:58
[13] top-level scope at none:0
[14] eval(::Module, ::Any) at .\boot.jl:328
[15] eval_user_input(::Any, ::REPL.REPLBackend) at C:\cygwin\home\Administrator\buildbot\worker\package_win64\build\usr\share\julia\stdlib\v1.1\REPL\src\REPL.jl:85
[16] macro expansion at C:\cygwin\home\Administrator\buildbot\worker\package_win64\build\usr\share\julia\stdlib\v1.1\REPL\src\REPL.jl:117 [inlined]
[17] (::getfield(REPL, Symbol("##26#27")){REPL.REPLBackend})() at .\task.jl:259
This is in the context of using Optim with autodiff. The error is occuring in my objective function at the line where log_px_typed[k]
is being added to. I’m feeding the following objective function to optim
n, m = size(X)
nodes, weights = gausshermite(q)
z_cache1 = DiffCache(T, (m,q), Val{ForwardDiff.pickchunksize(2*m+2)})
z_cache2 = DiffCache(T, (q,), Val{ForwardDiff.pickchunksize(2*m+2)})
function objective_fxn(beta)
a = @view beta[1:m]
b = @view beta[(m+1):2m]
tau = @view beta[(2m+1):end]
P = get_cache(z_cache1, eltype(beta))
P_typed = reinterpret(eltype(beta[1]), P)
log_px = get_cache(z_cache2, eltype(beta))
log_px_typed = reinterpret(eltype(beta[1]), log_px)
fill_probs!(P_typed, sqrt(2.0 * tau[2]) .* nodes .+ tau[1], a, b)
result = zero(eltype(beta))
for p in 1:n
log_px_typed .= zero(eltype(beta))
for k in 1:q
for i in 1:m
log_px_typed[k] += X[p,i] * log(P[i,k]) + (1 - X[p,i]) * log(1 - P[i,k])
end
end
result += logsumexp(log_px .+ log.(weights))
end
return -result + n*log(sqrt(pi))
end
Seems to be a problem with the ForwardDiff Tag.
If I try this:
TT=ForwardDiff.Dual{ForwardDiff.Tag{typeof(objective_fxn),Float64},Float64,12}
beta = [ones(TT,m); zeros(TT,m); 0.0; 1.0]
objective_fxn(beta)
I get the error but if I try
TT=ForwardDiff.Dual{nothing,Float64,12}
beta = [ones(TT,m); zeros(TT,m); 0.0; 1.0]
objective_fxn(beta)
There is no error