Using pmap with ForwardDiff

I’m trying to run the following code:

println(workers())

@everywhere using ForwardDiff

@everywhere function V(r)
    return 0.5 * ((r[1]^2-1)^2)
end

@everywhere gradV = r-> ForwardDiff.gradient(V, r);

xx  = 0:0.1:1;

Vvals = pmap(V,xx);
println(Vvals);

gradVvals = pmap(gradV,xx);
println(gradVvals);

rmprocs(workers());

and I’m receiving the error:

ERROR: LoadError: On worker 4:
MethodError: no method matching gradient(::#V, ::Float64)
Closest candidates are:
  gradient(::Any, ::StaticArrays.SArray) at /Users/gideonsimpson/.julia/v0.6/ForwardDiff/src/gradient.jl:42
  gradient(::Any, ::StaticArrays.SArray, ::ForwardDiff.GradientConfig) at /Users/gideonsimpson/.julia/v0.6/ForwardDiff/src/gradient.jl:43
  gradient(::Any, ::AbstractArray) at /Users/gideonsimpson/.julia/v0.6/ForwardDiff/src/gradient.jl:15
  ...
#13 at /Users/gideonsimpson/Desktop/pmap1.jl:10
#106 at ./distributed/process_messages.jl:268 [inlined]
run_work_thunk at ./distributed/process_messages.jl:56
macro expansion at ./distributed/process_messages.jl:268 [inlined]
#105 at ./event.jl:73
Stacktrace:
 [1] #571 at ./asyncmap.jl:178 [inlined]
 [2] foreach(::Base.##571#573, ::Array{Any,1}) at ./abstractarray.jl:1733
 [3] maptwice(::Function, ::Channel{Any}, ::Array{Any,1}, ::StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}}, ::Vararg{StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}},N} where N) at ./asyncmap.jl:178
 [4] wrap_n_exec_twice(::Channel{Any}, ::Array{Any,1}, ::Base.Distributed.##204#207{WorkerPool}, ::Function, ::StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}}, ::Vararg{StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}},N} where N) at ./asyncmap.jl:154
 [5] #async_usemap#556(::Function, ::Void, ::Function, ::Base.Distributed.##188#190, ::StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}}, ::Vararg{StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}},N} where N) at ./asyncmap.jl:103
 [6] (::Base.#kw##async_usemap)(::Array{Any,1}, ::Base.#async_usemap, ::Function, ::StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}}, ::Vararg{StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}},N} where N) at ./<missing>:0
 [7] (::Base.#kw##asyncmap)(::Array{Any,1}, ::Base.#asyncmap, ::Function, ::StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}}) at ./<missing>:0
 [8] #pmap#203(::Bool, ::Int64, ::Void, ::Array{Any,1}, ::Void, ::Function, ::WorkerPool, ::Function, ::StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}}) at ./distributed/pmap.jl:126
 [9] pmap(::WorkerPool, ::Function, ::StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}}) at ./distributed/pmap.jl:101
 [10] #pmap#213(::Array{Any,1}, ::Function, ::Function, ::StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}}) at ./distributed/pmap.jl:156
 [11] pmap(::Function, ::StepRangeLen{Float64,Base.TwicePrecision{Float64},Base.TwicePrecision{Float64}}) at ./distributed/pmap.jl:156
 [12] include_from_node1(::String) at ./loading.jl:576
 [13] include(::String) at ./sysimg.jl:14
while loading /Users/gideonsimpson/Desktop/pmap1.jl, in expression starting on line 17

I’m also curious, more generally, about where @everywhere is required when performing this kind of computation.

pmap will call gradV for each element of xx, and each element is a Float64. ForwardDiff.gradient is simply not defined with Float64 as the second argument. The second argument needs to be an AbstractArray.