Error in diffeq_fd command of DiffEqFlux

I’m trying to run neural partial differential equation code on the web site (Neural Jump SDEs (Jump Diffusions) and Neural PDEs - Stochastic Lifestyle). Thanks Chris for publishing the interest code.

When I run the code, I got following error. Could you tell me how to fix the error?

julia> @time diffeq_fd(p1,Array,length(u0)*length(0.0f0:5.0f0:100.0f0),prob,ROCK2(),progress=true,
                       saveat=0.0f0:5.0f0:100.0f0)
ERROR: cannot assign variables in other modules
Stacktrace:
 [1] #9 at .\reflection.jl:684 [inlined]
 [2] ntuple(::getfield(Base, Symbol("##9#10")){DataType}, ::Int64) at .\tuple.jl:136
 [3] fieldtypes(::Type) at .\reflection.jl:684
 [4] explain_nonisbits(::Any, ::Int64) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\validation.jl:44
 [5] explain_nonisbits(::Any, ::Int64) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\validation.jl:47 (repeats 5 times)
 [6] explain_nonisbits at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\validation.jl:42 [inlined]
 [7] check_invocation(::CUDAnative.CompilerJob, ::LLVM.Function) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\validation.jl:68
 [8] macro expansion at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\driver.jl:187 [inlined]
 [9] macro expansion at C:\Users\fuk\.julia\packages\TimerOutputs\7zSea\src\TimerOutput.jl:216 [inlined]
 [10] #codegen#121(::Bool, ::Bool, ::Bool, ::Bool, ::Bool, ::Function, ::Symbol, ::CUDAnative.CompilerJob) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\driver.jl:186
 [11] #codegen at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\driver.jl:0 [inlined]
 [12] #compile#120(::Bool, ::Bool, ::Bool, ::Bool, ::Bool, ::Function, ::Symbol, ::CUDAnative.CompilerJob) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\driver.jl:47
 [13] #compile#119 at .\none:0 [inlined]
 [14] #compile at .\none:0 [inlined] (repeats 2 times)
 [15] macro expansion at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\execution.jl:388 [inlined]
 [16] #cufunction#161(::Nothing, ::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{,Tuple{}}}, ::typeof(CUDAnative.cufunction), ::getfield(GPUArrays, Symbol("##23#24")), ::Type{Tuple{CuArrays.CuKernelState,CUDAnative.CuDeviceArray{ForwardDiff.Dual{ForwardDiff.Tag{getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}},Float32},Float32,12},1,CUDAnative.AS.Global},Base.Broadcast.Broadcasted{Nothing,Tuple{Base.OneTo{Int64}},typeof(convert),Tuple{CUDAnative.CuRefValue{DataType},Base.Broadcast.Extruded{CUDAnative.CuDeviceArray{Float32,1,CUDAnative.AS.Global},Tuple{Bool},Tuple{Int64}}}}}}) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\execution.jl:356
 [17] cufunction(::Function, ::Type) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\execution.jl:356
 [18] macro expansion at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\execution.jl:174 [inlined]
 [19] macro expansion at .\gcutils.jl:87 [inlined]
 [20] macro expansion at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\execution.jl:171 [inlined]
 [21] _gpu_call(::CuArrays.CuArrayBackend, ::Function, ::CuArray{ForwardDiff.Dual{ForwardDiff.Tag{getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}},Float32},Float32,12},1}, ::Tuple{CuArray{ForwardDiff.Dual{ForwardDiff.Tag{getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}},Float32},Float32,12},1},Base.Broadcast.Broadcasted{Nothing,Tuple{Base.OneTo{Int64}},typeof(convert),Tuple{Base.RefValue{Type{ForwardDiff.Dual{ForwardDiff.Tag{getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}},Float32},Float32,12}}},Base.Broadcast.Extruded{CuArray{Float32,1},Tuple{Bool},Tuple{Int64}}}}}, ::Tuple{Tuple{Int64},Tuple{Int64}}) at C:\Users\fuk\.julia\packages\CuArrays\PwSdF\src\gpuarray_interface.jl:59
 [22] gpu_call at C:\Users\fuk\.julia\packages\GPUArrays\pJw1Y\src\abstract_gpu_interface.jl:151 [inlined]
 [23] gpu_call at C:\Users\fuk\.julia\packages\GPUArrays\pJw1Y\src\abstract_gpu_interface.jl:128 [inlined]
 [24] copyto! at C:\Users\fuk\.julia\packages\GPUArrays\pJw1Y\src\broadcast.jl:48 [inlined]
 [25] copyto! at .\broadcast.jl:797 [inlined]
 [26] copy at .\broadcast.jl:773 [inlined]
 [27] materialize at .\broadcast.jl:753 [inlined]
 [28] (::getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}})(::CuArray{ForwardDiff.Dual{ForwardDiff.Tag{getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}},Float32},Float32,12},1}) at C:\Users\fuk\.julia\packages\DiffEqFlux\B7Omg\src\Flux\layers.jl:32
 [29] chunk_mode_jacobian!(::DiffResults.MutableDiffResult{1,Array{Float64,1},Tuple{Array{Float64,2}}}, ::getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}}, ::CuArray{Float32,1}, ::ForwardDiff.JacobianConfig{ForwardDiff.Tag{getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}},Float32},Float32,12,CuArray{ForwardDiff.Dual{ForwardDiff.Tag{getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}},Float32},Float32,12},1}}) at C:\Users\fuk\.julia\packages\ForwardDiff\N0wMF\src\jacobian.jl:213
 [30] jacobian!(::DiffResults.MutableDiffResult{1,Array{Float64,1},Tuple{Array{Float64,2}}}, ::Function, ::CuArray{Float32,1}, ::ForwardDiff.JacobianConfig{ForwardDiff.Tag{getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}},Float32},Float32,12,CuArray{ForwardDiff.Dual{ForwardDiff.Tag{getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}},Float32},Float32,12},1}}, ::Val{true}) at C:\Users\fuk\.julia\packages\ForwardDiff\N0wMF\src\jacobian.jl:56
 [31] jacobian!(::DiffResults.MutableDiffResult{1,Array{Float64,1},Tuple{Array{Float64,2}}}, ::Function, ::CuArray{Float32,1}, ::ForwardDiff.JacobianConfig{ForwardDiff.Tag{getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}},Float32},Float32,12,CuArray{ForwardDiff.Dual{ForwardDiff.Tag{getfield(DiffEqFlux, Symbol("##11#16")){CuArray{Float32,1},Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}},UnionAll,ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem},Tuple{ROCK2}},Float32},Float32,12},1}}) at C:\Users\fuk\.julia\packages\ForwardDiff\N0wMF\src\jacobian.jl:52 (repeats 2 times)
 [32] #_forward#10(::CuArray{Float32,1}, ::Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}}, ::Function, ::typeof(diffeq_fd), ::TrackedArray{…,CuArray{Float32,1}}, ::Type, ::Int64, ::ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem}, ::ROCK2) at C:\Users\fuk\.julia\packages\DiffEqFlux\B7Omg\src\Flux\layers.jl:43
 [33] (::getfield(Tracker, Symbol("#kw##_forward")))(::NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}, ::typeof(Tracker._forward), ::typeof(diffeq_fd), ::TrackedArray{…,CuArray{Float32,1}}, ::Type, ::Int64, ::ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem}, ::ROCK2) at .\none:0
 [34] #track#1(::Base.Iterators.Pairs{Symbol,Any,Tuple{Symbol,Symbol},NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}}, ::Function, ::typeof(diffeq_fd), ::TrackedArray{…,CuArray{Float32,1}}, ::Vararg{Any,N} where N) at C:\Users\fuk\.julia\packages\Tracker\RRYy6\src\Tracker.jl:51
 [35] #track at .\none:0 [inlined]
 [36] #diffeq_fd#9 at C:\Users\fuk\.julia\packages\DiffEqFlux\B7Omg\src\Flux\layers.jl:29 [inlined]
 [37] (::getfield(DiffEqFlux, Symbol("#kw##diffeq_fd")))(::NamedTuple{(:progress, :saveat),Tuple{Bool,StepRangeLen{Float32,Float64,Float64}}}, ::typeof(diffeq_fd), ::TrackedArray{…,CuArray{Float32,1}}, ::Type, ::Int64, ::ODEProblem{CuArray{Float32,1},Tuple{Float32,Float32},false,CuArray{Float32,1},ODEFunction{false,typeof(dudt_),UniformScaling{Bool},Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing,Nothing},Nothing,DiffEqBase.StandardODEProblem}, ::ROCK2) at .\none:0
 [38] top-level scope at util.jl:156

julia>

Hey sorry, haven’t been able to respond to the email yet. I realized I had a local patch on the ROCK methods when I made the blog post, related to https://github.com/JuliaDiffEq/OrdinaryDiffEq.jl/pull/765. I’ll get that cleaned up and merged.

I actually don’t remember seeing this error before, peculiar. I have an MWE for the GPU developers though:

using ForwardDiff
a = CuArray(ForwardDiff.Dual{Nothing}.([1.0,2.0,3.0]))
b = [1.0,2.0,3.0]
convert.(eltype(a),b)
convert.(eltype(a),CuArray(b))

and will upstream this.

Dear Chris,

Thank you for your prompt reply.

I got the same error, if I change scheme of ODE solver from ROCK2 to BS3.

I run the code in your message, and got following error.

julia> using CuArrays

julia> using ForwardDiff

julia> a = CuArray(ForwardDiff.Dual{Nothing}.([1.0,2.0,3.0]))
3-element CuArray{ForwardDiff.Dual{Nothing,Float64,0},1}:
 Dual{Nothing}(1.0)
 Dual{Nothing}(2.0)
 Dual{Nothing}(3.0)

julia> b = [1.0,2.0,3.0]
3-element Array{Float64,1}:
 1.0
 2.0
 3.0

julia> convert.(eltype(a),b)
3-element Array{ForwardDiff.Dual{Nothing,Float64,0},1}:
 Dual{Nothing}(1.0)
 Dual{Nothing}(2.0)
 Dual{Nothing}(3.0)

julia> convert.(eltype(a),CuArray(b))
ERROR: cannot assign variables in other modules
Stacktrace:
 [1] #9 at .\reflection.jl:684 [inlined]
 [2] ntuple(::getfield(Base, Symbol("##9#10")){DataType}, ::Int64) at .\tuple.jl:136
 [3] fieldtypes(::Type) at .\reflection.jl:684
 [4] explain_nonisbits(::Any, ::Int64) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\validation.jl:44
 [5] explain_nonisbits(::Any, ::Int64) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\validation.jl:47 (repeats 5 times)
 [6] explain_nonisbits at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\validation.jl:42 [inlined]
 [7] check_invocation(::CUDAnative.CompilerJob, ::LLVM.Function) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\validation.jl:68
 [8] macro expansion at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\driver.jl:187 [inlined]
 [9] macro expansion at C:\Users\fuk\.julia\packages\TimerOutputs\7zSea\src\TimerOutput.jl:216 [inlined]
 [10] #codegen#121(::Bool, ::Bool, ::Bool, ::Bool, ::Bool, ::Function, ::Symbol, ::CUDAnative.CompilerJob) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\driver.jl:186
 [11] #codegen at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\driver.jl:0 [inlined]
 [12] #compile#120(::Bool, ::Bool, ::Bool, ::Bool, ::Bool, ::Function, ::Symbol, ::CUDAnative.CompilerJob) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\driver.jl:47
 [13] #compile at .\none:0 [inlined]
 [14] #compile#119 at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\compiler\driver.jl:28 [inlined]
 [15] #compile at .\none:0 [inlined] (repeats 2 times)
 [16] macro expansion at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\execution.jl:388 [inlined]
 [17] #cufunction#161(::Nothing, ::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}, ::typeof(CUDAnative.cufunction), ::getfield(GPUArrays, Symbol("##23#24")), ::Type{Tuple{CuArrays.CuKernelState,CUDAnative.CuDeviceArray{ForwardDiff.Dual{Nothing,Float64,0},1,CUDAnative.AS.Global},Base.Broadcast.Broadcasted{Nothing,Tuple{Base.OneTo{Int64}},typeof(convert),Tuple{CUDAnative.CuRefValue{DataType},Base.Broadcast.Extruded{CUDAnative.CuDeviceArray{Float64,1,CUDAnative.AS.Global},Tuple{Bool},Tuple{Int64}}}}}}) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\execution.jl:356
 [18] cufunction(::Function, ::Type) at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\execution.jl:356
 [19] macro expansion at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\execution.jl:174 [inlined]
 [20] macro expansion at .\gcutils.jl:87 [inlined]
 [21] macro expansion at C:\Users\fuk\.julia\packages\CUDAnative\gJDZI\src\execution.jl:171 [inlined]
 [22] _gpu_call(::CuArrays.CuArrayBackend, ::Function, ::CuArray{ForwardDiff.Dual{Nothing,Float64,0},1}, ::Tuple{CuArray{ForwardDiff.Dual{Nothing,Float64,0},1},Base.Broadcast.Broadcasted{Nothing,Tuple{Base.OneTo{Int64}},typeof(convert),Tuple{Base.RefValue{Type{ForwardDiff.Dual{Nothing,Float64,0}}},Base.Broadcast.Extruded{CuArray{Float64,1},Tuple{Bool},Tuple{Int64}}}}}, ::Tuple{Tuple{Int64},Tuple{Int64}}) at C:\Users\fuk\.julia\packages\CuArrays\PwSdF\src\gpuarray_interface.jl:59
 [23] gpu_call at C:\Users\fuk\.julia\packages\GPUArrays\pJw1Y\src\abstract_gpu_interface.jl:151 [inlined]
 [24] gpu_call at C:\Users\fuk\.julia\packages\GPUArrays\pJw1Y\src\abstract_gpu_interface.jl:128 [inlined]
 [25] copyto! at C:\Users\fuk\.julia\packages\GPUArrays\pJw1Y\src\broadcast.jl:48 [inlined]
 [26] copyto! at .\broadcast.jl:797 [inlined]
 [27] copy(::Base.Broadcast.Broadcasted{Base.Broadcast.ArrayStyle{CuArray},Tuple{Base.OneTo{Int64}},typeof(convert),Tuple{Base.RefValue{Type{ForwardDiff.Dual{Nothing,Float64,0}}},CuArray{Float64,1}}}) at .\broadcast.jl:773
 [28] materialize(::Base.Broadcast.Broadcasted{Base.Broadcast.ArrayStyle{CuArray},Nothing,typeof(convert),Tuple{Base.RefValue{Type{ForwardDiff.Dual{Nothing,Float64,0}}},CuArray{Float64,1}}}) at .\broadcast.jl:753
 [29] top-level scope at none:0

julia>

Yes exactly, that MWE is pre-solve.