My model can't be transfered to gpu. Adapt.jl problem?

Here’s some more stacktrace…

 [33] fmap(f::Flux.var"#182#183", x::Flux.Chain{Tuple{Flux.Parallel{var"#240#253", Tuple{Flux.Chain{Tuple{var"#tr_nil_f#246"}}, Flux.Chain{Tuple{var"#coce#247"{Vector{Bool}}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 4, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 4, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, var"#236#248"}}, Flux.Chain{Tuple{var"#sace#250"{Vector{Bool}}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 4, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 4, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, var"#238#251"}}}}, Flux.Dense{var"#σ#243", Matrix{Float32}, Vector{Float32}}, Flux.Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}}}; exclude::typeof(Flux._isleaf), walk::typeof(Functors._default_walk), cache::IdDict{Any, Any}, prune::Functors.NoKeyword)
    @ Functors ~/.julia/packages/Functors/qBIlC/src/functor.jl:50
 [34] gpu
    @ ~/.julia/packages/Flux/js6mP/src/functor.jl:182 [inlined]
 [35] |>(x::Flux.Chain{Tuple{Flux.Parallel{var"#240#253", Tuple{Flux.Chain{Tuple{var"#tr_nil_f#246"}}, Flux.Chain{Tuple{var"#coce#247"{Vector{Bool}}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 4, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 4, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, var"#236#248"}}, Flux.Chain{Tuple{var"#sace#250"{Vector{Bool}}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 2, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 2, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 4, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Bool}, Flux.BatchNorm{var"#σ#243", Vector{Float32}, Float32, Vector{Float32}}, Flux.Dropout{Float64, Colon, Random.TaskLocalRNG}, Flux.Conv{2, 4, var"#σ#243", Array{Float32, 4}, Vector{Float32}}, var"#238#251"}}}}, Flux.Dense{var"#σ#243", Matrix{Float32}, Vector{Float32}}, Flux.Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}}}, f::typeof(Flux.gpu))
    @ Base ./operators.jl:911                                                                                                               
 [36] train_and_evaluate(mfn::String, arguments::Vector{Int64}, modifications::Tuple{}, hyperparameters::Dict{Any, Any}; identifier::String)
    @ Main ~/3Dto2D/v2/proj_net_v2_0.jl:286
 [37] runfromfile(fn::String)
    @ Main ~/3Dto2D/v2/proj_net_v2_0.jl:448
 [38] top-level scope
    @ ~/3Dto2D/v2/proj_net_v2_0.jl:474
Exit Code 1                                                   

I assume that the LoadError is secondary to the ArgumentError. (Well, I don’t assume anything absolutely, I just think so)

I suspect that a closure is not being correctly treated in a recursive walk through the Flux model.

Is there anything that springs out on an ocular inspection?