Hi, I’m new to using Flux and I keep getting a data type error when running the following code:
using LinearAlgebra, Flux, Base.Iterators, Statistics
using CuArrays
using Flux: onehotbatch, onecold, crossentropy, throttle, Tracker
trainX = Tracker.data(rand(19,348)) |> gpu
trainY = Tracker.data(rand(19,348)) |> gpu
m = Chain(
Dense(19,32),
#LSTM(32,32),
Dense(32,19),
softmax) |> gpu
loss(x, y) = Flux.mse(m(x), y)
accuracy(x, y) = mean(onecold(m(x)) .== onecold(y))
dataset = zip(trainX, trainY)
evalcb = () -> @show(loss(trainX, trainY))
opt = ADAM()
Flux.train!(loss, params(m), dataset, opt)
Running train!
results in:
MethodError: no method matching (::Dense{typeof(identity),TrackedArray{…,Array{Float32,2}},TrackedArray{…,Array{Float32,1}}})(::Float64)
Closest candidates are:
Dense(!Matched::AbstractArray{T<:Union{Float32, Float64},N} where N) where {T<:Union{Float32, Float64}, W<:(AbstractArray{T,N} where N)} at /home/m_tucci/.julia/packages/Flux/dkJUV/src/layers/basic.jl:110
Dense(!Matched::AbstractArray{#s107,N} where N where #s107<:AbstractFloat) where {T<:Union{Float32, Float64}, W<:(AbstractArray{T,N} where N)} at /home/m_tucci/.julia/packages/Flux/dkJUV/src/layers/basic.jl:113
Dense(!Matched::AbstractArray) at /home/m_tucci/.julia/packages/Flux/dkJUV/src/layers/basic.jl:98
applychain(::Tuple{Dense{typeof(identity),TrackedArray{…,Array{Float32,2}},TrackedArray{…,Array{Float32,1}}},Dense{typeof(identity),TrackedArray{…,Array{Float32,2}},TrackedArray{…,Array{Float32,1}}},typeof(softmax)}, ::Float64) at basic.jl:31
(::Chain{Tuple{Dense{typeof(identity),TrackedArray{…,Array{Float32,2}},TrackedArray{…,Array{Float32,1}}},Dense{typeof(identity),TrackedArray{…,Array{Float32,2}},TrackedArray{…,Array{Float32,1}}},typeof(softmax)}})(::Float64) at basic.jl:33
loss(::Float64, ::Float64) at Player_Flux.jl:45
#14 at train.jl:72 [inlined]
gradient_(::getfield(Flux.Optimise, Symbol("##14#20")){typeof(loss),Tuple{Float64,Float64}}, ::Tracker.Params) at back.jl:97
#gradient#24(::Bool, ::typeof(Tracker.gradient), ::Function, ::Tracker.Params) at back.jl:164
gradient at back.jl:164 [inlined]
macro expansion at train.jl:71 [inlined]
macro expansion at progress.jl:119 [inlined]
#train!#12(::getfield(Flux.Optimise, Symbol("##16#22")), ::typeof(Flux.Optimise.train!), ::Function, ::Tracker.Params, ::Base.Iterators.Zip{Tuple{Array{Float64,2},Array{Float64,2}}}, ::ADAM) at train.jl:69
train!(::Function, ::Tracker.Params, ::Base.Iterators.Zip{Tuple{Array{Float64,2},Array{Float64,2}}}, ::ADAM) at train.jl:67
top-level scope at Player_Flux.jl:53
include_string(::Module, ::String, ::String, ::Int64) at eval.jl:30
(::getfield(Atom, Symbol("##124#129")){String,Int64,String})() at eval.jl:94
withpath(::getfield(Atom, Symbol("##124#129")){String,Int64,String}, ::String) at utils.jl:30
withpath at eval.jl:46 [inlined]
#123 at eval.jl:93 [inlined]
with_logstate(::getfield(Atom, Symbol("##123#128")){String,Int64,String}, ::Base.CoreLogging.LogState) at logging.jl:395
with_logger at logging.jl:491 [inlined]
#122 at eval.jl:92 [inlined]
hideprompt(::getfield(Atom, Symbol("##122#127")){String,Int64,String}) at repl.jl:77
macro expansion at eval.jl:91 [inlined]
macro expansion at dynamic.jl:24 [inlined]
(::getfield(Atom, Symbol("##121#126")))(::Dict{String,Any}) at eval.jl:86
handlemsg(::Dict{String,Any}, ::Dict{String,Any}) at comm.jl:164
(::getfield(Atom, Symbol("##19#21")){Array{Any,1}})() at task.jl:268
I feel like I’m missing something obvious here but I’m too new to this to figure it out. I found another thread that seemed similar and suggested adding Tracker.data()
to training data sets, but that (obviously) didn’t work.
Any suggestions?