Is this a Flux bug?
Or have I not done something right?
using Flux
simple_rnn = Flux.RNN(1, 1, (x -> x))
###
function eval_model(x)
ax=[Vector{Float32}[[j] for j in i] for i in x]
out = map(ax) do v
Flux.reset!(simple_rnn)
a=[simple_rnn(x) for x in v][end]
reduce(vcat,a)
end
out
end
eval_model(data)
loss(x, y) = abs(sum((eval_model(x) .-y)))
loss(data, labels)
loss.(data, labels)
###[A Basic RNN](https://learningjulia.com/2019/10/11/a-basic-rnn.html)
ps = Flux.params(simple_rnn)
opt = Flux.ADAM()
println("Training loss before = ", sum(loss.(data, labels)))
#println("Test loss before = ", sum(loss.(test_data, test_labels)))
# callback function during training
evalcb() = @show(sum(loss.(data, labels)))
using Flux: @epochs
@epochs num_epochs Flux.train!(loss, ps, [(data, labels)], opt, cb = Flux.throttle(evalcb, 1))
[ Info: Epoch 1
ERROR: MethodError: no method matching getindex(::Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, ::UnitRange{Int64})
Stacktrace:
[1] getobs(data::Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, idx::UnitRange{Int64})
@ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\observation.jl:49
[2] (::MLUtils.var"#7#8"{UnitRange{Int64}})(x::Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}})
@ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\observation.jl:136
[3] map
@ .\tuple.jl:222 [inlined]
[4] getobs(tup::Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, indices::UnitRange{Int64})
@ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\observation.jl:136
[5] getobs(subset::MLUtils.ObsView{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, UnitRange{Int64}}, idx::UnitRange{Int64})
@ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\obsview.jl:187
[6] _getbatch(A::MLUtils.BatchView{Union{}, MLUtils.ObsView{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, UnitRange{Int64}}, Val{nothing}}, obsindices::UnitRange{Int64})
@ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\batchview.jl:147
[7] getindex(A::MLUtils.BatchView{Union{}, MLUtils.ObsView{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, UnitRange{Int64}}, Val{nothing}}, i::Int64)
@ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\batchview.jl:132
[8] getobs(data::MLUtils.BatchView{Union{}, MLUtils.ObsView{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, UnitRange{Int64}}, Val{nothing}}, idx::Int64)
@ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\observation.jl:49
[9] (::MLUtils.var"#38#40")(i::Int64)
@ MLUtils .\none:0
[10] iterate(::Base.Generator{UnitRange{Int64}, MLUtils.var"#38#40"})
@ Base .\generator.jl:47
[11] iterate(e::MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}})
@ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\eachobs.jl:173
[12] iterate
@ .\generator.jl:44 [inlined]
[13] collect(itr::Base.Generator{MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}}, Zygote.var"#543#547"{Zygote.Context, var"#33#36"}})
@ Base .\array.jl:724
[14] map
@ .\abstractarray.jl:2896 [inlined]
[15] ∇map(cx::Zygote.Context, f::var"#33#36", args::MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}})
@ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\lib\array.jl:184
[16] _pullback(cx::Zygote.Context, #unused#::typeof(collect), g::Base.Generator{MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}}, var"#33#36"})
@ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\lib\array.jl:231
[17] _pullback
@ c:\projects\Julia Flux\Evaluate simple RNN in Julia Flux.jl:56 [inlined]
[18] _pullback(ctx::Zygote.Context, f::typeof(eval_model), args::MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}})
@ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:0
[19] _pullback
@ c:\projects\Julia Flux\Evaluate simple RNN in Julia Flux.jl:65 [inlined]
[20] _pullback(::Zygote.Context, ::typeof(loss), ::MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}}, ::Vector{Int64})
@ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:0
[21] _apply(::Function, ::Vararg{Any})
@ Core .\boot.jl:814
[22] adjoint
@ C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\lib\lib.jl:204 [inlined]
[23] _pullback
@ C:\Users\Hermesr\.julia\packages\ZygoteRules\AIbCs\src\adjoint.jl:65 [inlined]
[24] _pullback
@ C:\Users\Hermesr\.julia\packages\Flux\js6mP\src\optimise\train.jl:120 [inlined]
[25] _pullback(::Zygote.Context, ::Flux.Optimise.var"#37#40"{typeof(loss), Tuple{MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}}, Vector{Int64}}})
@ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:0
[26] pullback(f::Function, ps::Zygote.Params{Zygote.Buffer{Any, Vector{Any}}})
@ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\compiler\interface.jl:352
[27] gradient(f::Function, args::Zygote.Params{Zygote.Buffer{Any, Vector{Any}}})
@ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\compiler\interface.jl:75
[28] macro expansion
@ C:\Users\Hermesr\.julia\packages\Flux\js6mP\src\optimise\train.jl:119 [inlined]
[29] macro expansion
@ C:\Users\Hermesr\.julia\packages\ProgressLogging\6KXlp\src\ProgressLogging.jl:328 [inlined]
[30] train!(loss::Function, ps::Zygote.Params{Zygote.Buffer{Any, Vector{Any}}}, data::Vector{Tuple{MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}}, Vector{Int64}}}, opt::ADAM; cb::Flux.var"#throttled#122"{Flux.var"#throttled#118#123"{Bool, Bool, typeof(evalcb), Int64}})
@ Flux.Optimise C:\Users\Hermesr\.julia\packages\Flux\js6mP\src\optimise\train.jl:117
[31] macro expansion
@ C:\Users\Hermesr\.julia\packages\Flux\js6mP\src\optimise\train.jl:155 [inlined]
[32] top-level scope
@ C:\Users\Hermesr\.julia\packages\ProgressLogging\6KXlp\src\ProgressLogging.jl:470
###
data2=zip(data, labels)#
data1=Flux.DataLoader((data, labels), batchsize=num_epochs)#