Evaluate variable-length input array RNN in Julia Flux

How to create a vector of a matrice for variable sequence length data to evaluate untrained RNN?
Following the idea developed in these [link1, link2] it is possible to carry out the evaluation for a vector;

using Flux

simple_rnn = Flux.RNN(1, 1, (x -> x))

#simple_rnn.([1, 2, 3])

v = Vector{Vector{Float32}}([[1], [2], [3]])

simple_rnn.(v)

v=[2,4,5,6]

vv=[Vector{Float32}([i]) for i in v]

simple_rnn.(vv)

seq = [Vector{Float32}([i]) for i = rand(10)]

simple_rnn.(seq)

but for the matrix for variable sequence length data, I get the following alert:

julia> b=[[1, 2, 3],[2,4,5,6],[6.0, 7.0, 7.0, 10.0, 7.0, 8.0]]
3-element Vector{Vector{Float64}}:
[1.0, 2.0, 3.0]
[2.0, 4.0, 5.0, 6.0]
[6.0, 7.0, 7.0, 10.0, 7.0, 8.0]

julia> c=[[Vector{Float32}([j]) for j in i] for i in b]
3-element Vector{Vector{Vector{Float32}}}:
[[1.0], [2.0], [3.0]]
[[2.0], [4.0], [5.0], [6.0]]
[[6.0], [7.0], [7.0], [10.0], [7.0], [8.0]]

simple_rnn.(c)
ERROR: MethodError: no method matching (::Flux.RNNCell{var"#5#6", Matrix{Float32}, Vector{Float32}, Matrix{Float32}})(::
Matrix{Float32}, ::Vector{Vector{Float32}})
Closest candidates are:
 (::Flux.RNNCell{F, A, V, <:AbstractMatrix{T}})(::Any, ::Union{AbstractMatrix{T}, AbstractVector{T}, Flux.OneHotArray})
where {F, A, V, T} at C:\Users\Hermesr\.julia\packages\Flux\js6mP\src\layers\recurrent.jl:138
Stacktrace:
[1] (::Flux.Recur{Flux.RNNCell{var"#5#6", Matrix{Float32}, Vector{Float32}, Matrix{Float32}}, Matrix{Float32}})(x::Vect
or{Vector{Float32}})
. . .

[7] top-level scope
 @ REPL[24]:1
[8] top-level scope
 @ C:\Users\Hermesr\.julia\packages\CUDA\qAl31\src\initialization.jl:52

If the solution is possible by correctly interpreting the error alert shown by Julia. I would be very grateful if you could enlighten me!!!

julia> map(c) do v
         reset!(simple_rnn)
         simple_rnn.(v)
       end
┌ Warning: Broadcasting is not safe to use with RNNs, as it does not guarantee an iteration order.
│ Re-writing this as a comprehension would be better.
│   caller = (::var"#21#22")(v::Vector{Vector{Float32}}) at REPL[46]:3
└ @ Main ./REPL[46]:3
3-element Vector{Vector{Vector{Float32}}}:
 [[-0.32932267], [-1.0383314], [-2.1850915]]
 [[-0.65864533], [-2.0766628], [-4.04086], [-6.6347656]]
 [[-1.9759359], [-4.583375], [-7.58957], [-12.04347], [-16.190538], [-21.30114]]

I suppose you want the reset! here to not carry over state between batches.

Note the warning though. The broadcast . should really be written as [simple_rnn(x) for x in v] to guarantee order.

2 Likes
using Flux
simple_rnn = Flux.RNN(1, 1, (x -> x))

b=[[1, 2, 3],[2,4,5,6],[6.0, 7.0, 7.0, 10.0, 7.0, 8.0]]


 c=[[Vector{Float32}([j]) for j in i] for i in b]

map(c) do v
                Flux.reset!(simple_rnn)
                simple_rnn.(v)
       end

 map(c) do v
                Flux.reset!(simple_rnn)
                [simple_rnn(x) for x in v]
       end

Why should I declare Flux.reset! In the code?
And not just reset! When at the beginning Flux has been declared(loaded)
this also happens to me with Flux.params, etc.

Because Flux doesn’t export these symbols. If you want them in the importing namespace, do

using Flux: reset!, params, [...]

Is this a Flux bug?
Or have I not done something right?

using Flux
simple_rnn = Flux.RNN(1, 1, (x -> x))
###

function eval_model(x)
    ax=[Vector{Float32}[[j] for j in i] for i in x]
    out = map(ax) do v
        Flux.reset!(simple_rnn)
        a=[simple_rnn(x) for x in v][end]
        reduce(vcat,a)
    end
out
end
eval_model(data)
loss(x, y) = abs(sum((eval_model(x) .-y)))
loss(data, labels)
loss.(data, labels)

###[A Basic RNN](https://learningjulia.com/2019/10/11/a-basic-rnn.html)
ps = Flux.params(simple_rnn)
opt = Flux.ADAM()

println("Training loss before = ", sum(loss.(data, labels)))
#println("Test loss before = ", sum(loss.(test_data, test_labels)))

# callback function during training
evalcb() = @show(sum(loss.(data, labels)))
using Flux: @epochs
@epochs num_epochs Flux.train!(loss, ps, [(data, labels)], opt, cb = Flux.throttle(evalcb, 1))

[ Info: Epoch 1
ERROR: MethodError: no method matching getindex(::Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, ::UnitRange{Int64})
Stacktrace:
  [1] getobs(data::Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, idx::UnitRange{Int64})
    @ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\observation.jl:49
  [2] (::MLUtils.var"#7#8"{UnitRange{Int64}})(x::Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}})
    @ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\observation.jl:136
  [3] map
    @ .\tuple.jl:222 [inlined]
  [4] getobs(tup::Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, indices::UnitRange{Int64})
    @ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\observation.jl:136
  [5] getobs(subset::MLUtils.ObsView{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, UnitRange{Int64}}, idx::UnitRange{Int64})
    @ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\obsview.jl:187
  [6] _getbatch(A::MLUtils.BatchView{Union{}, MLUtils.ObsView{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, UnitRange{Int64}}, Val{nothing}}, obsindices::UnitRange{Int64})
    @ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\batchview.jl:147
  [7] getindex(A::MLUtils.BatchView{Union{}, MLUtils.ObsView{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, UnitRange{Int64}}, Val{nothing}}, i::Int64)
    @ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\batchview.jl:132
  [8] getobs(data::MLUtils.BatchView{Union{}, MLUtils.ObsView{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, UnitRange{Int64}}, Val{nothing}}, idx::Int64)
    @ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\observation.jl:49
  [9] (::MLUtils.var"#38#40")(i::Int64)
    @ MLUtils .\none:0
 [10] iterate(::Base.Generator{UnitRange{Int64}, MLUtils.var"#38#40"})
    @ Base .\generator.jl:47
 [11] iterate(e::MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}})
    @ MLUtils C:\Users\Hermesr\.julia\packages\MLUtils\6niiD\src\eachobs.jl:173
 [12] iterate
    @ .\generator.jl:44 [inlined]
 [13] collect(itr::Base.Generator{MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}}, Zygote.var"#543#547"{Zygote.Context, var"#33#36"}})
    @ Base .\array.jl:724
 [14] map
    @ .\abstractarray.jl:2896 [inlined]
 [15] ∇map(cx::Zygote.Context, f::var"#33#36", args::MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}})
    @ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\lib\array.jl:184
 [16] _pullback(cx::Zygote.Context, #unused#::typeof(collect), g::Base.Generator{MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}}, var"#33#36"})
    @ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\lib\array.jl:231
 [17] _pullback
    @ c:\projects\Julia Flux\Evaluate simple RNN in Julia Flux.jl:56 [inlined]
 [18] _pullback(ctx::Zygote.Context, f::typeof(eval_model), args::MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}})
    @ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:0
 [19] _pullback
    @ c:\projects\Julia Flux\Evaluate simple RNN in Julia Flux.jl:65 [inlined]
 [20] _pullback(::Zygote.Context, ::typeof(loss), ::MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}}, ::Vector{Int64})
    @ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:0
 [21] _apply(::Function, ::Vararg{Any})
    @ Core .\boot.jl:814
 [22] adjoint
    @ C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\lib\lib.jl:204 [inlined]
 [23] _pullback
    @ C:\Users\Hermesr\.julia\packages\ZygoteRules\AIbCs\src\adjoint.jl:65 [inlined]
 [24] _pullback
    @ C:\Users\Hermesr\.julia\packages\Flux\js6mP\src\optimise\train.jl:120 [inlined]
 [25] _pullback(::Zygote.Context, ::Flux.Optimise.var"#37#40"{typeof(loss), Tuple{MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}}, Vector{Int64}}})
    @ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:0
 [26] pullback(f::Function, ps::Zygote.Params{Zygote.Buffer{Any, Vector{Any}}})
    @ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\compiler\interface.jl:352
 [27] gradient(f::Function, args::Zygote.Params{Zygote.Buffer{Any, Vector{Any}}})
    @ Zygote C:\Users\Hermesr\.julia\packages\Zygote\DkIUK\src\compiler\interface.jl:75
 [28] macro expansion
    @ C:\Users\Hermesr\.julia\packages\Flux\js6mP\src\optimise\train.jl:119 [inlined]
 [29] macro expansion
    @ C:\Users\Hermesr\.julia\packages\ProgressLogging\6KXlp\src\ProgressLogging.jl:328 [inlined]
 [30] train!(loss::Function, ps::Zygote.Params{Zygote.Buffer{Any, Vector{Any}}}, data::Vector{Tuple{MLUtils.DataLoader{Tuple{Base.Iterators.Zip{Tuple{Vector{Vector{Int64}}, Vector{Int64}}}, Vector{Int64}}, Random._GLOBAL_RNG, Val{nothing}}, Vector{Int64}}}, opt::ADAM; cb::Flux.var"#throttled#122"{Flux.var"#throttled#118#123"{Bool, Bool, typeof(evalcb), Int64}})
    @ Flux.Optimise C:\Users\Hermesr\.julia\packages\Flux\js6mP\src\optimise\train.jl:117
 [31] macro expansion
    @ C:\Users\Hermesr\.julia\packages\Flux\js6mP\src\optimise\train.jl:155 [inlined]
 [32] top-level scope
    @ C:\Users\Hermesr\.julia\packages\ProgressLogging\6KXlp\src\ProgressLogging.jl:470

###
data2=zip(data, labels)# 
data1=Flux.DataLoader((data, labels), batchsize=num_epochs)#