LoadError: MethodError while training LSTM model in Julia

I’m trying to train an LSTM model to predict number of real roots of polynomials. x_train and y_train include array of arrays such as [[-204, 20, 13, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]] which are coefficients of polynomials. x_test and y_test include number of real roots of each polynomial such 1,2,5… I’m stuck in this error. Here is my code adn error message, please help me. Thanks in advance!

Error Message:

ERROR: LoadError: MethodError: no method matching (::Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}})(::Tuple{Matrix{Float32}, Matrix{Float32}}, ::Float32)
Closest candidates are:
  (::Flux.LSTMCell{A, V, <:Tuple{AbstractMatrix{T}, AbstractMatrix{T}}})(::Any, ::Union{AbstractVector{T}, AbstractMatrix{T}, Flux.OneHotArray}) where {A, V, T} at ~/.julia/packages/Flux/BPPNj/src/layers/recurrent.jl:157
Stacktrace:
  [1] macro expansion
    @ ~/.julia/packages/Zygote/umM0L/src/compiler/interface2.jl:0 [inlined]
  [2] _pullback(::Zygote.Context, ::Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, ::Tuple{Matrix{Float32}, Matrix{Float32}}, ::Float32)
    @ Zygote ~/.julia/packages/Zygote/umM0L/src/compiler/interface2.jl:9
  [3] _pullback
    @ ~/.julia/packages/Flux/BPPNj/src/layers/recurrent.jl:47 [inlined]
  [4] _pullback(ctx::Zygote.Context, f::Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, args::Float32)
    @ Zygote ~/.julia/packages/Zygote/umM0L/src/compiler/interface2.jl:0
  [5] _pullback
    @ ~/.julia/packages/Flux/BPPNj/src/layers/basic.jl:47 [inlined]
  [6] _pullback(::Zygote.Context, ::typeof(Flux.applychain), ::Tuple{Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, Dropout{Float64, Colon}}, ::Float32)
    @ Zygote ~/.julia/packages/Zygote/umM0L/src/compiler/interface2.jl:0
  [7] _pullback
    @ ~/.julia/packages/Flux/BPPNj/src/layers/basic.jl:49 [inlined]
  [8] _pullback(ctx::Zygote.Context, f::Chain{Tuple{Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, Dropout{Float64, Colon}}}, args::Float32)
    @ Zygote ~/.julia/packages/Zygote/umM0L/src/compiler/interface2.jl:0
  [9] #1099
    @ ~/.julia/packages/Zygote/umM0L/src/lib/broadcast.jl:186 [inlined]
 [10] _broadcast_getindex_evalf
    @ ./broadcast.jl:670 [inlined]
 [11] _broadcast_getindex
    @ ./broadcast.jl:643 [inlined]
 [12] getindex
    @ ./broadcast.jl:597 [inlined]
 [13] copy
    @ ./broadcast.jl:899 [inlined]
 [14] materialize
    @ ./broadcast.jl:860 [inlined]
 [15] _broadcast(f::Zygote.var"#1099#1103"{Zygote.Context, Chain{Tuple{Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, Dropout{Float64, Colon}}}}, x::Vector{Float32})
    @ Zygote ~/.julia/packages/Zygote/umM0L/src/lib/broadcast.jl:163
 [16] adjoint
    @ ~/.julia/packages/Zygote/umM0L/src/lib/broadcast.jl:186 [inlined]
 [17] _pullback(__context__::Zygote.Context, 680::typeof(Base.Broadcast.broadcasted), 681::Base.Broadcast.DefaultArrayStyle{1}, f::Chain{Tuple{Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, Dropout{Float64, Colon}}}, args::Vector{Float32})
    @ Zygote ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:65
 [18] _apply(::Function, ::Vararg{Any})
    @ Core ./boot.jl:814
 [19] adjoint
    @ ~/.julia/packages/Zygote/umM0L/src/lib/lib.jl:200 [inlined]
 [20] _pullback
    @ ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:65 [inlined]
 [21] _pullback
    @ ./broadcast.jl:1297 [inlined]
 [22] _pullback
    @ ~/Desktop/lstm_model.jl:82 [inlined]
 [23] _pullback(::Zygote.Context, ::typeof(model), ::Vector{Float32}, ::Chain{Tuple{Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, Dropout{Float64, Colon}}}, ::Dense{typeof(identity), Matrix{Float32}, Vector{Float32}})
    @ Zygote ~/.julia/packages/Zygote/umM0L/src/compiler/interface2.jl:0
 [24] _pullback
    @ ~/Desktop/lstm_model.jl:108 [inlined]
 [25] _pullback(::Zygote.Context, ::var"#loss#2"{Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, Chain{Tuple{Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, Dropout{Float64, Colon}}}}, ::Vector{Float32}, ::Float32)
    @ Zygote ~/.julia/packages/Zygote/umM0L/src/compiler/interface2.jl:0
 [26] _apply(::Function, ::Vararg{Any})
    @ Core ./boot.jl:814
 [27] adjoint
    @ ~/.julia/packages/Zygote/umM0L/src/lib/lib.jl:200 [inlined]
 [28] _pullback
    @ ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:65 [inlined]
 [29] _pullback
    @ ~/.julia/packages/Flux/BPPNj/src/optimise/train.jl:105 [inlined]
 [30] _pullback(::Zygote.Context, ::Flux.Optimise.var"#39#45"{var"#loss#2"{Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, Chain{Tuple{Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Dropout{Float64, Colon}, Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}, Dropout{Float64, Colon}}}}, Tuple{Vector{Float32}, Float32}})
    @ Zygote ~/.julia/packages/Zygote/umM0L/src/compiler/interface2.jl:0
 [31] pullback(f::Function, ps::Zygote.Params)
    @ Zygote ~/.julia/packages/Zygote/umM0L/src/compiler/interface.jl:352
 [32] gradient(f::Function, args::Zygote.Params)
    @ Zygote ~/.julia/packages/Zygote/umM0L/src/compiler/interface.jl:75
 [33] macro expansion
    @ ~/.julia/packages/Flux/BPPNj/src/optimise/train.jl:104 [inlined]
 [34] macro expansion
    @ ~/.julia/packages/Juno/n6wyj/src/progress.jl:134 [inlined]
 [35] train!(loss::Function, ps::Zygote.Params, data::Base.Iterators.Zip{Tuple{Vector{Any}, Vector{Float32}}}, opt::ADAM; cb::Flux.var"#throttled#72"{Flux.var"#throttled#68#73"{Bool, Bool, var"#1#3", Int64}})
    @ Flux.Optimise ~/.julia/packages/Flux/BPPNj/src/optimise/train.jl:102
 [36] main()
    @ Main ~/Desktop/lstm_model.jl:116
 [37] top-level scope
    @ ~/Desktop/lstm_model.jl:120
in expression starting at /home/user/Desktop/lstm_model.jl:120
using Flux: @epochs, throttle
using Flux

function input()
	## x_train
	lines = Tuple(readlines("/home/user/Desktop/train_x_data.txt"))
	x_train = []

	for i in lines
		push!(x_train, convert(Vector{Float32},eval(Meta.parse(i))))
	end

	## y_train
	lines = Tuple(readlines("/home/user/Desktop/train_y_data.txt"))
	y_train = []

	for i in lines
		push!(y_train, eval(Meta.parse(i)))
	end
	
	y_train = convert(Vector{Float32}, y_train)

	## x_test
	lines = Tuple(readlines("/home/user/Desktop/test_x_data.txt"))
	x_test = []

	for i in lines
		push!(x_test, convert(Vector{Float32},eval(Meta.parse(i))))
	end


	## y_test
	lines = Tuple(readlines("/home/user/Desktop/test_y_data.txt"))
	y_test = []

	for i in lines
		push!(y_test, eval(Meta.parse(i)))
	end
	
	y_test = convert(Vector{Float32}, y_test)

	return x_train, x_test, y_train, y_test
end

function LSTM_model(N,num_of_classes)
	scanner = Chain(LSTM(N,200),
		        Dropout(0.2),
		        LSTM(200,200),
		        Dropout(0.1),
		        Dense(200,101),
		        Dropout(0.1))
	encoder = Dense(101,num_of_classes)
	return scanner, encoder
end

function model(x, scanner, encoder)
	state = scanner.(x)[end]
    reset!(scanner)
    encoder(state)
end

function main()
	num_of_classes = 101
	num_epochs = 50

	x_train, x_test, y_train, y_test = input()

	
	N = size(x_train)[1]
	scanner, encoder = LSTM_model(N,num_of_classes)

	loss(x, y)=  (model(x, scanner, encoder) - y)^2
	ps = Flux.params(scanner,encoder)

	# use the ADAM optimizer. It's a pretty good one!
	opt = Flux.ADAM(0.001)

	evalcb = () -> @show testloss()
    @info("Training...")
    Flux.train!(loss, ps, zip(x_train, y_train), opt, cb = throttle(evalcb, 10))
	
end

main()