I try to take the script shown in the link to Julia.
example-2-calling-julia-from-python
But in the following lines:
loss = jl.seval("m -> (x, y) -> Flux.Losses.me(m(x), y)")(model)
jl.Flux.train_b(
loss,
jl.Flux.params(model),
jl.Vector[jl.Tuple]([batch(100) for _ in range(2000) ]),
jl.ADAM (0.01) ,
)
I don’t know how to write the code in Julia
This is all I have achieved:
using Plots
pyplot()
using Random
Random.seed!(1234)
function batch(n)
x = sort(rand(-1.1:0.01:1.1, n))
y = sin.(10x) .+ rand(n)
return x, y
end
x, y = batch(100);
p1 = scatter(x, y, marker_z=(+), color=:blues, legend=false,
label="data",
markersize=3, alpha=0.3);
plot(p1)
using Flux
model = Chain(
Dense(1, 10, relu),
Dense(10, 10, relu),
Dense(10, 10, relu),
Dense(10, 1,))
#loss(x, y) = Flux.Losses.mse(model(x), y)
loss = (m -> (x, y) -> Flux.Losses.mse(m(x), y))(model)
Flux.train!(loss, Flux.params(model), Vector{Tuple}([(batch(100)) for _ in range(1, length=2000)]), Flux.ADAM(0.01))
ERROR: DimensionMismatch("matrix A has dimensions (10,1), vector B has length 100")
Stacktrace:
[1] generic_matvecmul!(C::Vector{Float64}, tA::Char, A::Matrix{Float32}, B::Vector{Float64}, _add::LinearAlgebra.MulAddMul{true, true, Bool, Bool})
@ LinearAlgebra C:\Users\user\AppData\Local\Programs\Julia-1.7.3\share\julia\stdlib\v1.7\LinearAlgebra\src\matmul.jl:713
[2] mul!
@ C:\Users\user\AppData\Local\Programs\Julia-1.7.3\share\julia\stdlib\v1.7\LinearAlgebra\src\matmul.jl:81 [inlined]
[3] mul!
@ C:\Users\user\AppData\Local\Programs\Julia-1.7.3\share\julia\stdlib\v1.7\LinearAlgebra\src\matmul.jl:275 [inlined]
[4] *
@ C:\Users\user\AppData\Local\Programs\Julia-1.7.3\share\julia\stdlib\v1.7\LinearAlgebra\src\matmul.jl:47 [inlined]
[5] rrule
@ C:\Users\user\.julia\packages\ChainRules\uh22h\src\rulesets\Base\arraymath.jl:40 [inlined]
[6] rrule
@ C:\Users\user\.julia\packages\ChainRulesCore\GUvJT\src\rules.jl:134 [inlined]
[7] chain_rrule
@ C:\Users\user\.julia\packages\Zygote\DkIUK\src\compiler\chainrules.jl:217 [inlined]
[8] macro expansion
@ C:\Users\user\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:0 [inlined]
[9] _pullback
@ C:\Users\user\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:9 [inlined]
[10] _pullback
@ C:\Users\user\.julia\packages\Flux\js6mP\src\layers\basic.jl:159 [inlined]
[11] _pullback(ctx::Zygote.Context, f::Dense{typeof(relu), Matrix{Float32}, Vector{Float32}}, args::Vector{Float64})
@ Zygote C:\Users\user\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:0
[12] macro expansion
@ C:\Users\user\.julia\packages\Flux\js6mP\src\layers\basic.jl:53 [inlined]
[13] _pullback
@ C:\Users\user\.julia\packages\Flux\js6mP\src\layers\basic.jl:53 [inlined]
[14] _pullback(::Zygote.Context, ::typeof(Flux.applychain), ::Tuple{Dense{typeof(relu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(relu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(relu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}}, ::Vector{Float64})
@ Zygote C:\Users\user\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:0
[15] _pullback
@ C:\Users\user\.julia\packages\Flux\js6mP\src\layers\basic.jl:51 [inlined]
[16] _pullback
@ f:\projects\Julia Flux\miejemplo.jl:33 [inlined]
[17] _pullback(::Zygote.Context, ::var"#6#8"{Chain{Tuple{Dense{typeof(relu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(relu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(relu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}}}}, ::Vector{Float64}, ::Vector{Float64})
@ Zygote C:\Users\user\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:0
[18] _apply
@ .\boot.jl:814 [inlined]
[19] adjoint
@ C:\Users\user\.julia\packages\Zygote\DkIUK\src\lib\lib.jl:204 [inlined]
[20] _pullback
@ C:\Users\user\.julia\packages\ZygoteRules\AIbCs\src\adjoint.jl:65 [inlined]
[21] _pullback
@ C:\Users\user\.julia\packages\Flux\js6mP\src\optimise\train.jl:120 [inlined]
[22] _pullback(::Zygote.Context, ::Flux.Optimise.var"#37#40"{var"#6#8"{Chain{Tuple{Dense{typeof(relu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(relu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(relu), Matrix{Float32}, Vector{Float32}}, Dense{typeof(identity), Matrix{Float32}, Vector{Float32}}}}}, Tuple{Vector{Float64}, Vector{Float64}}})
@ Zygote C:\Users\user\.julia\packages\Zygote\DkIUK\src\compiler\interface2.jl:0
[23] pullback(f::Function, ps::Zygote.Params{Zygote.Buffer{Any, Vector{Any}}})
@ Zygote C:\Users\user\.julia\packages\Zygote\DkIUK\src\compiler\interface.jl:352
[24] gradient(f::Function, args::Zygote.Params{Zygote.Buffer{Any, Vector{Any}}})
@ Zygote C:\Users\user\.julia\packages\Zygote\DkIUK\src\compiler\interface.jl:75
[25] macro expansion
@ C:\Users\user\.julia\packages\Flux\js6mP\src\optimise\train.jl:119 [inlined]
[26] macro expansion
@ C:\Users\user\.julia\packages\ProgressLogging\6KXlp\src\ProgressLogging.jl:328 [inlined]
[27] train!(loss::Function, ps::Zygote.Params{Zygote.Buffer{Any, Vector{Any}}}, data::Vector{Tuple}, opt::ADAM; cb::Flux.Optimise.var"#38#41")
@ Flux.Optimise C:\Users\user\.julia\packages\Flux\js6mP\src\optimise\train.jl:117
[28] train!(loss::Function, ps::Zygote.Params{Zygote.Buffer{Any, Vector{Any}}}, data::Vector{Tuple}, opt::ADAM)
@ Flux.Optimise C:\Users\user\.julia\packages\Flux\js6mP\src\optimise\train.jl:114
[29] top-level scope
@ f:\projects\Julia Flux\miejemplo.jl:34