%%julia
# Load the data
println("start")
x_train, y_train,x_valid, y_valid=h5open("./data.h5", "r") do file
read(file, "tr_X","tr_Y","va_X","va_Y")
end|> gpu
println("read done")
x_train = reshape(x_train, (80,80,3,:))|> gpu
x_valid = reshape(x_valid, (80,80,3,:))|> gpu
println("reshape done")
# Add the channel layer
#x_train = Flux.unsqueeze(x_train, 3)
#x_valid = Flux.unsqueeze(x_valid, 3)
# Encode labels
#y_train = onehotbatch(y_train, 0:1)
#y_valid = onehotbatch(y_valid, 0:1)
# Create the full dataset
train_data = DataLoader(x_train, y_train, batchsize=32)|> gpu
println("generator done")
model = Chain(
Conv((3, 3), 3=>32, stride=1, relu),
Conv((3, 3), 32=>64, stride=1, relu),
MaxPool((2,2); pad = 0, stride = 1),
Dropout(0.25),
Conv((3, 3), 64=>64, stride=1, relu),
MaxPool((2,2); pad = 0, stride = 1),
Dropout(0.25),
Conv((3, 3), 64=>128, pad=1, stride=1, relu),
MaxPool((2,2); pad = 0, stride = 1),
Dropout(0.5),
GlobalMeanPool(),
flatten,
Dense(128, 64,relu),
Dropout(0.5),
Dense(64, 1,sigmoid)
)|> gpu
println("chain init done")
#ŷ = model(x_valid)
#println("Prediction of first image: $(ŷ[1])")
accuracy(ŷ, y) = mean(ŷ.== y) |> gpu
loss(x, y) = Flux.crossentropy(model(x), y) |> gpu
# learning rate
lr = 0.1|> gpu
opt = Descent(lr) |> gpu
ps = Flux.params(model) |> gpu
number_epochs = 10|> gpu
println("train start")
callback() = push!(loss_vector, loss(x_train, y_train))|> gpu
loss_vector = Vector{Float64}()|> gpu
@epochs number_epochs Flux.train!(loss, ps, train_data, opt,cb=callback) |> gpu
println("post epoch")
#accuracy(model(x_train), y_train) |> gpu
println("done!")
Above is my first deep lerarning project using flux and julia. Getting below error while trying to use GPU.
RuntimeError: <PyCall.jlwrap (in a Julia function called from Python)
JULIA: InvalidIRError: compiling kernel getindex_kernel(CUDA.CuKernelContext, CuDeviceArray{Float32,4,1}, CuDeviceArray{Float32,4,1}, NTuple{4,Int64}, Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, CuDeviceArray{Int64,1,1}) resulted in invalid LLVM IR
Reason: unsupported dynamic function invocation (call to pointerref(ptr::Core.LLVMPtr{T,A}, i::Int64, ::Val{align}) where {T, A, align} in LLVM.Interop at /root/.julia/packages/LLVM/F9DFY/src/interop/pointer.jl:7)
Any help woould be great. I am using Kaggle environment for this project.