For those interested, this is the complete script together with a grid for letting students write their own digits… works quite well

(the actual classification script is from various tutorials, mainly this one)
using Pkg
cd(".")
Pkg.activate(".")
#Pkg.add("Flux")
#Pkg.add("MLDatasets")
#Pkg.add("BetaML")
#Pkg.add("Images")
#Pkg.add("FileIO")
#Pkg.add("ImageTransformations")
#Pkg.add("MLDatasets")
using DelimitedFiles
using Statistics
using Flux
using Flux: Data.DataLoader
using Flux: onehotbatch, onecold, crossentropy
using Flux: @epochs
using MLDatasets # For loading the training data
using Images, FileIO, ImageTransformations # For loading the actual images
# Training of the model
x_train, y_train = MLDatasets.MNIST.traindata()
x_train = permutedims(x_train,(2,1,3)) # For correct img axis
x_train_imgs = convert(Array{Gray{N0f8},3},deepcopy(x_train))
x_train = convert(Array{Float32,3},x_train)
x_train = reshape(x_train,(28,28,1,60000))
y_train = onehotbatch(y_train, 0:9)
train_data = DataLoader((x_train, y_train), batchsize=128)
model = Chain(
# 28x28 => 14x14
Conv((5, 5), 1=>8, pad=2, stride=2, relu),
# 14x14 => 7x7
Conv((3, 3), 8=>16, pad=1, stride=2, relu),
# 7x7 => 4x4
Conv((3, 3), 16=>32, pad=1, stride=2, relu),
# 4x4 => 2x2
Conv((3, 3), 32=>32, pad=1, stride=2, relu),
# Average pooling on each width x height feature map
GlobalMeanPool(),
flatten,
Dense(32, 10),
softmax
)
accuracy(ŷ, y) = (mean(onecold(ŷ) .== onecold(y)))
loss(x, y) = Flux.crossentropy(model(x), y)
# learning rate
opt = Descent(0.1)
#opt = Flux.ADAM()
ps = Flux.params(model)
number_epochs = 10
@epochs number_epochs Flux.train!(loss, ps, train_data, opt)
accuracy(model(x_train), y_train) # 0.981
# Loading imgs
function cleanImg!(img,threshold=0.3,radius=0)
(R,C) = size(img)
for c in 1:C
for r in 1:R
if img[r,c] <= threshold
allneighmoursunderthreshold = true
for c2 in max(1,c-radius):min(C,c+radius)
for r2 in max(1,r-radius):min(R,r+radius)
if img[r2,c2] > threshold
allneighmoursunderthreshold = false
break
end
end
end
if allneighmoursunderthreshold
img[r,c] = Gray(0.0)
end
end
end
end
return img
end
imgs_y = convert(Array{Int64,1},dropdims(readdlm("./data/img_labels.txt"),dims=2))
imgs_path = ["./data/test$(i).png" for i in 1:24]
imgs = load.(imgs_path)
imgs = [Gray.(i) for i in imgs]
imgs = [imresize(i, (28,28)) for i in imgs]
imgs = [1.0 .- i for i in imgs]
imgs = cleanImg!.(imgs, 0.3,1)
imgs = cat(imgs...,dims=3)
imgs = reshape(imgs,(28,28,1,size(imgs,3)))
# Doing the actual classification
imgs_est = model(imgs)
imgs_ŷ = onecold(imgs_est, 0:9)
probs = maximum(imgs_est,dims=1)
mean(imgs_ŷ .== imgs_y)
Grid: