Ok so, for the datas I did something weird don’t ask,
im_width = 128
im_height = 128
function get_data(width,height,type)
folder = readdir("alzheimers/$type")
labels = copy(folder)
D = Dict()
for label in labels
fold = joinpath("alzheimers/$type",label)
files = readdir(fold)
D[label] = zeros(Float32,width,height,1,length(files))
for (i,file) in enumerate(files)
im = imresize(load(joinpath(fold,file)),(width,height))
D[label][:,:,1,i] .= channelview(im)
end
end
labels,D
end
class,D = get_data(im_width,im_height,"train")
total_im = mapreduce(v -> size(v,4) ,+,values(D))
datas = zeros(Float32,im_width,im_height,1,total_im);
labels = Vector{String}(undef,total_im)
c = 1
for i in eachindex(class)
Di = D[class[i]]
for j in axes(Di,4)
labels[c] = class[i]
datas[:,:,:,c] .= Di[:,:,:,j]
c+=1
end
end
labels
datas = gpu(datas)
label_for_flux = Flux.onehotbatch(labels,class) |> gpu
datas_for_flux = Flux.DataLoader((datas,label_for_flux),batchsize=32,shuffle=true)
For the model,
model = Flux.Chain(
Conv((3,3),1=>4,relu,pad=1),
MaxPool((2,2)),
Conv((3,3),4=>8,relu,pad=1),
MaxPool((2,2)),
Conv((3,3),8=>16,relu,pad=1),
MaxPool((2,2)),
Conv((3,3),16=>32,relu,pad=1),
MaxPool((2,2)),
Flux.flatten,
Dense(floor(Int,im_width/16*im_height/16*32)=>20,Flux.relu),
Flux.Dropout(0.1),
Dense(20=>4),
Flux.softmax
) |> gpu
opt = Flux.setup(Adam(0.001),model)
loss(m,x,y) = Flux.crossentropy(m(x),y)
for i in 1:100
CUDA.synchronize()
Flux.train!(loss,model,datas_for_flux,opt)
@info i,loss(model,datas,label_for_flux)
end
And to tests,
class,D = get_data(im_width,im_height,"test")
Flux.testmode!(model)
purc = Float64[]
for i in 1:4
Di = D[class[i]]
lab_re = [ class[i] for _ in axes(Di,4)]
lab_pred = model(gpu(Di)) |> cpu |> x-> Flux.onecold(x,class)
push!(purc,(lab_re .== lab_pred ) |> sum |> x-> x/length(lab_re)*100)
end
purc
you will need
using Flux,Images,CUDA,cuDNN