Is there a way to freeze one single weight instead of an entire layer ?
Flux.delete!
function is able to freeze just the layer and not one single parameter.
function micronet()
simplenet = gpu(Chain(
Dense(2, 2 relu),
Dense(2,1),
));
function dummy_data_micronet(train::Int64, test::Int64)
xtrain = rand(train,2) |> cpu
ytrain = rand(train) |> cpu
xtest = rand(test,2) |>cpu
ytest = rand(test) |> cpu
data = [(xtrain', ytrain')] |> cpu
return data,xtest',ytest'
end
model = SimpleNet.micronet()
data,xtest,ytest = DataCostumizer.dummy_data_micronet(1000,100)
loss(x, y) = Flux.mse(model(x),y)
ps = params(model)
opt = ADAM()
evalcb = () -> println("removed edges : ",MagnitudePruning.compute_zero_entries(model), "\n loss : ",loss(xtest,ytest))
@Flux.epochs 10 Flux.train!(loss, ps, data, opt,cb = throttle(evalcb, 10))
delete!(ps,model[1].W[2])
@Flux.epochs 10 Flux.train!(loss, ps, data, opt,cb = throttle(evalcb, 10))
The same for Flux.trainable