Sure, so here is my code trying to replicate this 1-D GAN:
function generate_real_data(n)
x1 = rand(1,n) .- 0.5
x2 = x1 .* x1
return vcat(x1,x2)
end
function D()
return Chain(
Dense(2, 25,relu),
Dense(25,1,x->σ.(x))
)
end
function G(latent_dim::Int)
return Chain(
Dense(latent_dim, 15,relu),
Dense(15,2)
)
end
loss_D(x, y) = sum(Flux.Losses.logitbinarycrossentropy(dscr(x), y))
function trainDiscriminator!(dscr,gen,train_size)
real = generate_real_data(train_size)
fake = gen(rand(5,train_size))
X = hcat(real,fake)
Y = vcat(ones(train_size),zeros(train_size))
data = Flux.Data.DataLoader(X, Y', batchsize=100,shuffle=true);
for d in data
gs = gradient(Flux.params(dscr)) do
l = loss_D(d...)
end
Flux.update!(opt, Flux.params(dscr), gs)
end
end
loss_G(x) = sum(Flux.Losses.logitbinarycrossentropy(dscr(x),1))
function trainGenerator!(gen,dscr,train_size)
fake_generated = gen(rand(5,train_size))
data = Flux.Data.DataLoader(fake_generated, batchsize=100,shuffle=true);
for d in data
gs = gradient(Flux.params(gen)) do
l = loss_G(d)
end
Flux.update!(opt, Flux.params(gen), gs)
end
fake_generated = gen(rand(5,train_size))
end
gen = G(5)
dscr = D()
opt = ADAM()
train_size = 2000
epochs = 10000
for e in 1:epochs
trainDiscriminator!(dscr,gen,train_size)
trainGenerator!(gen,dscr,train_size)
if e%1000 == 0
real = generate_real_data(train_size)
fake = gen(rand(5,train_size))
@show mean(dscr(real)),mean(dscr(fake))
end
end
real = generate_real_data(train_size)
fake = gen(rand(5,train_size))
scatter(real[1,1:100],real[2,1:100])
scatter!(fake[1,1:100],fake[2,1:100])