i found this example ,but no documentation how to build this way
struct FFNetwork
fc_1
dropout
fc_2
FFNetwork(
input_dim::Int, hidden_dim::Int, dropout::Float32, num_classes::Int
) = new(
Dense(input_dim, hidden_dim, relu),
Dropout(dropout),
Dense(hidden_dim, num_classes),
)
end
function (net::FFNetwork)(x)
x = Flux.flatten(x)
return net.fc_2(net.dropout(net.fc_1(x)))
end
this pytorch code i want to reproduce to Julia Flux
class FFNetwork(Module):
def __init__(self, input_dims, hidden_dim, dropout_ratio, num_classes):
super(FFNetwork, self).__init__()
self.flat_image_dims = np.prod(input_dims)
self.fc_1 = torch.nn.Linear(self.flat_image_dims, hidden_dim)
self.dropout = torch.nn.Dropout(dropout_ratio)
self.fc_2 = torch.nn.Linear(hidden_dim, num_classes)
def forward(self, x):
x = x.view(-1, self.flat_image_dims)
return self.fc_2(self.dropout(F.relu(self.fc_1(x))))
Per Advanced Model Building · Flux, all you need to add to make the custom layer Flux compatible is @functor FFNetwork
.
3 Likes
thanks, but i already figure it out…
Great that you already figured it out! In future, I would recommend posting what you found here and marking it as a solution as soon as you’ve done so. It saves us time answering a solved question, gives a solution for future readers and is generally good etiquette.
10 Likes