From 627bf370bc158d01b890f5951c2da0c11f6c1ad4 Mon Sep 17 00:00:00 2001 From: Dominik Roth Date: Tue, 21 Sep 2021 09:17:01 +0200 Subject: [PATCH] Upped lr and dropout --- discriminator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/discriminator.py b/discriminator.py index f0ded06..ade1d2b 100644 --- a/discriminator.py +++ b/discriminator.py @@ -13,7 +13,7 @@ class Model(nn.Module): input_size=8, hidden_size=16, num_layers=3, - dropout=0.05, + dropout=0.1, ) self.fc = nn.Linear(16, 1) self.out = nn.Sigmoid() @@ -32,7 +32,7 @@ def train(model, seq_len=16*64): model.train() criterion = nn.BCELoss() - optimizer = optim.Adam(model.parameters(), lr=0.001) + optimizer = optim.Adam(model.parameters(), lr=0.01) for epoch in range(1024): state_h, state_c = model.init_state(seq_len)