Upped lr and dropout
This commit is contained in:
parent
f4444f3a9e
commit
627bf370bc
@ -13,7 +13,7 @@ class Model(nn.Module):
|
||||
input_size=8,
|
||||
hidden_size=16,
|
||||
num_layers=3,
|
||||
dropout=0.05,
|
||||
dropout=0.1,
|
||||
)
|
||||
self.fc = nn.Linear(16, 1)
|
||||
self.out = nn.Sigmoid()
|
||||
@ -32,7 +32,7 @@ def train(model, seq_len=16*64):
|
||||
model.train()
|
||||
|
||||
criterion = nn.BCELoss()
|
||||
optimizer = optim.Adam(model.parameters(), lr=0.001)
|
||||
optimizer = optim.Adam(model.parameters(), lr=0.01)
|
||||
|
||||
for epoch in range(1024):
|
||||
state_h, state_c = model.init_state(seq_len)
|
||||
|
Loading…
Reference in New Issue
Block a user