From 9fa94f3b08c9f25f49df5cd0cb2fd8e984488048 Mon Sep 17 00:00:00 2001
From: Franck Dary <franck.dary@lis-lab.fr>
Date: Thu, 15 Apr 2021 20:26:09 +0200
Subject: [PATCH] Increased number of parameters in Neural network and applied
 more dropout

---
 Networks.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/Networks.py b/Networks.py
index caddcee..2ab865b 100644
--- a/Networks.py
+++ b/Networks.py
@@ -9,13 +9,13 @@ class BaseNet(nn.Module):
     self.inputSize = inputSize
     self.outputSize = outputSize
     self.embeddings = {name : nn.Embedding(len(dicts.dicts[name]), self.embSize) for name in dicts.dicts.keys()}
-    self.fc1 = nn.Linear(inputSize * self.embSize, 128)
-    self.fc2 = nn.Linear(128, outputSize)
+    self.fc1 = nn.Linear(inputSize * self.embSize, 1600)
+    self.fc2 = nn.Linear(1600, outputSize)
     self.dropout = nn.Dropout(0.3)
 
   def forward(self, x) :
     x = self.dropout(self.embeddings["UPOS"](x).view(x.size(0), -1))
-    x = F.relu(self.fc1(x))
+    x = F.relu(self.dropout(self.fc1(x)))
     x = self.fc2(x)
     return x
 ################################################################################
-- 
GitLab