From d66aeb6be0402c9bdd803a88632b354aef556e6b Mon Sep 17 00:00:00 2001
From: hartbook <franck.dary@etu.univ-amu.fr>
Date: Mon, 9 Jul 2018 16:13:25 +0200
Subject: [PATCH] Training now print MLP topology

---
 MLP/include/MLP.hpp                 |  1 +
 MLP/src/MLP.cpp                     | 15 +++++++++++++++
 tape_machine/include/Classifier.hpp |  1 +
 tape_machine/src/Classifier.cpp     |  8 +++++++-
 trainer/src/Trainer.cpp             |  6 +++++-
 5 files changed, 29 insertions(+), 2 deletions(-)

diff --git a/MLP/include/MLP.hpp b/MLP/include/MLP.hpp
index 91bfea8..a7efc5e 100644
--- a/MLP/include/MLP.hpp
+++ b/MLP/include/MLP.hpp
@@ -78,6 +78,7 @@ class MLP
   int trainOnBatch(Examples & examples, int start, int end);
   int getScoreOnBatch(Examples & examples, int start, int end);
   void save(const std::string & filename);
+  void printTopology(FILE * output);
 };
 
 #endif
diff --git a/MLP/src/MLP.cpp b/MLP/src/MLP.cpp
index 8978dcc..ac2594a 100644
--- a/MLP/src/MLP.cpp
+++ b/MLP/src/MLP.cpp
@@ -441,3 +441,18 @@ MLP::MLP(const std::string & filename)
   load(filename);
 }
 
+void MLP::printTopology(FILE * output)
+{
+  fprintf(output, "(");
+  for(unsigned int i = 0; i < layers.size(); i++)
+  {
+    auto & layer = layers[i];
+
+    if(i == 0)
+      fprintf(output, "%d", layer.input_dim);
+    fprintf(output, "->%d", layer.output_dim);
+  }
+
+  fprintf(output, ")\n");
+}
+
diff --git a/tape_machine/include/Classifier.hpp b/tape_machine/include/Classifier.hpp
index fa39bad..48a042f 100644
--- a/tape_machine/include/Classifier.hpp
+++ b/tape_machine/include/Classifier.hpp
@@ -54,6 +54,7 @@ class Classifier
   void initClassifier(Config & config);
   void save();
   bool needsTrain();
+  void printTopology(FILE * output);
 };
 
 #endif
diff --git a/tape_machine/src/Classifier.cpp b/tape_machine/src/Classifier.cpp
index 55dc6cd..77d218f 100644
--- a/tape_machine/src/Classifier.cpp
+++ b/tape_machine/src/Classifier.cpp
@@ -125,7 +125,7 @@ void Classifier::initClassifier(Config & config)
   }
 
   int nbInputs = 0;
-  int nbHidden = 200;
+  int nbHidden = 500;
   int nbOutputs = as->actions.size();
 
   auto fd = fm->getFeatureDescription(config);
@@ -226,3 +226,9 @@ bool Classifier::needsTrain()
   return type == Type::Prediction;
 }
 
+void Classifier::printTopology(FILE * output)
+{
+  fprintf(output, "%s topology : ", name.c_str());
+  mlp->printTopology(output);
+}
+
diff --git a/trainer/src/Trainer.cpp b/trainer/src/Trainer.cpp
index b550c37..0ce551c 100644
--- a/trainer/src/Trainer.cpp
+++ b/trainer/src/Trainer.cpp
@@ -127,6 +127,11 @@ void Trainer::trainBatched(int nbIter, int batchSize, bool mustShuffle)
   if(devMcd && devConfig)
     getExamplesByClassifier(devExamples, *devConfig);
 
+  auto & classifiers = tm.getClassifiers();
+  for(Classifier * cla : classifiers)
+    if(cla->needsTrain())
+      cla->printTopology(stderr);
+
   std::map< std::string, std::vector<float> > trainScores;
   std::map< std::string, std::vector<float> > devScores;
   std::map<std::string, int> bestIter;
@@ -154,7 +159,6 @@ void Trainer::trainBatched(int nbIter, int batchSize, bool mustShuffle)
     printIterationScores(stderr, nbExamplesTrain, nbExamplesDev,
                          trainScores, devScores, bestIter, nbIter, i);
 
-    auto & classifiers = tm.getClassifiers();
     for(Classifier * cla : classifiers)
       if(cla->needsTrain())
         if(bestIter[cla->name] == i)
-- 
GitLab