diff --git a/MLP/include/MLP.hpp b/MLP/include/MLP.hpp
index 91bfea8c0357496000616942abeb10afe6374885..a7efc5e615b49e16f57e63ce96123f38d0050012 100644
--- a/MLP/include/MLP.hpp
+++ b/MLP/include/MLP.hpp
@@ -78,6 +78,7 @@ class MLP
   int trainOnBatch(Examples & examples, int start, int end);
   int getScoreOnBatch(Examples & examples, int start, int end);
   void save(const std::string & filename);
+  void printTopology(FILE * output);
 };
 
 #endif
diff --git a/MLP/src/MLP.cpp b/MLP/src/MLP.cpp
index 8978dcc56fbef0c1534d02e5a4594c575a2dec51..ac2594aaff5663173adc679025162a7c5d6a544d 100644
--- a/MLP/src/MLP.cpp
+++ b/MLP/src/MLP.cpp
@@ -441,3 +441,18 @@ MLP::MLP(const std::string & filename)
   load(filename);
 }
 
+void MLP::printTopology(FILE * output)
+{
+  fprintf(output, "(");
+  for(unsigned int i = 0; i < layers.size(); i++)
+  {
+    auto & layer = layers[i];
+
+    if(i == 0)
+      fprintf(output, "%d", layer.input_dim);
+    fprintf(output, "->%d", layer.output_dim);
+  }
+
+  fprintf(output, ")\n");
+}
+
diff --git a/tape_machine/include/Classifier.hpp b/tape_machine/include/Classifier.hpp
index fa39badfc6a1e0d5ff1698d52c372065ba042d81..48a042f27364bcd6bb42f1c2b3d105a17c2857b6 100644
--- a/tape_machine/include/Classifier.hpp
+++ b/tape_machine/include/Classifier.hpp
@@ -54,6 +54,7 @@ class Classifier
   void initClassifier(Config & config);
   void save();
   bool needsTrain();
+  void printTopology(FILE * output);
 };
 
 #endif
diff --git a/tape_machine/src/Classifier.cpp b/tape_machine/src/Classifier.cpp
index 55dc6cd557cf6245b4e0e6e898484a6e2cf16740..77d218f3103333f53c225d62c0832c17d3160f4c 100644
--- a/tape_machine/src/Classifier.cpp
+++ b/tape_machine/src/Classifier.cpp
@@ -125,7 +125,7 @@ void Classifier::initClassifier(Config & config)
   }
 
   int nbInputs = 0;
-  int nbHidden = 200;
+  int nbHidden = 500;
   int nbOutputs = as->actions.size();
 
   auto fd = fm->getFeatureDescription(config);
@@ -226,3 +226,9 @@ bool Classifier::needsTrain()
   return type == Type::Prediction;
 }
 
+void Classifier::printTopology(FILE * output)
+{
+  fprintf(output, "%s topology : ", name.c_str());
+  mlp->printTopology(output);
+}
+
diff --git a/trainer/src/Trainer.cpp b/trainer/src/Trainer.cpp
index b550c37977df4cdb227246ac96338651ad9edb47..0ce551c50dd045f828926a2726767442e0da0e62 100644
--- a/trainer/src/Trainer.cpp
+++ b/trainer/src/Trainer.cpp
@@ -127,6 +127,11 @@ void Trainer::trainBatched(int nbIter, int batchSize, bool mustShuffle)
   if(devMcd && devConfig)
     getExamplesByClassifier(devExamples, *devConfig);
 
+  auto & classifiers = tm.getClassifiers();
+  for(Classifier * cla : classifiers)
+    if(cla->needsTrain())
+      cla->printTopology(stderr);
+
   std::map< std::string, std::vector<float> > trainScores;
   std::map< std::string, std::vector<float> > devScores;
   std::map<std::string, int> bestIter;
@@ -154,7 +159,6 @@ void Trainer::trainBatched(int nbIter, int batchSize, bool mustShuffle)
     printIterationScores(stderr, nbExamplesTrain, nbExamplesDev,
                          trainScores, devScores, bestIter, nbIter, i);
 
-    auto & classifiers = tm.getClassifiers();
     for(Classifier * cla : classifiers)
       if(cla->needsTrain())
         if(bestIter[cla->name] == i)