diff --git a/MLP/include/MLP.hpp b/MLP/include/MLP.hpp
index 958741e25f2e4b9294cae3496d9294cd085895ea..d40715a6ce0e21c4c69da32fe01b4abd35f86b57 100644
--- a/MLP/include/MLP.hpp
+++ b/MLP/include/MLP.hpp
@@ -90,8 +90,6 @@ class MLP
   dynet::ParameterCollection model;
   /// @brief The training algorithm that will be used.
   std::unique_ptr<dynet::Trainer> trainer;
-  /// @brief Whether the program is in train mode or not (only in train mode the parameters will be updated).
-  bool trainMode;
   /// @brief Must the Layer dropout rate be taken into account during the computations ? Usually it is only during the training step.
   bool dropoutActive;
 
diff --git a/MLP/src/MLP.cpp b/MLP/src/MLP.cpp
index 93ed71c08b086e1b9516fb5667831bfc507998f5..8e1e0c431665c3edd41ff1f86ee43a2b6817e7de 100644
--- a/MLP/src/MLP.cpp
+++ b/MLP/src/MLP.cpp
@@ -83,7 +83,6 @@ void MLP::initDynet()
 MLP::MLP()
 {
   randomSeed = ProgramParameters::seed;
-  trainMode = true;
   dropoutActive = true;
   trainer.reset(createTrainer());
   initDynet();
@@ -126,9 +125,6 @@ void MLP::init(int nbInputs, const std::string & topology, int nbOutputs)
 
 dynet::Trainer * MLP::createTrainer()
 {
-  if (!trainMode)
-    return nullptr;
-
   auto optimizer = noAccentLower(ProgramParameters::optimizer);
 
   if (optimizer == "amsgrad")
@@ -137,6 +133,8 @@ dynet::Trainer * MLP::createTrainer()
     return new dynet::AdamTrainer(model, ProgramParameters::learningRate, ProgramParameters::beta1, ProgramParameters::beta2, ProgramParameters::bias);
   else if (optimizer == "sgd")
     return new dynet::SimpleSGDTrainer(model, ProgramParameters::learningRate);
+  else if (optimizer == "none")
+    return nullptr;
 
   fprintf(stderr, "ERROR (%s) : unknown optimizer \'%s\'. Aborting.\n", ERRINFO, optimizer.c_str());
 
@@ -512,8 +510,7 @@ void MLP::loadParameters(const std::string & filename)
 
 MLP::MLP(const std::string & filename)
 {
-  trainMode = false;
-  dropoutActive = false;
+  dropoutActive = true;
 
   randomSeed = ProgramParameters::seed;
   trainer.reset(createTrainer());
diff --git a/decoder/src/macaon_decode.cpp b/decoder/src/macaon_decode.cpp
index 4dd69599a67161fbef28d67a1fd1f8a935292f2c..87aa954a0b45c7897258547c1738810063cb88fa 100644
--- a/decoder/src/macaon_decode.cpp
+++ b/decoder/src/macaon_decode.cpp
@@ -128,6 +128,7 @@ int main(int argc, char * argv[])
   ProgramParameters::sequenceDelimiterTape = vm["sequenceDelimiterTape"].as<std::string>();
   ProgramParameters::sequenceDelimiter = vm["sequenceDelimiter"].as<std::string>();
   ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<int>();
+  ProgramParameters::optimizer = "none";
   std::string featureModels = vm["featureModels"].as<std::string>();
   if (!featureModels.empty())
   {
diff --git a/maca_common/include/util.hpp b/maca_common/include/util.hpp
index 0ad31ce09049bc49a6cafa8ca1ba31bbbbee0ac8..ff7dbf5a5572e6cbd325b1fb5785213d89fc621c 100644
--- a/maca_common/include/util.hpp
+++ b/maca_common/include/util.hpp
@@ -168,6 +168,13 @@ std::string removeSuffix(const std::string & s, const std::string & suffix);
 /// @return Current system time.
 std::string getTime();
 
+/// @brief Test if a file exists on the filesystem
+///
+/// @param s The path to the file
+///
+/// @return True if the file exists
+bool fileExists(const std::string & s);
+
 /// @brief Macro giving informations about an error.
 #define ERRINFO (getFilenameFromPath(std::string(__FILE__))+ ":l." + std::to_string(__LINE__)).c_str()
 
diff --git a/maca_common/src/Dict.cpp b/maca_common/src/Dict.cpp
index 3e05a69ea8a0aff2c353ba4d27d99f1eefd73d8a..3c1b3367bab8662c98f38488b2ac0f52122aba9c 100644
--- a/maca_common/src/Dict.cpp
+++ b/maca_common/src/Dict.cpp
@@ -139,7 +139,12 @@ void Dict::initFromFile(dynet::ParameterCollection & pc)
   delete file;
 
   if (readIndex == -1) // No parameters to read
+  {
+    this->lookupParameter = pc.add_lookup_parameters(MAX_CAPACITY, {(unsigned int)dimension});
+    addEntry(nullValueStr);
+    addEntry(unknownValueStr);
     return;
+  }
 
   dynet::TextFileLoader loader(filename);
   lookupParameter = loader.load_lookup_param(pc, "lookup");
@@ -151,7 +156,7 @@ void Dict::saveDicts(const std::string & directory, const std::string & namePref
   {
     if(!strncmp(it.first.c_str(), namePrefix.c_str(), namePrefix.size()))
     {
-      if (!it.second->isTrained)
+      if (!(it.second->isTrained && it.second->policy != Policy::Modifiable))
       {
         it.second->filename = directory + it.second->name + ".dict";
         it.second->save();
@@ -445,7 +450,15 @@ void Dict::readDicts(const std::string & directory, const std::string & filename
 
       if (std::string(pretrained) == "_")
       {
-        str2dict.insert(std::make_pair(name, std::unique_ptr<Dict>(new Dict(name, dim, str2mode(modeStr)))));
+        if (ProgramParameters::newTemplatePath == ProgramParameters::expPath)
+        {
+          std::string probableFilename = ProgramParameters::expPath + name + std::string(".dict");
+          str2dict.insert(std::make_pair(name, std::unique_ptr<Dict>(new Dict(name, Policy::Modifiable, probableFilename))));
+        }
+        else
+        {
+          str2dict.insert(std::make_pair(name, std::unique_ptr<Dict>(new Dict(name, dim, str2mode(modeStr)))));
+        }
       }
       else
       {
diff --git a/maca_common/src/util.cpp b/maca_common/src/util.cpp
index 7f43046c38f1259d87b530420196fa1678ae647a..a65d8a177b0466dc62e198a26d132d81f43530af 100644
--- a/maca_common/src/util.cpp
+++ b/maca_common/src/util.cpp
@@ -388,3 +388,14 @@ std::string getTime()
   return std::string(buffer);
 }
 
+bool fileExists(const std::string & s)
+{
+  FILE * f = fopen(s.c_str(), "r");
+
+  if (!f)
+    return false;
+
+  fclose(f);
+  return true;
+}
+
diff --git a/trainer/include/TrainInfos.hpp b/trainer/include/TrainInfos.hpp
new file mode 100644
index 0000000000000000000000000000000000000000..55be116e5ddb524514ee81955da2b75ab0b523ea
--- /dev/null
+++ b/trainer/include/TrainInfos.hpp
@@ -0,0 +1,60 @@
+/// @file TrainInfos.hpp
+/// @author Franck Dary
+/// @version 1.0
+/// @date 2018-12-20
+
+#ifndef TRAININFOS__H
+#define TRAININFOS__H
+
+#include <string>
+#include <vector>
+#include "ProgramParameters.hpp"
+
+class TrainInfos
+{
+  private :
+
+  std::string filename;
+  int lastEpoch;
+  std::map< std::string, std::vector<float> > trainLossesPerClassifierPerEpoch;
+  std::map< std::string, std::vector<float> > devLossesPerClassifierPerEpoch;
+  std::map< std::string, std::vector<float> > trainScoresPerClassifierPerEpoch;
+  std::map< std::string, std::vector<float> > devScoresPerClassifierPerEpoch;
+  std::map< std::string, std::vector<bool> > mustSavePerClassifierPerEpoch;
+
+  std::map< std::string, std::pair<int,int> > trainCounter;
+  std::map< std::string, std::pair<int,int> > devCounter;
+
+  std::map<std::string, bool> topologyPrinted;
+
+  private :
+
+  void readFromFilename();
+  void saveToFilename();
+  float computeTrainScore(const std::string & classifier);
+  float computeDevScore(const std::string & classifier);
+  void addTrainScore(const std::string & classifier, float score);
+  void addDevScore(const std::string & classifier, float score);
+
+  public :
+
+  TrainInfos();
+  void addTrainLoss(const std::string & classifier, float loss);
+  void addDevLoss(const std::string & classifier, float loss);
+  void addTrainExample(const std::string & classifier);
+  void addDevExample(const std::string & classifier);
+  void addTrainSuccess(const std::string & classifier);
+  void addDevSuccess(const std::string & classifier);
+  void resetCounters();
+  void computeTrainScores();
+  void computeDevScores();
+  void computeMustSaves();
+  int getEpoch();
+  bool isTopologyPrinted(const std::string & classifier);
+  void setTopologyPrinted(const std::string & classifier);
+  void nextEpoch();
+  bool mustSave(const std::string & classifier);
+  void printScores(FILE * output);
+};
+
+#endif
diff --git a/trainer/include/Trainer.hpp b/trainer/include/Trainer.hpp
index 045c8003615580be80cff9dbd58f9a88e8129837..2b4a666496dc3c417ebc73f4b0c7465969bc67f2 100644
--- a/trainer/include/Trainer.hpp
+++ b/trainer/include/Trainer.hpp
@@ -9,6 +9,7 @@
 #include "TransitionMachine.hpp"
 #include "BD.hpp"
 #include "Config.hpp"
+#include "TrainInfos.hpp"
 
 /// @brief An object capable of training a TransitionMachine given a BD initialized with training examples.
 class Trainer
@@ -31,20 +32,7 @@ class Trainer
   /// Can be nullptr if dev is not used in this training.
   Config * devConfig;
 
-  /// @brief For each classifier, a pair of number examples seen / number examples successfully classified
-  std::map< std::string, std::pair<int, int> > trainCounter;
-
-  /// @brief For each classifier, the train score for the current iteration.
-  std::map< std::string, float > scores;
-
-  /// @brief For each classifier, the best score seen on dev.
-  std::map< std::string, float > bestScores;
-
-  /// @brief Whether or not each Classifier topology has been printed.
-  std::map< std::string, bool > topologyPrinted;
-
-  /// @brief Current iteration.
-  int curIter;
+  TrainInfos TI;
 
   public :
 
@@ -57,9 +45,7 @@ class Trainer
   void printScoresAndSave(FILE * output);
 
   /// @brief Get the scores of the classifiers on the dev dataset.
-  ///
-  /// @return Map from each Classifier name to their score.
-  std::map<std::string, std::pair<float, std::pair<float, float> > > getScoreOnDev();
+  void computeScoreOnDev();
 
   public :
 
diff --git a/trainer/src/TrainInfos.cpp b/trainer/src/TrainInfos.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..868048076b7baa6f9eb360f2e88a01e13b3ea3eb
--- /dev/null
+++ b/trainer/src/TrainInfos.cpp
@@ -0,0 +1,266 @@
+#include "TrainInfos.hpp"
+#include "File.hpp"
+#include "util.hpp"
+
+TrainInfos::TrainInfos()
+{
+  filename = ProgramParameters::expPath + "trainInfos.txt";
+  lastEpoch = 0;
+
+  if (fileExists(filename))
+  {
+    readFromFilename();
+  }
+}
+
+void TrainInfos::readFromFilename()
+{
+  File file(filename, "r");
+  FILE * filePtr = file.getDescriptor();
+
+  char buffer[10000];
+
+  auto badFormatAndExit = [](const std::string & errinfo)
+  {
+    fprintf(stderr, "ERROR (%s) : bad format. Aborting.\n", errinfo.c_str());
+    exit(1);
+  };
+
+  if (fscanf(filePtr, "%d\n", &lastEpoch) != 1)
+    badFormatAndExit(ERRINFO);
+  while (fscanf(filePtr, "%[^\n]\n", buffer) == 1)
+  {
+    auto splitted = split(buffer, '\t');
+    if (splitted.empty() || splitted[0] == "---")
+      break;
+
+    for (unsigned int i = 1; i < splitted.size(); i++)
+      trainLossesPerClassifierPerEpoch[splitted[0]].emplace_back(std::stof(splitted[i]));
+  }
+  while (fscanf(filePtr, "%[^\n]\n", buffer) == 1)
+  {
+    auto splitted = split(buffer, '\t');
+    if (splitted.empty() || splitted[0] == "---")
+      break;
+
+    for (unsigned int i = 1; i < splitted.size(); i++)
+      devLossesPerClassifierPerEpoch[splitted[0]].emplace_back(std::stof(splitted[i]));
+  }
+  while (fscanf(filePtr, "%[^\n]\n", buffer) == 1)
+  {
+    auto splitted = split(buffer, '\t');
+    if (splitted.empty() || splitted[0] == "---")
+      break;
+
+    for (unsigned int i = 1; i < splitted.size(); i++)
+      trainScoresPerClassifierPerEpoch[splitted[0]].emplace_back(std::stof(splitted[i]));
+  }
+  while (fscanf(filePtr, "%[^\n]\n", buffer) == 1)
+  {
+    auto splitted = split(buffer, '\t');
+    if (splitted.empty() || splitted[0] == "---")
+      break;
+
+    for (unsigned int i = 1; i < splitted.size(); i++)
+      devScoresPerClassifierPerEpoch[splitted[0]].emplace_back(std::stof(splitted[i]));
+  }
+  while (fscanf(filePtr, "%[^\n]\n", buffer) == 1)
+  {
+    auto splitted = split(buffer, '\t');
+    if (splitted.empty() || splitted[0] == "---")
+      break;
+
+    for (unsigned int i = 1; i < splitted.size(); i++)
+      mustSavePerClassifierPerEpoch[splitted[0]].push_back(splitted[i] == "true" ? true : false);
+  }
+}
+
+void TrainInfos::saveToFilename()
+{
+  File file(filename, "w");
+  FILE * filePtr = file.getDescriptor();
+
+  fprintf(filePtr, "%d\n", lastEpoch);
+  for (auto & it : trainLossesPerClassifierPerEpoch)
+  {
+    fprintf(filePtr, "%s\t", it.first.c_str());
+    for (auto & val : it.second)
+      fprintf(filePtr, "%f\t", val);
+    fprintf(filePtr, "\n");
+  }
+  fprintf(filePtr, "---\n");
+  for (auto & it : devLossesPerClassifierPerEpoch)
+  {
+    fprintf(filePtr, "%s\t", it.first.c_str());
+    for (auto & val : it.second)
+      fprintf(filePtr, "%f\t", val);
+    fprintf(filePtr, "\n");
+  }
+  fprintf(filePtr, "---\n");
+  for (auto & it : trainScoresPerClassifierPerEpoch)
+  {
+    fprintf(filePtr, "%s\t", it.first.c_str());
+    for (auto & val : it.second)
+      fprintf(filePtr, "%f\t", val);
+    fprintf(filePtr, "\n");
+  }
+  fprintf(filePtr, "---\n");
+  for (auto & it : devScoresPerClassifierPerEpoch)
+  {
+    fprintf(filePtr, "%s\t", it.first.c_str());
+    for (auto & val : it.second)
+      fprintf(filePtr, "%f\t", val);
+    fprintf(filePtr, "\n");
+  }
+  fprintf(filePtr, "---\n");
+  for (auto & it : mustSavePerClassifierPerEpoch)
+  {
+    fprintf(filePtr, "%s\t", it.first.c_str());
+    for (unsigned int i = 0; i < it.second.size(); i++)
+      fprintf(filePtr, "%s\t", it.second[i] ? "true" : "false");
+    fprintf(filePtr, "\n");
+  }
+  fprintf(filePtr, "---\n");
+}
+
+void TrainInfos::addTrainLoss(const std::string & classifier, float loss)
+{
+  trainLossesPerClassifierPerEpoch[classifier].emplace_back(loss);
+}
+
+void TrainInfos::addDevLoss(const std::string & classifier, float loss)
+{
+  devLossesPerClassifierPerEpoch[classifier].emplace_back(loss);
+}
+
+void TrainInfos::addTrainScore(const std::string & classifier, float score)
+{
+  trainScoresPerClassifierPerEpoch[classifier].emplace_back(score);
+}
+
+void TrainInfos::addDevScore(const std::string & classifier, float score)
+{
+  devScoresPerClassifierPerEpoch[classifier].emplace_back(score);
+}
+
+float TrainInfos::computeTrainScore(const std::string & classifier)
+{
+  return 100.0*trainCounter[classifier].first / trainCounter[classifier].second;
+}
+
+float TrainInfos::computeDevScore(const std::string & classifier)
+{
+  return 100.0*devCounter[classifier].first / devCounter[classifier].second;
+}
+
+void TrainInfos::addTrainExample(const std::string & classifier)
+{
+  trainCounter[classifier].second++;
+}
+
+void TrainInfos::addDevExample(const std::string & classifier)
+{
+  devCounter[classifier].second++;
+}
+
+void TrainInfos::addTrainSuccess(const std::string & classifier)
+{
+  trainCounter[classifier].first++;
+}
+
+void TrainInfos::addDevSuccess(const std::string & classifier)
+{
+  devCounter[classifier].first++;
+}
+
+void TrainInfos::resetCounters()
+{
+  trainCounter.clear();
+  devCounter.clear();
+}
+
+void TrainInfos::computeTrainScores()
+{
+  for (auto & it : trainCounter)
+    addTrainScore(it.first, computeTrainScore(it.first));
+}
+
+void TrainInfos::computeDevScores()
+{
+  for (auto & it : devCounter)
+    addDevScore(it.first, computeDevScore(it.first));
+}
+
+int TrainInfos::getEpoch()
+{
+  return lastEpoch + 1;
+}
+
+bool TrainInfos::isTopologyPrinted(const std::string & classifier)
+{
+  return topologyPrinted.count(classifier) && topologyPrinted[classifier];
+}
+
+void TrainInfos::setTopologyPrinted(const std::string & classifier)
+{
+
+topologyPrinted[classifier] = true;
+}
+
+void TrainInfos::nextEpoch()
+{
+  lastEpoch++;
+  saveToFilename();
+}
+
+void TrainInfos::computeMustSaves()
+{
+  if (!devScoresPerClassifierPerEpoch.empty())
+    for (auto & it : devScoresPerClassifierPerEpoch)
+    {
+      mustSavePerClassifierPerEpoch[it.first].push_back(true);
+      for (auto & score : it.second)
+        if (score > it.second.back())
+          mustSavePerClassifierPerEpoch[it.first].back() = false;
+    }
+  else
+    for (auto & it : trainScoresPerClassifierPerEpoch)
+    {
+      mustSavePerClassifierPerEpoch[it.first].push_back(true);
+      for (auto & score : it.second)
+        if (score > it.second.back())
+          mustSavePerClassifierPerEpoch[it.first].back() = false;
+    }
+}
+
+void TrainInfos::printScores(FILE * output)
+{
+  std::vector<std::string> names;
+  std::vector<std::string> acc;
+  std::vector<std::string> train;
+  std::vector<std::string> dev;
+  std::vector<std::string> savedStr;
+
+  for (auto & it : trainScoresPerClassifierPerEpoch)
+  {
+    names.emplace_back(it.first);
+    acc.emplace_back("accuracy");
+    train.emplace_back(": train(" + float2str(it.second.back(), "%.2f") + "%)");
+    dev.emplace_back(devScoresPerClassifierPerEpoch.empty() ? "" : "dev(" +float2str(devScoresPerClassifierPerEpoch[it.first].back(), "%.2f") + "%)");
+    savedStr.emplace_back(mustSavePerClassifierPerEpoch[it.first].back() ? "SAVED" : "");
+  }
+
+  if (ProgramParameters::interactive)
+    fprintf(stderr, "                            \r");
+  if (ProgramParameters::printTime)
+    fprintf(output, "[%s] ", getTime().c_str());
+  fprintf(output, "Iteration %d/%d :\n", getEpoch(), ProgramParameters::nbIter);
+
+  printColumns(output, {names, acc, train, dev, savedStr});
+}
+
+bool TrainInfos::mustSave(const std::string & classifier)
+{
+  return mustSavePerClassifierPerEpoch.count(classifier) && mustSavePerClassifierPerEpoch[classifier].back();
+}
+
diff --git a/trainer/src/Trainer.cpp b/trainer/src/Trainer.cpp
index 991cc15ba9912373f26d7c639521277f1795847c..d53b7138315be057daef34bfcc1db7a97b540bc8 100644
--- a/trainer/src/Trainer.cpp
+++ b/trainer/src/Trainer.cpp
@@ -12,16 +12,14 @@ Trainer::Trainer(TransitionMachine & tm, BD & bd, Config & config, BD * devBD, C
 {
 }
 
-std::map<std::string, std::pair<float, std::pair<float, float> > > Trainer::getScoreOnDev()
+void Trainer::computeScoreOnDev()
 {
   if (!devConfig)
-    return {};
+    return;
 
   tm.reset();
   devConfig->reset();
 
-  std::map< std::string, std::pair<int, int> > counts;
-
   if (ProgramParameters::debug)
     fprintf(stderr, "Computing score on dev set\n");
 
@@ -71,8 +69,9 @@ std::map<std::string, std::pair<float, std::pair<float, float> > > Trainer::getS
           break;
         }
 
-      counts[classifier->name].first++;
-      counts[classifier->name].second += pActionIsZeroCost ? 1 : 0;
+      TI.addDevExample(classifier->name);
+      if (pActionIsZeroCost)
+        TI.addDevSuccess(classifier->name);
 
       std::string actionName = pAction;
       Action * action = classifier->getAction(actionName);
@@ -113,25 +112,7 @@ std::map<std::string, std::pair<float, std::pair<float, float> > > Trainer::getS
     }
   }
 
-  std::map<std::string, std::pair<float,std::pair<float,float> > > scores;
-  for (auto & it : counts)
-  {
-    scores[it.first].first = 100.0 * it.second.second / it.second.first;
-    if (ProgramParameters::printEntropy)
-    {
-      for (float f : entropies)
-        scores[it.first].second.first += f;
-
-      scores[it.first].second.first /= entropies.size();
-
-      for (float f : entropies)
-        scores[it.first].second.second += (f-scores[it.first].second.first)*(f-scores[it.first].second.first);
-
-      scores[it.first].second.second /= entropies.size();
-    }
-  }
-
-  return scores;
+  TI.computeDevScores();
 }
 
 void Trainer::train()
@@ -150,13 +131,11 @@ void Trainer::train()
     if(ProgramParameters::shuffleExamples)
       trainConfig.shuffle(ProgramParameters::sequenceDelimiterTape, ProgramParameters::sequenceDelimiter);
 
-    for (auto & it : trainCounter)
-      it.second.first = it.second.second = 0;
+    TI.resetCounters();
   };
 
-  curIter = 0;
   int nbSteps = 0;
-  while (curIter < ProgramParameters::nbIter)
+  while (TI.getEpoch() <= ProgramParameters::nbIter)
   {
     resetAndShuffle();
     while (!trainConfig.isFinal())
@@ -186,9 +165,9 @@ void Trainer::train()
       }
       else
       {
-        if (!topologyPrinted.count(classifier->name))
+        if (!TI.isTopologyPrinted(classifier->name))
         {
-          topologyPrinted[classifier->name] = true;
+          TI.setTopologyPrinted(classifier->name);
           classifier->printTopology(stderr);
         }
 
@@ -244,15 +223,16 @@ void Trainer::train()
 
         classifier->trainOnExample(trainConfig, classifier->getActionIndex(oAction));
 
-        trainCounter[classifier->name].first++;
-        trainCounter[classifier->name].second += pActionIsZeroCost ? 1 : 0;
+        TI.addTrainExample(classifier->name);
+        if (pActionIsZeroCost)
+          TI.addTrainSuccess(classifier->name);
 
         int k = ProgramParameters::dynamicEpoch;
         int p = 1000*(1.0-ProgramParameters::dynamicProbability);
 
         std::string actionName = "";
 
-        if (curIter >= k && (rand() % 1000 >= p))
+        if (TI.getEpoch() >= k && (rand() % 1000 >= p))
         {
           actionName = pAction;
         }
@@ -286,9 +266,9 @@ void Trainer::train()
       {
         printScoresAndSave(stderr);
         nbSteps = 0;
-        curIter++;
+        TI.nextEpoch();
 
-        if (curIter >= ProgramParameters::nbIter)
+        if (TI.getEpoch() > ProgramParameters::nbIter)
           break;
       }
     }
@@ -297,9 +277,9 @@ void Trainer::train()
     {
       printScoresAndSave(stderr);
       nbSteps = 0;
-      curIter++;
+      TI.nextEpoch();
 
-      if (curIter >= ProgramParameters::nbIter)
+      if (TI.getEpoch() > ProgramParameters::nbIter)
         break;
     }
   }
@@ -307,76 +287,18 @@ void Trainer::train()
 
 void Trainer::printScoresAndSave(FILE * output)
 {
-  for (auto & it : trainCounter)
-    scores[it.first] = 100.0 * it.second.second / it.second.first;
-
-  std::vector<std::string> names;
-  std::vector<std::string> acc;
-  std::vector<std::string> train;
-  std::vector<std::string> dev;
-  std::vector<std::string> savedStr; 
-
-  std::map<std::string, bool> saved;
-
-  auto devScores = getScoreOnDev();
-
-  if (devConfig)
-  {
-    for (auto & it : devScores)
-    {
-      if (bestScores.count(it.first) == 0 || bestScores[it.first] < it.second.first)
-      {
-        bestScores[it.first] = it.second.first;
-        saved[it.first] = true;
-      }
-      else
-        saved[it.first] = false;
-    }
-  }
-  else
-  {
-    for (auto & it : scores)
-    {
-      if (bestScores.count(it.first) == 0 || bestScores[it.first] < it.second)
-      {
-        bestScores[it.first] = it.second;
-        saved[it.first] = true;
-      }
-      else
-        saved[it.first] = false;
-    }
-  }
+  TI.computeTrainScores();
+  computeScoreOnDev();
+  TI.computeMustSaves();
 
   auto classifiers = tm.getClassifiers();
   for (auto * cla : classifiers)
-  {
-    if (!saved.count(cla->name))
-      continue;
-
-    if (saved[cla->name])
+    if (TI.mustSave(cla->name))
     {
       cla->save(ProgramParameters::expPath + cla->name + ".model");
       Dict::saveDicts(ProgramParameters::expPath, cla->name);
     }
-  }
-
-  for (auto & it : saved)
-  {
-    names.emplace_back(it.first);
-    acc.emplace_back("accuracy");
-    train.emplace_back(": train(" + float2str(scores[it.first], "%.2f") + "%)");
-    dev.emplace_back(devConfig ? "dev(" +float2str(devScores[it.first].first, "%.2f") + "%)" : "");
-    savedStr.emplace_back(saved[it.first] ? "SAVED" : "");
-    if (ProgramParameters::printEntropy)
-      savedStr.back() += " Entropy[" + float2str(devScores[it.first].second.first, "%.2f") + "\u00B1" + float2str(devScores[it.first].second.second, "%.2f") + "]";
-  }
-
-  if (ProgramParameters::interactive)
-    fprintf(stderr, "                            \r");
-  if (ProgramParameters::printTime)
-    fprintf(output, "[%s] ", getTime().c_str());
-  fprintf(output, "Iteration %d/%d :\n", curIter+1, ProgramParameters::nbIter);
 
-  printColumns(output, {names, acc, train, dev, savedStr});
+  TI.printScores(output);
 }
 
diff --git a/trainer/src/macaon_train.cpp b/trainer/src/macaon_train.cpp
index 1e3c79dcb7e3cd170637e09d7b9d4d2e59a19a2d..3b89f2bb7c1cc615c2cc7272ffc89383e9be4c61 100644
--- a/trainer/src/macaon_train.cpp
+++ b/trainer/src/macaon_train.cpp
@@ -159,6 +159,11 @@ void updatePaths()
 /// @brief Create the folder containing the current experiment from the template frolder
 void createExpPath()
 {
+  // If this is the case, the training should resume where it was stopped.
+  // No need to recreate the expPath.
+  if (ProgramParameters::newTemplatePath == ProgramParameters::expPath)
+    return;
+
 std::string decode = "\
 #! /bin/bash\n\
 \n\
@@ -311,7 +316,6 @@ int main(int argc, char * argv[])
     updatePaths();
     ProgramParameters::newTemplatePath = ProgramParameters::templatePath;
     createExpPath();
-    Dict::deleteDicts();
     launchTraining();
   }
 
diff --git a/transition_machine/src/Classifier.cpp b/transition_machine/src/Classifier.cpp
index 59d6d4a037820b4d4b40dace28f9c22d9f38ca71..48b6f7c0e68683377659ce809a0f9eacfec562b7 100644
--- a/transition_machine/src/Classifier.cpp
+++ b/transition_machine/src/Classifier.cpp
@@ -125,9 +125,10 @@ void Classifier::initClassifier(Config & config)
   if(mlp.get())
     return;
 
-  if(!trainMode)
+  std::string modelFilename = ProgramParameters::expPath + name + ".model";
+  if (fileExists(modelFilename))
   {
-    mlp.reset(new MLP(ProgramParameters::expPath + name + ".model"));
+    mlp.reset(new MLP(modelFilename));
     Dict::initDicts(mlp->getModel(), name);
     return;
   }