diff --git a/neural_network/include/GeneticAlgorithm.hpp b/neural_network/include/GeneticAlgorithm.hpp
index 73ecb8887d7477ac979a3b5f2b1f198ee482b6a0..e52b327b2625e559f66db7a7b877b4775ffc39d1 100644
--- a/neural_network/include/GeneticAlgorithm.hpp
+++ b/neural_network/include/GeneticAlgorithm.hpp
@@ -42,6 +42,8 @@ class GeneticAlgorithm : public NeuralNetwork
     /// @param topology The desired topology for the mlp.
     /// @param nbOutputs The size of the mlp output layer..
     Individual(dynet::ParameterCollection & model, int nbInputs, const std::string & topology, int nbOutputs);
+    /// @brief Create a blank individual, so that it will be loaded from a saved model.
+    Individual();
     void becomeChildOf(Individual * other);
     void becomeChildOf(Individual * mom, Individual * dad);
     void mutate(float probability);
diff --git a/neural_network/include/MLPBase.hpp b/neural_network/include/MLPBase.hpp
index fb32913f94c19e78baa3591e7dd75500bbb6270c..b8bafcd08115227c4fbb28503b27c29e01ffa292 100644
--- a/neural_network/include/MLPBase.hpp
+++ b/neural_network/include/MLPBase.hpp
@@ -73,7 +73,8 @@ class MLPBase
   /// The file must have been written by the function saveStruct.
   /// @param model The dynet model that will contain the loaded parameters.
   /// @param filename The file from which the structure will be read.
-  void loadStruct(dynet::ParameterCollection & model, const std::string & filename);
+  /// @param index The index of the structure in the file.
+  void loadStruct(dynet::ParameterCollection & model, const std::string & filename, unsigned int index);
   /// @brief Load and populate the model with parameters from a file.
   ///
   /// The file must have been written by the function saveParameters.
diff --git a/neural_network/src/GeneticAlgorithm.cpp b/neural_network/src/GeneticAlgorithm.cpp
index 50c1abbf5f38fc060d60ef35663af73cdba24de0..16d1131162724d2252dd8ffdcf183fd0659eaa4f 100644
--- a/neural_network/src/GeneticAlgorithm.cpp
+++ b/neural_network/src/GeneticAlgorithm.cpp
@@ -149,18 +149,29 @@ void GeneticAlgorithm::load(const std::string & filename)
     fprintf(stderr, "ERROR (%s) : file \'%s\' bad format. Aborting.\n", ERRINFO, filename.c_str());
     exit(1);
   }
+
   delete file;
 
   this->nbInputs = i;
   this->nbOutputs = o;
   this->topology = buffer;
 
-  init(nbInputs, topology, nbOutputs);
+  auto splited = split(topology, ' ');
+  if (splited.size() != 2 || !isNum(splited[0]))
+  {
+    fprintf(stderr, "ERROR (%s) : wrong topology \'%s\'. Aborting.\n", ERRINFO, topology.c_str());
+    exit(1);
+  }
+
+  int nbElems = std::stoi(splited[0]);
 
-  for (auto & individual : generation)
+  for (int i = 0; i < nbElems; i++)
+    generation.emplace_back(new Individual());
+
+  for (int i = 0; i < nbElems; i++)
   {
-    individual->mlp.loadStruct(model, filename);
-    individual->mlp.loadParameters(model, filename);
+    generation[i]->mlp.loadStruct(model, filename, i);
+    generation[i]->mlp.loadParameters(model, filename);
   }
 }
 
@@ -174,6 +185,14 @@ GeneticAlgorithm::Individual::Individual(dynet::ParameterCollection & model, int
   mutate(1.0);
 }
 
+GeneticAlgorithm::Individual::Individual()
+  : mlp("MLP_" + std::to_string(idCount))
+{
+  this->id = idCount++;
+  this->loss = 0.0;
+  this->value = 0.0;
+}
+
 float GeneticAlgorithm::loss2value(float loss)
 {
   return 1000.0 / loss;
diff --git a/neural_network/src/MLP.cpp b/neural_network/src/MLP.cpp
index 90eac755b64d3a0cfaf87d698d1f3c973f53f6ad..47dc9453a24af6dc2a0bba5d0f0380f13e7a29e3 100644
--- a/neural_network/src/MLP.cpp
+++ b/neural_network/src/MLP.cpp
@@ -13,7 +13,7 @@ MLP::MLP(const std::string & filename) : mlp("MLP")
   trainer.reset(createTrainer());
   initDynet();
 
-  mlp.loadStruct(model, filename);
+  mlp.loadStruct(model, filename, 0);
   mlp.loadParameters(model, filename);
 }
 
diff --git a/neural_network/src/MLPBase.cpp b/neural_network/src/MLPBase.cpp
index 41520295e343e1b719f5e739071beefbe17cbc9b..3f0aa3fc50e4f4fe8e427c3d32ded2906498888b 100644
--- a/neural_network/src/MLPBase.cpp
+++ b/neural_network/src/MLPBase.cpp
@@ -293,7 +293,7 @@ void MLPBase::saveParameters(const std::string & filename)
   }
 }
 
-void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string & filename)
+void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string & filename, unsigned int index)
 {
   File file(filename, "r");
   FILE * fd = file.getDescriptor();
@@ -303,17 +303,31 @@ void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string &
   int output;
   float dropout;
 
+  for (unsigned int i = 0; i < index; i++)
+  {
+    while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
+      if (fscanf(fd, "%10[^\n]\n", activation) != 1)
+      {
+        fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, file.getName().c_str());
+        exit(1);
+      }
+
+      do
+      {
+      } while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
+  }
+
   while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
-    if (fscanf(fd, "%[^\n]\n", activation) != 1)
+    if (fscanf(fd, "%10[^\n]\n", activation) != 1)
     {
-      fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, filename.c_str());
+      fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, file.getName().c_str());
       exit(1);
     }
 
-    do
-    {
-      layers.emplace_back(input, output, dropout, NeuralNetwork::str2activation(activation));
-    } while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
+  do
+  {
+    layers.emplace_back(input, output, dropout, NeuralNetwork::str2activation(activation));
+  } while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
 
   checkLayersCompatibility();
 
@@ -334,6 +348,7 @@ void MLPBase::loadParameters(dynet::ParameterCollection & model, const std::stri
       parameters[i][1] = loader.load_param(model, prefix + std::to_string(i) + "_b");
     } catch(const std::runtime_error e)
     {
+      fprintf(stderr, "WARNING (%s) : Could not find parameter with key \'%s\' in the model. Ignore this if this model was trained with an older version of Macaon.\n", ERRINFO, (prefix+std::to_string(i) + "_W").c_str());
       prefix = "Layer_";
       parameters[i][0] = loader.load_param(model, prefix + std::to_string(i) + "_W");
       parameters[i][1] = loader.load_param(model, prefix + std::to_string(i) + "_b");