diff --git a/neural_network/src/GeneticAlgorithm.cpp b/neural_network/src/GeneticAlgorithm.cpp
index 036a438c28ff09af303ab5ac2a08f1284492816d..a0c971679f35c2ef9402a1e643ee72d3093ba745 100644
--- a/neural_network/src/GeneticAlgorithm.cpp
+++ b/neural_network/src/GeneticAlgorithm.cpp
@@ -43,7 +43,7 @@ std::vector<float> GeneticAlgorithm::predict(FeatureModel::FeatureDescription &
 
   if (toAsk < 0 || toAsk > (int)generation.size())
   {
-    fprintf(stderr, "ERROR (%s) : trying to save \'%d\' individuals out of a population of \'%lu\'. Aborting.\n", ERRINFO, toAsk, generation.size());
+    fprintf(stderr, "ERROR (%s) : trying to use \'%d\' individuals out of a population of \'%lu\'. Aborting.\n", ERRINFO, toAsk, generation.size());
     exit(1);
   }
 
@@ -144,7 +144,7 @@ void GeneticAlgorithm::save(const std::string & filename)
 
   File * file = new File(filename, "w");
   for (int i = 0; i < toSave; i++)
-    fprintf(file->getDescriptor(), "%u\n", generation[i]->id);
+    fprintf(file->getDescriptor(), "#SAVED_ID# # {1,1} 0 %u\n", generation[i]->id);
   delete file;
   
   for (int i = 0; i < toSave; i++)
@@ -172,7 +172,7 @@ void GeneticAlgorithm::load(const std::string & filename)
   std::vector<int> ids;
   File * file = new File(filename, "r");
   unsigned int id;
-  while (fscanf(file->getDescriptor(), "%u\n", &id) == 1)
+  while (fscanf(file->getDescriptor(), "#SAVED_ID# # {1,1} 0 %u\n", &id) == 1)
     ids.emplace_back(id);
   delete file;
 
diff --git a/neural_network/src/MLPBase.cpp b/neural_network/src/MLPBase.cpp
index 3f0aa3fc50e4f4fe8e427c3d32ded2906498888b..8fd98e450db028d18a69d785469409af1690d8ac 100644
--- a/neural_network/src/MLPBase.cpp
+++ b/neural_network/src/MLPBase.cpp
@@ -277,7 +277,7 @@ void MLPBase::saveStruct(const std::string & filename)
 
   for (auto & layer : layers)
   {
-    fprintf(fd, "Layer : %d %d %s %.2f\n", layer.input_dim, layer.output_dim, NeuralNetwork::activation2str(layer.activation).c_str(), layer.dropout_rate);
+    fprintf(fd, "#TOPOLOGY# # {1,1} 0 Layer : %d %d %s %.2f\n", layer.input_dim, layer.output_dim, NeuralNetwork::activation2str(layer.activation).c_str(), layer.dropout_rate);
   }
 }
 
@@ -305,7 +305,7 @@ void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string &
 
   for (unsigned int i = 0; i < index; i++)
   {
-    while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
+    while (fscanf(fd, "#TOPOLOGY# # {1,1} 0 Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
       if (fscanf(fd, "%10[^\n]\n", activation) != 1)
       {
         fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, file.getName().c_str());
@@ -314,10 +314,10 @@ void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string &
 
       do
       {
-      } while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
+      } while (fscanf(fd, "#TOPOLOGY# # {1,1} 0 Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
   }
 
-  while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
+  while (fscanf(fd, "#TOPOLOGY# # {1,1} 0 Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
     if (fscanf(fd, "%10[^\n]\n", activation) != 1)
     {
       fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, file.getName().c_str());
@@ -327,7 +327,7 @@ void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string &
   do
   {
     layers.emplace_back(input, output, dropout, NeuralNetwork::str2activation(activation));
-  } while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
+  } while (fscanf(fd, "#TOPOLOGY# # {1,1} 0 Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
 
   checkLayersCompatibility();