diff --git a/maca_common/include/util.hpp b/maca_common/include/util.hpp
index ff7dbf5a5572e6cbd325b1fb5785213d89fc621c..17574130facaf0471cff1f7741060cf38c6cdb2a 100644
--- a/maca_common/include/util.hpp
+++ b/maca_common/include/util.hpp
@@ -175,6 +175,20 @@ std::string getTime();
 /// @return True if the file exists
 bool fileExists(const std::string & s);
 
+/// @brief Return true with a certain probability.
+///
+/// @param probability The probability (between 0.0 and 1.0) to return true.
+///
+/// @return True with a certain probability, false otherwise.
+bool choiceWithProbability(float probability);
+
+/// @brief Return a random number between -range and +range
+///
+/// @param range The random value will be between -range and +range
+///
+/// @return Random number between -range and +range
+float getRandomValueInRange(int range);
+
 /// @brief Macro giving informations about an error.
 #define ERRINFO (getFilenameFromPath(std::string(__FILE__))+ ":l." + std::to_string(__LINE__)).c_str()
 
diff --git a/maca_common/src/Dict.cpp b/maca_common/src/Dict.cpp
index 3c1b3367bab8662c98f38488b2ac0f52122aba9c..d974ca927cb5579cce10ecb3714db8ad60967113 100644
--- a/maca_common/src/Dict.cpp
+++ b/maca_common/src/Dict.cpp
@@ -323,13 +323,7 @@ void Dict::initEmbeddingRandom(unsigned int index)
   int range = 1;
 
   for (auto & val : vec)
-  {
-    float sign = (rand() % 100000) >= 50000 ? 1.0 : -1.0; 
-    float result = ((rand() % range) + 1) * sign;
-    float decimal = (rand() % 100000) / 100000.0;
-    result += decimal;
-    val = result;
-  }
+    val = getRandomValueInRange(range);
 
   lookupParameter.initialize(index, vec);
 }
diff --git a/maca_common/src/File.cpp b/maca_common/src/File.cpp
index 3f8eaf2b60206b86aa241d8629016761aa803dd0..42d7014c1e12cf0fbd61fee93c77189a8e2beeae 100644
--- a/maca_common/src/File.cpp
+++ b/maca_common/src/File.cpp
@@ -30,7 +30,7 @@ File::File(const std::string & filename, const std::string & mode)
 	this->filename = filename;
 	endHasBeenReached = false;
 
-	if (mode != "r" && mode != "w")
+	if (mode != "r" && mode != "w" && mode != "a")
 	{
 		fprintf(stderr, "ERROR (%s) : \"%s\" is an invalid mode when opening a file\n", ERRINFO, mode.c_str());
 
diff --git a/maca_common/src/util.cpp b/maca_common/src/util.cpp
index a65d8a177b0466dc62e198a26d132d81f43530af..e5fbd86d872ee42bc4f0afd4b4fa149dfd40ad03 100644
--- a/maca_common/src/util.cpp
+++ b/maca_common/src/util.cpp
@@ -399,3 +399,27 @@ bool fileExists(const std::string & s)
   return true;
 }
 
+float getRandomValueInRange(int range)
+{
+  int maxValue = 1000000;
+
+  float sign = choiceWithProbability(0.5) ? 1.0 : -1.0; 
+  float result = sign*range*(rand() % (maxValue+1)) / maxValue;
+
+  return result;
+}
+
+bool choiceWithProbability(float probability)
+{
+  if (probability < 0 || probability > 1.0)
+  {
+    fprintf(stderr, "ERROR (%s) : invalid probability \'%f\'. Aborting.\n", ERRINFO, probability);
+    exit(1);
+  }
+
+  int maxVal = 100000;
+  int threshold = maxVal * probability;
+
+  return (rand() % maxVal) < threshold;
+}
+
diff --git a/neural_network/include/GeneticAlgorithm.hpp b/neural_network/include/GeneticAlgorithm.hpp
index 8b1df471c83ac2d096b554fd7e7ce212534e0fa9..73ecb8887d7477ac979a3b5f2b1f198ee482b6a0 100644
--- a/neural_network/include/GeneticAlgorithm.hpp
+++ b/neural_network/include/GeneticAlgorithm.hpp
@@ -22,6 +22,8 @@ class GeneticAlgorithm : public NeuralNetwork
   /// It can be evaluated against a particular metric, can mutate and reproduce with another of its kind.
   struct Individual
   {
+    /// @brief A counter for giving unique id to objects of this struct.
+    static int idCount;
     /// @brief The neural network corresponding to this Individual.
     MLPBase mlp;
     /// @brief The value of this Individual.
@@ -29,6 +31,8 @@ class GeneticAlgorithm : public NeuralNetwork
     /// This metric is used to dertermine wich are the best Individuals of the generation.
     /// For example it can be the inverse of the loss value of its MLP against the dev dataset.
     float value;
+    /// @brief The loss of the MLP of this Individual.
+    float loss;
     /// @brief Unique identifier for this individual.
     int id;
     /// @brief Create a new Individual from a certain topology.
@@ -39,10 +43,18 @@ class GeneticAlgorithm : public NeuralNetwork
     /// @param nbOutputs The size of the mlp output layer..
     Individual(dynet::ParameterCollection & model, int nbInputs, const std::string & topology, int nbOutputs);
     void becomeChildOf(Individual * other);
+    void becomeChildOf(Individual * mom, Individual * dad);
+    void mutate(float probability);
   };
 
   /// @brief The current generation.
   std::vector< std::unique_ptr<Individual> > generation;
+  /// @brief The topology of the GeneticAlgorithm
+  std::string topology;
+  /// @brief The input layer size of the Individual's MLP.
+  int nbInputs;
+  /// @brief The output layer size of the Individual's MLP.
+  int nbOutputs;
 
   private :
 
diff --git a/neural_network/include/MLPBase.hpp b/neural_network/include/MLPBase.hpp
index 8021f6d0efcfae77523f1681f44323603b808ae5..fb32913f94c19e78baa3591e7dd75500bbb6270c 100644
--- a/neural_network/include/MLPBase.hpp
+++ b/neural_network/include/MLPBase.hpp
@@ -20,6 +20,8 @@ class MLPBase
   public :
 
   using Layer = NeuralNetwork::Layer;
+  /// @brief The name of this MLP.
+  std::string name;
   /// @brief The Layers of the MLP.
   std::vector<Layer> layers;
   /// @brief The parameters corresponding to the layers of the MLP.
@@ -99,7 +101,9 @@ class MLPBase
   /// @param nbOutputs The size of the output layer of the MLP.
   void init(dynet::ParameterCollection & model, int nbInputs, const std::string & topology, int nbOutputs);
   /// @brief Construct a new MLP for training.
-  MLPBase();
+  ///
+  /// @param name The name of this MLP.
+  MLPBase(std::string name);
   /// @brief Give a score to each possible class, given an input.
   ///
   /// @param fd The input to use.
diff --git a/neural_network/src/GeneticAlgorithm.cpp b/neural_network/src/GeneticAlgorithm.cpp
index 347027e053b35139cea581dc74b5bb1d1a00c8d0..50c1abbf5f38fc060d60ef35663af73cdba24de0 100644
--- a/neural_network/src/GeneticAlgorithm.cpp
+++ b/neural_network/src/GeneticAlgorithm.cpp
@@ -2,6 +2,8 @@
 #include "ProgramParameters.hpp"
 #include "util.hpp"
 
+int GeneticAlgorithm::Individual::idCount = 0;
+
 GeneticAlgorithm::GeneticAlgorithm()
 {
   randomSeed = ProgramParameters::seed;
@@ -18,6 +20,10 @@ GeneticAlgorithm::GeneticAlgorithm(const std::string & filename)
 
 void GeneticAlgorithm::init(int nbInputs, const std::string & topology, int nbOutputs)
 {
+  this->nbInputs = nbInputs;
+  this->nbOutputs = nbOutputs;
+  this->topology = topology;
+
   auto splited = split(topology, ' ');
   if (splited.size() != 2 || !isNum(splited[0]))
   {
@@ -29,8 +35,6 @@ void GeneticAlgorithm::init(int nbInputs, const std::string & topology, int nbOu
 
   for (int i = 0; i < nbElems; i++)
     generation.emplace_back(new Individual(model, nbInputs, splited[1], nbOutputs));
-
-  fprintf(stderr, "Init is done !\n");
 }
 
 std::vector<float> GeneticAlgorithm::predict(FeatureModel::FeatureDescription & fd)
@@ -47,7 +51,8 @@ float GeneticAlgorithm::update(FeatureModel::FeatureDescription & fd, int gold)
     float loss = individual->mlp.update(fd, gold);
     if (loss != 0.0)
     {
-      individual->value = loss2value(loss);
+      individual->loss = loss / ProgramParameters::batchSize;
+      individual->value = loss2value(individual->loss);
       haveBeenUpdated = true;
     }
   }
@@ -61,20 +66,64 @@ float GeneticAlgorithm::update(FeatureModel::FeatureDescription & fd, int gold)
     return a->value > b->value;
   });
 
-  fprintf(stderr, "-----------------\n");
-  for (auto & individual : generation)
-    fprintf(stderr, "%d\t%f\n", individual->id, individual->value);
-  fprintf(stderr, "-----------------\n");
+  fprintf(stderr, "Best : %3d with value %.2f Worst : %3d with value %.2f\n", generation[0]->id, generation[0]->value, generation.back()->id, generation.back()->value);
+
+  unsigned int quarter = generation.size() / 4;
+  if (quarter%2)
+    quarter++;
+  std::vector<unsigned int> reproductors;
+  std::vector<unsigned int> candidates;
+  for (unsigned int i = 0; i < generation.size(); i++)
+    candidates.push_back(i);
+
+  while (reproductors.size() < quarter)
+    for (unsigned int i = 0; i < candidates.size(); i++)
+    {
+      int parts = candidates.size() * ((candidates.size()+1) / 2.0);
+      float probability = (candidates.size()-i) / (1.0*parts);
+
+      if (choiceWithProbability(probability))
+      {
+        reproductors.push_back(candidates[i]);
+        candidates.erase(candidates.begin() + i);
+        i--;
+      }
+    }
+
+  while (reproductors.size() != quarter)
+    reproductors.pop_back();
 
-  for (unsigned int i = 1; i < generation.size(); i++)
+  // Reproduction
+  for (unsigned int i = 2*quarter; i < 3*quarter; i++)
   {
-    generation[i]->becomeChildOf(generation[0].get());
+    int momIndex = reproductors.size()-1-(i-2*quarter);
+    int dadIndex = i-2*quarter;
+
+    generation[i]->becomeChildOf(generation[reproductors[momIndex]].get(), generation[reproductors[dadIndex]].get());
   }
+
+  // Mutation
+  for (unsigned int i = 1*quarter; i < 2*quarter; i++)
+    generation[i]->mutate(0.01);
+
+  // Death and replace
+  for (unsigned int i = 3*quarter; i < generation.size(); i++)
+    generation[i]->mutate(1.0);
+
+  return generation[0]->loss;
 }
 
 void GeneticAlgorithm::save(const std::string & filename)
 {
+  File * file = new File(filename, "w");
+  fprintf(file->getDescriptor(), "%d %d %s\n", nbInputs, nbOutputs, topology.c_str());
+  delete file;
 
+  for (auto & individual : generation)
+  {
+    individual->mlp.saveStruct(filename);
+    individual->mlp.saveParameters(filename);
+  }
 }
 
 void GeneticAlgorithm::printTopology(FILE * output)
@@ -92,14 +141,37 @@ void GeneticAlgorithm::printTopology(FILE * output)
 
 void GeneticAlgorithm::load(const std::string & filename)
 {
+  File * file = new File(filename, "r");
+  int i, o;
+  char buffer[1024];
+  if (fscanf(file->getDescriptor(), "%d %d %[^\n]\n", &i, &o, buffer) != 3)
+  {
+    fprintf(stderr, "ERROR (%s) : file \'%s\' bad format. Aborting.\n", ERRINFO, filename.c_str());
+    exit(1);
+  }
+  delete file;
+
+  this->nbInputs = i;
+  this->nbOutputs = o;
+  this->topology = buffer;
+
+  init(nbInputs, topology, nbOutputs);
 
+  for (auto & individual : generation)
+  {
+    individual->mlp.loadStruct(model, filename);
+    individual->mlp.loadParameters(model, filename);
+  }
 }
 
-GeneticAlgorithm::Individual::Individual(dynet::ParameterCollection & model, int nbInputs, const std::string & topology, int nbOutputs)
+GeneticAlgorithm::Individual::Individual(dynet::ParameterCollection & model, int nbInputs, const std::string & topology, int nbOutputs) : mlp("MLP_" + std::to_string(idCount))
 {
-  static int id = 0;
-  this->id = id++;
+  this->id = idCount++;
+  this->loss = 0.0;
+  this->value = 0.0;
   mlp.init(model, nbInputs, topology, nbOutputs);
+  // mutate with probability 1.0 = random init
+  mutate(1.0);
 }
 
 float GeneticAlgorithm::loss2value(float loss)
@@ -128,8 +200,56 @@ void GeneticAlgorithm::Individual::becomeChildOf(Individual * other)
       unsigned int nbValues = thisParameter.values()->d.size();
 
       for (unsigned int k = 0; k < nbValues; k++)
-        if (rand() % 1000 >= 500)
+        if (choiceWithProbability(0.5))
           thisValues[k] = otherValues[k];
     }
 }
 
+void GeneticAlgorithm::Individual::becomeChildOf(Individual * mom, Individual * dad)
+{
+  auto & thisParameters = mlp.parameters;
+  auto & momParameters = mom->mlp.parameters;
+  auto & dadParameters = dad->mlp.parameters;
+
+  if (thisParameters.size() != momParameters.size() || thisParameters.size() != dadParameters.size())
+  {
+    fprintf(stderr, "ERROR (%s) : The three individuals are not compatibles. Sizes %lu and %lu and %lu. Aborting.\n", ERRINFO, thisParameters.size(), momParameters.size(), dadParameters.size());
+    exit(1);
+  }
+
+  for (unsigned int i = 0; i < thisParameters.size(); i++)
+    for (unsigned int j = 0; j < thisParameters[i].size(); j++)
+    {
+      auto & thisParameter = thisParameters[i][j];
+      auto & momParameter = momParameters[i][j];
+      auto & dadParameter = dadParameters[i][j];
+      float * thisValues = thisParameter.values()->v;
+      float * momValues = momParameter.values()->v;
+      float * dadValues = dadParameter.values()->v;
+      unsigned int nbValues = thisParameter.values()->d.size();
+
+      for (unsigned int k = 0; k < nbValues; k++)
+        if (choiceWithProbability(0.5))
+          thisValues[k] = momValues[k];
+        else
+          thisValues[k] = dadValues[k];
+    }
+}
+
+void GeneticAlgorithm::Individual::mutate(float probability)
+{
+  auto & thisParameters = mlp.parameters;
+
+  for (unsigned int i = 0; i < thisParameters.size(); i++)
+    for (unsigned int j = 0; j < thisParameters[i].size(); j++)
+    {
+      auto & thisParameter = thisParameters[i][j];
+      float * thisValues = thisParameter.values()->v;
+      unsigned int nbValues = thisParameter.values()->d.size();
+
+      for (unsigned int k = 0; k < nbValues; k++)
+        if (choiceWithProbability(probability))
+          thisValues[k] = getRandomValueInRange(1);
+    }
+}
+
diff --git a/neural_network/src/MLP.cpp b/neural_network/src/MLP.cpp
index 74e1113e9e975b2b5faef8ee9c8c24457f450b04..90eac755b64d3a0cfaf87d698d1f3c973f53f6ad 100644
--- a/neural_network/src/MLP.cpp
+++ b/neural_network/src/MLP.cpp
@@ -1,13 +1,13 @@
 #include "MLP.hpp"
 
-MLP::MLP()
+MLP::MLP() : mlp("MLP")
 {
   randomSeed = ProgramParameters::seed;
   trainer.reset(createTrainer());
   initDynet();
 }
 
-MLP::MLP(const std::string & filename)
+MLP::MLP(const std::string & filename) : mlp("MLP")
 {
   randomSeed = ProgramParameters::seed;
   trainer.reset(createTrainer());
@@ -58,6 +58,8 @@ float MLP::update(FeatureModel::FeatureDescription & fd, int gold)
 
 void MLP::save(const std::string & filename)
 {
+  File * file = new File(filename, "w");
+  delete file;
   mlp.saveStruct(filename);
   mlp.saveParameters(filename);
 }
diff --git a/neural_network/src/MLPBase.cpp b/neural_network/src/MLPBase.cpp
index 5b9355b7e2f92c1edcb9cb4cec9cc9a24a9d9b85..41520295e343e1b719f5e739071beefbe17cbc9b 100644
--- a/neural_network/src/MLPBase.cpp
+++ b/neural_network/src/MLPBase.cpp
@@ -1,7 +1,8 @@
 #include "MLPBase.hpp"
 
-MLPBase::MLPBase()
+MLPBase::MLPBase(std::string name)
 {
+  this->name = name;
   dropoutActive = true;
 }
 
@@ -271,7 +272,7 @@ void MLPBase::printParameters(FILE * output)
 
 void MLPBase::saveStruct(const std::string & filename)
 {
-  File file(filename, "w");
+  File file(filename, "a");
   FILE * fd = file.getDescriptor();
 
   for (auto & layer : layers)
@@ -283,7 +284,7 @@ void MLPBase::saveStruct(const std::string & filename)
 void MLPBase::saveParameters(const std::string & filename)
 {
   dynet::TextFileSaver s(filename, true);
-  std::string prefix("Layer_");
+  std::string prefix(name + "_Layer_");
 
   for(unsigned int i = 0; i < parameters.size(); i++)
   {
@@ -302,8 +303,17 @@ void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string &
   int output;
   float dropout;
 
-  while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4)
-    layers.emplace_back(input, output, dropout, NeuralNetwork::str2activation(activation));
+  while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
+    if (fscanf(fd, "%[^\n]\n", activation) != 1)
+    {
+      fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, filename.c_str());
+      exit(1);
+    }
+
+    do
+    {
+      layers.emplace_back(input, output, dropout, NeuralNetwork::str2activation(activation));
+    } while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
 
   checkLayersCompatibility();
 
@@ -314,12 +324,20 @@ void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string &
 void MLPBase::loadParameters(dynet::ParameterCollection & model, const std::string & filename)
 {
   dynet::TextFileLoader loader(filename);
-  std::string prefix("Layer_");
+  std::string prefix(name + "_Layer_");
 
   for(unsigned int i = 0; i < parameters.size(); i++)
   {
-    parameters[i][0] = loader.load_param(model, prefix + std::to_string(i) + "_W");
-    parameters[i][1] = loader.load_param(model, prefix + std::to_string(i) + "_b");
+    try
+    {
+      parameters[i][0] = loader.load_param(model, prefix + std::to_string(i) + "_W");
+      parameters[i][1] = loader.load_param(model, prefix + std::to_string(i) + "_b");
+    } catch(const std::runtime_error e)
+    {
+      prefix = "Layer_";
+      parameters[i][0] = loader.load_param(model, prefix + std::to_string(i) + "_W");
+      parameters[i][1] = loader.load_param(model, prefix + std::to_string(i) + "_b");     
+    }
   }
 }
 
diff --git a/transition_machine/include/Classifier.hpp b/transition_machine/include/Classifier.hpp
index 2b523d875d739fac8b731b4a860cb01f42035ab8..958cf169d6cbbf1b496b5be8b4e12dca029ba89d 100644
--- a/transition_machine/include/Classifier.hpp
+++ b/transition_machine/include/Classifier.hpp
@@ -61,6 +61,20 @@ class Classifier
   /// For Classifier of type Information, the Oracle is used in train mode and decode mode too, it is simply a deterministic function that gives the correct Action given a Configuration.
   Oracle * oracle;
 
+  private :
+
+  /// @brief Create the correct type of NeuralNetwork.
+  ///
+  /// @return A pointer to the newly created NeuralNetwork.
+  NeuralNetwork * createNeuralNetwork();
+
+  /// @brief Create the correct type of NeuralNetwork, from a file.
+  ///
+  /// @param modelFilename The name of the file containing the NeuralNetwork description.
+  ///
+  /// @return A pointer to the newly created NeuralNetwork.
+  NeuralNetwork * createNeuralNetwork(const std::string & modelFilename);
+
   public :
 
   /// @brief Return how many errors will an action introduce.
diff --git a/transition_machine/src/Classifier.cpp b/transition_machine/src/Classifier.cpp
index ea4fdea3a720f31d680052cf34b3c7d60b87815e..deb4dbdc610559dcaed4b7696cd034c6ed98302b 100644
--- a/transition_machine/src/Classifier.cpp
+++ b/transition_machine/src/Classifier.cpp
@@ -130,12 +130,12 @@ void Classifier::initClassifier(Config & config)
   std::string modelFilename = ProgramParameters::expPath + name + ".model";
   if (fileExists(modelFilename))
   {
-    nn.reset(new MLP(modelFilename));
+    nn.reset(createNeuralNetwork(modelFilename));
     Dict::initDicts(nn->getModel(), name);
     return;
   }
 
-  nn.reset(new GeneticAlgorithm());
+  nn.reset(createNeuralNetwork());
 
   Dict::initDicts(nn->getModel(), name);
 
@@ -293,3 +293,23 @@ float Classifier::computeEntropy(WeightedActions & wa)
   return entropy;
 }
 
+NeuralNetwork * Classifier::createNeuralNetwork()
+{
+  auto splited = split(topology, ' ');
+
+  if (splited.size() == 2)
+    return new GeneticAlgorithm();
+
+  return new MLP();
+}
+
+NeuralNetwork * Classifier::createNeuralNetwork(const std::string & modelFilename)
+{
+  auto splited = split(topology, ' ');
+
+  if (splited.size() == 2)
+    return new GeneticAlgorithm(modelFilename);
+
+  return new MLP(modelFilename);
+}
+