diff --git a/neural_network/include/GeneticAlgorithm.hpp b/neural_network/include/GeneticAlgorithm.hpp
index caf3e6eb1d35260b2052ed6436ab531b7cdf3573..8b1df471c83ac2d096b554fd7e7ce212534e0fa9 100644
--- a/neural_network/include/GeneticAlgorithm.hpp
+++ b/neural_network/include/GeneticAlgorithm.hpp
@@ -8,40 +8,54 @@
 #include <dynet/expr.h>
 #include <dynet/io.h>
 #include <string>
+#include <memory>
 #include "NeuralNetwork.hpp"
 #include "FeatureModel.hpp"
+#include "MLPBase.hpp"
 
 class GeneticAlgorithm : public NeuralNetwork
 {
   private :
 
-  /// @brief An individual is a MLP
-  class Individual
+  /// @brief An Individual is a part of the current population.
+  ///
+  /// It can be evaluated against a particular metric, can mutate and reproduce with another of its kind.
+  struct Individual
   {
-    private :
-
-    /// @brief The Layers of the MLP.
-    std::vector<Layer> layers;
-    /// @brief The parameters corresponding to the layers of the MLP.
-    std::vector< std::vector<dynet::Parameter> > parameters;
-
-    public :
-
-    /// @brief Create a new individual for the population.
+    /// @brief The neural network corresponding to this Individual.
+    MLPBase mlp;
+    /// @brief The value of this Individual.
     ///
-    /// @param topology The topology the underlying MLP will take.
-    /// @param model The Collection of parameters of the GeneticAlgorithm.
-    /// @param nbInputs The size of the input layer of the MLP.
-    /// @param nbOutputs The size of the output layer of the MLP.
-    Individual(const std::string & topology, dynet::ParameterCollection & model, int nbInputs, int nbOutputs);
+    /// This metric is used to dertermine wich are the best Individuals of the generation.
+    /// For example it can be the inverse of the loss value of its MLP against the dev dataset.
+    float value;
+    /// @brief Unique identifier for this individual.
+    int id;
+    /// @brief Create a new Individual from a certain topology.
+    ///
+    /// @param model The dynet model that will contains the mlp parameters.
+    /// @param nbInputs The size of the mlp input layer.
+    /// @param topology The desired topology for the mlp.
+    /// @param nbOutputs The size of the mlp output layer..
+    Individual(dynet::ParameterCollection & model, int nbInputs, const std::string & topology, int nbOutputs);
+    void becomeChildOf(Individual * other);
   };
 
+  /// @brief The current generation.
+  std::vector< std::unique_ptr<Individual> > generation;
+
   private :
 
   /// @brief Load this GeneticAlgorithm from a file.
   ///
   /// @param filename The name of the file where the GeneticAlgorithm is stored.
   void load(const std::string & filename);
+  /// @brief Get the value of an Individual depending on the loss of its MLP
+  ///
+  /// @param loss The loss of the MLP.
+  ///
+  /// @return  The value of the Individual.
+  static float loss2value(float loss);
 
   public :
 
diff --git a/neural_network/include/MLPBase.hpp b/neural_network/include/MLPBase.hpp
index 0411a45da1a0326aa86d125ad4b3193a30152306..8021f6d0efcfae77523f1681f44323603b808ae5 100644
--- a/neural_network/include/MLPBase.hpp
+++ b/neural_network/include/MLPBase.hpp
@@ -17,7 +17,7 @@
 /// Once trained, it can also be used to predict the class of a certain input.
 class MLPBase
 {
-  private :
+  public :
 
   using Layer = NeuralNetwork::Layer;
   /// @brief The Layers of the MLP.
diff --git a/neural_network/src/GeneticAlgorithm.cpp b/neural_network/src/GeneticAlgorithm.cpp
index 6194b70c22838dd70d95707ab24ddc7d35e21d52..347027e053b35139cea581dc74b5bb1d1a00c8d0 100644
--- a/neural_network/src/GeneticAlgorithm.cpp
+++ b/neural_network/src/GeneticAlgorithm.cpp
@@ -1,5 +1,6 @@
 #include "GeneticAlgorithm.hpp"
 #include "ProgramParameters.hpp"
+#include "util.hpp"
 
 GeneticAlgorithm::GeneticAlgorithm()
 {
@@ -17,17 +18,58 @@ GeneticAlgorithm::GeneticAlgorithm(const std::string & filename)
 
 void GeneticAlgorithm::init(int nbInputs, const std::string & topology, int nbOutputs)
 {
-  fprintf(stderr, "init of genetic\n");
+  auto splited = split(topology, ' ');
+  if (splited.size() != 2 || !isNum(splited[0]))
+  {
+    fprintf(stderr, "ERROR (%s) : wrong topology \'%s\'. Aborting.\n", ERRINFO, topology.c_str());
+    exit(1);
+  }
+
+  int nbElems = std::stoi(splited[0]);
+
+  for (int i = 0; i < nbElems; i++)
+    generation.emplace_back(new Individual(model, nbInputs, splited[1], nbOutputs));
+
+  fprintf(stderr, "Init is done !\n");
 }
 
 std::vector<float> GeneticAlgorithm::predict(FeatureModel::FeatureDescription & fd)
 {
-
+  return generation[0]->mlp.predict(fd);
 }
 
 float GeneticAlgorithm::update(FeatureModel::FeatureDescription & fd, int gold)
 {
+  bool haveBeenUpdated = false;
 
+  for (auto & individual : generation)
+  {
+    float loss = individual->mlp.update(fd, gold);
+    if (loss != 0.0)
+    {
+      individual->value = loss2value(loss);
+      haveBeenUpdated = true;
+    }
+  }
+
+  if (!haveBeenUpdated)
+    return 0.0;
+
+  std::sort(generation.begin(), generation.end(),
+            [](const std::unique_ptr<Individual> & a, const std::unique_ptr<Individual> & b)
+  {
+    return a->value > b->value;
+  });
+
+  fprintf(stderr, "-----------------\n");
+  for (auto & individual : generation)
+    fprintf(stderr, "%d\t%f\n", individual->id, individual->value);
+  fprintf(stderr, "-----------------\n");
+
+  for (unsigned int i = 1; i < generation.size(); i++)
+  {
+    generation[i]->becomeChildOf(generation[0].get());
+  }
 }
 
 void GeneticAlgorithm::save(const std::string & filename)
@@ -37,7 +79,15 @@ void GeneticAlgorithm::save(const std::string & filename)
 
 void GeneticAlgorithm::printTopology(FILE * output)
 {
-
+  if (generation.empty())
+  {
+    fprintf(output, "0 x ()\n");
+  }
+  else
+  {
+    fprintf(output, "%lu x ", generation.size());
+    generation[0]->mlp.printTopology(output);
+  }
 }
 
 void GeneticAlgorithm::load(const std::string & filename)
@@ -45,52 +95,41 @@ void GeneticAlgorithm::load(const std::string & filename)
 
 }
 
-GeneticAlgorithm::Individual::Individual(const std::string & topology, dynet::ParameterCollection & model, int nbInputs, int nbOutputs)
+GeneticAlgorithm::Individual::Individual(dynet::ParameterCollection & model, int nbInputs, const std::string & topology, int nbOutputs)
 {
-  std::string topo = topology;
-  std::replace(topo.begin(), topo.end(), '(', ' ');
-  std::replace(topo.begin(), topo.end(), ')', ' ');
-
-  auto groups = split(topo);
-  for (auto group : groups)
-  {
-    if(group.empty())
-      continue;
-
-    std::replace(group.begin(), group.end(), ',', ' ');
-    auto layer = split(group);
-
-    if (layer.size() != 2)
-    {
-      fprintf(stderr, "ERROR (%s) : invalid topology \'%s\'. Aborting.\n", ERRINFO, topology.c_str());
-      exit(1);
-    }
+  static int id = 0;
+  this->id = id++;
+  mlp.init(model, nbInputs, topology, nbOutputs);
+}
 
-    int input = layers.empty() ? nbInputs : layers.back().output_dim;
-    int output = std::stoi(layer[0]); 
-    layers.emplace_back(input, output, 0, str2activation(layer[1]));
-  }
+float GeneticAlgorithm::loss2value(float loss)
+{
+  return 1000.0 / loss;
+}
 
-  layers.emplace_back(layers.back().output_dim, nbOutputs, 0.0, Activation::LINEAR);
+void GeneticAlgorithm::Individual::becomeChildOf(Individual * other)
+{
+  auto & thisParameters = mlp.parameters;
+  auto & otherParameters = other->mlp.parameters;
 
-  if(layers.empty())
+  if (thisParameters.size() != otherParameters.size())
   {
-    fprintf(stderr, "ERROR (%s) : constructed mlp with 0 layers. Aborting.\n", ERRINFO);
+    fprintf(stderr, "ERROR (%s) : The two individuals are not compatibles. Sizes %lu and %lu. Aborting.\n", ERRINFO, thisParameters.size(), otherParameters.size());
     exit(1);
   }
 
-  for(unsigned int i = 0; i < layers.size()-1; i++)
-    if(layers[i].output_dim != layers[i+1].input_dim)
+  for (unsigned int i = 0; i < thisParameters.size(); i++)
+    for (unsigned int j = 0; j < thisParameters[i].size(); j++)
     {
-      fprintf(stderr, "ERROR (%s) : constructed mlp with incompatible layers. Aborting.\n", ERRINFO);
-      exit(1);
+      auto & thisParameter = thisParameters[i][j];
+      auto & otherParameter = otherParameters[i][j];
+      float * thisValues = thisParameter.values()->v;
+      float * otherValues = otherParameter.values()->v;
+      unsigned int nbValues = thisParameter.values()->d.size();
+
+      for (unsigned int k = 0; k < nbValues; k++)
+        if (rand() % 1000 >= 500)
+          thisValues[k] = otherValues[k];
     }
-
-  for (auto & layer : layers)
-  {
-    dynet::Parameter W = model.add_parameters({(unsigned)layer.output_dim, (unsigned)layer.input_dim});
-    dynet::Parameter b = model.add_parameters({(unsigned)layer.output_dim});
-    parameters.push_back({W,b});
-  }
 }
 
diff --git a/transition_machine/include/Classifier.hpp b/transition_machine/include/Classifier.hpp
index bfffadf35a6cceb46d12a0ffa1eb8d0c3e7b8838..2b523d875d739fac8b731b4a860cb01f42035ab8 100644
--- a/transition_machine/include/Classifier.hpp
+++ b/transition_machine/include/Classifier.hpp
@@ -12,7 +12,6 @@
 #include "ActionSet.hpp"
 #include "Oracle.hpp"
 #include "NeuralNetwork.hpp"
-#include "MLP.hpp"
 #include "ProgramParameters.hpp"
 
 /// @brief Given a Config, a Classifier is capable of weighting its ActionSet.
diff --git a/transition_machine/src/Classifier.cpp b/transition_machine/src/Classifier.cpp
index 7dc2ee21e07bb37f45ec20af6191281c46599540..ea4fdea3a720f31d680052cf34b3c7d60b87815e 100644
--- a/transition_machine/src/Classifier.cpp
+++ b/transition_machine/src/Classifier.cpp
@@ -1,6 +1,8 @@
 #include "Classifier.hpp"
 #include "File.hpp"
 #include "util.hpp"
+#include "MLP.hpp"
+#include "GeneticAlgorithm.hpp"
 
 Classifier::Classifier(const std::string & filename, bool trainMode)
 {
@@ -67,7 +69,7 @@ Classifier::Classifier(const std::string & filename, bool trainMode)
 
   as.reset(new ActionSet(ProgramParameters::expPath + buffer, false));
 
-  if(fscanf(fd, "Topology : %s\n", buffer) != 1)
+  if(fscanf(fd, "Topology : %[^\n]\n", buffer) != 1)
     badFormatAndAbort(ERRINFO);
 
   topology = buffer;
@@ -133,7 +135,7 @@ void Classifier::initClassifier(Config & config)
     return;
   }
 
-  nn.reset(new MLP());
+  nn.reset(new GeneticAlgorithm());
 
   Dict::initDicts(nn->getModel(), name);