From 42507e78703b65882ba9867fd411b192402c5899 Mon Sep 17 00:00:00 2001
From: Franck Dary <franck.dary@etu.univ-amu.fr>
Date: Thu, 17 Jan 2019 11:20:11 +0100
Subject: [PATCH] GeneticAlgorithm works on decode

---
 neural_network/include/GeneticAlgorithm.hpp |  2 ++
 neural_network/include/MLPBase.hpp          |  3 ++-
 neural_network/src/GeneticAlgorithm.cpp     | 27 ++++++++++++++++---
 neural_network/src/MLP.cpp                  |  2 +-
 neural_network/src/MLPBase.cpp              | 29 ++++++++++++++++-----
 5 files changed, 50 insertions(+), 13 deletions(-)

diff --git a/neural_network/include/GeneticAlgorithm.hpp b/neural_network/include/GeneticAlgorithm.hpp
index 73ecb88..e52b327 100644
--- a/neural_network/include/GeneticAlgorithm.hpp
+++ b/neural_network/include/GeneticAlgorithm.hpp
@@ -42,6 +42,8 @@ class GeneticAlgorithm : public NeuralNetwork
     /// @param topology The desired topology for the mlp.
     /// @param nbOutputs The size of the mlp output layer..
     Individual(dynet::ParameterCollection & model, int nbInputs, const std::string & topology, int nbOutputs);
+    /// @brief Create a blank individual, so that it will be loaded from a saved model.
+    Individual();
     void becomeChildOf(Individual * other);
     void becomeChildOf(Individual * mom, Individual * dad);
     void mutate(float probability);
diff --git a/neural_network/include/MLPBase.hpp b/neural_network/include/MLPBase.hpp
index fb32913..b8bafcd 100644
--- a/neural_network/include/MLPBase.hpp
+++ b/neural_network/include/MLPBase.hpp
@@ -73,7 +73,8 @@ class MLPBase
   /// The file must have been written by the function saveStruct.
   /// @param model The dynet model that will contain the loaded parameters.
   /// @param filename The file from which the structure will be read.
-  void loadStruct(dynet::ParameterCollection & model, const std::string & filename);
+  /// @param index The index of the structure in the file.
+  void loadStruct(dynet::ParameterCollection & model, const std::string & filename, unsigned int index);
   /// @brief Load and populate the model with parameters from a file.
   ///
   /// The file must have been written by the function saveParameters.
diff --git a/neural_network/src/GeneticAlgorithm.cpp b/neural_network/src/GeneticAlgorithm.cpp
index 50c1abb..16d1131 100644
--- a/neural_network/src/GeneticAlgorithm.cpp
+++ b/neural_network/src/GeneticAlgorithm.cpp
@@ -149,18 +149,29 @@ void GeneticAlgorithm::load(const std::string & filename)
     fprintf(stderr, "ERROR (%s) : file \'%s\' bad format. Aborting.\n", ERRINFO, filename.c_str());
     exit(1);
   }
+
   delete file;
 
   this->nbInputs = i;
   this->nbOutputs = o;
   this->topology = buffer;
 
-  init(nbInputs, topology, nbOutputs);
+  auto splited = split(topology, ' ');
+  if (splited.size() != 2 || !isNum(splited[0]))
+  {
+    fprintf(stderr, "ERROR (%s) : wrong topology \'%s\'. Aborting.\n", ERRINFO, topology.c_str());
+    exit(1);
+  }
+
+  int nbElems = std::stoi(splited[0]);
 
-  for (auto & individual : generation)
+  for (int i = 0; i < nbElems; i++)
+    generation.emplace_back(new Individual());
+
+  for (int i = 0; i < nbElems; i++)
   {
-    individual->mlp.loadStruct(model, filename);
-    individual->mlp.loadParameters(model, filename);
+    generation[i]->mlp.loadStruct(model, filename, i);
+    generation[i]->mlp.loadParameters(model, filename);
   }
 }
 
@@ -174,6 +185,14 @@ GeneticAlgorithm::Individual::Individual(dynet::ParameterCollection & model, int
   mutate(1.0);
 }
 
+GeneticAlgorithm::Individual::Individual()
+  : mlp("MLP_" + std::to_string(idCount))
+{
+  this->id = idCount++;
+  this->loss = 0.0;
+  this->value = 0.0;
+}
+
 float GeneticAlgorithm::loss2value(float loss)
 {
   return 1000.0 / loss;
diff --git a/neural_network/src/MLP.cpp b/neural_network/src/MLP.cpp
index 90eac75..47dc945 100644
--- a/neural_network/src/MLP.cpp
+++ b/neural_network/src/MLP.cpp
@@ -13,7 +13,7 @@ MLP::MLP(const std::string & filename) : mlp("MLP")
   trainer.reset(createTrainer());
   initDynet();
 
-  mlp.loadStruct(model, filename);
+  mlp.loadStruct(model, filename, 0);
   mlp.loadParameters(model, filename);
 }
 
diff --git a/neural_network/src/MLPBase.cpp b/neural_network/src/MLPBase.cpp
index 4152029..3f0aa3f 100644
--- a/neural_network/src/MLPBase.cpp
+++ b/neural_network/src/MLPBase.cpp
@@ -293,7 +293,7 @@ void MLPBase::saveParameters(const std::string & filename)
   }
 }
 
-void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string & filename)
+void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string & filename, unsigned int index)
 {
   File file(filename, "r");
   FILE * fd = file.getDescriptor();
@@ -303,17 +303,31 @@ void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string &
   int output;
   float dropout;
 
+  for (unsigned int i = 0; i < index; i++)
+  {
+    while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
+      if (fscanf(fd, "%10[^\n]\n", activation) != 1)
+      {
+        fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, file.getName().c_str());
+        exit(1);
+      }
+
+      do
+      {
+      } while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
+  }
+
   while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
-    if (fscanf(fd, "%[^\n]\n", activation) != 1)
+    if (fscanf(fd, "%10[^\n]\n", activation) != 1)
     {
-      fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, filename.c_str());
+      fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, file.getName().c_str());
       exit(1);
     }
 
-    do
-    {
-      layers.emplace_back(input, output, dropout, NeuralNetwork::str2activation(activation));
-    } while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
+  do
+  {
+    layers.emplace_back(input, output, dropout, NeuralNetwork::str2activation(activation));
+  } while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
 
   checkLayersCompatibility();
 
@@ -334,6 +348,7 @@ void MLPBase::loadParameters(dynet::ParameterCollection & model, const std::stri
       parameters[i][1] = loader.load_param(model, prefix + std::to_string(i) + "_b");
     } catch(const std::runtime_error e)
     {
+      fprintf(stderr, "WARNING (%s) : Could not find parameter with key \'%s\' in the model. Ignore this if this model was trained with an older version of Macaon.\n", ERRINFO, (prefix+std::to_string(i) + "_W").c_str());
       prefix = "Layer_";
       parameters[i][0] = loader.load_param(model, prefix + std::to_string(i) + "_W");
       parameters[i][1] = loader.load_param(model, prefix + std::to_string(i) + "_b");     
-- 
GitLab