From 3905067c77dd691239c2ce2bf08f1b279199f649 Mon Sep 17 00:00:00 2001
From: Franck Dary <franck.dary@etu.univ-amu.fr>
Date: Fri, 25 Jan 2019 16:39:46 +0100
Subject: [PATCH] Changed the ways model are saved and loaded to accomodate
 with Dynet code.

---
 neural_network/src/GeneticAlgorithm.cpp |  6 +++---
 neural_network/src/MLPBase.cpp          | 10 +++++-----
 2 files changed, 8 insertions(+), 8 deletions(-)

diff --git a/neural_network/src/GeneticAlgorithm.cpp b/neural_network/src/GeneticAlgorithm.cpp
index 036a438..a0c9716 100644
--- a/neural_network/src/GeneticAlgorithm.cpp
+++ b/neural_network/src/GeneticAlgorithm.cpp
@@ -43,7 +43,7 @@ std::vector<float> GeneticAlgorithm::predict(FeatureModel::FeatureDescription &
 
   if (toAsk < 0 || toAsk > (int)generation.size())
   {
-    fprintf(stderr, "ERROR (%s) : trying to save \'%d\' individuals out of a population of \'%lu\'. Aborting.\n", ERRINFO, toAsk, generation.size());
+    fprintf(stderr, "ERROR (%s) : trying to use \'%d\' individuals out of a population of \'%lu\'. Aborting.\n", ERRINFO, toAsk, generation.size());
     exit(1);
   }
 
@@ -144,7 +144,7 @@ void GeneticAlgorithm::save(const std::string & filename)
 
   File * file = new File(filename, "w");
   for (int i = 0; i < toSave; i++)
-    fprintf(file->getDescriptor(), "%u\n", generation[i]->id);
+    fprintf(file->getDescriptor(), "#SAVED_ID# # {1,1} 0 %u\n", generation[i]->id);
   delete file;
   
   for (int i = 0; i < toSave; i++)
@@ -172,7 +172,7 @@ void GeneticAlgorithm::load(const std::string & filename)
   std::vector<int> ids;
   File * file = new File(filename, "r");
   unsigned int id;
-  while (fscanf(file->getDescriptor(), "%u\n", &id) == 1)
+  while (fscanf(file->getDescriptor(), "#SAVED_ID# # {1,1} 0 %u\n", &id) == 1)
     ids.emplace_back(id);
   delete file;
 
diff --git a/neural_network/src/MLPBase.cpp b/neural_network/src/MLPBase.cpp
index 3f0aa3f..8fd98e4 100644
--- a/neural_network/src/MLPBase.cpp
+++ b/neural_network/src/MLPBase.cpp
@@ -277,7 +277,7 @@ void MLPBase::saveStruct(const std::string & filename)
 
   for (auto & layer : layers)
   {
-    fprintf(fd, "Layer : %d %d %s %.2f\n", layer.input_dim, layer.output_dim, NeuralNetwork::activation2str(layer.activation).c_str(), layer.dropout_rate);
+    fprintf(fd, "#TOPOLOGY# # {1,1} 0 Layer : %d %d %s %.2f\n", layer.input_dim, layer.output_dim, NeuralNetwork::activation2str(layer.activation).c_str(), layer.dropout_rate);
   }
 }
 
@@ -305,7 +305,7 @@ void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string &
 
   for (unsigned int i = 0; i < index; i++)
   {
-    while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
+    while (fscanf(fd, "#TOPOLOGY# # {1,1} 0 Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
       if (fscanf(fd, "%10[^\n]\n", activation) != 1)
       {
         fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, file.getName().c_str());
@@ -314,10 +314,10 @@ void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string &
 
       do
       {
-      } while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
+      } while (fscanf(fd, "#TOPOLOGY# # {1,1} 0 Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
   }
 
-  while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
+  while (fscanf(fd, "#TOPOLOGY# # {1,1} 0 Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
     if (fscanf(fd, "%10[^\n]\n", activation) != 1)
     {
       fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, file.getName().c_str());
@@ -327,7 +327,7 @@ void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string &
   do
   {
     layers.emplace_back(input, output, dropout, NeuralNetwork::str2activation(activation));
-  } while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
+  } while (fscanf(fd, "#TOPOLOGY# # {1,1} 0 Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
 
   checkLayersCompatibility();
 
-- 
GitLab