From b0f2f464a4c9a3dc39a899599bb2d2ae8667bead Mon Sep 17 00:00:00 2001
From: Franck Dary <franck.dary@etu.univ-amu.fr>
Date: Mon, 30 Jul 2018 11:26:55 +0200
Subject: [PATCH] Made sure dynet is only initialized once

---
 MLP/include/MLP.hpp |  1 +
 MLP/src/MLP.cpp     | 15 +++++++++++++--
 2 files changed, 14 insertions(+), 2 deletions(-)

diff --git a/MLP/include/MLP.hpp b/MLP/include/MLP.hpp
index efe83ae..825d705 100644
--- a/MLP/include/MLP.hpp
+++ b/MLP/include/MLP.hpp
@@ -69,6 +69,7 @@ class MLP
   void loadStruct(const std::string & filename);
   void loadParameters(const std::string & filename);
   void load(const std::string & filename);
+  void initDynet();
 
   public :
 
diff --git a/MLP/src/MLP.cpp b/MLP/src/MLP.cpp
index da90235..7342adc 100644
--- a/MLP/src/MLP.cpp
+++ b/MLP/src/MLP.cpp
@@ -67,10 +67,21 @@ MLP::Activation MLP::str2activation(std::string s)
   return LINEAR;
 }
 
+void MLP::initDynet()
+{
+  static bool init = false;
+
+  if(init)
+    return;
+
+  init = true;
+  dynet::initialize(getDefaultParams());
+}
+
 MLP::MLP(std::vector<Layer> layers)
 : layers(layers), trainer(model, 0.001, 0.9, 0.999, 1e-8)
 {
-  dynet::initialize(getDefaultParams());
+  initDynet();
 
   trainMode = true;
   dropoutActive = true;
@@ -441,7 +452,7 @@ void MLP::loadParameters(const std::string & filename)
 MLP::MLP(const std::string & filename)
 : trainer(model, 0.001, 0.9, 0.999, 1e-8)
 {
-  dynet::initialize(getDefaultParams());
+  initDynet();
 
   trainMode = false;
   dropoutActive = false;
-- 
GitLab