diff --git a/MLP/include/MLP.hpp b/MLP/include/MLP.hpp
index efe83ae68fa27abd91d0769370b0c668f9ca751a..825d705a5b9b6083c314a19624678bf5013a3072 100644
--- a/MLP/include/MLP.hpp
+++ b/MLP/include/MLP.hpp
@@ -69,6 +69,7 @@ class MLP
   void loadStruct(const std::string & filename);
   void loadParameters(const std::string & filename);
   void load(const std::string & filename);
+  void initDynet();
 
   public :
 
diff --git a/MLP/src/MLP.cpp b/MLP/src/MLP.cpp
index da90235ca35a8d7f174e28eab3fa419eaafe6f96..7342adc722f948159800ffc9bcd28870fd7796e3 100644
--- a/MLP/src/MLP.cpp
+++ b/MLP/src/MLP.cpp
@@ -67,10 +67,21 @@ MLP::Activation MLP::str2activation(std::string s)
   return LINEAR;
 }
 
+void MLP::initDynet()
+{
+  static bool init = false;
+
+  if(init)
+    return;
+
+  init = true;
+  dynet::initialize(getDefaultParams());
+}
+
 MLP::MLP(std::vector<Layer> layers)
 : layers(layers), trainer(model, 0.001, 0.9, 0.999, 1e-8)
 {
-  dynet::initialize(getDefaultParams());
+  initDynet();
 
   trainMode = true;
   dropoutActive = true;
@@ -441,7 +452,7 @@ void MLP::loadParameters(const std::string & filename)
 MLP::MLP(const std::string & filename)
 : trainer(model, 0.001, 0.9, 0.999, 1e-8)
 {
-  dynet::initialize(getDefaultParams());
+  initDynet();
 
   trainMode = false;
   dropoutActive = false;