diff --git a/MLP/include/MLP.hpp b/MLP/include/MLP.hpp
index f71c346d3cfb52fc5ebd85e866bcbbe7c20126a6..bb42cccc64015a90984d0d638d6ed3dd323cb459 100644
--- a/MLP/include/MLP.hpp
+++ b/MLP/include/MLP.hpp
@@ -43,6 +43,7 @@ class MLP
 
   std::vector<Layer> layers;
   std::vector< std::vector<dynet::Parameter> > parameters;
+  std::map<void*,dynet::Parameter> ptr2parameter;
 
   dynet::ParameterCollection model;
   dynet::AmsgradTrainer trainer;
@@ -53,6 +54,9 @@ class MLP
   void addLayerToModel(Layer & layer);
   void checkLayersCompatibility();
   dynet::DynetParams & getDefaultParams();
+  dynet::Parameter & featValue2parameter(const FeatureModel::FeatureValue & fv);
+  dynet::Expression run(dynet::ComputationGraph & cg, dynet::Expression x);
+  inline dynet::Expression activate(dynet::Expression h, Activation f);
 
   public :
 
diff --git a/MLP/src/MLP.cpp b/MLP/src/MLP.cpp
index 219b027cfcb01b76b026f6a722046d0bd5ce56c7..7f2a84fb30370a67a39015a843600d480bbffded 100644
--- a/MLP/src/MLP.cpp
+++ b/MLP/src/MLP.cpp
@@ -1,6 +1,8 @@
 #include "MLP.hpp"
 #include "util.hpp"
 
+#include <dynet/param-init.h>
+
 std::string MLP::activation2str(Activation a)
 {
   switch(a)
@@ -107,11 +109,19 @@ std::vector<float> MLP::predict(FeatureModel::FeatureDescription & fd, int goldC
 {
   dynet::ComputationGraph cg;
 
+  std::vector<dynet::Expression> expressions;
+
   for (auto & featValue : fd.values)
   {
-    dynet::Parameter p(*featValue.vec);
+    if(featValue.policy == FeatureModel::Policy::Final)
+      expressions.emplace_back(dynet::const_parameter(cg, featValue2parameter(featValue)));
+    else
+      expressions.emplace_back(dynet::parameter(cg, featValue2parameter(featValue)));
   }
 
+  dynet::Expression input = dynet::concatenate(expressions);
+  dynet::Expression output = run(cg, input);
+
   /*
   int nbInputs = layers[0].input_dim;
 
@@ -126,7 +136,7 @@ std::vector<float> MLP::predict(FeatureModel::FeatureDescription & fd, int goldC
   trainer.update();
   */
 
-  std::vector<float> res;
+  std::vector<float> res = as_vector(cg.forward(output));
 
   return res;
 }
@@ -141,3 +151,82 @@ dynet::DynetParams & MLP::getDefaultParams()
   return params;
 }
 
+dynet::Parameter & MLP::featValue2parameter(const FeatureModel::FeatureValue & fv)
+{
+  auto it = ptr2parameter.find(fv.vec);
+
+  if(it != ptr2parameter.end())
+    return it->second;
+
+  ptr2parameter[fv.vec] = model.add_parameters({1,fv.vec->size()}, dynet::ParameterInitFromVector(*fv.vec));
+  it = ptr2parameter.find(fv.vec);
+  it->second.set_updated(fv.policy == FeatureModel::Policy::Final ? false : true);
+
+  return it->second;
+}
+
+dynet::Expression MLP::run(dynet::ComputationGraph & cg, dynet::Expression x)
+{
+  // Expression for the current hidden state
+  dynet::Expression h_cur = x;
+
+  for(unsigned int l = 0; l < layers.size(); l++)
+  {
+    // Initialize parameters in computation graph
+    dynet::Expression W = parameter(cg, parameters[l][0]);
+    dynet::Expression b = parameter(cg, parameters[l][1]);
+    // Apply affine transform
+    dynet::Expression a = dynet::affine_transform({b, W, h_cur});
+    // Apply activation function
+    dynet::Expression h = activate(a, layers[l].activation);
+    h_cur = h;
+    // Take care of dropout
+    /*
+    dynet::Expression h_dropped;
+    if(layers[l].dropout_rate > 0){
+      if(dropout_active){
+        dynet::Expression mask = random_bernoulli(cg, 
+         {layers[l].output_dim}, 1 - layers[l].dropout_rate);
+        h_dropped = cmult(h, mask);
+      }
+      else{
+        h_dropped = h * (1 - layers[l].dropout_rate);
+      }
+    }
+    else{
+      h_dropped = h;
+    }
+
+    h_cur = h_dropped;
+  */
+  }
+
+  return h_cur;
+}
+
+inline dynet::Expression MLP::activate(dynet::Expression h, Activation f)
+{
+  switch(f)
+  {
+    case LINEAR :
+      return h;
+      break;
+    case RELU :
+      return rectify(h);
+      break;
+    case SIGMOID :
+      return logistic(h);
+      break;
+    case TANH :
+      return tanh(h);
+      break;
+    case SOFTMAX :
+      return softmax(h);
+      break;
+    default :
+      break;
+  }
+
+  return h;
+}
+
diff --git a/tape_machine/src/MCD.cpp b/tape_machine/src/MCD.cpp
index c1cc39c21c635ab5d75433689bb180581c817f7d..7c96d21e6899ca6ea1b9bf0e79d70efd580b1f7b 100644
--- a/tape_machine/src/MCD.cpp
+++ b/tape_machine/src/MCD.cpp
@@ -39,27 +39,67 @@ MCD::MCD(const std::string & filename)
 
 Dict * MCD::getDictOfLine(int num)
 {
-  return num2line[num]->dict;
+  auto it = num2line.find(num);
+
+  if(it == num2line.end())
+  {
+    fprintf(stderr, "ERROR (%s) : requestion line number %d in MCD. Aborting.\n", ERRINFO, num);
+    exit(1);
+  }
+
+  return it->second->dict;
 }
 
 Dict * MCD::getDictOfLine(const std::string & name)
 {
-  return name2line[name]->dict;
+  auto it = name2line.find(name);
+
+  if(it == name2line.end())
+  {
+    fprintf(stderr, "ERROR (%s) : requestion line \'%s\' in MCD. Aborting.\n", ERRINFO, name.c_str());
+    exit(1);
+  }
+
+  return it->second->dict;
 }
 
 Dict * MCD::getDictOfInputCol(int col)
 {
-  return col2line[col]->dict;
+  auto it = col2line.find(col);
+
+  if(it == col2line.end())
+  {
+    fprintf(stderr, "ERROR (%s) : requestion line of input column %d in MCD. Aborting.\n", ERRINFO, col);
+    exit(1);
+  }
+
+  return it->second->dict;
 }
 
 int MCD::getLineOfName(const std::string & name)
 {
-  return name2line[name]->num;
+  auto it = name2line.find(name);
+
+  if(it == name2line.end())
+  {
+    fprintf(stderr, "ERROR (%s) : requestion line %s in MCD. Aborting.\n", ERRINFO, name.c_str());
+    exit(1);
+  }
+
+  return it->second->num;
 }
 
 int MCD::getLineOfInputCol(int col)
 {
-  return col2line[col]->num;
+  auto it = col2line.find(col);
+
+  if(it == col2line.end())
+  {
+    fprintf(stderr, "ERROR (%s) : requestion line in MCD corresponding to input col %d. Aborting.\n", ERRINFO, col);
+    exit(1);
+  }
+
+  return it->second->num;
 }
 
 int MCD::getNbInputColumns()