diff --git a/MLP/include/MLP.hpp b/MLP/include/MLP.hpp
index 40c6ba706e46a950a2ea0c08bb15b58aa772ce7d..05d5ef3c00b25ce5273ba2bddb199e6123c1fb09 100644
--- a/MLP/include/MLP.hpp
+++ b/MLP/include/MLP.hpp
@@ -41,9 +41,11 @@ class MLP
 
   private :
 
+  static const unsigned int MAXLOOKUPSIZE = 200000;
+
   std::vector<Layer> layers;
   std::vector< std::vector<dynet::Parameter> > parameters;
-  std::map<void*,dynet::Parameter> ptr2parameter;
+  std::map< Dict*, std::pair<dynet::LookupParameter, std::map<void*, unsigned int> > > lookupParameters;
 
   dynet::ParameterCollection model;
   dynet::AmsgradTrainer trainer;
@@ -54,7 +56,7 @@ class MLP
   void addLayerToModel(Layer & layer);
   void checkLayersCompatibility();
   dynet::DynetParams & getDefaultParams();
-  dynet::Parameter & featValue2parameter(const FeatureModel::FeatureValue & fv);
+  dynet::Expression featValue2Expression(dynet::ComputationGraph & cg, const FeatureModel::FeatureValue & fv);
   dynet::Expression run(dynet::ComputationGraph & cg, dynet::Expression x);
   inline dynet::Expression activate(dynet::Expression h, Activation f);
   void printParameters(FILE * output);
diff --git a/MLP/src/MLP.cpp b/MLP/src/MLP.cpp
index 4c1bc79d62c2703d91e1069aba0ba211d979f5b0..6213bf698c7badc46b18635883f77aaa3ae064bb 100644
--- a/MLP/src/MLP.cpp
+++ b/MLP/src/MLP.cpp
@@ -114,10 +114,7 @@ std::vector<float> MLP::predict(FeatureModel::FeatureDescription & fd, int goldC
 
   for (auto & featValue : fd.values)
   {
-    if(featValue.policy == FeatureModel::Policy::Final)
-      expressions.emplace_back(dynet::const_parameter(cg, featValue2parameter(featValue)));
-    else
-      expressions.emplace_back(dynet::parameter(cg, featValue2parameter(featValue)));
+    expressions.emplace_back(featValue2Expression(cg, featValue));
   }
 
   dynet::Expression input = dynet::concatenate(expressions);
@@ -143,19 +140,50 @@ dynet::DynetParams & MLP::getDefaultParams()
   return params;
 }
 
-dynet::Parameter & MLP::featValue2parameter(const FeatureModel::FeatureValue & fv)
+dynet::Expression MLP::featValue2Expression(dynet::ComputationGraph & cg, const FeatureModel::FeatureValue & fv)
 {
-  auto it = ptr2parameter.find(fv.vec);
+  Dict * dict = fv.dict;
+
+  auto entry = lookupParameters.find(dict);
+
+  if(entry == lookupParameters.end())
+  {
+    lookupParameters[dict].first = model.add_lookup_parameters(MAXLOOKUPSIZE, {(unsigned)dict->getDimension(),1});
+  }
+
+  auto & ptr2index = lookupParameters[dict].second;
+  auto & lu = lookupParameters[dict].first;
+
+  bool isConst = fv.policy == FeatureModel::Policy::Final;
+
+  auto it = ptr2index.find(fv.vec);
+
+  if(it != ptr2index.end())
+  {
+    if(isConst)
+      return dynet::const_lookup(cg, lu, it->second);
+    else
+      return dynet::lookup(cg, lu, it->second);
+  }
 
-  if(it != ptr2parameter.end())
-    return it->second;
+  ptr2index[fv.vec] = ptr2index.size();
+  it = ptr2index.find(fv.vec);
 
-  ptr2parameter[fv.vec] = model.add_parameters({(unsigned)fv.vec->size(),1});
-  it = ptr2parameter.find(fv.vec);
+  unsigned int lookupSize = (int)(*lu.values()).size();
+  if(it->second >= lookupSize)
+  {
+    fprintf(stderr, "ERROR (%s) : MAXLOOKUPSIZE (%d) is too small. Aborting.\n", ERRINFO, MAXLOOKUPSIZE);
+    exit(1);
+  }
 
-  it->second.values()->v = fv.vec->data();
+  // Horrible trick : directly set Dict data as Tensor values
+  // Works only on CPU
+  (*lu.values())[it->second].v = fv.vec->data();
 
-  return it->second;
+  if(isConst)
+    return dynet::const_lookup(cg, lu, it->second);
+  else
+    return dynet::lookup(cg, lu, it->second);
 }
 
 dynet::Expression MLP::run(dynet::ComputationGraph & cg, dynet::Expression x)
@@ -225,17 +253,7 @@ inline dynet::Expression MLP::activate(dynet::Expression h, Activation f)
 
 void MLP::printParameters(FILE * output)
 {
-  for(auto & it : ptr2parameter)
-  {
-    auto & param = it.second;
-    dynet::Tensor * tensor = param.values();
-    float * value = tensor->v;
-    int dim = tensor->d.size();
-    fprintf(output, "Param : ");
-    for(int i = 0; i < dim; i++)
-      fprintf(output, "%.2f ", value[i]);
-    fprintf(output, "\n");
-  }
+  fprintf(output, "Parameters : NOT IMPLEMENTED\n");
 }
 
 int MLP::trainOnBatch(std::vector<std::pair<int, FeatureModel::FeatureDescription> >::iterator & start, std::vector<std::pair<int, FeatureModel::FeatureDescription> >::iterator & end)
@@ -252,12 +270,7 @@ int MLP::trainOnBatch(std::vector<std::pair<int, FeatureModel::FeatureDescriptio
     expressions.clear();
 
     for (auto & featValue : it->second.values)
-    {
-      if(featValue.policy == FeatureModel::Policy::Final)
-        expressions.emplace_back(dynet::const_parameter(cg, featValue2parameter(featValue)));
-      else
-        expressions.emplace_back(dynet::parameter(cg, featValue2parameter(featValue)));
-    }
+      expressions.emplace_back(featValue2Expression(cg, featValue));
 
     inputs.emplace_back(dynet::concatenate(expressions));
     inputDim = inputs.back().dim().rows();
diff --git a/maca_common/include/Dict.hpp b/maca_common/include/Dict.hpp
index cb104449e6bd9db04b334cb97fc6e90e50b27e11..996aaa43929e2423fecac61113e2f78f666de9ac 100644
--- a/maca_common/include/Dict.hpp
+++ b/maca_common/include/Dict.hpp
@@ -58,6 +58,7 @@ class Dict
   void save();
   std::vector<float> * getValue(const std::string & s);
   std::vector<float> * getNullValue();
+  int getDimension();
 };
 
 #endif
diff --git a/maca_common/src/Dict.cpp b/maca_common/src/Dict.cpp
index 083dd40c600fb101fecf46306538a909aa505c30..125f24c4802c4d8ae32e819ca1c75ddb15775342 100644
--- a/maca_common/src/Dict.cpp
+++ b/maca_common/src/Dict.cpp
@@ -190,3 +190,8 @@ Dict * Dict::getDict(Policy policy, const std::string & filename)
   return str2dict[filename].get();
 }
 
+int Dict::getDimension()
+{
+  return dimension;
+}
+
diff --git a/tape_machine/include/FeatureModel.hpp b/tape_machine/include/FeatureModel.hpp
index d092176d1c50b1c502aa0d7ada8a473305a251fa..4767930335bd5b8a7ae273b044ebae231a9a694b 100644
--- a/tape_machine/include/FeatureModel.hpp
+++ b/tape_machine/include/FeatureModel.hpp
@@ -17,6 +17,7 @@ class FeatureModel
 
   struct FeatureValue
   {
+    Dict * dict;
     std::string name;
     std::string * value;
     std::vector<float> * vec;
diff --git a/tape_machine/src/FeatureBank.cpp b/tape_machine/src/FeatureBank.cpp
index 845fed53dae3ec615be18b2890c9580c36c428ef..52d0af70b2b8741a2de4ad3f821d38cbd36371c2 100644
--- a/tape_machine/src/FeatureBank.cpp
+++ b/tape_machine/src/FeatureBank.cpp
@@ -36,9 +36,9 @@ FeatureModel::FeatureValue FeatureBank::simpleBufferAccess(Config & config, int
   int index = config.head + relativeIndex;
 
   if(index < 0 || index >= (int)tape.size())
-    return {featName+"(null)", &Dict::nullValueStr, dict->getNullValue(), policy};
+    return {dict, featName+"(null)", &Dict::nullValueStr, dict->getNullValue(), policy};
 
-  return {featName, &tape[index], dict->getValue(tape[index]), policy};
+  return {dict, featName, &tape[index], dict->getValue(tape[index]), policy};
 }
 
 FeatureModel::FeatureValue FeatureBank::simpleStackAccess(Config & config, int relativeIndex, const std::string & tapeName, const std::string & featName)
@@ -48,14 +48,14 @@ FeatureModel::FeatureValue FeatureBank::simpleStackAccess(Config & config, int r
   auto policy = dictPolicy2FeaturePolicy(dict->policy);
 
   if(relativeIndex < 0 || relativeIndex >= (int)config.stack.size())
-    return {featName+"(null)", &Dict::nullValueStr, dict->getNullValue(), policy};
+    return {dict, featName+"(null)", &Dict::nullValueStr, dict->getNullValue(), policy};
 
   int index = config.stack[config.stack.size()-1-relativeIndex];
 
   if(index < 0 || index >= (int)tape.size())
-    return {featName+"(null)", &Dict::nullValueStr, dict->getNullValue(), policy};
+    return {dict, featName+"(null)", &Dict::nullValueStr, dict->getNullValue(), policy};
 
-  return {featName, &tape[index], dict->getValue(tape[index]), policy};
+  return {dict, featName, &tape[index], dict->getValue(tape[index]), policy};
 }
 
 FeatureModel::Policy FeatureBank::dictPolicy2FeaturePolicy(Dict::Policy policy)
diff --git a/trainer/src/Trainer.cpp b/trainer/src/Trainer.cpp
index f3419110606f52535ef0d423d0cb17353fb5e841..d20ee64f128973763658c1f412982f02c786d49d 100644
--- a/trainer/src/Trainer.cpp
+++ b/trainer/src/Trainer.cpp
@@ -55,6 +55,8 @@ void Trainer::trainBatched()
 
   std::map<Classifier*, std::vector<Example> > examples;
 
+  fprintf(stderr, "Training of \'%s\' :\n", tm.name.c_str());
+
   while (!config.isFinal())
   {
     TapeMachine::State * currentState = tm.getCurrentState();