diff --git a/CMakeLists.txt b/CMakeLists.txt
index 9f9c4ab5e1b1b6b86f608cc95277a2fc2bac2ad4..ec5e2a61198dcf3d4f293cfd909e50a5116acffb 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -27,6 +27,7 @@ include_directories(maca_common/include)
 include_directories(transition_machine/include)
 include_directories(trainer/include)
 include_directories(decoder/include)
+include_directories(neural_network/include)
 include_directories(MLP/include)
 include_directories(error_correction/include)
 
@@ -34,6 +35,7 @@ add_subdirectory(maca_common)
 add_subdirectory(transition_machine)
 add_subdirectory(trainer)
 add_subdirectory(decoder)
+add_subdirectory(neural_network)
 add_subdirectory(MLP)
 add_subdirectory(error_correction)
 
diff --git a/neural_network/CMakeLists.txt b/neural_network/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a4237344cb6202fdeb49d6e8fc1e8c1f7462b9c2
--- /dev/null
+++ b/neural_network/CMakeLists.txt
@@ -0,0 +1,4 @@
+FILE(GLOB SOURCES src/*.cpp)
+
+#compiling library
+add_library(neural_network STATIC ${SOURCES})
diff --git a/neural_network/include/NeuralNetwork.hpp b/neural_network/include/NeuralNetwork.hpp
new file mode 100644
index 0000000000000000000000000000000000000000..46e9c1ba8c82345563d42d5fca28f78eaddd4443
--- /dev/null
+++ b/neural_network/include/NeuralNetwork.hpp
@@ -0,0 +1,55 @@
+#ifndef NEURALNETWORK__H
+#define NEURALNETWORK__H
+
+#include <dynet/nodes.h>
+#include <dynet/dynet.h>
+#include <dynet/training.h>
+#include <dynet/timing.h>
+#include <dynet/expr.h>
+#include "FeatureModel.hpp"
+
+class NeuralNetwork
+{
+  public :
+
+  /// @brief Convert a dynet expression to a string (usefull for debug purposes)
+  ///
+  /// @param expr The expression to convert.
+  ///
+  /// @return A string representing the expression.
+  static std::string expression2str(dynet::Expression & expr);
+
+  /// @brief initialize a new untrained NeuralNetwork from a desired topology.
+  ///
+  /// @param nbInputs The size of the input layer of the NeuralNetwork.
+  /// @param topology Description of the NeuralNetwork.
+  /// @param nbOutputs The size of the output layer of the NeuralNetwork.
+  virtual void init(int nbInputs, const std::string & topology, int nbOutputs) = 0;
+
+  /// @brief Give a score to each possible class, given an input.
+  ///
+  /// @param fd The input to use.
+  ///
+  /// @return A vector containing one score per possible class.
+  virtual std::vector<float> predict(FeatureModel::FeatureDescription & fd) = 0;
+
+  /// @brief Update the parameters according to the given gold class.
+  ///
+  /// @param fd The input to use.
+  /// @param gold The gold class of this input.
+  ///
+  /// @return The loss.
+  virtual float update(FeatureModel::FeatureDescription & fd, int gold) = 0;
+
+  /// @brief Save the NeuralNetwork to a file.
+  /// 
+  /// @param filename The file to write the NeuralNetwork to.
+  virtual void save(const std::string & filename) = 0;
+
+  /// @brief Print the topology of the NeuralNetwork.
+  ///
+  /// @param output Where the topology will be printed.
+  virtual void printTopology(FILE * output) = 0;
+};
+
+#endif
diff --git a/neural_network/src/NeuralNetwork.cpp b/neural_network/src/NeuralNetwork.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..8be87b89b50a248ba7832e05621a1c8fe986f2de
--- /dev/null
+++ b/neural_network/src/NeuralNetwork.cpp
@@ -0,0 +1,17 @@
+#include "NeuralNetwork.hpp"
+
+std::string NeuralNetwork::expression2str(dynet::Expression & expr)
+{
+  std::string result = "";
+
+  auto elem = dynet::as_vector(expr.value());
+
+  for (auto & f : elem)
+    result += float2str(f, "%f") + " ";
+
+  if (!result.empty())
+  result.pop_back();
+
+  return result;
+}
+