Skip to content
Snippets Groups Projects
Commit a3f93cb0 authored by Marjorie Armando's avatar Marjorie Armando
Browse files

initial commit

parent 9b8f2f13
Branches
No related tags found
No related merge requests found
......@@ -3,15 +3,14 @@ project(macaon2)
find_package(FLEX)
find_package(BLAS)
#find_package(DyNet)
find_package(DyNet)
add_definitions("-Wall" )
SET(CMAKE_C_COMPILER g++)
SET(CMAKE_CXX_COMPILER g++)
SET( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Ofast -DUSE_CBLAS -I/home/marjorie/Documents/LIB/dynet -I/home/marjorie/Documents/LIB/eigen")
SET( CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -lm -lopenblas -L/home/marjorie/Documents/LIB/dynet/build/dynet -ldynet" )
SET( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Ofast -DUSE_CBLAS" )
SET( CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -lm -lopenblas -ldynet" )
if (${CMAKE_C_COMPILER_VERSION} VERSION_LESS 5.3)
......
......@@ -26,7 +26,7 @@ Layer::Layer(unsigned input_dim, unsigned output_dim, Activation activation, flo
*/
MLP::MLP(ParameterCollection & model)
{
LAYERS = 0;
LAYERS = layers.size();
}
/**
......@@ -34,10 +34,11 @@ MLP::MLP(ParameterCollection & model)
* \details Creates a feedforward multilayer perceptron based on a list of layer descriptions
*
* \param model : ParameterCollection (to contain parameters)
* \param layers : Layers description
* \param filename : file containing the MLP's structure
*/
MLP::MLP(ParameterCollection& model, vector<Layer> layers)
MLP::MLP(ParameterCollection& model, char* filename/*vector<Layer> layers*/)
{
read_struct_mlp(filename);
// Verify layers compatibility
for (unsigned l = 0; l < layers.size() - 1; ++l)
{
......@@ -210,3 +211,25 @@ inline Expression MLP::activate(Expression h, Activation f)
break;
}
}
void MLP::read_struct_mlp(char* filename)
{
ifstream file(filename, ios::in);
if(!file)
{
cerr << "Impossible d'ouvrir le fichier " << filename << endl;
exit(EXIT_FAILURE);
}
float tmp[4];
while(file >> tmp[0]) //input_dim
{
file >> tmp[1]; //output_dim
file >> tmp[2]; //activation rate
file >> tmp[3]; //dropout
Layer tmp_layer(tmp[0], tmp[1], tmp[2], tmp[3]);
layers.push_back(tmp_layer);
}
}
......@@ -57,8 +57,9 @@
bool dropout_active = true;
public:
void read_struct_mlp(char* filename);
MLP(dynet::ParameterCollection & model);
MLP(dynet::ParameterCollection& model, std::vector<Layer> layers);
MLP(dynet::ParameterCollection& model, char* filename/*std::vector<Layer> layers*/);
void append(dynet::ParameterCollection& model, Layer layer);
dynet::Expression run(dynet::Expression x, dynet::ComputationGraph& cg);
dynet::Expression get_nll(dynet::Expression x, std::vector</*dynet::real*/unsigned int> labels, dynet::ComputationGraph& cg);
......
......@@ -3,6 +3,16 @@
./trainCFF train_file dev_file batch_size nb_epochs
**/
/**
Corriger cmake pour inclure Dynet
Lire le fichier de Flo et Seb
init le vecteur de Layer (push_back)
*
Représentation one-hot
Représentation embedding
**/
#include <iostream>
#include <fstream>
#include "train_cff.hpp"
......@@ -21,7 +31,6 @@ using namespace dynet;
/**
* Recupere a partir d'un fichier fm la dimention de l'entree du reseau
* */
......@@ -138,7 +147,7 @@ int main(int argc, char** argv)
// Create model
MLP nn(model, vector<Layer>({
Layer(/* input_dim (nb de features) */ 5, /* output_dim */ 50, /* activation */ RELU, /* dropout_rate */ 0.2),
Layer(/* input_dim (nb de features) */ input_dim, /* output_dim */ 50, /* activation */ RELU, /* dropout_rate */ 0.2),
Layer(/* input_dim */ 50, /* output_dim */ 100, /* activation */ RELU, /* dropout_rate */ 0.2),
Layer(/* input_dim */ 100, /* output_dim */ 150, /* activation */ RELU, /* dropout_rate */ 0.2),
Layer(/* input_dim */ 150, /* output_dim (nb de classes possibles) */ 3, /* activation */ LINEAR, /* dropout_rate */ 0.0)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment