Skip to content
Snippets Groups Projects
Commit 6a9e3b32 authored by Franck Dary's avatar Franck Dary
Browse files

Added a way to specify the .dict file as a program parameter

parent 38f7f9ad
No related branches found
No related tags found
No related merge requests found
...@@ -53,6 +53,7 @@ struct ProgramParameters ...@@ -53,6 +53,7 @@ struct ProgramParameters
static std::string classifierName; static std::string classifierName;
static int batchSize; static int batchSize;
static std::string loss; static std::string loss;
static std::string dicts;
private : private :
......
...@@ -25,6 +25,7 @@ std::string ProgramParameters::devFilename; ...@@ -25,6 +25,7 @@ std::string ProgramParameters::devFilename;
std::string ProgramParameters::devName; std::string ProgramParameters::devName;
std::string ProgramParameters::lang; std::string ProgramParameters::lang;
std::string ProgramParameters::optimizer; std::string ProgramParameters::optimizer;
std::string ProgramParameters::dicts;
int ProgramParameters::nbIter; int ProgramParameters::nbIter;
int ProgramParameters::seed; int ProgramParameters::seed;
bool ProgramParameters::removeDuplicates; bool ProgramParameters::removeDuplicates;
......
...@@ -41,6 +41,8 @@ po::options_description getOptionsDescription() ...@@ -41,6 +41,8 @@ po::options_description getOptionsDescription()
("help,h", "Produce this help message") ("help,h", "Produce this help message")
("debug,d", "Print infos on stderr") ("debug,d", "Print infos on stderr")
("printEntropy", "Print mean entropy and standard deviation accross sequences") ("printEntropy", "Print mean entropy and standard deviation accross sequences")
("dicts", po::value<std::string>()->default_value(""),
"The .dict file describing all the dictionaries to be used in the experiement. By default the filename specified in the .tm file will be used")
("optimizer", po::value<std::string>()->default_value("amsgrad"), ("optimizer", po::value<std::string>()->default_value("amsgrad"),
"The learning algorithm to use : amsgrad | adam | sgd") "The learning algorithm to use : amsgrad | adam | sgd")
("loss", po::value<std::string>()->default_value("neglogsoftmax"), ("loss", po::value<std::string>()->default_value("neglogsoftmax"),
...@@ -265,6 +267,7 @@ int main(int argc, char * argv[]) ...@@ -265,6 +267,7 @@ int main(int argc, char * argv[])
ProgramParameters::beta2 = vm["b2"].as<float>(); ProgramParameters::beta2 = vm["b2"].as<float>();
ProgramParameters::bias = vm["bias"].as<float>(); ProgramParameters::bias = vm["bias"].as<float>();
ProgramParameters::optimizer = vm["optimizer"].as<std::string>(); ProgramParameters::optimizer = vm["optimizer"].as<std::string>();
ProgramParameters::dicts = vm["dicts"].as<std::string>();
ProgramParameters::loss = vm["loss"].as<std::string>(); ProgramParameters::loss = vm["loss"].as<std::string>();
ProgramParameters::dynamicEpoch = vm["epochd"].as<int>(); ProgramParameters::dynamicEpoch = vm["epochd"].as<int>();
ProgramParameters::dynamicProbability = vm["proba"].as<float>(); ProgramParameters::dynamicProbability = vm["proba"].as<float>();
......
...@@ -33,7 +33,10 @@ TransitionMachine::TransitionMachine(bool trainMode) ...@@ -33,7 +33,10 @@ TransitionMachine::TransitionMachine(bool trainMode)
if(fscanf(fd, "Dicts : %[^\n]\n", buffer) != 1) if(fscanf(fd, "Dicts : %[^\n]\n", buffer) != 1)
badFormatAndAbort(ERRINFO); badFormatAndAbort(ERRINFO);
Dict::readDicts(ProgramParameters::expPath, ProgramParameters::expPath + buffer, trainMode); if (ProgramParameters::dicts.empty())
ProgramParameters::dicts = ProgramParameters::expPath + buffer;
Dict::readDicts(ProgramParameters::expPath, ProgramParameters::dicts, trainMode);
// Reading %CLASSIFIERS // Reading %CLASSIFIERS
if(fscanf(fd, "%%%s\n", buffer) != 1 || buffer != std::string("CLASSIFIERS")) if(fscanf(fd, "%%%s\n", buffer) != 1 || buffer != std::string("CLASSIFIERS"))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment