Skip to content
Snippets Groups Projects
Commit d0530fcd authored by Franck Dary's avatar Franck Dary
Browse files

Added a way to specify .fm file for each classifier, as command line arguments

parent 2e407928
Branches
No related tags found
No related merge requests found
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
#define PROGRAMPARAMETERS__H #define PROGRAMPARAMETERS__H
#include <string> #include <string>
#include <map>
struct ProgramParameters struct ProgramParameters
{ {
...@@ -54,6 +55,7 @@ struct ProgramParameters ...@@ -54,6 +55,7 @@ struct ProgramParameters
static int batchSize; static int batchSize;
static std::string loss; static std::string loss;
static std::string dicts; static std::string dicts;
static std::map<std::string,std::string> featureModelByClassifier;
private : private :
......
...@@ -49,3 +49,4 @@ std::string ProgramParameters::sequenceDelimiter; ...@@ -49,3 +49,4 @@ std::string ProgramParameters::sequenceDelimiter;
std::string ProgramParameters::classifierName; std::string ProgramParameters::classifierName;
int ProgramParameters::batchSize; int ProgramParameters::batchSize;
std::string ProgramParameters::loss; std::string ProgramParameters::loss;
std::map<std::string,std::string> ProgramParameters::featureModelByClassifier;
...@@ -43,6 +43,8 @@ po::options_description getOptionsDescription() ...@@ -43,6 +43,8 @@ po::options_description getOptionsDescription()
("printEntropy", "Print mean entropy and standard deviation accross sequences") ("printEntropy", "Print mean entropy and standard deviation accross sequences")
("dicts", po::value<std::string>()->default_value(""), ("dicts", po::value<std::string>()->default_value(""),
"The .dict file describing all the dictionaries to be used in the experiement. By default the filename specified in the .tm file will be used") "The .dict file describing all the dictionaries to be used in the experiement. By default the filename specified in the .tm file will be used")
("featureModels", po::value<std::string>()->default_value(""),
"For each classifier, specify what .fm (feature model) file to use. By default the filename specified in the .cla file will be used. Example : --featureModel Parser=parser.fm,Tagger=tagger.fm")
("optimizer", po::value<std::string>()->default_value("amsgrad"), ("optimizer", po::value<std::string>()->default_value("amsgrad"),
"The learning algorithm to use : amsgrad | adam | sgd") "The learning algorithm to use : amsgrad | adam | sgd")
("loss", po::value<std::string>()->default_value("neglogsoftmax"), ("loss", po::value<std::string>()->default_value("neglogsoftmax"),
...@@ -273,6 +275,21 @@ int main(int argc, char * argv[]) ...@@ -273,6 +275,21 @@ int main(int argc, char * argv[])
ProgramParameters::dynamicProbability = vm["proba"].as<float>(); ProgramParameters::dynamicProbability = vm["proba"].as<float>();
ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<int>(); ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<int>();
ProgramParameters::iterationSize = vm["iterationSize"].as<int>(); ProgramParameters::iterationSize = vm["iterationSize"].as<int>();
std::string featureModels = vm["featureModels"].as<std::string>();
if (!featureModels.empty())
{
auto byClassifiers = split(featureModels, ',');
for (auto & classifier : byClassifiers)
{
auto parts = split(classifier, '=');
if (parts.size() != 2)
{
fprintf(stderr, "ERROR (%s) : wrong format for argument of option featureModels. Aborting.\n", ERRINFO);
exit(1);
}
ProgramParameters::featureModelByClassifier[parts[0]] = parts[1];
}
}
if (ProgramParameters::nbTrain) if (ProgramParameters::nbTrain)
{ {
......
...@@ -55,7 +55,12 @@ Classifier::Classifier(const std::string & filename, bool trainMode) ...@@ -55,7 +55,12 @@ Classifier::Classifier(const std::string & filename, bool trainMode)
if(fscanf(fd, "Feature Model : %s\n", buffer) != 1) if(fscanf(fd, "Feature Model : %s\n", buffer) != 1)
badFormatAndAbort(ERRINFO); badFormatAndAbort(ERRINFO);
fm.reset(new FeatureModel(ProgramParameters::expPath + buffer)); std::string fmFilename = ProgramParameters::expPath + buffer;
if (ProgramParameters::featureModelByClassifier.count(this->name))
fmFilename = ProgramParameters::featureModelByClassifier[this->name];
fm.reset(new FeatureModel(fmFilename));
if(fscanf(fd, "Action Set : %s\n", buffer) != 1) if(fscanf(fd, "Action Set : %s\n", buffer) != 1)
badFormatAndAbort(ERRINFO); badFormatAndAbort(ERRINFO);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment