Skip to content
Snippets Groups Projects
Commit 813ca8e8 authored by Franck Dary's avatar Franck Dary
Browse files

Added option alwaysSave for training

parent 03ae370d
No related branches found
No related tags found
No related merge requests found
...@@ -75,6 +75,7 @@ struct ProgramParameters ...@@ -75,6 +75,7 @@ struct ProgramParameters
static bool devLoss; static bool devLoss;
static bool randomDebug; static bool randomDebug;
static float randomDebugProbability; static float randomDebugProbability;
static bool alwaysSave;
private : private :
......
...@@ -69,4 +69,4 @@ bool ProgramParameters::devEvalOnGold; ...@@ -69,4 +69,4 @@ bool ProgramParameters::devEvalOnGold;
bool ProgramParameters::devLoss; bool ProgramParameters::devLoss;
bool ProgramParameters::randomDebug; bool ProgramParameters::randomDebug;
float ProgramParameters::randomDebugProbability; float ProgramParameters::randomDebugProbability;
bool ProgramParameters::alwaysSave;
...@@ -41,6 +41,7 @@ po::options_description getTrainOptionsDescription() ...@@ -41,6 +41,7 @@ po::options_description getTrainOptionsDescription()
opt.add_options() opt.add_options()
("help,h", "Produce this help message") ("help,h", "Produce this help message")
("debug,d", "Print infos on stderr") ("debug,d", "Print infos on stderr")
("alwaysSave", "Save the model at every iteration")
("randomDebug", "Print infos on stderr with a probability of randomDebugProbability") ("randomDebug", "Print infos on stderr with a probability of randomDebugProbability")
("randomDebugProbability", po::value<float>()->default_value(0.001), ("randomDebugProbability", po::value<float>()->default_value(0.001),
"Probability that debug infos will be printed") "Probability that debug infos will be printed")
...@@ -268,6 +269,7 @@ void loadTrainProgramParameters(int argc, char * argv[]) ...@@ -268,6 +269,7 @@ void loadTrainProgramParameters(int argc, char * argv[])
ProgramParameters::bdName = vm["bd"].as<std::string>(); ProgramParameters::bdName = vm["bd"].as<std::string>();
ProgramParameters::mcdName = vm["mcd"].as<std::string>(); ProgramParameters::mcdName = vm["mcd"].as<std::string>();
ProgramParameters::debug = vm.count("debug") == 0 ? false : true; ProgramParameters::debug = vm.count("debug") == 0 ? false : true;
ProgramParameters::alwaysSave = vm.count("alwaysSave") == 0 ? false : true;
ProgramParameters::randomDebug = vm.count("randomDebug") == 0 ? false : true; ProgramParameters::randomDebug = vm.count("randomDebug") == 0 ? false : true;
ProgramParameters::printEntropy = vm.count("printEntropy") == 0 ? false : true; ProgramParameters::printEntropy = vm.count("printEntropy") == 0 ? false : true;
ProgramParameters::printTime = vm.count("printTime") == 0 ? false : true; ProgramParameters::printTime = vm.count("printTime") == 0 ? false : true;
......
...@@ -203,6 +203,7 @@ void TrainInfos::computeMustSaves() ...@@ -203,6 +203,7 @@ void TrainInfos::computeMustSaves()
for (auto & it : devScoresPerClassifierPerEpoch) for (auto & it : devScoresPerClassifierPerEpoch)
{ {
mustSavePerClassifierPerEpoch[it.first].push_back(true); mustSavePerClassifierPerEpoch[it.first].push_back(true);
if (!ProgramParameters::alwaysSave)
for (auto & score : it.second) for (auto & score : it.second)
if (score > it.second.back()) if (score > it.second.back())
mustSavePerClassifierPerEpoch[it.first].back() = false; mustSavePerClassifierPerEpoch[it.first].back() = false;
...@@ -214,6 +215,7 @@ void TrainInfos::computeMustSaves() ...@@ -214,6 +215,7 @@ void TrainInfos::computeMustSaves()
for (auto & it : trainScoresPerClassifierPerEpoch) for (auto & it : trainScoresPerClassifierPerEpoch)
{ {
mustSavePerClassifierPerEpoch[it.first].push_back(true); mustSavePerClassifierPerEpoch[it.first].push_back(true);
if (!ProgramParameters::alwaysSave)
for (auto & score : it.second) for (auto & score : it.second)
if (score > it.second.back()) if (score > it.second.back())
mustSavePerClassifierPerEpoch[it.first].back() = false; mustSavePerClassifierPerEpoch[it.first].back() = false;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment