Skip to content
Snippets Groups Projects
Commit 5d1d99f1 authored by Franck Dary's avatar Franck Dary
Browse files

Added an option to chose the size of each training epoch

parent 66cbc6bb
No related branches found
No related tags found
No related merge requests found
...@@ -21,7 +21,7 @@ endif() ...@@ -21,7 +21,7 @@ endif()
set(CMAKE_CXX_FLAGS "-Wall -Wextra -std=c++11") set(CMAKE_CXX_FLAGS "-Wall -Wextra -std=c++11")
set(CMAKE_CXX_FLAGS_DEBUG "-g3 -Ofast") set(CMAKE_CXX_FLAGS_DEBUG "-g3 -Ofast")
set(CMAKE_CXX_FLAGS_RELEASE "-Ofast") set(CMAKE_CXX_FLAGS_RELEASE "-Ofast -march=native")
include_directories(maca_common/include) include_directories(maca_common/include)
include_directories(transition_machine/include) include_directories(transition_machine/include)
......
...@@ -37,6 +37,7 @@ struct ProgramParameters ...@@ -37,6 +37,7 @@ struct ProgramParameters
static int dynamicEpoch; static int dynamicEpoch;
static float dynamicProbability; static float dynamicProbability;
static bool showFeatureRepresentation; static bool showFeatureRepresentation;
static int iterationSize;
private : private :
......
...@@ -32,3 +32,4 @@ bool ProgramParameters::interactive; ...@@ -32,3 +32,4 @@ bool ProgramParameters::interactive;
int ProgramParameters::dynamicEpoch; int ProgramParameters::dynamicEpoch;
float ProgramParameters::dynamicProbability; float ProgramParameters::dynamicProbability;
bool ProgramParameters::showFeatureRepresentation; bool ProgramParameters::showFeatureRepresentation;
int ProgramParameters::iterationSize;
...@@ -98,15 +98,23 @@ void Trainer::train() ...@@ -98,15 +98,23 @@ void Trainer::train()
fprintf(stderr, "Training of \'%s\' :\n", tm.name.c_str()); fprintf(stderr, "Training of \'%s\' :\n", tm.name.c_str());
for (curIter = 0; curIter < ProgramParameters::nbIter; curIter++) auto resetAndShuffle = [this]()
{ {
tm.reset(); tm.reset();
trainConfig.reset(); trainConfig.reset();
if(ProgramParameters::shuffleExamples) if(ProgramParameters::shuffleExamples)
trainConfig.shuffle("EOS", "1"); trainConfig.shuffle("EOS", "1");
for (auto & it : trainCounter)
it.second.first = it.second.second = 0;
};
curIter = 0;
int nbSteps = 0;
while (curIter < ProgramParameters::nbIter)
{
resetAndShuffle();
while (!trainConfig.isFinal()) while (!trainConfig.isFinal())
{ {
TransitionMachine::State * currentState = tm.getCurrentState(); TransitionMachine::State * currentState = tm.getCurrentState();
...@@ -142,8 +150,12 @@ void Trainer::train() ...@@ -142,8 +150,12 @@ void Trainer::train()
// Print current iter advancement in percentage // Print current iter advancement in percentage
if (ProgramParameters::interactive) if (ProgramParameters::interactive)
if (trainConfig.head % 200 == 0 || trainConfig.tapes[0].ref.size()-trainConfig.head < 200) {
fprintf(stderr, "Current Iteration : %.2f%%\r", 100.0*trainConfig.head/trainConfig.tapes[0].ref.size()); int totalSize = ProgramParameters::iterationSize == -1 ? trainConfig.tapes[0].hyp.size() : ProgramParameters::iterationSize;
int steps = ProgramParameters::iterationSize == -1 ? trainConfig.head : nbSteps;
if (steps % 200 == 0 || totalSize-steps < 200)
fprintf(stderr, "Current Iteration : %.2f%%\r", 100.0*steps/totalSize);
}
auto weightedActions = classifier->weightActions(trainConfig); auto weightedActions = classifier->weightActions(trainConfig);
std::string pAction = ""; std::string pAction = "";
...@@ -215,9 +227,28 @@ void Trainer::train() ...@@ -215,9 +227,28 @@ void Trainer::train()
tm.takeTransition(transition); tm.takeTransition(transition);
trainConfig.moveHead(transition->headMvt); trainConfig.moveHead(transition->headMvt);
} }
nbSteps++;
if (ProgramParameters::iterationSize != -1 && nbSteps >= ProgramParameters::iterationSize)
{
printScoresAndSave(stderr);
nbSteps = 0;
curIter++;
if (curIter >= ProgramParameters::nbIter)
break;
}
} }
if (ProgramParameters::iterationSize == -1)
{
printScoresAndSave(stderr); printScoresAndSave(stderr);
nbSteps = 0;
curIter++;
if (curIter >= ProgramParameters::nbIter)
break;
}
} }
} }
......
...@@ -46,6 +46,8 @@ po::options_description getOptionsDescription() ...@@ -46,6 +46,8 @@ po::options_description getOptionsDescription()
"Language you are working with") "Language you are working with")
("nbiter,n", po::value<int>()->default_value(5), ("nbiter,n", po::value<int>()->default_value(5),
"Number of training epochs (iterations)") "Number of training epochs (iterations)")
("iterationSize", po::value<int>()->default_value(-1),
"The number of examples for each iteration. -1 means the whole training set.")
("lr", po::value<float>()->default_value(0.001), ("lr", po::value<float>()->default_value(0.001),
"Learning rate of the optimizer") "Learning rate of the optimizer")
("seed,s", po::value<int>()->default_value(100), ("seed,s", po::value<int>()->default_value(100),
...@@ -149,6 +151,7 @@ int main(int argc, char * argv[]) ...@@ -149,6 +151,7 @@ int main(int argc, char * argv[])
ProgramParameters::dynamicEpoch = vm["epochd"].as<int>(); ProgramParameters::dynamicEpoch = vm["epochd"].as<int>();
ProgramParameters::dynamicProbability = vm["proba"].as<float>(); ProgramParameters::dynamicProbability = vm["proba"].as<float>();
ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<bool>(); ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<bool>();
ProgramParameters::iterationSize = vm["iterationSize"].as<int>();
const char * MACAON_DIR = std::getenv("MACAON_DIR"); const char * MACAON_DIR = std::getenv("MACAON_DIR");
std::string slash = "/"; std::string slash = "/";
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment