Skip to content
Snippets Groups Projects
Commit b0bb4445 authored by Franck Dary's avatar Franck Dary
Browse files

Set amsgrad as default optimizer

parent c1c05d0e
No related branches found
No related tags found
No related merge requests found
......@@ -26,7 +26,6 @@ class Trainer
Trainer(ReadingMachine & machine);
void createDataset(SubConfig & goldConfig, bool debug);
float epoch(bool printAdvancement);
};
#endif
......@@ -58,7 +58,7 @@ void Trainer::createDataset(SubConfig & config, bool debug)
dataLoader = torch::data::make_data_loader(Dataset(contexts, classes).map(torch::data::transforms::Stack<>()), torch::data::DataLoaderOptions(batchSize).workers(0).max_jobs(0));
optimizer.reset(new torch::optim::Adam(machine.getClassifier()->getNN()->parameters(), torch::optim::AdamOptions(0.001).beta1(0.9).beta2(0.999)));
optimizer.reset(new torch::optim::Adam(machine.getClassifier()->getNN()->parameters(), torch::optim::AdamOptions(0.001).amsgrad(true).beta1(0.9).beta2(0.999)));
}
float Trainer::epoch(bool printAdvancement)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment