Skip to content
Snippets Groups Projects
Commit 2ef613e3 authored by Franck Dary's avatar Franck Dary
Browse files

changed loss function

parent 1f6407c2
Branches
No related tags found
No related merge requests found
...@@ -284,7 +284,7 @@ dynet::Expression MLP::errorCorrectionLoss(dynet::ComputationGraph & cg, dynet:: ...@@ -284,7 +284,7 @@ dynet::Expression MLP::errorCorrectionLoss(dynet::ComputationGraph & cg, dynet::
unsigned int u = 0; unsigned int u = 0;
dynet::Expression c = dynet::pick(dynet::one_hot(cg, layers.back().output_dim, oneHotGolds[i]),u); dynet::Expression c = dynet::pick(dynet::one_hot(cg, layers.back().output_dim, oneHotGolds[i]),u);
dynet::Expression a = dynet::pick(dynet::softmax(dynet::pick_batch_elem(output,i)),u); dynet::Expression a = dynet::pick(dynet::softmax(dynet::pick_batch_elem(output,i)),u);
lossExpr.emplace_back(pickneglogsoftmax(dynet::pick_batch_elem(output, i), oneHotGolds[i]) + c-a*c); lossExpr.emplace_back(dynet::pickneglogsoftmax(dynet::pick_batch_elem(output, i),oneHotGolds[i])+2-c-a*c+(dynet::acos(a-1)*(c-1)));
if (ProgramParameters::debug) if (ProgramParameters::debug)
{ {
cg.forward(lossExpr.back()); cg.forward(lossExpr.back());
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment