Skip to content
Snippets Groups Projects
Commit c85bd342 authored by Franck Dary's avatar Franck Dary
Browse files

Added a way to obtain old format when using showFeatureRepresentation option with argument 2

parent 3c6fceb2
Branches
No related tags found
No related merge requests found
......@@ -270,10 +270,14 @@ dynet::Expression MLP::run(dynet::ComputationGraph & cg, dynet::Expression x)
if (ProgramParameters::showFeatureRepresentation)
{
if (ProgramParameters::showFeatureRepresentation == 1)
for (unsigned int i = 0; i < 81; i++)
fprintf(stderr, "%s", i == 80 ? "\n" : "-");
exprForDebug.clear();
if (ProgramParameters::showFeatureRepresentation == 1)
exprForDebug.emplace_back("Input layer", h_cur);
if (ProgramParameters::showFeatureRepresentation >= 2)
exprForDebug.emplace_back("", h_cur);
}
for(unsigned int l = 0; l < layers.size(); l++)
......@@ -302,11 +306,19 @@ dynet::Expression MLP::run(dynet::ComputationGraph & cg, dynet::Expression x)
}
if (ProgramParameters::showFeatureRepresentation)
{
if (ProgramParameters::showFeatureRepresentation == 1)
{
exprForDebug.emplace_back("Result of h = h*W_" + std::to_string(l) + " + b_" + std::to_string(l), a);
exprForDebug.emplace_back("Result of h = a_" + std::to_string(l) + "(h)", h);
exprForDebug.emplace_back("Result of h = dropout_" + std::to_string(l) + "(h)", h_dropped);
}
else if (ProgramParameters::showFeatureRepresentation >= 2)
{
exprForDebug.emplace_back("", a);
exprForDebug.emplace_back("", h);
}
}
h_cur = h_dropped;
}
......@@ -314,11 +326,21 @@ dynet::Expression MLP::run(dynet::ComputationGraph & cg, dynet::Expression x)
if (ProgramParameters::showFeatureRepresentation)
{
cg.forward(h_cur);
if (ProgramParameters::showFeatureRepresentation == 1)
{
for (auto & it : exprForDebug)
fprintf(stderr, "%s (dimension=%lu) :\n%s\n", it.first.c_str(), dynet::as_vector(it.second.value()).size(), expression2str(it.second).c_str());
for (unsigned int i = 0; i < 81; i++)
fprintf(stderr, "%s", i == 80 ? "\n" : "-");
}
else if (ProgramParameters::showFeatureRepresentation >= 2)
{
for (auto & it : exprForDebug)
fprintf(stderr, "| %s |", expression2str(it.second).c_str());
fprintf(stderr, "\n");
}
}
return h_cur;
}
......@@ -457,17 +479,16 @@ dynet::ParameterCollection & MLP::getModel()
std::string MLP::expression2str(dynet::Expression & expr)
{
std::string result = "<";
std::string result = "";
auto elem = dynet::as_vector(expr.value());
for (auto & f : elem)
result += float2str(f, "%f") + " ";
if (!result.empty())
result.pop_back();
result += ">";
return result;
}
......@@ -42,7 +42,7 @@ po::options_description getOptionsDescription()
"The name of the buffer's tape that contains the delimiter token for a sequence")
("sequenceDelimiter", po::value<std::string>()->default_value("1"),
"The value of the token that act as a delimiter for sequences")
("showFeatureRepresentation", po::value<bool>()->default_value(false),
("showFeatureRepresentation", po::value<int>()->default_value(0),
"For each state of the Config, show its feature representation")
("lang", po::value<std::string>()->default_value("fr"),
"Language you are working with");
......@@ -110,7 +110,7 @@ int main(int argc, char * argv[])
ProgramParameters::lang = vm["lang"].as<std::string>();
ProgramParameters::sequenceDelimiterTape = vm["sequenceDelimiterTape"].as<std::string>();
ProgramParameters::sequenceDelimiter = vm["sequenceDelimiter"].as<std::string>();
ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<bool>();
ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<int>();
const char * MACAON_DIR = std::getenv("MACAON_DIR");
std::string slash = "/";
......
......@@ -41,7 +41,7 @@ po::options_description getOptionsDescription()
"The name of the buffer's tape that contains the delimiter token for a sequence")
("sequenceDelimiter", po::value<std::string>()->default_value("1"),
"The value of the token that act as a delimiter for sequences")
("showFeatureRepresentation", po::value<bool>()->default_value(false),
("showFeatureRepresentation", po::value<int>()->default_value(0),
"For each state of the Config, show its feature representation")
("lang", po::value<std::string>()->default_value("fr"),
"Language you are working with");
......@@ -109,7 +109,7 @@ int main(int argc, char * argv[])
ProgramParameters::lang = vm["lang"].as<std::string>();
ProgramParameters::sequenceDelimiterTape = vm["sequenceDelimiterTape"].as<std::string>();
ProgramParameters::sequenceDelimiter = vm["sequenceDelimiter"].as<std::string>();
ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<bool>();
ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<int>();
const char * MACAON_DIR = std::getenv("MACAON_DIR");
std::string slash = "/";
......
......@@ -61,7 +61,7 @@ po::options_description getOptionsDescription()
"The number of models that will be trained, with only the random seed changing")
("duplicates", po::value<bool>()->default_value(true),
"Remove identical training examples")
("showFeatureRepresentation", po::value<bool>()->default_value(false),
("showFeatureRepresentation", po::value<int>()->default_value(0),
"For each state of the Config, show its feature representation")
("interactive", po::value<bool>()->default_value(true),
"Is the shell interactive ? Display advancement informations")
......@@ -515,7 +515,7 @@ int main(int argc, char * argv[])
ProgramParameters::optimizer = vm["optimizer"].as<std::string>();
ProgramParameters::dynamicEpoch = vm["epochd"].as<int>();
ProgramParameters::dynamicProbability = vm["proba"].as<float>();
ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<bool>();
ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<int>();
ProgramParameters::iterationSize = vm["iterationSize"].as<int>();
if (ProgramParameters::nbTrain)
......
......@@ -41,7 +41,7 @@ struct ProgramParameters
static bool interactive;
static int dynamicEpoch;
static float dynamicProbability;
static bool showFeatureRepresentation;
static int showFeatureRepresentation;
static int iterationSize;
static int nbTrain;
static bool randomEmbeddings;
......
......@@ -36,7 +36,7 @@ float ProgramParameters::bias;
bool ProgramParameters::interactive;
int ProgramParameters::dynamicEpoch;
float ProgramParameters::dynamicProbability;
bool ProgramParameters::showFeatureRepresentation;
int ProgramParameters::showFeatureRepresentation;
bool ProgramParameters::randomEmbeddings;
bool ProgramParameters::printEntropy;
bool ProgramParameters::printTime;
......
......@@ -59,7 +59,7 @@ po::options_description getOptionsDescription()
"The number of models that will be trained, with only the random seed changing")
("duplicates", po::value<bool>()->default_value(true),
"Remove identical training examples")
("showFeatureRepresentation", po::value<bool>()->default_value(false),
("showFeatureRepresentation", po::value<int>()->default_value(0),
"For each state of the Config, show its feature representation")
("interactive", po::value<bool>()->default_value(true),
"Is the shell interactive ? Display advancement informations")
......@@ -262,7 +262,7 @@ int main(int argc, char * argv[])
ProgramParameters::optimizer = vm["optimizer"].as<std::string>();
ProgramParameters::dynamicEpoch = vm["epochd"].as<int>();
ProgramParameters::dynamicProbability = vm["proba"].as<float>();
ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<bool>();
ProgramParameters::showFeatureRepresentation = vm["showFeatureRepresentation"].as<int>();
ProgramParameters::iterationSize = vm["iterationSize"].as<int>();
if (ProgramParameters::nbTrain)
......
......@@ -94,7 +94,7 @@ Classifier::WeightedActions Classifier::weightActions(Config & config)
auto fd = fm->getFeatureDescription(config);
auto scores = mlp->predict(fd);
if (ProgramParameters::showFeatureRepresentation)
if (ProgramParameters::showFeatureRepresentation == 1)
fd.printForDebug(stderr);
for (unsigned int i = 0; i < scores.size(); i++)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment