Skip to content
Snippets Groups Projects
Commit 91a5461f authored by Franck Dary's avatar Franck Dary
Browse files

Added the precise computation intermediate results in showFeatureRepresentation output

parent 20b709cf
No related branches found
No related tags found
No related merge requests found
...@@ -167,6 +167,12 @@ class MLP ...@@ -167,6 +167,12 @@ class MLP
public : public :
/// @brief Convert a dynet expression to a string (usefull for debug purposes)
///
/// @param expr The expression to convert.
///
/// @return A string representing the expression.
static std::string expression2str(dynet::Expression & expr);
/// @brief initialize a new untrained MLP from a desired topology. /// @brief initialize a new untrained MLP from a desired topology.
/// ///
/// topology example for 2 hidden layers : (150,RELU,0.3)(50,ELU,0.2)\n /// topology example for 2 hidden layers : (150,RELU,0.3)(50,ELU,0.2)\n
......
...@@ -247,9 +247,19 @@ dynet::Expression MLP::featValue2Expression(dynet::ComputationGraph & cg, const ...@@ -247,9 +247,19 @@ dynet::Expression MLP::featValue2Expression(dynet::ComputationGraph & cg, const
dynet::Expression MLP::run(dynet::ComputationGraph & cg, dynet::Expression x) dynet::Expression MLP::run(dynet::ComputationGraph & cg, dynet::Expression x)
{ {
static std::vector< std::pair<std::string,dynet::Expression> > exprForDebug;
// Expression for the current hidden state // Expression for the current hidden state
dynet::Expression h_cur = x; dynet::Expression h_cur = x;
if (ProgramParameters::showFeatureRepresentation)
{
for (unsigned int i = 0; i < 81; i++)
fprintf(stderr, "%s", i == 80 ? "\n" : "-");
exprForDebug.clear();
exprForDebug.emplace_back("Input layer", h_cur);
}
for(unsigned int l = 0; l < layers.size(); l++) for(unsigned int l = 0; l < layers.size(); l++)
{ {
// Initialize parameters in computation graph // Initialize parameters in computation graph
...@@ -275,9 +285,25 @@ dynet::Expression MLP::run(dynet::ComputationGraph & cg, dynet::Expression x) ...@@ -275,9 +285,25 @@ dynet::Expression MLP::run(dynet::ComputationGraph & cg, dynet::Expression x)
h_dropped = h; h_dropped = h;
} }
if (ProgramParameters::showFeatureRepresentation)
{
exprForDebug.emplace_back("Result of h = h*W_" + std::to_string(l) + " + b_" + std::to_string(l), a);
exprForDebug.emplace_back("Result of h = a_" + std::to_string(l) + "(h)", h);
exprForDebug.emplace_back("Result of h = dropout_" + std::to_string(l) + "(h)", h_dropped);
}
h_cur = h_dropped; h_cur = h_dropped;
} }
if (ProgramParameters::showFeatureRepresentation)
{
cg.forward(h_cur);
for (auto & it : exprForDebug)
fprintf(stderr, "%s (dimension=%lu) :\n%s\n", it.first.c_str(), dynet::as_vector(it.second.value()).size(), expression2str(it.second).c_str());
for (unsigned int i = 0; i < 81; i++)
fprintf(stderr, "%s", i == 80 ? "\n" : "-");
}
return h_cur; return h_cur;
} }
...@@ -413,3 +439,19 @@ dynet::ParameterCollection & MLP::getModel() ...@@ -413,3 +439,19 @@ dynet::ParameterCollection & MLP::getModel()
return model; return model;
} }
std::string MLP::expression2str(dynet::Expression & expr)
{
std::string result = "<";
auto elem = dynet::as_vector(expr.value());
for (auto & f : elem)
result += float2str(f, "%f") + " ";
result.pop_back();
result += ">";
return result;
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment