Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
O
old_macaon
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Deploy
Releases
Container registry
Model registry
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
GitLab community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Franck Dary
old_macaon
Commits
91a5461f
Commit
91a5461f
authored
6 years ago
by
Franck Dary
Browse files
Options
Downloads
Patches
Plain Diff
Added the precise computation intermediate results in showFeatureRepresentation output
parent
20b709cf
No related branches found
No related tags found
No related merge requests found
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
MLP/include/MLP.hpp
+6
-0
6 additions, 0 deletions
MLP/include/MLP.hpp
MLP/src/MLP.cpp
+42
-0
42 additions, 0 deletions
MLP/src/MLP.cpp
with
48 additions
and
0 deletions
MLP/include/MLP.hpp
+
6
−
0
View file @
91a5461f
...
@@ -167,6 +167,12 @@ class MLP
...
@@ -167,6 +167,12 @@ class MLP
public
:
public
:
/// @brief Convert a dynet expression to a string (usefull for debug purposes)
///
/// @param expr The expression to convert.
///
/// @return A string representing the expression.
static
std
::
string
expression2str
(
dynet
::
Expression
&
expr
);
/// @brief initialize a new untrained MLP from a desired topology.
/// @brief initialize a new untrained MLP from a desired topology.
///
///
/// topology example for 2 hidden layers : (150,RELU,0.3)(50,ELU,0.2)\n
/// topology example for 2 hidden layers : (150,RELU,0.3)(50,ELU,0.2)\n
...
...
This diff is collapsed.
Click to expand it.
MLP/src/MLP.cpp
+
42
−
0
View file @
91a5461f
...
@@ -247,9 +247,19 @@ dynet::Expression MLP::featValue2Expression(dynet::ComputationGraph & cg, const
...
@@ -247,9 +247,19 @@ dynet::Expression MLP::featValue2Expression(dynet::ComputationGraph & cg, const
dynet
::
Expression
MLP
::
run
(
dynet
::
ComputationGraph
&
cg
,
dynet
::
Expression
x
)
dynet
::
Expression
MLP
::
run
(
dynet
::
ComputationGraph
&
cg
,
dynet
::
Expression
x
)
{
{
static
std
::
vector
<
std
::
pair
<
std
::
string
,
dynet
::
Expression
>
>
exprForDebug
;
// Expression for the current hidden state
// Expression for the current hidden state
dynet
::
Expression
h_cur
=
x
;
dynet
::
Expression
h_cur
=
x
;
if
(
ProgramParameters
::
showFeatureRepresentation
)
{
for
(
unsigned
int
i
=
0
;
i
<
81
;
i
++
)
fprintf
(
stderr
,
"%s"
,
i
==
80
?
"
\n
"
:
"-"
);
exprForDebug
.
clear
();
exprForDebug
.
emplace_back
(
"Input layer"
,
h_cur
);
}
for
(
unsigned
int
l
=
0
;
l
<
layers
.
size
();
l
++
)
for
(
unsigned
int
l
=
0
;
l
<
layers
.
size
();
l
++
)
{
{
// Initialize parameters in computation graph
// Initialize parameters in computation graph
...
@@ -275,9 +285,25 @@ dynet::Expression MLP::run(dynet::ComputationGraph & cg, dynet::Expression x)
...
@@ -275,9 +285,25 @@ dynet::Expression MLP::run(dynet::ComputationGraph & cg, dynet::Expression x)
h_dropped
=
h
;
h_dropped
=
h
;
}
}
if
(
ProgramParameters
::
showFeatureRepresentation
)
{
exprForDebug
.
emplace_back
(
"Result of h = h*W_"
+
std
::
to_string
(
l
)
+
" + b_"
+
std
::
to_string
(
l
),
a
);
exprForDebug
.
emplace_back
(
"Result of h = a_"
+
std
::
to_string
(
l
)
+
"(h)"
,
h
);
exprForDebug
.
emplace_back
(
"Result of h = dropout_"
+
std
::
to_string
(
l
)
+
"(h)"
,
h_dropped
);
}
h_cur
=
h_dropped
;
h_cur
=
h_dropped
;
}
}
if
(
ProgramParameters
::
showFeatureRepresentation
)
{
cg
.
forward
(
h_cur
);
for
(
auto
&
it
:
exprForDebug
)
fprintf
(
stderr
,
"%s (dimension=%lu) :
\n
%s
\n
"
,
it
.
first
.
c_str
(),
dynet
::
as_vector
(
it
.
second
.
value
()).
size
(),
expression2str
(
it
.
second
).
c_str
());
for
(
unsigned
int
i
=
0
;
i
<
81
;
i
++
)
fprintf
(
stderr
,
"%s"
,
i
==
80
?
"
\n
"
:
"-"
);
}
return
h_cur
;
return
h_cur
;
}
}
...
@@ -413,3 +439,19 @@ dynet::ParameterCollection & MLP::getModel()
...
@@ -413,3 +439,19 @@ dynet::ParameterCollection & MLP::getModel()
return
model
;
return
model
;
}
}
std
::
string
MLP
::
expression2str
(
dynet
::
Expression
&
expr
)
{
std
::
string
result
=
"<"
;
auto
elem
=
dynet
::
as_vector
(
expr
.
value
());
for
(
auto
&
f
:
elem
)
result
+=
float2str
(
f
,
"%f"
)
+
" "
;
result
.
pop_back
();
result
+=
">"
;
return
result
;
}
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment