Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
O
old_macaon
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Deploy
Releases
Container registry
Model registry
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
GitLab community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Franck Dary
old_macaon
Commits
13a8c4bd
Commit
13a8c4bd
authored
5 years ago
by
Franck Dary
Browse files
Options
Downloads
Patches
Plain Diff
Added multiMLP
parent
8b5de980
No related branches found
No related tags found
No related merge requests found
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
neural_network/include/MultiMLP.hpp
+93
-0
93 additions, 0 deletions
neural_network/include/MultiMLP.hpp
neural_network/src/MultiMLP.cpp
+170
-0
170 additions, 0 deletions
neural_network/src/MultiMLP.cpp
transition_machine/src/Classifier.cpp
+4
-0
4 additions, 0 deletions
transition_machine/src/Classifier.cpp
with
267 additions
and
0 deletions
neural_network/include/MultiMLP.hpp
0 → 100644
+
93
−
0
View file @
13a8c4bd
/// \file MultiMLP.hpp
/// \author Franck Dary
/// @version 1.0
/// @date 2019-08-12
#ifndef MULTIMLP__H
#define MULTIMLP__H
#include
"NeuralNetwork.hpp"
#include
"MLPBase.hpp"
#include
"ProgramParameters.hpp"
/// @brief Classifier consisting in 1 MLP per possible class.
///
/// It is capable of training itself given a batch of examples.\n
/// Once trained, it can also be used to predict the class of a certain input.
class
MultiMLP
:
public
NeuralNetwork
{
private
:
/// @brief The mlps that will be trained to each recognize one class.
std
::
vector
<
MLPBase
>
mlps
;
/// @brief The training algorithm that will be used.
std
::
unique_ptr
<
dynet
::
Trainer
>
trainer
;
public
:
/// @brief initialize a new untrained MultiMLP from a desired topology.
///
/// topology example for 2 hidden layers : (150,RELU,0.3)(50,ELU,0.2)\n
/// Of sizes 150 and 50, activation functions RELU and ELU, and dropout rates
/// of 0.3 and 0.2.
/// @param nbInputs The size of the input layer of the MLP.
/// @param topology Description of each hidden Layer of the MLP.
/// @param nbOutputs The size of the output layer of the MLP.
void
init
(
int
nbInputs
,
const
std
::
string
&
topology
,
int
nbOutputs
)
override
;
/// @brief Construct a new MultiMLP for training.
MultiMLP
();
/// @brief Read and construct a trained MultiMLP from a file.
///
/// The file must have been written by save.
/// @param filename The file to read the MultiMLP from.
MultiMLP
(
const
std
::
string
&
filename
);
/// @brief Give a score to each possible class, given an input.
///
/// @param fd The input to use.
///
/// @return A vector containing one score per possible class.
std
::
vector
<
float
>
predict
(
FeatureModel
::
FeatureDescription
&
fd
)
override
;
/// @brief Update the parameters according to the given gold class.
///
/// @param fd The input to use.
/// @param gold The gold class of this input.
///
/// @return The loss.
float
update
(
FeatureModel
::
FeatureDescription
&
fd
,
int
gold
)
override
;
/// @brief Get the loss according to the given gold class.
///
/// @param fd The input to use.
/// @param gold The gold class of this input.
///
/// @return The loss.
float
update
(
FeatureModel
::
FeatureDescription
&
fd
,
const
std
::
vector
<
float
>
&
gold
)
override
;
/// @brief Get the loss according to the given gold class.
///
/// @param fd The input to use.
/// @param gold The gold class of this input.
///
/// @return The loss.
float
getLoss
(
FeatureModel
::
FeatureDescription
&
fd
,
int
gold
)
override
;
/// @brief Get the loss according to the given gold vector.
///
/// @param fd The input to use.
/// @param gold The gold vector for this input.
///
/// @return The loss.
float
getLoss
(
FeatureModel
::
FeatureDescription
&
fd
,
const
std
::
vector
<
float
>
&
gold
)
override
;
/// @brief Save the MultiMLP to a file.
///
/// @param filename The file to write the MultiMLP to.
void
save
(
const
std
::
string
&
filename
)
override
;
/// @brief Print the topology (Layers) of the MultiMLP.
///
/// @param output Where the topology will be printed.
void
printTopology
(
FILE
*
output
)
override
;
/// @brief Allocate the correct trainer type depending on the program parameters.
///
/// @return A pointer to the newly allocated trainer.
dynet
::
Trainer
*
createTrainer
();
void
endOfIteration
();
};
#endif
This diff is collapsed.
Click to expand it.
neural_network/src/MultiMLP.cpp
0 → 100644
+
170
−
0
View file @
13a8c4bd
#include
"MultiMLP.hpp"
MultiMLP
::
MultiMLP
()
{
randomSeed
=
ProgramParameters
::
seed
;
trainer
.
reset
(
createTrainer
());
initDynet
();
}
MultiMLP
::
MultiMLP
(
const
std
::
string
&
filename
)
{
randomSeed
=
ProgramParameters
::
seed
;
trainer
.
reset
(
createTrainer
());
initDynet
();
File
*
file
=
new
File
(
filename
,
"r"
);
int
nbMlps
;
if
(
fscanf
(
file
->
getDescriptor
(),
"#NBMLP:%d# # {1,1} 0 _
\n
"
,
&
nbMlps
)
!=
1
)
{
fprintf
(
stderr
,
"ERROR (%s) : Ill formated model, aborting.
\n
"
,
ERRINFO
);
exit
(
1
);
}
delete
file
;
for
(
int
i
=
0
;
i
<
nbMlps
;
i
++
)
{
mlps
.
emplace_back
(
std
::
string
(
"MLP_"
)
+
std
::
to_string
(
i
));
mlps
.
back
().
loadStruct
(
model
,
filename
,
i
);
mlps
.
back
().
loadParameters
(
model
,
filename
);
}
}
void
MultiMLP
::
init
(
int
nbInputs
,
const
std
::
string
&
topology
,
int
nbOutputs
)
{
std
::
string
safeTopology
=
""
;
for
(
unsigned
int
i
=
1
;
i
<
topology
.
size
();
i
++
)
safeTopology
.
push_back
(
topology
[
i
]);
setBatchSize
(
0
);
if
(
mlps
.
empty
())
{
for
(
int
i
=
0
;
i
<
nbOutputs
;
i
++
)
mlps
.
emplace_back
(
std
::
string
(
"MLP_"
)
+
std
::
to_string
(
i
));
}
for
(
auto
&
mlp
:
mlps
)
mlp
.
init
(
model
,
nbInputs
,
safeTopology
,
2
);
}
dynet
::
Trainer
*
MultiMLP
::
createTrainer
()
{
auto
optimizer
=
noAccentLower
(
ProgramParameters
::
optimizer
);
dynet
::
Trainer
*
trainer
=
nullptr
;
if
(
optimizer
==
"amsgrad"
)
trainer
=
new
dynet
::
AmsgradTrainer
(
model
,
ProgramParameters
::
learningRate
,
ProgramParameters
::
beta1
,
ProgramParameters
::
beta2
,
ProgramParameters
::
bias
);
else
if
(
optimizer
==
"adam"
)
trainer
=
new
dynet
::
AdamTrainer
(
model
,
ProgramParameters
::
learningRate
,
ProgramParameters
::
beta1
,
ProgramParameters
::
beta2
,
ProgramParameters
::
bias
);
else
if
(
optimizer
==
"sgd"
)
trainer
=
new
dynet
::
SimpleSGDTrainer
(
model
,
ProgramParameters
::
learningRate
);
else
if
(
optimizer
==
"none"
)
return
nullptr
;
if
(
trainer
)
{
trainer
->
sparse_updates_enabled
=
true
;
return
trainer
;
}
fprintf
(
stderr
,
"ERROR (%s) : unknown optimizer
\'
%s
\'
. Aborting.
\n
"
,
ERRINFO
,
optimizer
.
c_str
());
exit
(
1
);
return
nullptr
;
}
std
::
vector
<
float
>
MultiMLP
::
predict
(
FeatureModel
::
FeatureDescription
&
fd
)
{
std
::
vector
<
float
>
prediction
(
mlps
.
size
());
for
(
unsigned
int
i
=
0
;
i
<
mlps
.
size
();
i
++
)
{
int
id
=
std
::
stoi
(
split
(
mlps
[
i
].
name
,
'_'
)[
1
]);
auto
value
=
mlps
[
i
].
predict
(
fd
);
prediction
[
id
]
=
value
[
1
];
}
return
prediction
;
}
float
MultiMLP
::
update
(
FeatureModel
::
FeatureDescription
&
fd
,
int
gold
)
{
try
{
for
(
auto
&
mlp
:
mlps
)
{
int
id
=
std
::
stoi
(
split
(
mlp
.
name
,
'_'
)[
1
]);
float
loss
=
0.0
;
mlp
.
setBatchSize
(
getBatchSize
());
loss
=
mlp
.
update
(
fd
,
id
==
gold
?
1
:
0
);
trainer
->
update
();
return
loss
;
}
}
catch
(
BatchNotFull
&
)
{
return
0.0
;
}
return
0.0
;
}
float
MultiMLP
::
update
(
FeatureModel
::
FeatureDescription
&
,
const
std
::
vector
<
float
>
&
)
{
fprintf
(
stderr
,
"ERROR (%s) : only classification is supported. Aborting.
\n
"
,
ERRINFO
);
exit
(
1
);
return
0.0
;
}
float
MultiMLP
::
getLoss
(
FeatureModel
::
FeatureDescription
&
fd
,
int
gold
)
{
float
loss
=
0.0
;
try
{
for
(
auto
&
mlp
:
mlps
)
{
int
id
=
std
::
stoi
(
split
(
mlp
.
name
,
'_'
)[
1
]);
mlp
.
setBatchSize
(
getBatchSize
());
loss
+=
mlp
.
getLoss
(
fd
,
id
==
gold
?
1
:
0
);
trainer
->
update
();
}
}
catch
(
BatchNotFull
&
)
{
return
0.0
;
}
return
loss
;
}
float
MultiMLP
::
getLoss
(
FeatureModel
::
FeatureDescription
&
,
const
std
::
vector
<
float
>
&
)
{
fprintf
(
stderr
,
"ERROR (%s) : only classification is supported. Aborting.
\n
"
,
ERRINFO
);
exit
(
1
);
return
0.0
;
}
void
MultiMLP
::
save
(
const
std
::
string
&
filename
)
{
File
*
file
=
new
File
(
filename
,
"w"
);
fprintf
(
file
->
getDescriptor
(),
"#NBMLP:%lu# # {1,1} 0 _
\n
"
,
mlps
.
size
());
delete
file
;
for
(
auto
&
mlp
:
mlps
)
{
mlp
.
saveStruct
(
filename
);
mlp
.
saveParameters
(
filename
);
}
}
void
MultiMLP
::
printTopology
(
FILE
*
output
)
{
mlps
.
back
().
printTopology
(
output
);
}
void
MultiMLP
::
endOfIteration
()
{
for
(
auto
&
mlp
:
mlps
)
mlp
.
endOfIteration
();
}
This diff is collapsed.
Click to expand it.
transition_machine/src/Classifier.cpp
+
4
−
0
View file @
13a8c4bd
...
@@ -3,6 +3,7 @@
...
@@ -3,6 +3,7 @@
#include
"util.hpp"
#include
"util.hpp"
#include
"MLP.hpp"
#include
"MLP.hpp"
#include
"ReversedMLP.hpp"
#include
"ReversedMLP.hpp"
#include
"MultiMLP.hpp"
#include
"GeneticAlgorithm.hpp"
#include
"GeneticAlgorithm.hpp"
Classifier
::
Classifier
(
const
std
::
string
&
filename
,
bool
trainMode
)
Classifier
::
Classifier
(
const
std
::
string
&
filename
,
bool
trainMode
)
...
@@ -376,6 +377,9 @@ NeuralNetwork * Classifier::createNeuralNetwork()
...
@@ -376,6 +377,9 @@ NeuralNetwork * Classifier::createNeuralNetwork()
if
(
topology
[
0
]
==
'R'
)
if
(
topology
[
0
]
==
'R'
)
return
new
ReversedMLP
();
return
new
ReversedMLP
();
if
(
topology
[
0
]
==
'M'
)
return
new
MultiMLP
();
return
new
MLP
();
return
new
MLP
();
}
}
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment