Skip to content
Snippets Groups Projects
Commit f42d9f8e authored by Franck Dary's avatar Franck Dary
Browse files

Implemented crude version of geneticAlgorithm

parent d1519b7e
No related branches found
No related tags found
No related merge requests found
...@@ -8,40 +8,54 @@ ...@@ -8,40 +8,54 @@
#include <dynet/expr.h> #include <dynet/expr.h>
#include <dynet/io.h> #include <dynet/io.h>
#include <string> #include <string>
#include <memory>
#include "NeuralNetwork.hpp" #include "NeuralNetwork.hpp"
#include "FeatureModel.hpp" #include "FeatureModel.hpp"
#include "MLPBase.hpp"
class GeneticAlgorithm : public NeuralNetwork class GeneticAlgorithm : public NeuralNetwork
{ {
private : private :
/// @brief An individual is a MLP /// @brief An Individual is a part of the current population.
class Individual ///
/// It can be evaluated against a particular metric, can mutate and reproduce with another of its kind.
struct Individual
{ {
private : /// @brief The neural network corresponding to this Individual.
MLPBase mlp;
/// @brief The Layers of the MLP. /// @brief The value of this Individual.
std::vector<Layer> layers; ///
/// @brief The parameters corresponding to the layers of the MLP. /// This metric is used to dertermine wich are the best Individuals of the generation.
std::vector< std::vector<dynet::Parameter> > parameters; /// For example it can be the inverse of the loss value of its MLP against the dev dataset.
float value;
public : /// @brief Unique identifier for this individual.
int id;
/// @brief Create a new individual for the population. /// @brief Create a new Individual from a certain topology.
/// ///
/// @param topology The topology the underlying MLP will take. /// @param model The dynet model that will contains the mlp parameters.
/// @param model The Collection of parameters of the GeneticAlgorithm. /// @param nbInputs The size of the mlp input layer.
/// @param nbInputs The size of the input layer of the MLP. /// @param topology The desired topology for the mlp.
/// @param nbOutputs The size of the output layer of the MLP. /// @param nbOutputs The size of the mlp output layer..
Individual(const std::string & topology, dynet::ParameterCollection & model, int nbInputs, int nbOutputs); Individual(dynet::ParameterCollection & model, int nbInputs, const std::string & topology, int nbOutputs);
void becomeChildOf(Individual * other);
}; };
/// @brief The current generation.
std::vector< std::unique_ptr<Individual> > generation;
private : private :
/// @brief Load this GeneticAlgorithm from a file. /// @brief Load this GeneticAlgorithm from a file.
/// ///
/// @param filename The name of the file where the GeneticAlgorithm is stored. /// @param filename The name of the file where the GeneticAlgorithm is stored.
void load(const std::string & filename); void load(const std::string & filename);
/// @brief Get the value of an Individual depending on the loss of its MLP
///
/// @param loss The loss of the MLP.
///
/// @return The value of the Individual.
static float loss2value(float loss);
public : public :
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
/// Once trained, it can also be used to predict the class of a certain input. /// Once trained, it can also be used to predict the class of a certain input.
class MLPBase class MLPBase
{ {
private : public :
using Layer = NeuralNetwork::Layer; using Layer = NeuralNetwork::Layer;
/// @brief The Layers of the MLP. /// @brief The Layers of the MLP.
......
#include "GeneticAlgorithm.hpp" #include "GeneticAlgorithm.hpp"
#include "ProgramParameters.hpp" #include "ProgramParameters.hpp"
#include "util.hpp"
GeneticAlgorithm::GeneticAlgorithm() GeneticAlgorithm::GeneticAlgorithm()
{ {
...@@ -17,80 +18,118 @@ GeneticAlgorithm::GeneticAlgorithm(const std::string & filename) ...@@ -17,80 +18,118 @@ GeneticAlgorithm::GeneticAlgorithm(const std::string & filename)
void GeneticAlgorithm::init(int nbInputs, const std::string & topology, int nbOutputs) void GeneticAlgorithm::init(int nbInputs, const std::string & topology, int nbOutputs)
{ {
fprintf(stderr, "init of genetic\n"); auto splited = split(topology, ' ');
if (splited.size() != 2 || !isNum(splited[0]))
{
fprintf(stderr, "ERROR (%s) : wrong topology \'%s\'. Aborting.\n", ERRINFO, topology.c_str());
exit(1);
}
int nbElems = std::stoi(splited[0]);
for (int i = 0; i < nbElems; i++)
generation.emplace_back(new Individual(model, nbInputs, splited[1], nbOutputs));
fprintf(stderr, "Init is done !\n");
} }
std::vector<float> GeneticAlgorithm::predict(FeatureModel::FeatureDescription & fd) std::vector<float> GeneticAlgorithm::predict(FeatureModel::FeatureDescription & fd)
{ {
return generation[0]->mlp.predict(fd);
} }
float GeneticAlgorithm::update(FeatureModel::FeatureDescription & fd, int gold) float GeneticAlgorithm::update(FeatureModel::FeatureDescription & fd, int gold)
{ {
bool haveBeenUpdated = false;
for (auto & individual : generation)
{
float loss = individual->mlp.update(fd, gold);
if (loss != 0.0)
{
individual->value = loss2value(loss);
haveBeenUpdated = true;
}
} }
void GeneticAlgorithm::save(const std::string & filename) if (!haveBeenUpdated)
return 0.0;
std::sort(generation.begin(), generation.end(),
[](const std::unique_ptr<Individual> & a, const std::unique_ptr<Individual> & b)
{ {
return a->value > b->value;
});
} fprintf(stderr, "-----------------\n");
for (auto & individual : generation)
fprintf(stderr, "%d\t%f\n", individual->id, individual->value);
fprintf(stderr, "-----------------\n");
void GeneticAlgorithm::printTopology(FILE * output) for (unsigned int i = 1; i < generation.size(); i++)
{ {
generation[i]->becomeChildOf(generation[0].get());
}
} }
void GeneticAlgorithm::load(const std::string & filename) void GeneticAlgorithm::save(const std::string & filename)
{ {
} }
GeneticAlgorithm::Individual::Individual(const std::string & topology, dynet::ParameterCollection & model, int nbInputs, int nbOutputs) void GeneticAlgorithm::printTopology(FILE * output)
{
if (generation.empty())
{ {
std::string topo = topology; fprintf(output, "0 x ()\n");
std::replace(topo.begin(), topo.end(), '(', ' '); }
std::replace(topo.begin(), topo.end(), ')', ' '); else
{
fprintf(output, "%lu x ", generation.size());
generation[0]->mlp.printTopology(output);
}
}
auto groups = split(topo); void GeneticAlgorithm::load(const std::string & filename)
for (auto group : groups)
{ {
if(group.empty())
continue;
std::replace(group.begin(), group.end(), ',', ' '); }
auto layer = split(group);
if (layer.size() != 2) GeneticAlgorithm::Individual::Individual(dynet::ParameterCollection & model, int nbInputs, const std::string & topology, int nbOutputs)
{ {
fprintf(stderr, "ERROR (%s) : invalid topology \'%s\'. Aborting.\n", ERRINFO, topology.c_str()); static int id = 0;
exit(1); this->id = id++;
mlp.init(model, nbInputs, topology, nbOutputs);
} }
int input = layers.empty() ? nbInputs : layers.back().output_dim; float GeneticAlgorithm::loss2value(float loss)
int output = std::stoi(layer[0]); {
layers.emplace_back(input, output, 0, str2activation(layer[1])); return 1000.0 / loss;
} }
layers.emplace_back(layers.back().output_dim, nbOutputs, 0.0, Activation::LINEAR); void GeneticAlgorithm::Individual::becomeChildOf(Individual * other)
if(layers.empty())
{ {
fprintf(stderr, "ERROR (%s) : constructed mlp with 0 layers. Aborting.\n", ERRINFO); auto & thisParameters = mlp.parameters;
exit(1); auto & otherParameters = other->mlp.parameters;
}
for(unsigned int i = 0; i < layers.size()-1; i++) if (thisParameters.size() != otherParameters.size())
if(layers[i].output_dim != layers[i+1].input_dim)
{ {
fprintf(stderr, "ERROR (%s) : constructed mlp with incompatible layers. Aborting.\n", ERRINFO); fprintf(stderr, "ERROR (%s) : The two individuals are not compatibles. Sizes %lu and %lu. Aborting.\n", ERRINFO, thisParameters.size(), otherParameters.size());
exit(1); exit(1);
} }
for (auto & layer : layers) for (unsigned int i = 0; i < thisParameters.size(); i++)
for (unsigned int j = 0; j < thisParameters[i].size(); j++)
{ {
dynet::Parameter W = model.add_parameters({(unsigned)layer.output_dim, (unsigned)layer.input_dim}); auto & thisParameter = thisParameters[i][j];
dynet::Parameter b = model.add_parameters({(unsigned)layer.output_dim}); auto & otherParameter = otherParameters[i][j];
parameters.push_back({W,b}); float * thisValues = thisParameter.values()->v;
float * otherValues = otherParameter.values()->v;
unsigned int nbValues = thisParameter.values()->d.size();
for (unsigned int k = 0; k < nbValues; k++)
if (rand() % 1000 >= 500)
thisValues[k] = otherValues[k];
} }
} }
...@@ -12,7 +12,6 @@ ...@@ -12,7 +12,6 @@
#include "ActionSet.hpp" #include "ActionSet.hpp"
#include "Oracle.hpp" #include "Oracle.hpp"
#include "NeuralNetwork.hpp" #include "NeuralNetwork.hpp"
#include "MLP.hpp"
#include "ProgramParameters.hpp" #include "ProgramParameters.hpp"
/// @brief Given a Config, a Classifier is capable of weighting its ActionSet. /// @brief Given a Config, a Classifier is capable of weighting its ActionSet.
......
#include "Classifier.hpp" #include "Classifier.hpp"
#include "File.hpp" #include "File.hpp"
#include "util.hpp" #include "util.hpp"
#include "MLP.hpp"
#include "GeneticAlgorithm.hpp"
Classifier::Classifier(const std::string & filename, bool trainMode) Classifier::Classifier(const std::string & filename, bool trainMode)
{ {
...@@ -67,7 +69,7 @@ Classifier::Classifier(const std::string & filename, bool trainMode) ...@@ -67,7 +69,7 @@ Classifier::Classifier(const std::string & filename, bool trainMode)
as.reset(new ActionSet(ProgramParameters::expPath + buffer, false)); as.reset(new ActionSet(ProgramParameters::expPath + buffer, false));
if(fscanf(fd, "Topology : %s\n", buffer) != 1) if(fscanf(fd, "Topology : %[^\n]\n", buffer) != 1)
badFormatAndAbort(ERRINFO); badFormatAndAbort(ERRINFO);
topology = buffer; topology = buffer;
...@@ -133,7 +135,7 @@ void Classifier::initClassifier(Config & config) ...@@ -133,7 +135,7 @@ void Classifier::initClassifier(Config & config)
return; return;
} }
nn.reset(new MLP()); nn.reset(new GeneticAlgorithm());
Dict::initDicts(nn->getModel(), name); Dict::initDicts(nn->getModel(), name);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment