Skip to content
Snippets Groups Projects
Commit 42507e78 authored by Franck Dary's avatar Franck Dary
Browse files

GeneticAlgorithm works on decode

parent 02255bac
No related branches found
No related tags found
No related merge requests found
......@@ -42,6 +42,8 @@ class GeneticAlgorithm : public NeuralNetwork
/// @param topology The desired topology for the mlp.
/// @param nbOutputs The size of the mlp output layer..
Individual(dynet::ParameterCollection & model, int nbInputs, const std::string & topology, int nbOutputs);
/// @brief Create a blank individual, so that it will be loaded from a saved model.
Individual();
void becomeChildOf(Individual * other);
void becomeChildOf(Individual * mom, Individual * dad);
void mutate(float probability);
......
......@@ -73,7 +73,8 @@ class MLPBase
/// The file must have been written by the function saveStruct.
/// @param model The dynet model that will contain the loaded parameters.
/// @param filename The file from which the structure will be read.
void loadStruct(dynet::ParameterCollection & model, const std::string & filename);
/// @param index The index of the structure in the file.
void loadStruct(dynet::ParameterCollection & model, const std::string & filename, unsigned int index);
/// @brief Load and populate the model with parameters from a file.
///
/// The file must have been written by the function saveParameters.
......
......@@ -149,18 +149,29 @@ void GeneticAlgorithm::load(const std::string & filename)
fprintf(stderr, "ERROR (%s) : file \'%s\' bad format. Aborting.\n", ERRINFO, filename.c_str());
exit(1);
}
delete file;
this->nbInputs = i;
this->nbOutputs = o;
this->topology = buffer;
init(nbInputs, topology, nbOutputs);
auto splited = split(topology, ' ');
if (splited.size() != 2 || !isNum(splited[0]))
{
fprintf(stderr, "ERROR (%s) : wrong topology \'%s\'. Aborting.\n", ERRINFO, topology.c_str());
exit(1);
}
int nbElems = std::stoi(splited[0]);
for (auto & individual : generation)
for (int i = 0; i < nbElems; i++)
generation.emplace_back(new Individual());
for (int i = 0; i < nbElems; i++)
{
individual->mlp.loadStruct(model, filename);
individual->mlp.loadParameters(model, filename);
generation[i]->mlp.loadStruct(model, filename, i);
generation[i]->mlp.loadParameters(model, filename);
}
}
......@@ -174,6 +185,14 @@ GeneticAlgorithm::Individual::Individual(dynet::ParameterCollection & model, int
mutate(1.0);
}
GeneticAlgorithm::Individual::Individual()
: mlp("MLP_" + std::to_string(idCount))
{
this->id = idCount++;
this->loss = 0.0;
this->value = 0.0;
}
float GeneticAlgorithm::loss2value(float loss)
{
return 1000.0 / loss;
......
......@@ -13,7 +13,7 @@ MLP::MLP(const std::string & filename) : mlp("MLP")
trainer.reset(createTrainer());
initDynet();
mlp.loadStruct(model, filename);
mlp.loadStruct(model, filename, 0);
mlp.loadParameters(model, filename);
}
......
......@@ -293,7 +293,7 @@ void MLPBase::saveParameters(const std::string & filename)
}
}
void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string & filename)
void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string & filename, unsigned int index)
{
File file(filename, "r");
FILE * fd = file.getDescriptor();
......@@ -303,10 +303,24 @@ void MLPBase::loadStruct(dynet::ParameterCollection & model, const std::string &
int output;
float dropout;
for (unsigned int i = 0; i < index; i++)
{
while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
if (fscanf(fd, "%10[^\n]\n", activation) != 1)
{
fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, file.getName().c_str());
exit(1);
}
do
{
} while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) == 4);
}
while (fscanf(fd, "Layer : %d %d %s %f\n", &input, &output, activation, &dropout) != 4)
if (fscanf(fd, "%[^\n]\n", activation) != 1)
if (fscanf(fd, "%10[^\n]\n", activation) != 1)
{
fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, filename.c_str());
fprintf(stderr, "ERROR (%s) : Unexpected end of file \'%s\'. Aborting.\n", ERRINFO, file.getName().c_str());
exit(1);
}
......@@ -334,6 +348,7 @@ void MLPBase::loadParameters(dynet::ParameterCollection & model, const std::stri
parameters[i][1] = loader.load_param(model, prefix + std::to_string(i) + "_b");
} catch(const std::runtime_error e)
{
fprintf(stderr, "WARNING (%s) : Could not find parameter with key \'%s\' in the model. Ignore this if this model was trained with an older version of Macaon.\n", ERRINFO, (prefix+std::to_string(i) + "_W").c_str());
prefix = "Layer_";
parameters[i][0] = loader.load_param(model, prefix + std::to_string(i) + "_W");
parameters[i][1] = loader.load_param(model, prefix + std::to_string(i) + "_b");
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment