Commit 1473579c authored by Franck Dary's avatar Franck Dary
Browse files

Added sanity check when loading pretrained word embeddings

parent 7e1a6789
......@@ -72,6 +72,8 @@ void Submodule::loadPretrainedW2vEmbeddings(torch::nn::Embedding embeddings, std
if (dictIndex >= embeddings->weight.size(0))
{
if ((unsigned long)dictIndex != embeddings->weight.size(0)+toAdd.size())
util::myThrow(fmt::format("dictIndex == {}, weight.size == {}, toAdd.size == {}", dictIndex, embeddings->weight.size(0), toAdd.size()));
toAdd.emplace_back();
for (unsigned int i = 1; i < splited.size(); i++)
toAdd.back().emplace_back(std::stof(splited[i]));
......@@ -166,7 +168,7 @@ std::function<std::string(const std::string &)> Submodule::getFunction(const std
return [sequence](const std::string & s)
{
auto result = s;
auto result = s;
for (auto & f : sequence)
result = f(result);
return result;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment