Skip to content
Snippets Groups Projects
Select Git revision
  • 08c4f3095fa0c4d40a2de1dedf709013cbd0a4e9
  • master default protected
  • ci39
  • ci39-python12
  • py39
  • issue#14
  • endianness
  • bugs_i686
  • bug_test_instfreqplot_arm64
  • bug_test_tfplot
  • gitlab-ci
  • debian
  • v1.1.2
  • v1.1.1
  • v1.0.17
  • v1.0.16
  • v1.0.15
  • v1.0.14
  • v1.0.13
  • v1.0.12
  • v1.0.9
  • v1.0.8
  • v1.0.7
  • v1.0.6
  • v1.0.0
25 results

conf.py

Blame
  • FocusedColumnModule.cpp 6.35 KiB
    #include "FocusedColumnModule.hpp"
    
    FocusedColumnModuleImpl::FocusedColumnModuleImpl(std::string name, const std::string & definition, std::filesystem::path path) : path(path)
    {
      setName(name);
      std::regex regex("(?:(?:\\s|\\t)*)Column\\{(.*)\\}(?:(?:\\s|\\t)*)NbElem\\{(.*)\\}(?:(?:\\s|\\t)*)Buffer\\{(.*)\\}(?:(?:\\s|\\t)*)Stack\\{(.*)\\}(?:(?:\\s|\\t)*)(\\S+)\\{(.*)\\}(?:(?:\\s|\\t)*)In\\{(.*)\\}(?:(?:\\s|\\t)*)Out\\{(.*)\\}(?:(?:\\s|\\t)*)w2v\\{(.*)\\}(?:(?:\\s|\\t)*)");
      if (!util::doIfNameMatch(regex, definition, [this,&definition](auto sm)
            {
              try
              {
                func = getFunction(sm.str(1));
                column = util::split(sm.str(1), ':').back();
                maxNbElements = std::stoi(sm.str(2));
    
                for (auto & index : util::split(sm.str(3), ' '))
                  focusedBuffer.emplace_back(std::stoi(index));
    
                for (auto & index : util::split(sm.str(4), ' '))
                  focusedStack.emplace_back(std::stoi(index));
    
                auto subModuleType = sm.str(5);
                auto subModuleArguments = util::split(sm.str(6), ' ');
    
                auto options = MyModule::ModuleOptions(true)
                  .bidirectional(std::stoi(subModuleArguments[0]))
                  .num_layers(std::stoi(subModuleArguments[1]))
                  .dropout(std::stof(subModuleArguments[2]))
                  .complete(std::stoi(subModuleArguments[3]));
    
                inSize = std::stoi(sm.str(7));
                int outSize = std::stoi(sm.str(8));
    
                if (subModuleType == "LSTM")
                  myModule = register_module("myModule", LSTM(inSize, outSize, options));
                else if (subModuleType == "GRU")
                  myModule = register_module("myModule", GRU(inSize, outSize, options));
                else if (subModuleType == "Concat")
                  myModule = register_module("myModule", Concat(inSize));
                else
                  util::myThrow(fmt::format("unknown sumodule type '{}'", subModuleType));
    
                w2vFiles = sm.str(9);
    
                if (!w2vFiles.empty())
                {
                  auto pathes = util::split(w2vFiles.string(), ' ');
                  for (auto & p : pathes)
                  {
                    auto splited = util::split(p, ',');
                    if (splited.size() != 2)
                      util::myThrow("expected 'prefix,pretrained.w2v'");
                    auto pretrained = getDict().loadWord2Vec(this->path / splited[1], splited[0]);
                    if (pretrained)
                    {
                      getDict().setState(Dict::State::Closed);
                      dictSetPretrained(true);
                    }
                  }
                }
    
              } catch (std::exception & e) {util::myThrow(fmt::format("{} in '{}'",e.what(),definition));}
            }))
        util::myThrow(fmt::format("invalid definition '{}'", definition));
    }
    
    torch::Tensor FocusedColumnModuleImpl::forward(torch::Tensor input)
    {
      std::vector<torch::Tensor> outputs;
      for (unsigned int i = 0; i < focusedBuffer.size()+focusedStack.size(); i++)
        outputs.emplace_back(myModule->forward(wordEmbeddings(input.narrow(1, firstInputIndex+i*maxNbElements, maxNbElements))).reshape({input.size(0), -1}));