Skip to content
Snippets Groups Projects
Select Git revision
  • 3d20b27d2b6a6b5d0992726a03eeaa4cb8f6f304
  • master default protected
  • correlation
  • 24-non-negative-omp
  • 15-integration-sota
  • 20-coherence-des-arbres-de-predictions
  • 19-add-some-tests
  • 13-visualization
  • 17-adding-new-datasets
  • 12-experiment-pipeline
  • 14-correction-of-multiclass-classif
  • archive/10-gridsearching-of-the-base-forest
  • archive/farah_notation_and_related_work
  • archive/wip_clean_scripts
  • archive/4-implement-omp_forest_classifier
  • archive/5-add-plots-2
  • archive/Leo_Add_first_notebook
17 results

compute_hyperparameters.py

Blame
  • HistoryModule.cpp 2.70 KiB
    #include "HistoryModule.hpp"
    
    HistoryModuleImpl::HistoryModuleImpl(std::string name, const std::string & definition)
    {
      setName(name);
      std::regex regex("(?:(?:\\s|\\t)*)NbElem\\{(.*)\\}(?:(?:\\s|\\t)*)(\\S+)\\{(.*)\\}(?:(?:\\s|\\t)*)In\\{(.*)\\}(?:(?:\\s|\\t)*)Out\\{(.*)\\}(?:(?:\\s|\\t)*)");
      if (!util::doIfNameMatch(regex, definition, [this,&definition](auto sm)
            {
              try
              {
                maxNbElements = std::stoi(sm.str(1));
    
                auto subModuleType = sm.str(2);
                auto subModuleArguments = util::split(sm.str(3), ' ');
    
                auto options = MyModule::ModuleOptions(true)
                  .bidirectional(std::stoi(subModuleArguments[0]))
                  .num_layers(std::stoi(subModuleArguments[1]))
                  .dropout(std::stof(subModuleArguments[2]))
                  .complete(std::stoi(subModuleArguments[3]));
    
                inSize = std::stoi(sm.str(4));
                int outSize = std::stoi(sm.str(5));
    
                if (subModuleType == "LSTM")
                  myModule = register_module("myModule", LSTM(inSize, outSize, options));
                else if (subModuleType == "GRU")
                  myModule = register_module("myModule", GRU(inSize, outSize, options));
                else if (subModuleType == "CNN")
                  myModule = register_module("myModule", CNN(inSize, outSize, options));
                else if (subModuleType == "Concat")
                  myModule = register_module("myModule", Concat(inSize));
                else
                  util::myThrow(fmt::format("unknown sumodule type '{}'", subModuleType));
    
              } catch (std::exception & e) {util::myThrow(fmt::format("{} in '{}'",e.what(),definition));}
            }))
        util::myThrow(fmt::format("invalid definition '{}'", definition));
    }
    
    torch::Tensor HistoryModuleImpl::forward(torch::Tensor input)
    {
      return myModule->forward(wordEmbeddings(input.narrow(1, firstInputIndex, maxNbElements))).reshape({input.size(0), -1});
    }
    
    std::size_t HistoryModuleImpl::getOutputSize()
    {
      return myModule->getOutputSize(maxNbElements);
    }
    
    std::size_t HistoryModuleImpl::getInputSize()
    {
      return maxNbElements;
    }
    
    void HistoryModuleImpl::addToContext(std::vector<std::vector<long>> & context, const Config & config)
    {
      auto & dict = getDict();
    
      std::string prefix = "HISTORY";
    
      for (auto & contextElement : context)
        for (int i = 0; i < maxNbElements; i++)
          if (config.hasHistory(i))
            contextElement.emplace_back(dict.getIndexOrInsert(config.getHistory(i), prefix));
          else
            contextElement.emplace_back(dict.getIndexOrInsert(Dict::nullValueStr, prefix));
    }
    
    void HistoryModuleImpl::registerEmbeddings()
    {
      if (!wordEmbeddings)
        wordEmbeddings = register_module("embeddings", WordEmbeddings(getDict().size(), inSize));
    }