diff --git a/decoder/src/Beam.cpp b/decoder/src/Beam.cpp
index 32e7360e02fd07bbb6e8c201463312a9e3fb867b..1b0aafae59b9d1f2146b0f14843382e28402361d 100644
--- a/decoder/src/Beam.cpp
+++ b/decoder/src/Beam.cpp
@@ -48,7 +48,7 @@ void Beam::update(ReadingMachine & machine, bool debug)
     elements[index].config.setAppliableTransitions(appliableTransitions);
 
     auto context = classifier.getNN()->extractContext(elements[index].config).back();
-    auto neuralInput = torch::from_blob(context.data(), {(long)context.size()}, torch::kLong).clone().to(NeuralNetworkImpl::device);
+    auto neuralInput = torch::from_blob(context.data(), {(long)context.size()}, torch::TensorOptions(torch::kLong).device(NeuralNetworkImpl::device));
 
     auto prediction = classifier.isRegression() ? classifier.getNN()(neuralInput).squeeze(0) : torch::softmax(classifier.getNN()(neuralInput).squeeze(0), 0);
     float entropy = classifier.isRegression() ? 0.0 : NeuralNetworkImpl::entropy(prediction);
diff --git a/reading_machine/src/Classifier.cpp b/reading_machine/src/Classifier.cpp
index aa11dc9704f6d72c1c1159ea1953ec8fa82ddcac..b5e929bfee0b6245bbee111e2ba1353909afa2f3 100644
--- a/reading_machine/src/Classifier.cpp
+++ b/reading_machine/src/Classifier.cpp
@@ -81,17 +81,15 @@ Classifier::Classifier(const std::string & name, std::filesystem::path path, std
   getNN()->loadDicts(path);
   getNN()->registerEmbeddings();
 
-  getNN()->to(torch::kCPU);
-
   if (!train)
   {
-    torch::load(getNN(), getBestFilename());
+    torch::load(getNN(), getBestFilename(), NeuralNetworkImpl::device);
     getNN()->registerEmbeddings();
     getNN()->to(NeuralNetworkImpl::device);
   }
   else if (std::filesystem::exists(getLastFilename()))
   {
-    torch::load(getNN(), getLastFilename());
+    torch::load(getNN(), getLastFilename(), NeuralNetworkImpl::device);
     getNN()->to(NeuralNetworkImpl::device);
     resetOptimizer();
     loadOptimizer();
@@ -185,7 +183,7 @@ void Classifier::loadOptimizer()
 {
   auto optimizerPath = std::filesystem::path(fmt::format("{}/{}_optimizer.pt", path.string(), name));
   if (std::filesystem::exists(optimizerPath))
-    torch::load(*optimizer, optimizerPath);
+    torch::load(*optimizer, optimizerPath, NeuralNetworkImpl::device);
 }
 
 void Classifier::saveOptimizer()
@@ -273,16 +271,12 @@ std::string Classifier::getLastFilename() const
 
 void Classifier::saveBest()
 {
-  getNN()->to(torch::kCPU);
   torch::save(getNN(), getBestFilename());
-  getNN()->to(NeuralNetworkImpl::device);
 }
 
 void Classifier::saveLast()
 {
-  getNN()->to(torch::kCPU);
   torch::save(getNN(), getLastFilename());
-  getNN()->to(NeuralNetworkImpl::device);
   saveOptimizer();
 }
 
diff --git a/torch_modules/src/NumericColumnModule.cpp b/torch_modules/src/NumericColumnModule.cpp
index 4899d5953c908cfe51373864d95ad6c788e0827c..15d2b19ef681539418dbd1c64dcffb27f7eabe54 100644
--- a/torch_modules/src/NumericColumnModule.cpp
+++ b/torch_modules/src/NumericColumnModule.cpp
@@ -46,7 +46,7 @@ NumericColumnModuleImpl::NumericColumnModuleImpl(std::string name, const std::st
 torch::Tensor NumericColumnModuleImpl::forward(torch::Tensor input)
 {
   auto context = input.narrow(1, firstInputIndex, getInputSize());
-  auto values = torch::from_blob(context.data_ptr(), context.sizes(), context.strides(), torch::TensorOptions(torch::kDouble).requires_grad(false).device(NeuralNetworkImpl::device)).to(torch::kFloat).unsqueeze(-1).clone();
+  auto values = torch::from_blob(context.data_ptr(), context.sizes(), context.strides(), torch::TensorOptions(torch::kDouble).requires_grad(false).device(NeuralNetworkImpl::device)).to(torch::kFloat).unsqueeze(-1);
   return myModule->forward(values).reshape({input.size(0), -1});
 }
 
diff --git a/trainer/src/Trainer.cpp b/trainer/src/Trainer.cpp
index d3157426b9c38268897a42a377e90fbf03aa6632..a85fff0b08800ba7cc565ca5d91a1e08ccac5aef 100644
--- a/trainer/src/Trainer.cpp
+++ b/trainer/src/Trainer.cpp
@@ -93,7 +93,7 @@ void Trainer::extractExamples(std::vector<SubConfig> & configs, bool debug, std:
       if (dynamicOracle and util::choiceWithProbability(1.0) and config.getState() != "tokenizer" and config.getState() != "segmenter")
       {
         auto & classifier = *machine.getClassifier(config.getState());
-        auto neuralInput = torch::from_blob(context.data(), {(long)context.size()}, torch::kLong).clone().to(NeuralNetworkImpl::device);
+        auto neuralInput = torch::from_blob(context.data(), {(long)context.size()}, torch::TensorOptions(torch::kLong).device(NeuralNetworkImpl::device));
         auto prediction = classifier.isRegression() ? classifier.getNN()(neuralInput).squeeze(0) : torch::softmax(classifier.getNN()(neuralInput).squeeze(0), 0);
         entropy  = NeuralNetworkImpl::entropy(prediction);
     
@@ -291,7 +291,7 @@ void Trainer::Examples::saveIfNeeded(const std::string & state, std::filesystem:
 
   int nbClasses = classes[0].size(0);
 
-  auto tensorToSave = torch::cat({torch::stack(contexts), torch::stack(classes)}, 1).to(torch::kCPU);
+  auto tensorToSave = torch::cat({torch::stack(contexts), torch::stack(classes)}, 1);
   auto filename = fmt::format("{}-{}_{}-{}.{}.{}.tensor", state, nbClasses, lastSavedIndex, currentExampleIndex-1, epoch, dynamicOracle);
   torch::save(tensorToSave, dir/filename);
   lastSavedIndex = currentExampleIndex;