diff --git a/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/BoostUtils.py b/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/BoostUtils.py
index 3cd8bf9eb393ca5c21b4787367beb4bac4facb70..877e262d82110305e876b0d674b66312cc55c237 100644
--- a/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/BoostUtils.py
+++ b/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/BoostUtils.py
@@ -209,10 +209,10 @@ class TreeClassifiersGenerator(ClassifiersGenerator):
 
     def fit(self, X, y=None):
         estimators_ = []
-        self.attribute_indices = [self.sub_sample_attributes(X) for _ in range(self.n_trees)]
-        self.example_indices = [self.sub_sample_examples(X) for _ in range(self.n_trees)]
+        self.attribute_indices = np.array([self.sub_sample_attributes(X) for _ in range(self.n_trees)])
+        self.example_indices = np.array([self.sub_sample_examples(X) for _ in range(self.n_trees)])
         for i in range(self.n_trees):
-            estimators_.append(DecisionTreeClassifier(criterion=self.criterion, splitter=self.splitter, max_depth=self.max_depth).fit(X[:, self.attribute_indices[i]][self.example_indices[i], :], y[self.example_indices[i]]))
+            estimators_.append(DecisionTreeClassifier(criterion=self.criterion, splitter=self.splitter, max_depth=self.max_depth).fit(X[:, self.attribute_indices[i, :]][self.example_indices[i], :], y[self.example_indices[i, :]]))
         self.estimators_ = np.asarray(estimators_)
         return self
 
@@ -228,6 +228,11 @@ class TreeClassifiersGenerator(ClassifiersGenerator):
         kept_indices = self.random_state.choice(examples_indices, size=int(self.examples_ratio*n_examples), replace=True)
         return kept_indices
 
+    def choose(self, chosen_columns):
+        self.estimators_ = self.estimators_[chosen_columns]
+        self.attribute_indices = self.attribute_indices[chosen_columns, :]
+        self.example_indices = self.example_indices[chosen_columns, :]
+
 
 
 
@@ -305,6 +310,10 @@ class StumpsClassifiersGenerator(ClassifiersGenerator):
         self.estimators_ = np.asarray(self.estimators_)
         return self
 
+    def choose(self, chosen_columns):
+        self.estimators_ = self.estimators_[chosen_columns]
+
+
 def _as_matrix(element):
     """ Utility function to convert "anything" to a Numpy matrix.
     """
@@ -773,6 +782,7 @@ class BaseBoost(object):
 
     def _collect_probas(self, X, sub_sampled=False):
         if self.estimators_generator.__class__.__name__ == "TreeClassifiersGenerator":
+            print("frogom")
             return np.asarray([clf.predict_proba(X[:,attribute_indices]) for clf, attribute_indices in zip(self.estimators_generator.estimators_, self.estimators_generator.attribute_indices)])
         else:
             return np.asarray([clf.predict_proba(X) for clf in self.estimators_generator.estimators_])
diff --git a/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/CGDescUtils.py b/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/CGDescUtils.py
index aca484e1480da2132ff3fabd458f8818475d63fb..9b1f8f9aac0e2b750ffa1ade0d8c8565705fd7d2 100644
--- a/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/CGDescUtils.py
+++ b/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/CGDescUtils.py
@@ -124,8 +124,7 @@ class ColumnGenerationClassifierQar(BaseEstimator, ClassifierMixin, BaseBoost):
 
 
         self.nb_opposed_voters = self.check_opposed_voters()
-        self.estimators_generator.estimators_ = \
-        self.estimators_generator.estimators_[self.chosen_columns_]
+        self.estimators_generator.choose(self.chosen_columns_)
 
         if self.save_train_data:
             self.X_train = self.classification_matrix[:, self.chosen_columns_]
@@ -155,6 +154,7 @@ class ColumnGenerationClassifierQar(BaseEstimator, ClassifierMixin, BaseBoost):
         end = time.time()
         self.predict_time = end - start
         self.step_predict(classification_matrix)
+        print(np.unique(signs_array))
         return signs_array
 
     def step_predict(self, classification_matrix):
@@ -338,6 +338,7 @@ class ColumnGenerationClassifierQar(BaseEstimator, ClassifierMixin, BaseBoost):
                 self_complemented=self.self_complemented)
         self.estimators_generator.fit(X, y)
         self.classification_matrix = self._binary_classification_matrix(X)
+        print(np.unique(y), np.unique(self.classification_matrix))
         self.train_shape = self.classification_matrix.shape
 
         m, n = self.classification_matrix.shape
@@ -489,6 +490,7 @@ class ColumnGenerationClassifierQar(BaseEstimator, ClassifierMixin, BaseBoost):
         return 1.0 / n_examples * np.ones((n_examples,))
 
     def get_step_decision_test_graph(self, directory, y_test):
+        print(np.unique(y_test))
         np.savetxt(directory + "y_test_step.csv", self.step_decisions, delimiter=',')
         step_metrics = []
         for step_index in range(self.step_decisions.shape[1]-1):
diff --git a/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/CQBoostUtils.py b/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/CQBoostUtils.py
index bf367423ab658215486431b322ca7a7a0a7ac297..401bcdcabef83269739d6d8f85e165b3dd1052c8 100644
--- a/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/CQBoostUtils.py
+++ b/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/CQBoostUtils.py
@@ -97,7 +97,7 @@ class ColumnGenerationClassifier(BaseEstimator, ClassifierMixin, BaseBoost):
         self.nb_opposed_voters = self.check_opposed_voters()
         self.compute_weights_(w)
         # self.weights_ = w
-        self.estimators_generator.estimators_ = self.estimators_generator.estimators_[self.chosen_columns_]
+        self.estimators_generator.choose(self.chosen_columns_)
         end = time.time()
 
         self.train_time = end-start