diff --git a/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/QarBoostUtils.py b/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/QarBoostUtils.py
index 0db72182653e7da4d6c9c753b81d4dd677c460fe..b2fd773de42b084a62e81e6cea1ca871801c2a05 100644
--- a/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/QarBoostUtils.py
+++ b/multiview_platform/MonoMultiViewClassifiers/Monoview/Additions/QarBoostUtils.py
@@ -20,7 +20,7 @@ class ColumnGenerationClassifierQar(BaseEstimator, ClassifierMixin, BaseBoost):
     def __init__(self, n_max_iterations=None, estimators_generator=None,
                  random_state=42, self_complemented=True, twice_the_same=False,
                  c_bound_choice=True, random_start=True,
-                 n_stumps_per_attribute=1, use_r=True, c_bound_sol=True,
+                 n_stumps=1, use_r=True, c_bound_sol=True,
                  plotted_metric=Metrics.zero_one_loss, save_train_data=True,
                  test_graph=True, mincq_tracking=False):
         super(ColumnGenerationClassifierQar, self).__init__()
@@ -62,7 +62,7 @@ class ColumnGenerationClassifierQar(BaseEstimator, ClassifierMixin, BaseBoost):
         self.c_bound_choice = c_bound_choice
         self.random_start = random_start
         self.plotted_metric = plotted_metric
-        self.n_stumps = n_stumps_per_attribute
+        self.n_stumps = n_stumps
         self.use_r = use_r
         self.c_bound_sol = c_bound_sol
         self.save_train_data = save_train_data
@@ -73,13 +73,6 @@ class ColumnGenerationClassifierQar(BaseEstimator, ClassifierMixin, BaseBoost):
                                        "n_stumps", "use_r", "c_bound_sol"]
         self.mincq_tracking = mincq_tracking
 
-    def get_params(self, deep=True):
-        return {"random_state":self.random_state, "n_max_iterations":self.n_max_iterations}
-
-    def set_params(self, **params):
-        self.n_max_iterations = params["n_max_iterations"]
-        return self
-
     def fit(self, X, y):
 
         start = time.time()
diff --git a/multiview_platform/MonoMultiViewClassifiers/Monoview/ExecClassifMonoView.py b/multiview_platform/MonoMultiViewClassifiers/Monoview/ExecClassifMonoView.py
index 778ab96a6456f467bac4c21a74e6a35d40a20d17..8a311c5df84fbb5acf215d943ad218720ce35f62 100644
--- a/multiview_platform/MonoMultiViewClassifiers/Monoview/ExecClassifMonoView.py
+++ b/multiview_platform/MonoMultiViewClassifiers/Monoview/ExecClassifMonoView.py
@@ -165,7 +165,7 @@ def getHPs(classifierModule, hyperParamSearch, nIter, CL_type, X_train, y_train,
         clKWARGS, testFoldsPreds = classifierHPSearch(X_train, y_train, randomState,
                                       outputFileName, classifierModule, CL_type,
                                       KFolds=KFolds, nbCores=nbCores,
-                                      metric=metrics[0], nIter=nIter)
+                                      metric=metrics[0], nIter=nIter, classifier_KWARGS=kwargs[CL_type + "KWARGS"])
         logging.debug("Done:\t " + hyperParamSearch + " best settings")
     else:
         clKWARGS = kwargs[CL_type + "KWARGS"]
diff --git a/multiview_platform/MonoMultiViewClassifiers/Monoview/ExecPlot.py b/multiview_platform/MonoMultiViewClassifiers/Monoview/ExecPlot.py
index 2ea2f1d75e6579a74a2f0824a283174576081b6d..6be38474597c5a084e905b2a0afd91c9a56cc66b 100644
--- a/multiview_platform/MonoMultiViewClassifiers/Monoview/ExecPlot.py
+++ b/multiview_platform/MonoMultiViewClassifiers/Monoview/ExecPlot.py
@@ -1,89 +1,89 @@
-#!/usr/bin/env python
-
-""" Script whichs helps to replot preds from Feature Parameter Optimisation """
-
-# Import built-in modules
-import argparse  # for acommand line arguments
-import datetime  # for TimeStamp in CSVFile
-import os  # to geth path of the running script
-import matplotlib
-
-# matplotlib.use('Agg')
-# Import 3rd party modules
-import pandas as pd  # for Series
-import numpy as np  # for DataFrames
-
-# Import own modules
-import ExportResults  # Functions to render results
-
-# Author-Info
-__author__ = "Nikolas Huelsmann"
-__status__ = "Prototype"  # Production, Development, Prototype
-__date__ = 2016 - 03 - 25
-
-parser = argparse.ArgumentParser(
-    description='This method can be used to replot preds from Feature Parameter Optimisation',
-    formatter_class=argparse.ArgumentDefaultsHelpFormatter)
-args = parser.parse_args()
-args.name = "Caltech"
-args.valueStart = 2
-args.valueEnd = 25
-args.nCalcs = 5
-args.feature = "HSV"
-args.param = "HSV_V_Bins"
-args.show = False
-df_feat_res = pd.DataFrame.from_csv(
-    path="D:\\BitBucket\\multiview-machine-learning-omis\\Results\\Hydra\\2016_03_23-FPO-Caltech-HSV-HSV_V_Bins.csv",
-    sep=';')
-
-# Get data from result to show results in plot
-# logging.debug("Start:\t Plot Result")
-# Total time for feature extraction and classification
-tot_time = df_feat_res.b_feat_extr_time.values + df_feat_res.e_cl_time.values
-tot_time = np.asarray(tot_time)
-# Time for feature extraction
-feat_time = df_feat_res.b_feat_extr_time.values
-feat_time = np.asarray(feat_time)
-# Time for classification
-cl_time = df_feat_res.e_cl_time.values
-cl_time = np.asarray(cl_time)
-
-# Mean Score of all classes
-score = df_feat_res.f_cl_score.values
-score = np.asarray(score)
-
-# Range on X-Axis
-if args.nCalcs > 1:
-    step = float(args.valueEnd - args.valueStart) / float(args.nCalcs - 1)
-    rangeX = np.around(np.array(range(0, args.nCalcs)) * step) + args.valueStart
-else:
-    rangeX = [args.valueStart]
-rangeX = np.asarray(rangeX)
-
-# Description of Classification
-cl_desc = df_feat_res.c_cl_desc.values
-
-# Description of Feature
-feat_desc = df_feat_res.a_feat_desc.values
-
-dir = os.path.dirname(os.path.abspath(__file__)) + "/Results-FeatParaOpt/"
-# filename = datetime.datetime.now().strftime("%Y_%m_%d") + "-FPO-" + args.name + "-" + args.feature + "-" + args.param
-# ExportResults.exportPandasToCSV(df_feat_res, directory, filename)
-
-# Store or Show plot
-if args.show:
-    store = False
-else:
-    store = True
-
-fileName = datetime.datetime.now().strftime("%Y_%m_%d") + "-FPO-" + args.name + "-" + args.feature + "-" + args.param
-# Show Results for Calculation
-ExportResults.showScoreTime(dir, fileName + "-TotalTime", store, score, tot_time, rangeX, args.param, feat_desc,
-                            cl_desc, 'Results for Parameter Optimisation - DB:' + args.name + ' Feat:' + args.feature,
-                            'Precision', 'Total Time (Feature Extraction+Classification)\n [s]')
-ExportResults.showScoreTime(dir, fileName + "-FeatExtTime", store, score, feat_time, rangeX, args.param, feat_desc,
-                            cl_desc, 'Results for Parameter Optimisation - DB:' + args.name + ' Feat:' + args.feature,
-                            'Precision', 'Feature Extraction Time\n [s]')
-ExportResults.showScoreTime(dir, fileName + "-ClassTime", store, score, cl_time, rangeX, args.param, feat_desc, cl_desc,
-                            'Results for Parameter Optimisation - DB:' + args.name + ' Feat:' + args.feature,
-                            'Precision', 'Classification Time\n [s]')
+# #!/usr/bin/env python
+#
+# """ Script whichs helps to replot preds from Feature Parameter Optimisation """
+#
+# # Import built-in modules
+# import argparse  # for acommand line arguments
+# import datetime  # for TimeStamp in CSVFile
+# import os  # to geth path of the running script
+# import matplotlib
+#
+# # matplotlib.use('Agg')
+# # Import 3rd party modules
+# import pandas as pd  # for Series
+# import numpy as np  # for DataFrames
+#
+# # Import own modules
+# import ExportResults  # Functions to render results
+#
+# # Author-Info
+# __author__ = "Nikolas Huelsmann"
+# __status__ = "Prototype"  # Production, Development, Prototype
+# __date__ = 2016 - 03 - 25
+#
+# parser = argparse.ArgumentParser(
+#     description='This method can be used to replot preds from Feature Parameter Optimisation',
+#     formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+# args = parser.parse_args()
+# args.name = "Caltech"
+# args.valueStart = 2
+# args.valueEnd = 25
+# args.nCalcs = 5
+# args.feature = "HSV"
+# args.param = "HSV_V_Bins"
+# args.show = False
+# df_feat_res = pd.DataFrame.from_csv(
+#     path="D:\\BitBucket\\multiview-machine-learning-omis\\Results\\Hydra\\2016_03_23-FPO-Caltech-HSV-HSV_V_Bins.csv",
+#     sep=';')
+#
+# # Get data from result to show results in plot
+# # logging.debug("Start:\t Plot Result")
+# # Total time for feature extraction and classification
+# tot_time = df_feat_res.b_feat_extr_time.values + df_feat_res.e_cl_time.values
+# tot_time = np.asarray(tot_time)
+# # Time for feature extraction
+# feat_time = df_feat_res.b_feat_extr_time.values
+# feat_time = np.asarray(feat_time)
+# # Time for classification
+# cl_time = df_feat_res.e_cl_time.values
+# cl_time = np.asarray(cl_time)
+#
+# # Mean Score of all classes
+# score = df_feat_res.f_cl_score.values
+# score = np.asarray(score)
+#
+# # Range on X-Axis
+# if args.nCalcs > 1:
+#     step = float(args.valueEnd - args.valueStart) / float(args.nCalcs - 1)
+#     rangeX = np.around(np.array(range(0, args.nCalcs)) * step) + args.valueStart
+# else:
+#     rangeX = [args.valueStart]
+# rangeX = np.asarray(rangeX)
+#
+# # Description of Classification
+# cl_desc = df_feat_res.c_cl_desc.values
+#
+# # Description of Feature
+# feat_desc = df_feat_res.a_feat_desc.values
+#
+# dir = os.path.dirname(os.path.abspath(__file__)) + "/Results-FeatParaOpt/"
+# # filename = datetime.datetime.now().strftime("%Y_%m_%d") + "-FPO-" + args.name + "-" + args.feature + "-" + args.param
+# # ExportResults.exportPandasToCSV(df_feat_res, directory, filename)
+#
+# # Store or Show plot
+# if args.show:
+#     store = False
+# else:
+#     store = True
+#
+# fileName = datetime.datetime.now().strftime("%Y_%m_%d") + "-FPO-" + args.name + "-" + args.feature + "-" + args.param
+# # Show Results for Calculation
+# ExportResults.showScoreTime(dir, fileName + "-TotalTime", store, score, tot_time, rangeX, args.param, feat_desc,
+#                             cl_desc, 'Results for Parameter Optimisation - DB:' + args.name + ' Feat:' + args.feature,
+#                             'Precision', 'Total Time (Feature Extraction+Classification)\n [s]')
+# ExportResults.showScoreTime(dir, fileName + "-FeatExtTime", store, score, feat_time, rangeX, args.param, feat_desc,
+#                             cl_desc, 'Results for Parameter Optimisation - DB:' + args.name + ' Feat:' + args.feature,
+#                             'Precision', 'Feature Extraction Time\n [s]')
+# ExportResults.showScoreTime(dir, fileName + "-ClassTime", store, score, cl_time, rangeX, args.param, feat_desc, cl_desc,
+#                             'Results for Parameter Optimisation - DB:' + args.name + ' Feat:' + args.feature,
+#                             'Precision', 'Classification Time\n [s]')
diff --git a/multiview_platform/MonoMultiViewClassifiers/Monoview/MonoviewUtils.py b/multiview_platform/MonoMultiViewClassifiers/Monoview/MonoviewUtils.py
index 44f12edbac25402773da287392e8a75cbeb1fe4a..7a9fa525c6ab7c01d977cb31e7f60c557772c7a7 100644
--- a/multiview_platform/MonoMultiViewClassifiers/Monoview/MonoviewUtils.py
+++ b/multiview_platform/MonoMultiViewClassifiers/Monoview/MonoviewUtils.py
@@ -15,9 +15,8 @@ __status__ = "Prototype"  # Production, Development, Prototype
 
 
 def randomizedSearch(X_train, y_train, randomState, outputFileName, classifierModule, CL_type, KFolds = 4, nbCores = 1,
-    metric = ["accuracy_score", None], nIter = 30):
-
-    estimator = getattr(classifierModule, CL_type)(randomState)
+    metric = ["accuracy_score", None], nIter = 30, classifier_KWARGS=None):
+    estimator = getattr(classifierModule, CL_type)(randomState, **classifier_KWARGS)
     params_dict = estimator.genDistribs()
     if params_dict:
         metricModule = getattr(Metrics, metric[0])
@@ -28,12 +27,11 @@ def randomizedSearch(X_train, y_train, randomState, outputFileName, classifierMo
         scorer = metricModule.get_scorer(**metricKWARGS)
         nb_possible_combinations = compute_possible_combinations(params_dict)
         min_list = np.array([min(nb_possible_combination, nIter) for nb_possible_combination in nb_possible_combinations])
-        print(nbCores)
         randomSearch = RandomizedSearchCV(estimator, n_iter=int(np.sum(min_list)), param_distributions=params_dict, refit=True,
                                           n_jobs=nbCores, scoring=scorer, cv=KFolds, random_state=randomState)
         detector = randomSearch.fit(X_train, y_train)
 
-        bestParams = estimator.genBestParams(detector)
+        bestParams = dict((key, value) for key, value in estimator.genBestParams(detector).items() if key is not "random_state")
 
         scoresArray = detector.cv_results_['mean_test_score']
         params = estimator.genParamsFromDetector(detector)
@@ -148,6 +146,7 @@ class BaseMonoviewClassifier(object):
             else:
                 return self.weird_strings[param_name](self.get_params()[param_name])
         else:
+            print(self.get_params())
             return str(self.get_params()[param_name])
 
     def getFeatureImportance(self, directory, nb_considered_feats=50):
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/AdaboostGraalpy.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/AdaboostGraalpy.py
index 7097231eb4cf6943208fd19a8c2fbcf354d313c8..04395dfec96f3069193fb96adf658ae858514116 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/AdaboostGraalpy.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/AdaboostGraalpy.py
@@ -148,8 +148,8 @@ class AdaboostGraalpy(AdaBoostGP, BaseMonoviewClassifier):
             n_iterations=n_iterations,
             n_stumps=n_stumps
         )
-        self.param_names = ["n_iterations",]
-        self.distribs = [CustomRandint(low=1, high=500)]
+        self.param_names = ["n_iterations","n_stumps", "random_state"]
+        self.distribs = [CustomRandint(low=1, high=500), [n_stumps], [random_state]]
         self.classed_params = []
         self.weird_strings = {}
         self.n_stumps = n_stumps
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/AdaboostPregen.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/AdaboostPregen.py
index ad523bdefdd0dea7defb167137e21f8bfc768e93..ddb8f8649f0bf9d05707689dd9d07d20d2227016 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/AdaboostPregen.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/AdaboostPregen.py
@@ -24,9 +24,9 @@ class AdaboostPregen(AdaBoostClassifier, BaseBoost, BaseMonoviewClassifier):
             base_estimator=base_estimator,
             algorithm="SAMME"
             )
-        self.param_names = ["n_estimators", "base_estimator"]
+        self.param_names = ["n_estimators", "base_estimator", "n_stumps", "random_state"]
         self.classed_params = ["base_estimator"]
-        self.distribs = [CustomRandint(low=1, high=500), [DecisionTreeClassifier(max_depth=1)]]
+        self.distribs = [CustomRandint(low=1, high=500), [DecisionTreeClassifier(max_depth=1)], [n_stumps], [random_state]]
         self.weird_strings = {"base_estimator": "class_name"}
         self.plotted_metric = Metrics.zero_one_loss
         self.plotted_metric_name = "zero_one_loss"
@@ -61,6 +61,13 @@ class AdaboostPregen(AdaBoostClassifier, BaseBoost, BaseMonoviewClassifier):
             self.step_predictions = np.array([change_label_to_zero(step_pred) for step_pred in self.staged_predict(pregen_X)])
         return change_label_to_zero(pred)
 
+    # def set_params(self, **params):
+    #     super().set_params(params)
+    #     self.random_state = params["random_state"]
+    #     self.n_stumps_per_attribute = params["n_tumps"]
+    #     return self
+
+
     def getInterpret(self, directory, y_test):
         interpretString = ""
         interpretString += self.getFeatureImportance(directory)
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CGDesc.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CGDesc.py
index 209baad9e07d9ce57d6dab33d49ff4e69f89724d..e22ea27a7200b501babd106452541f03be6f103d 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CGDesc.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CGDesc.py
@@ -5,19 +5,20 @@ from ..Monoview.Additions.QarBoostUtils import ColumnGenerationClassifierQar
 
 class CGDesc(ColumnGenerationClassifierQar, BaseMonoviewClassifier):
 
-    def __init__(self, random_state=None, n_max_iterations=500, n_stumps_per_attribute=1, **kwargs):
+    def __init__(self, random_state=None, n_max_iterations=500, n_stumps=1, **kwargs):
         super(CGDesc, self).__init__(n_max_iterations=n_max_iterations,
             random_state=random_state,
             self_complemented=True,
             twice_the_same=True,
             c_bound_choice=True,
             random_start=False,
-            n_stumps_per_attribute=n_stumps_per_attribute,
+            n_stumps=n_stumps,
             use_r=True,
             c_bound_sol=True
             )
-        self.param_names = ["n_max_iterations"]
-        self.distribs = [CustomRandint(low=2, high=1000)]
+        self.param_names = ["n_max_iterations", "n_stumps", "random_state"]
+        self.distribs = [CustomRandint(low=2, high=1000), [n_stumps],
+                         [random_state]]
         self.classed_params = []
         self.weird_strings = {}
 
@@ -34,7 +35,7 @@ class CGDesc(ColumnGenerationClassifierQar, BaseMonoviewClassifier):
 
 def formatCmdArgs(args):
     """Used to format kwargs for the parsed args"""
-    kwargsDict = {"n_stumps_per_attribute":args.CGD_stumps,
+    kwargsDict = {"n_stumps":args.CGD_stumps,
     "n_max_iterations":args.CGD_n_iter}
     return kwargsDict
 
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CGreed.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CGreed.py
index 351698c2faac20f5e910063657e07d92c9b95be3..e0e0916fbcb8ea39d3c1b0a1995a5391582180d8 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CGreed.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CGreed.py
@@ -5,20 +5,20 @@ from ..Monoview.Additions.QarBoostUtils import ColumnGenerationClassifierQar
 
 class CGreed(ColumnGenerationClassifierQar, BaseMonoviewClassifier):
 
-    def __init__(self, random_state=None, n_max_iterations=500, n_stumps_per_attribute=10, **kwargs):
+    def __init__(self, random_state=None, n_max_iterations=500, n_stumps=10, **kwargs):
         super(CGreed, self).__init__(n_max_iterations=n_max_iterations,
             random_state=random_state,
             self_complemented=True,
             twice_the_same=False,
             c_bound_choice=True,
             random_start=False,
-            n_stumps_per_attribute=n_stumps_per_attribute,
+            n_stumps=n_stumps,
             use_r=True,
             c_bound_sol=True
             )
 
-        self.param_names = ["n_max_iterations"]
-        self.distribs = [CustomRandint(low=2, high=1000)]
+        self.param_names = ["n_max_iterations", "n_stumps", "random_state"]
+        self.distribs = [CustomRandint(low=2, high=1000), [n_stumps], [random_state]]
         self.classed_params = []
         self.weird_strings = {}
 
@@ -35,7 +35,7 @@ class CGreed(ColumnGenerationClassifierQar, BaseMonoviewClassifier):
 
 def formatCmdArgs(args):
     """Used to format kwargs for the parsed args"""
-    kwargsDict = {"n_stumps_per_attribute":args.CGR_stumps,
+    kwargsDict = {"n_stumps":args.CGR_stumps,
     "n_max_iterations":args.CGR_n_iter}
     return kwargsDict
 
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CQBoost.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CQBoost.py
index c1b59f11083af32cbe6d91ee5a147a12994c48e6..f52385d1e0578a113d79844f9b8abfbabdb0f29f 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CQBoost.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/CQBoost.py
@@ -13,9 +13,9 @@ class CQBoost(ColumnGenerationClassifier, BaseMonoviewClassifier):
             mu=mu,
             epsilon=epsilon
         )
-        self.param_names = ["mu", "epsilon"]
+        self.param_names = ["mu", "epsilon", "n_stumps", "random_state"]
         self.distribs = [CustomUniform(loc=0.5, state=1.0, multiplier="e-"),
-                         CustomRandint(low=1, high=15, multiplier="e-")]
+                         CustomRandint(low=1, high=15, multiplier="e-"), [n_stumps], [random_state]]
         self.classed_params = []
         self.weird_strings = {}
         self.n_stumps = n_stumps
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/DecisionTree.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/DecisionTree.py
index b511fdf28668db404b411827605dd98ec894be7d..7ff6129659de33f60edf56f42133c60206c07d0c 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/DecisionTree.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/DecisionTree.py
@@ -17,11 +17,11 @@ class DecisionTree(DecisionTreeClassifier, BaseMonoviewClassifier):
             splitter=splitter,
             random_state=random_state
             )
-        self.param_names = ["max_depth", "criterion", "splitter",]
+        self.param_names = ["max_depth", "criterion", "splitter",'random_state']
         self.classed_params = []
         self.distribs = [CustomRandint(low=1, high=300),
                          ["gini", "entropy"],
-                         ["best", "random"], ]
+                         ["best", "random"], [random_state]]
         self.weird_strings = {}
 
     def canProbas(self):
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/KNN.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/KNN.py
index a8cb8fc6447a5ffe1ef3c97930931cbb1b6165d5..5fbcaeb825aac67d4d7c9ddac15fe5ae4f4ffd15 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/KNN.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/KNN.py
@@ -17,10 +17,10 @@ class KNN(KNeighborsClassifier, BaseMonoviewClassifier):
             algorithm=algorithm,
             p=p
             )
-        self.param_names = ["n_neighbors", "weights", "algorithm", "p"]
+        self.param_names = ["n_neighbors", "weights", "algorithm", "p", "random_state",]
         self.classed_params = []
         self.distribs = [CustomRandint(low=1, high=10), ["uniform", "distance"],
-                         ["auto", "ball_tree", "kd_tree", "brute"], [1, 2]]
+                         ["auto", "ball_tree", "kd_tree", "brute"], [1, 2], [random_state]]
         self.weird_strings = {}
         self.random_state=random_state
 
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/Lasso.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/Lasso.py
index 79e2a750f9f3f1c608d632bfe3437775f03eabef..1e4d3abe954bc0faa7cdd29c630d585f25c50179 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/Lasso.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/Lasso.py
@@ -18,10 +18,10 @@ class Lasso(Lasso, BaseMonoviewClassifier):
             warm_start=warm_start,
             random_state=random_state
             )
-        self.param_names = ["max_iter", "alpha",]
+        self.param_names = ["max_iter", "alpha", "random_state"]
         self.classed_params = []
         self.distribs = [CustomRandint(low=1, high=300),
-                         CustomUniform(),]
+                         CustomUniform(), [random_state]]
         self.weird_strings = {}
 
     def fit(self, X, y, check_input=True):
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/MinCQ.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/MinCQ.py
index 73133a3e093bf17d4ced59ed01c60efba9e76156..d8c6efe8e6b865ec536111702699f4d6503b39e7 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/MinCQ.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/MinCQ.py
@@ -525,15 +525,16 @@ class KernelVotersGenerator(VotersGenerator):
 
 class MinCQ(MinCqLearner, BaseMonoviewClassifier):
 
-    def __init__(self, random_state=None, mu=0.01, self_complemented=True , **kwargs):
+    def __init__(self, random_state=None, mu=0.01, self_complemented=True , n_stumps_per_attribute=10, **kwargs):
         super(MinCQ, self).__init__(mu=mu,
             voters_type='stumps',
-            n_stumps_per_attribute =10,
+            n_stumps_per_attribute =n_stumps_per_attribute,
             self_complemented=self_complemented
         )
-        self.param_names = ["mu"]
+        self.param_names = ["mu", "n_stumps_per_attribute", "random_state"]
         self.distribs = [CustomUniform(loc=0.5, state=2.0, multiplier="e-"),
-                         ]
+                         [n_stumps_per_attribute], [random_state]]
+        self.random_state=random_state
         self.classed_params = []
         self.weird_strings = {}
         if "nbCores" not in kwargs:
@@ -545,6 +546,16 @@ class MinCQ(MinCqLearner, BaseMonoviewClassifier):
         """Used to know if the classifier can return label probabilities"""
         return True
 
+    def set_params(self, **params):
+        self.mu = params["mu"]
+        self.random_state = params["random_state"]
+        self.n_stumps_per_attribute = params["n_stumps_per_attribute"]
+        return self
+
+    def get_params(self, deep=True):
+        return {"random_state": self.random_state, "mu": self.mu,
+                "n_stumps_per_attribute": self.n_stumps_per_attribute}
+
     def getInterpret(self, directory, y_test):
         interpret_string = "Train C_bound value : "+str(self.cbound_train)
         y_rework = np.copy(y_test)
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/MinCQGraalpy.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/MinCQGraalpy.py
index ae27aa91c57c040e3c021e551dd989ec343035f6..805ac493441c825b7d7afaecc335f24bdd63b537 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/MinCQGraalpy.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/MinCQGraalpy.py
@@ -43,7 +43,7 @@ class MinCqClassifier(VotingClassifier):
         if estimators is None:
             estimators = []
 
-        super().__init__(estimators=estimators, voting='soft')
+        super().__init__(estimators=estimators, voting='soft', flatten_transform=False)
         self.estimators_generator = estimators_generator
         self.mu = mu
         self.omega = omega
@@ -86,11 +86,13 @@ class MinCqClassifier(VotingClassifier):
         else:
             self.le_ = LabelEncoder()
             self.le_.fit(y)
+            self.clean_me =True
 
             if isinstance(y, np.ma.MaskedArray):
                 transformed_y = np.ma.MaskedArray(self.le_.transform(y), y.mask)
             else:
-                transformed_y = self.le_.transform(y)
+                # transformed_y = self.le_.transform(y)
+                transformed_y = y
 
             self.estimators_generator.fit(X, transformed_y)
             self.estimators = [('ds{}'.format(i), estimator) for i, estimator in enumerate(self.estimators_generator.estimators_)]
@@ -106,7 +108,8 @@ class MinCqClassifier(VotingClassifier):
         # Preparation and resolution of the quadratic program
         # logger.info("Preparing and solving QP...")
         self.weights = self._solve(X, y)
-
+        if self.clean_me:
+            self.estimators = []
         return self
 
     # def evaluate_metrics(self, X, y, metrics_list=None, functions_list=None):
@@ -138,6 +141,16 @@ class MinCqClassifier(VotingClassifier):
 
         return (matrix - self.omega)
 
+    def predict(self, X):
+        if not self.estimators:
+            self.estimators = [('ds{}'.format(i), estimator) for i, estimator in
+                               enumerate(self.estimators_generator.estimators_)]
+            self.clean_me = True
+        pred = super().predict(X)
+        if self.clean_me:
+            self.estimators = []
+        return pred
+
     def _solve(self, X, y):
         y = self.le_.transform(y)
 
@@ -240,7 +253,6 @@ class RegularizedBinaryMinCqClassifier(MinCqClassifier):
         n_examples, n_voters = np.shape(classification_matrix)
 
         if self.zeta == 0:
-            print(classification_matrix.shape)
             np.transpose(classification_matrix)
             ftf = np.dot(np.transpose(classification_matrix),classification_matrix)
         else:
@@ -273,7 +285,12 @@ class RegularizedBinaryMinCqClassifier(MinCqClassifier):
         lower_bound = 0.0
         upper_bound = 1.0 / n_voters
 
-        weights = self._solve_qp(objective_matrix, objective_vector, equality_matrix, equality_vector, lower_bound, upper_bound)
+        try:
+            weights = self._solve_qp(objective_matrix, objective_vector, equality_matrix, equality_vector, lower_bound, upper_bound)
+        except ValueError as e:
+            if "domain error" in e.args:
+                weights = np.ones(len(self.estimators_))
+
 
         # Keep learning information for further use.
         self.learner_info_ = {}
@@ -330,9 +347,9 @@ class MinCQGraalpy(RegularizedBinaryMinCqClassifier, BaseMonoviewClassifier):
         super(MinCQGraalpy, self).__init__(mu=mu,
             estimators_generator=StumpsClassifiersGenerator(n_stumps_per_attribute=n_stumps_per_attribute, self_complemented=self_complemented),
         )
-        self.param_names = ["mu",]
-        self.distribs = [CustomUniform(loc=0.5, state=2.0, multiplier="e-"),
-                         ]
+        self.param_names = ["mu", "n_stumps_per_attribute", "random_state"]
+        self.distribs = [CustomUniform(loc=0.05, state=2.0, multiplier="e-"),
+                         [n_stumps_per_attribute], [random_state]]
         self.n_stumps_per_attribute = n_stumps_per_attribute
         self.classed_params = []
         self.weird_strings = {}
@@ -348,9 +365,12 @@ class MinCQGraalpy(RegularizedBinaryMinCqClassifier, BaseMonoviewClassifier):
 
     def set_params(self, **params):
         self.mu = params["mu"]
+        self.random_state = params["random_state"]
+        self.n_stumps_per_attribute = params["n_stumps_per_attribute"]
+        return self
 
     def get_params(self, deep=True):
-        return {"random_state":self.random_state, "mu":self.mu}
+        return {"random_state":self.random_state, "mu":self.mu, "n_stumps_per_attribute":self.n_stumps_per_attribute}
 
     def getInterpret(self, directory, y_test):
         interpret_string = ""
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/RandomForest.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/RandomForest.py
index 5aeb0f9a5afc1e5b3355379bf57dc2101d817326..307136f912a2a223a048f5678375bb920a4df87d 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/RandomForest.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/RandomForest.py
@@ -17,11 +17,11 @@ class RandomForest(RandomForestClassifier, BaseMonoviewClassifier):
             criterion=criterion,
             random_state=random_state
             )
-        self.param_names = ["n_estimators", "max_depth", "criterion",]
+        self.param_names = ["n_estimators", "max_depth", "criterion", "random_state"]
         self.classed_params = []
         self.distribs = [CustomRandint(low=1, high=300),
                          CustomRandint(low=1, high=300),
-                         ["gini", "entropy"], ]
+                         ["gini", "entropy"], [random_state] ]
         self.weird_strings = {}
 
     def canProbas(self):
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SCM.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SCM.py
index 0543be7723e165bc1ff72503dd24a31e92f72ec5..adda1111dbcbfdf1649262dc65c803045774cd7e 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SCM.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SCM.py
@@ -51,10 +51,10 @@ class SCM(scm, BaseMonoviewClassifier):
             max_rules=max_rules,
             p=p
             )
-        self.param_names = ["model_type", "max_rules", "p"]
+        self.param_names = ["model_type", "max_rules", "p", "random_state"]
         self.distribs = [["conjunction", "disjunction"],
                          CustomRandint(low=1, high=15),
-                         CustomUniform(loc=0, state=1)]
+                         CustomUniform(loc=0, state=1), [random_state]]
         self.classed_params = []
         self.weird_strings = {}
 
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SGD.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SGD.py
index 0cfb7f9305cb81c43ea5004a18281e985bd988b3..6ce00d837dc08c240927f6eda88f25cc1a13fe4d 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SGD.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SGD.py
@@ -17,11 +17,11 @@ class SGD(SGDClassifier, BaseMonoviewClassifier):
             alpha=alpha,
             random_state=random_state
             )
-        self.param_names = ["loss", "penalty", "alpha",]
+        self.param_names = ["loss", "penalty", "alpha", "random_state"]
         self.classed_params = []
         self.distribs = [['log', 'modified_huber'],
                          ["l1", "l2", "elasticnet"],
-                         CustomUniform(loc=0, state=1), ]
+                         CustomUniform(loc=0, state=1), [random_state]]
         self.weird_strings = {}
 
     def canProbas(self):
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMLinear.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMLinear.py
index 80a0179470b05f17e442ba35b0332dccf05494e4..e545fce58c181ce78f6cce99619f71972a717c01 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMLinear.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMLinear.py
@@ -15,8 +15,8 @@ class SVMLinear(SVCClassifier, BaseMonoviewClassifier):
             kernel='linear',
             random_state=random_state
             )
-        self.param_names = ["C",]
-        self.distribs = [CustomUniform(loc=0, state=1), ]
+        self.param_names = ["C", "random_state"]
+        self.distribs = [CustomUniform(loc=0, state=1), [random_state]]
 
 
 
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMPoly.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMPoly.py
index 662601d6a945b285a562c6919df935dd883d4767..454c1847d714dd3178c11f3ca65e45fa58791340 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMPoly.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMPoly.py
@@ -15,8 +15,8 @@ class SVMPoly(SVCClassifier, BaseMonoviewClassifier):
             degree=degree,
             random_state=random_state
         )
-        self.param_names = ["C", "degree"]
-        self.distribs = [CustomUniform(loc=0, state=1), CustomRandint(low=2, high=30)]
+        self.param_names = ["C", "degree", "random_state"]
+        self.distribs = [CustomUniform(loc=0, state=1), CustomRandint(low=2, high=30), [random_state]]
 
 
 def formatCmdArgs(args):
diff --git a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMRBF.py b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMRBF.py
index c5fcb0525534b931bfa38351c5effab516f6f9b9..63e7dea97c7b7fc5a19551ba594fe33762f810f5 100644
--- a/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMRBF.py
+++ b/multiview_platform/MonoMultiViewClassifiers/MonoviewClassifiers/SVMRBF.py
@@ -14,8 +14,8 @@ class SVMRBF(SVCClassifier, BaseMonoviewClassifier):
             kernel='rbf',
             random_state=random_state
         )
-        self.param_names = ["C",]
-        self.distribs = [CustomUniform(loc=0, state=1),]
+        self.param_names = ["C", "random_state"]
+        self.distribs = [CustomUniform(loc=0, state=1), [random_state]]
 
 
 def formatCmdArgs(args):