diff --git a/config_files/config.yml b/config_files/config.yml
index 4baa1dae8537a94d62db23c4acb26a5dd1623c97..bcd321ed418dff029bdeda2adde64812b4aa3369 100644
--- a/config_files/config.yml
+++ b/config_files/config.yml
@@ -197,4 +197,9 @@ weighted_linear_late_fusion:
     decision_tree:
       max_depth: [1]
       criterion: ["gini"]
-      splitter: ["best"]
\ No newline at end of file
+      splitter: ["best"]
+
+mumbo:
+  base_estimator: [None]
+  n_estimators: [10]
+  best_view_mode: ["edge"]
\ No newline at end of file
diff --git a/config_files/config_test.yml b/config_files/config_test.yml
index 5a4d59df8fc4edc1066c69fb6d7d99b427f66f15..c2d2d87bfe8cb7dda8052dd6516aba82f45f1c40 100644
--- a/config_files/config_test.yml
+++ b/config_files/config_test.yml
@@ -24,7 +24,7 @@ Classification:
   classes:
   type: ["multiview"]
   algos_monoview: ["all"]
-  algos_multiview: ["mumbo"]
+  algos_multiview: ["mumbo", "easy_mkl"]
   stats_iter: 2
   metrics: ["accuracy_score", "f1_score"]
   metric_princ: "f1_score"
@@ -202,4 +202,8 @@ weighted_linear_late_fusion:
 mumbo:
   base_estimator: [None]
   n_estimators: [10]
-  best_view_mode: ["edge"]
\ No newline at end of file
+  best_view_mode: ["edge"]
+
+easy_mkl:
+  degrees: [1]
+  lam: [0.1]
diff --git a/multiview_platform/mono_multi_view_classifiers/multiview_classifiers/easy_mkl.py b/multiview_platform/mono_multi_view_classifiers/multiview_classifiers/easy_mkl.py
new file mode 100644
index 0000000000000000000000000000000000000000..18c4a0ab1939d0d58720998c6a949364e7b7ceae
--- /dev/null
+++ b/multiview_platform/mono_multi_view_classifiers/multiview_classifiers/easy_mkl.py
@@ -0,0 +1,62 @@
+from MKLpy.algorithms import EasyMKL
+from MKLpy.metrics import pairwise
+import numpy as np
+
+from ..multiview.multiview_utils import BaseMultiviewClassifier, get_examples_views_indices
+from ..utils.hyper_parameter_search import CustomUniform
+
+
+classifier_class_name = "EasyMKLClassifier"
+
+class EasyMKLClassifier(BaseMultiviewClassifier, EasyMKL):
+
+    def __init__(self, random_state=None, degrees=1, lam=0.1):
+        super().__init__(random_state)
+        super(BaseMultiviewClassifier, self).__init__(lam=lam)
+        self.degrees = degrees
+        self.param_names = ["lam", "degrees"]
+        self.distribs = [CustomUniform(), DegreesGenerator()]
+
+    def fit(self, X, y, train_indices=None, views_indices=None ):
+        train_indices, views_indices = get_examples_views_indices(X,
+                                                                  train_indices,
+                                                                  views_indices)
+        if isinstance(self.degrees, DegreesDistribution):
+            self.degrees = self.degrees.draw(len(views_indices))
+        elif isinstance(int, self.degrees):
+            self.degrees = [self.degrees for _ in range(len(views_indices))]
+
+        kernels = [pairwise.homogeneous_polynomial_kernel(X.get_V(views_indices[index],
+                                                                  train_indices),
+                                                          degree=degree)
+                   for index, degree in enumerate(self.degrees)]
+        return super(EasyMKLClassifier, self).fit(kernels, y[train_indices])
+
+    def predict(self, X, example_indices=None, views_indices=None):
+        example_indices, views_indices = get_examples_views_indices(X,
+                                                                  example_indices,
+                                                                  views_indices)
+        kernels = [
+            pairwise.homogeneous_polynomial_kernel(X.get_V(views_indices[index],
+                                                           example_indices),
+                                                   degree=degree)
+            for index, degree in enumerate(self.degrees)]
+        return super(EasyMKLClassifier, self).predict(kernels,)
+
+
+class DegreesGenerator:
+
+    def __init__(self):
+        pass
+
+    def rvs(self, random_state=None):
+        return DegreesDistribution(seed=random_state.randint(1))
+
+
+class DegreesDistribution:
+
+    def __init__(self, seed=42):
+        self.random_state=np.random.RandomState(seed)
+
+    def draw(self, nb_view):
+        return self.random_state.randint(low=1,high=10,size=nb_view)
diff --git a/multiview_platform/mono_multi_view_classifiers/utils/hyper_parameter_search.py b/multiview_platform/mono_multi_view_classifiers/utils/hyper_parameter_search.py
index 8e3c104268599f2a528e48244ca4aab50eea9f05..d1ad6272f0c55dad2c14a54972b0388f9415dbb4 100644
--- a/multiview_platform/mono_multi_view_classifiers/utils/hyper_parameter_search.py
+++ b/multiview_platform/mono_multi_view_classifiers/utils/hyper_parameter_search.py
@@ -3,7 +3,7 @@ import sys
 
 import matplotlib.pyplot as plt
 import numpy as np
-from scipy.stats import  randint
+from scipy.stats import randint, uniform
 from sklearn.model_selection import RandomizedSearchCV
 
 
@@ -38,6 +38,22 @@ def grid_search(dataset, classifier_name, views_indices=None, k_folds=None, n_it
     """Used to perfom gridsearch on the classifiers"""
     pass
 
+class CustomUniform:
+    """Used as a distribution returning a float between loc and loc + scale..
+        It can be used with a multiplier agrument to be able to perform more complex generation
+        for example 10 e -(float)"""
+
+    def __init__(self, loc=0, state=1, multiplier=""):
+        self.uniform = uniform(loc, state)
+        self.multiplier = multiplier
+
+    def rvs(self, random_state=None):
+        unif = self.uniform.rvs(random_state=random_state)
+        if self.multiplier == 'e-':
+            return 10 ** -unif
+        else:
+            return unif
+
 class CustomRandint:
     """Used as a distribution returning a integer between low and high-1.
     It can be used with a multiplier agrument to be able to perform more complex generation
diff --git a/requirements.txt b/requirements.txt
index a99989254c35ff3dfa00bfb97997f888a40caa89..3899b3fa4a24155369d0b13c09a4f8639428e4c1 100755
--- a/requirements.txt
+++ b/requirements.txt
@@ -14,4 +14,5 @@ pandas==0.23.3
 m2r==0.2.1
 docutils==0.12
 pyyaml==3.12
-cvxopt==1.2.0
\ No newline at end of file
+cvxopt==1.2.0
+-e git+https://github.com/IvanoLauriola/MKLpy.git#egg=MKLpy
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 3bc02d0f1fa3a6d2d666ac213262ba4f34a77a6d..f715ce8708c8a718f3229a381ddd929108cc226e 100644
--- a/setup.py
+++ b/setup.py
@@ -55,7 +55,7 @@ def setup_package():
     install_requires=['numpy>=1.16', 'scipy>=0.16','scikit-learn==0.19',
                       'matplotlib', 'h5py', 'joblib',
                       'pandas', 'm2r', 'pyyaml', 'pyscm @ git+https://github.com/aldro61/pyscm',
-                      'cvxopt'],
+                      'cvxopt', 'MKLpy @ git+https://github.com/IvanoLauriola/MKLpy'],
 
     # Il est d'usage de mettre quelques metadata à propos de sa lib
     # Pour que les robots puissent facilement la classer.