diff --git a/doc/tutorial/auto_examples/auto_examples_jupyter.zip b/doc/tutorial/auto_examples/auto_examples_jupyter.zip
index 406a6bbfecc33827931c4bf3d2a95aea32d71c28..a30ce0bbec3aa32afbeda1d6943444a8219a4ad9 100644
Binary files a/doc/tutorial/auto_examples/auto_examples_jupyter.zip and b/doc/tutorial/auto_examples/auto_examples_jupyter.zip differ
diff --git a/doc/tutorial/auto_examples/auto_examples_python.zip b/doc/tutorial/auto_examples/auto_examples_python.zip
index f97869f58a3c70107f1a8109c4f872df94909a07..455de39223fbe7205085e61edfda8b45f4bf4635 100644
Binary files a/doc/tutorial/auto_examples/auto_examples_python.zip and b/doc/tutorial/auto_examples/auto_examples_python.zip differ
diff --git a/doc/tutorial/auto_examples/index.rst b/doc/tutorial/auto_examples/index.rst
index 9faaee6ac2a401ff41f1c5f965aeafae4654aea1..b2d29ff53e20f07f3a7d6efe81c7f9f99c6e23e5 100644
--- a/doc/tutorial/auto_examples/index.rst
+++ b/doc/tutorial/auto_examples/index.rst
@@ -172,8 +172,8 @@ The following toy examples illustrate how the MVML algorithm
 .. _sphx_glr_tutorial_auto_examples_usecase:
 
 
-Use Case Examples
------------------
+Use Case Examples on Digit
+--------------------------
 
 The following toy examples illustrate how the multimodal as usecase on digit  dataset of sklearn
 
@@ -262,7 +262,7 @@ The following toy examples illustrate how the multimodal as usecase on digit  da
 
 .. raw:: html
 
-    <div class="sphx-glr-thumbcontainer" tooltip="Use Case MKL">
+    <div class="sphx-glr-thumbcontainer" tooltip="Use Case MKL on digit">
 
 .. only:: html
 
diff --git a/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMKL_001.png b/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMKL_001.png
index 8bd981e23ea21bfb199466d3e5c38026085be418..7a4af0bbb0335a7e7faf2cd04239b98efc2f6c4d 100644
Binary files a/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMKL_001.png and b/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMKL_001.png differ
diff --git a/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMVML_001.png b/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMVML_001.png
index d09ae25fafd53d988038e519f2279c06bba29604..087d4b634111b639b717f38ce1d964eabd236313 100644
Binary files a/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMVML_001.png and b/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMVML_001.png differ
diff --git a/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMuCuBo_001.png b/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMuCuBo_001.png
index f1cc7838da9a363efee549196cca618f2d3053b6..ed1278a49547cc866bec9a86ab0ffa2ded9bdce4 100644
Binary files a/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMuCuBo_001.png and b/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMuCuBo_001.png differ
diff --git a/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMumBo_001.png b/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMumBo_001.png
index 3d12ae79d5b7ef4dd0edae3f9c18681f2a54483d..942a30189fe22fb7d9d5759be05afb4353a80be7 100644
Binary files a/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMumBo_001.png and b/doc/tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMumBo_001.png differ
diff --git a/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMKL_thumb.png b/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMKL_thumb.png
index 263e55b4eb23246c463c2c136c48add3d89f7415..585e9a3c73ec9b68bd0ee01c4862094392a1f44e 100644
Binary files a/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMKL_thumb.png and b/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMKL_thumb.png differ
diff --git a/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMVML_thumb.png b/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMVML_thumb.png
index d97815a1e507bb0d37767c4760cf5c9cead64db9..accc9a2c7c21ff9cc54ac3465f86660bc4a9d1e2 100644
Binary files a/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMVML_thumb.png and b/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMVML_thumb.png differ
diff --git a/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMuCuBo_thumb.png b/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMuCuBo_thumb.png
index 50f786f3d3d3fe9d31f6fe08871304f10d21a31f..0e32449a5d1fbacf336d1efec1f8f99ae5ca6ad0 100644
Binary files a/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMuCuBo_thumb.png and b/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMuCuBo_thumb.png differ
diff --git a/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMumBo_thumb.png b/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMumBo_thumb.png
index 8bb42fc8451ce7220a8096162028acb6bcb564c3..9e485cc8de7adbeb12accbb2a719bd66d9188bf8 100644
Binary files a/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMumBo_thumb.png and b/doc/tutorial/auto_examples/usecase/images/thumb/sphx_glr_plot_usecase_exampleMumBo_thumb.png differ
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.ipynb b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.ipynb
index 474b7854727ecc6c252333d1bd2e52ef88ae7502..e3fb15092c88f3346176a62b80f03ca65e485569 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.ipynb
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.ipynb
@@ -15,7 +15,7 @@
       "cell_type": "markdown",
       "metadata": {},
       "source": [
-        "\n# Use Case MKL\n\nUse case for all classifier of multimodallearn MKL\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction\n\n\n"
+        "\n# Use Case MKL on digit\n\nUse case for all classifier of multimodallearn MKL\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction\n\n\n"
       ]
     },
     {
@@ -26,7 +26,7 @@
       },
       "outputs": [],
       "source": [
-        "from __future__ import absolute_import\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.multiclass import OneVsOneClassifier\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.tree import DecisionTreeClassifier\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\nfrom multimodal.kernels.mvml import MVML\nfrom multimodal.kernels.lpMKL import MKL\n\nfrom usecase_function import plot_subplot\n\nif __name__ == '__main__':\n    # file = get_dataset_path(\"digit_histogram.npy\")\n    file = get_dataset_path(\"digit_col_grad.npy\")\n    y = np.load(get_dataset_path(\"digit_y.npy\"))\n    base_estimator = DecisionTreeClassifier(max_depth=4)\n    dic_digit = load_dict(file)\n    XX =MultiModalArray(dic_digit)\n    X_train, X_test, y_train, y_test = train_test_split(XX, y)\n\n    est4 = OneVsOneClassifier(MKL(lmbda=0.1, nystrom_param=0.2)).fit(X_train, y_train)\n    y_pred4 = est4.predict(X_test)\n    y_pred44 = est4.predict(X_train)\n    print(\"result of MKL on digit with oneversone\")\n    result4 = np.mean(y_pred4.ravel() == y_test.ravel()) * 100\n    print(result4)\n\n    fig = plt.figure(figsize=(12., 11.))\n    fig.suptitle(\"MKL : result\" + str(result4), fontsize=16)\n    plot_subplot(X_train, y_train, y_pred44  ,0, (4, 1, 1), \"train vue 0\" )\n    plot_subplot(X_test, y_test,y_pred4 , 0, (4, 1, 2), \"test vue 0\" )\n    plot_subplot(X_test, y_test, y_pred4,1, (4, 1, 3), \"test vue 1\" )\n    plot_subplot(X_test, y_test,y_pred4, 2, (4, 1, 4), \"test vue 2\" )\n    # plt.legend()\n    plt.show()"
+        "import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.multiclass import OneVsOneClassifier\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.tree import DecisionTreeClassifier\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\nfrom multimodal.kernels.mvml import MVML\nfrom multimodal.kernels.lpMKL import MKL\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib._color_data as mcd\n\n\ndef plot_subplot(X, Y, Y_pred, vue, subplot, title):\n    cn = mcd.CSS4_COLORS\n    classes = np.unique(Y)\n    n_classes = len(np.unique(Y))\n    axs = plt.subplot(subplot[0],subplot[1],subplot[2])\n    axs.set_title(title)\n    #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',\n    #            edgecolors=(0, 0, 0))\n    for index, k in zip(range(n_classes), cn.keys()):\n         Y_class, = np.where(Y==classes[index])\n         Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])\n         plt.scatter(X._extract_view(vue)[Y_class],\n                     X._extract_view(vue)[Y_class],\n                     s=40, c=cn[k], edgecolors='blue', linewidths=2, label=\"class real class: \"+str(index)) #\n         plt.scatter(X._extract_view(vue)[Y_class_pred],\n                     X._extract_view(vue)[Y_class_pred],\n                     s=160, edgecolors='orange', linewidths=2, label=\"class prediction: \"+str(index))\n\n\nif __name__ == '__main__':\n    # file = get_dataset_path(\"digit_histogram.npy\")\n    file = get_dataset_path(\"digit_col_grad.npy\")\n    y = np.load(get_dataset_path(\"digit_y.npy\"))\n    base_estimator = DecisionTreeClassifier(max_depth=4)\n    dic_digit = load_dict(file)\n    XX =MultiModalArray(dic_digit)\n    X_train, X_test, y_train, y_test = train_test_split(XX, y)\n\n    est4 = OneVsOneClassifier(MKL(lmbda=0.1, nystrom_param=0.2)).fit(X_train, y_train)\n    y_pred4 = est4.predict(X_test)\n    y_pred44 = est4.predict(X_train)\n    print(\"result of MKL on digit with oneversone\")\n    result4 = np.mean(y_pred4.ravel() == y_test.ravel()) * 100\n    print(result4)\n\n    fig = plt.figure(figsize=(12., 11.))\n    fig.suptitle(\"MKL : result\" + str(result4), fontsize=16)\n    plot_subplot(X_train, y_train, y_pred44  ,0, (4, 1, 1), \"train vue 0 color\" )\n    plot_subplot(X_test, y_test,y_pred4 , 0, (4, 1, 2), \"test vue 0 color\" )\n    plot_subplot(X_test, y_test, y_pred4,1, (4, 1, 3), \"test vue 1 gradiant 0\" )\n    plot_subplot(X_test, y_test,y_pred4, 2, (4, 1, 4), \"test vue 2 gradiant 1\" )\n    # plt.legend()\n    plt.show()"
       ]
     }
   ],
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.py b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.py
index 77ecb7653b892481c80cd28373ccc7cf8bb9d009..27d6fbf446dd4ebbcc065836401d8cc387770b1e 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.py
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.py
@@ -1,8 +1,8 @@
 # -*- coding: utf-8 -*-
 """
-============
-Use Case MKL
-============
+=====================
+Use Case MKL on digit
+=====================
 Use case for all classifier of multimodallearn MKL
 multi class digit from sklearn, multivue
  - vue 0 digit data (color of sklearn)
@@ -10,7 +10,7 @@ multi class digit from sklearn, multivue
  - vue 2 gradiant of image in second direction
 
 """
-from __future__ import absolute_import
+
 import numpy as np
 import matplotlib.pyplot as plt
 from sklearn.multiclass import OneVsOneClassifier
@@ -22,7 +22,29 @@ from multimodal.datasets.data_sample import MultiModalArray
 from multimodal.kernels.mvml import MVML
 from multimodal.kernels.lpMKL import MKL
 
-from usecase_function import plot_subplot
+import numpy as np
+import matplotlib.pyplot as plt
+import matplotlib._color_data as mcd
+
+
+def plot_subplot(X, Y, Y_pred, vue, subplot, title):
+    cn = mcd.CSS4_COLORS
+    classes = np.unique(Y)
+    n_classes = len(np.unique(Y))
+    axs = plt.subplot(subplot[0],subplot[1],subplot[2])
+    axs.set_title(title)
+    #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',
+    #            edgecolors=(0, 0, 0))
+    for index, k in zip(range(n_classes), cn.keys()):
+         Y_class, = np.where(Y==classes[index])
+         Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])
+         plt.scatter(X._extract_view(vue)[Y_class],
+                     X._extract_view(vue)[Y_class],
+                     s=40, c=cn[k], edgecolors='blue', linewidths=2, label="class real class: "+str(index)) #
+         plt.scatter(X._extract_view(vue)[Y_class_pred],
+                     X._extract_view(vue)[Y_class_pred],
+                     s=160, edgecolors='orange', linewidths=2, label="class prediction: "+str(index))
+
 
 if __name__ == '__main__':
     # file = get_dataset_path("digit_histogram.npy")
@@ -42,10 +64,10 @@ if __name__ == '__main__':
 
     fig = plt.figure(figsize=(12., 11.))
     fig.suptitle("MKL : result" + str(result4), fontsize=16)
-    plot_subplot(X_train, y_train, y_pred44  ,0, (4, 1, 1), "train vue 0" )
-    plot_subplot(X_test, y_test,y_pred4 , 0, (4, 1, 2), "test vue 0" )
-    plot_subplot(X_test, y_test, y_pred4,1, (4, 1, 3), "test vue 1" )
-    plot_subplot(X_test, y_test,y_pred4, 2, (4, 1, 4), "test vue 2" )
+    plot_subplot(X_train, y_train, y_pred44  ,0, (4, 1, 1), "train vue 0 color" )
+    plot_subplot(X_test, y_test,y_pred4 , 0, (4, 1, 2), "test vue 0 color" )
+    plot_subplot(X_test, y_test, y_pred4,1, (4, 1, 3), "test vue 1 gradiant 0" )
+    plot_subplot(X_test, y_test,y_pred4, 2, (4, 1, 4), "test vue 2 gradiant 1" )
     # plt.legend()
     plt.show()
 
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.py.md5 b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.py.md5
index ebe72a235c6cada59357299814b8f422fe5dbfc3..eac7aeeb35124c4b624df10115ac5d442cc77a2a 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.py.md5
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.py.md5
@@ -1 +1 @@
-4f807359096f5f5b3a7ee6b3ea540b91
\ No newline at end of file
+f7b5c3f0fd24e4628f03aa7019eea376
\ No newline at end of file
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.rst b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.rst
index f21e3d77caf22a3a5548c048f40a97b8fbf3d71c..8bde3b61aa649683c405f98ed2af1df39edcfef3 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.rst
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL.rst
@@ -7,9 +7,9 @@
 .. _sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMKL.py:
 
 
-============
-Use Case MKL
-============
+=====================
+Use Case MKL on digit
+=====================
 Use case for all classifier of multimodallearn MKL
 multi class digit from sklearn, multivue
  - vue 0 digit data (color of sklearn)
@@ -30,8 +30,8 @@ multi class digit from sklearn, multivue
  .. code-block:: none
 
     result of MKL on digit with oneversone
-    98.44444444444444
-    /home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMKL.py:50: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
+    96.88888888888889
+    /home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMKL.py:72: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
       plt.show()
 
 
@@ -44,7 +44,7 @@ multi class digit from sklearn, multivue
 
 .. code-block:: default
 
-    from __future__ import absolute_import
+
     import numpy as np
     import matplotlib.pyplot as plt
     from sklearn.multiclass import OneVsOneClassifier
@@ -56,7 +56,29 @@ multi class digit from sklearn, multivue
     from multimodal.kernels.mvml import MVML
     from multimodal.kernels.lpMKL import MKL
 
-    from usecase_function import plot_subplot
+    import numpy as np
+    import matplotlib.pyplot as plt
+    import matplotlib._color_data as mcd
+
+
+    def plot_subplot(X, Y, Y_pred, vue, subplot, title):
+        cn = mcd.CSS4_COLORS
+        classes = np.unique(Y)
+        n_classes = len(np.unique(Y))
+        axs = plt.subplot(subplot[0],subplot[1],subplot[2])
+        axs.set_title(title)
+        #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',
+        #            edgecolors=(0, 0, 0))
+        for index, k in zip(range(n_classes), cn.keys()):
+             Y_class, = np.where(Y==classes[index])
+             Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])
+             plt.scatter(X._extract_view(vue)[Y_class],
+                         X._extract_view(vue)[Y_class],
+                         s=40, c=cn[k], edgecolors='blue', linewidths=2, label="class real class: "+str(index)) #
+             plt.scatter(X._extract_view(vue)[Y_class_pred],
+                         X._extract_view(vue)[Y_class_pred],
+                         s=160, edgecolors='orange', linewidths=2, label="class prediction: "+str(index))
+
 
     if __name__ == '__main__':
         # file = get_dataset_path("digit_histogram.npy")
@@ -76,10 +98,10 @@ multi class digit from sklearn, multivue
 
         fig = plt.figure(figsize=(12., 11.))
         fig.suptitle("MKL : result" + str(result4), fontsize=16)
-        plot_subplot(X_train, y_train, y_pred44  ,0, (4, 1, 1), "train vue 0" )
-        plot_subplot(X_test, y_test,y_pred4 , 0, (4, 1, 2), "test vue 0" )
-        plot_subplot(X_test, y_test, y_pred4,1, (4, 1, 3), "test vue 1" )
-        plot_subplot(X_test, y_test,y_pred4, 2, (4, 1, 4), "test vue 2" )
+        plot_subplot(X_train, y_train, y_pred44  ,0, (4, 1, 1), "train vue 0 color" )
+        plot_subplot(X_test, y_test,y_pred4 , 0, (4, 1, 2), "test vue 0 color" )
+        plot_subplot(X_test, y_test, y_pred4,1, (4, 1, 3), "test vue 1 gradiant 0" )
+        plot_subplot(X_test, y_test,y_pred4, 2, (4, 1, 4), "test vue 2 gradiant 1" )
         # plt.legend()
         plt.show()
 
@@ -87,7 +109,7 @@ multi class digit from sklearn, multivue
 
 .. rst-class:: sphx-glr-timing
 
-   **Total running time of the script:** ( 0 minutes  12.697 seconds)
+   **Total running time of the script:** ( 0 minutes  20.457 seconds)
 
 
 .. _sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMKL.py:
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL_codeobj.pickle b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL_codeobj.pickle
index 7d88fc9a5cfd32c1e3c34ae26b6fe0d4c51a39e0..a96580e139407f646205c7b6906300470ac0e7b7 100644
Binary files a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL_codeobj.pickle and b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMKL_codeobj.pickle differ
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.ipynb b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.ipynb
index c8fcc01848df8fb831dc15d428dc29a45242aeb2..d6a3734a7bdac97290808f4e7f53804756e90b25 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.ipynb
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.ipynb
@@ -15,7 +15,7 @@
       "cell_type": "markdown",
       "metadata": {},
       "source": [
-        "\n# Use Case of MVML\n\nUse case for all classifier of multimodallearn MVML\n\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction\n\n\n"
+        "\n# Use Case of MVML on digit\n\nUse case for all classifier of multimodallearn MVML\n\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction\n\n\n"
       ]
     },
     {
@@ -26,7 +26,7 @@
       },
       "outputs": [],
       "source": [
-        "from __future__ import absolute_import\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.multiclass import OneVsOneClassifier\nfrom sklearn.model_selection import train_test_split\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\nfrom multimodal.kernels.mvml import MVML\nfrom usecase_function import plot_subplot\n\n\nif __name__ == '__main__':\n    # file = get_dataset_path(\"digit_histogram.npy\")\n    file = get_dataset_path(\"digit_col_grad.npy\")\n    y = np.load(get_dataset_path(\"digit_y.npy\"))\n    dic_digit = load_dict(file)\n    XX =MultiModalArray(dic_digit)\n    X_train, X_test, y_train, y_test = train_test_split(XX, y)\n    est1 = OneVsOneClassifier(MVML(lmbda=0.1, eta=1, nystrom_param=0.2)).fit(X_train, y_train)\n    y_pred1 = est1.predict(X_test)\n    y_pred11 = est1.predict(X_train)\n    print(\"result of MVML on digit with oneversone\")\n    result1 = np.mean(y_pred1.ravel() == y_test.ravel()) * 100\n    print(result1)\n\n    fig = plt.figure(figsize=(12., 11.))\n    fig.suptitle(\"MVML: result\" + str(result1), fontsize=16)\n    plot_subplot(X_train, y_train, y_pred11\n                 , 0, (4, 1, 1), \"train vue 0\" )\n    plot_subplot(X_test, y_test,y_pred1, 0, (4, 1, 2), \"test vue 0\" )\n    plot_subplot(X_test, y_test, y_pred1, 1, (4, 1, 3), \"test vue 1\" )\n    plot_subplot(X_test, y_test,y_pred1, 2, (4, 1, 4), \"test vue 2\" )\n    #plt.legend()\n    plt.show()"
+        "import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.multiclass import OneVsOneClassifier\nfrom sklearn.model_selection import train_test_split\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\nfrom multimodal.kernels.mvml import MVML\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib._color_data as mcd\n\n\ndef plot_subplot(X, Y, Y_pred, vue, subplot, title):\n    cn = mcd.CSS4_COLORS\n    classes = np.unique(Y)\n    n_classes = len(np.unique(Y))\n    axs = plt.subplot(subplot[0],subplot[1],subplot[2])\n    axs.set_title(title)\n    #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',\n    #            edgecolors=(0, 0, 0))\n    for index, k in zip(range(n_classes), cn.keys()):\n         Y_class, = np.where(Y==classes[index])\n         Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])\n         plt.scatter(X._extract_view(vue)[Y_class],\n                     X._extract_view(vue)[Y_class],\n                     s=40, c=cn[k], edgecolors='blue', linewidths=2, label=\"class real class: \"+str(index)) #\n         plt.scatter(X._extract_view(vue)[Y_class_pred],\n                     X._extract_view(vue)[Y_class_pred],\n                     s=160, edgecolors='orange', linewidths=2, label=\"class prediction: \"+str(index))\n\n\n\nif __name__ == '__main__':\n    # file = get_dataset_path(\"digit_histogram.npy\")\n    file = get_dataset_path(\"digit_col_grad.npy\")\n    y = np.load(get_dataset_path(\"digit_y.npy\"))\n    dic_digit = load_dict(file)\n    XX =MultiModalArray(dic_digit)\n    X_train, X_test, y_train, y_test = train_test_split(XX, y)\n    est1 = OneVsOneClassifier(MVML(lmbda=0.1, eta=1, nystrom_param=0.2)).fit(X_train, y_train)\n    y_pred1 = est1.predict(X_test)\n    y_pred11 = est1.predict(X_train)\n    print(\"result of MVML on digit with oneversone\")\n    result1 = np.mean(y_pred1.ravel() == y_test.ravel()) * 100\n    print(result1)\n\n    fig = plt.figure(figsize=(12., 11.))\n    fig.suptitle(\"MVML: result\" + str(result1), fontsize=16)\n    plot_subplot(X_train, y_train, y_pred11\n                 , 0, (4, 1, 1), \"train vue 0 color\" )\n    plot_subplot(X_test, y_test,y_pred1, 0, (4, 1, 2), \"test vue 0 color\" )\n    plot_subplot(X_test, y_test, y_pred1, 1, (4, 1, 3), \"test vue 1 gradiant 0\" )\n    plot_subplot(X_test, y_test,y_pred1, 2, (4, 1, 4), \"test vue 2 gradiant 1\" )\n    #plt.legend()\n    plt.show()"
       ]
     }
   ],
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.py b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.py
index 8f86a8bc16b69427b32cbaef4c028d3db3b813ac..98fc33815ab0b775421b3c6750779e376fc6e7ee 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.py
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.py
@@ -1,8 +1,8 @@
 # -*- coding: utf-8 -*-
 """
-================
-Use Case of MVML
-================
+=========================
+Use Case of MVML on digit
+========================
 Use case for all classifier of multimodallearn MVML
 
 multi class digit from sklearn, multivue
@@ -11,7 +11,7 @@ multi class digit from sklearn, multivue
  - vue 2 gradiant of image in second direction
 
 """
-from __future__ import absolute_import
+
 import numpy as np
 import matplotlib.pyplot as plt
 from sklearn.multiclass import OneVsOneClassifier
@@ -20,7 +20,29 @@ from multimodal.datasets.base import load_dict, save_dict
 from multimodal.tests.data.get_dataset_path import get_dataset_path
 from multimodal.datasets.data_sample import MultiModalArray
 from multimodal.kernels.mvml import MVML
-from usecase_function import plot_subplot
+import numpy as np
+import matplotlib.pyplot as plt
+import matplotlib._color_data as mcd
+
+
+def plot_subplot(X, Y, Y_pred, vue, subplot, title):
+    cn = mcd.CSS4_COLORS
+    classes = np.unique(Y)
+    n_classes = len(np.unique(Y))
+    axs = plt.subplot(subplot[0],subplot[1],subplot[2])
+    axs.set_title(title)
+    #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',
+    #            edgecolors=(0, 0, 0))
+    for index, k in zip(range(n_classes), cn.keys()):
+         Y_class, = np.where(Y==classes[index])
+         Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])
+         plt.scatter(X._extract_view(vue)[Y_class],
+                     X._extract_view(vue)[Y_class],
+                     s=40, c=cn[k], edgecolors='blue', linewidths=2, label="class real class: "+str(index)) #
+         plt.scatter(X._extract_view(vue)[Y_class_pred],
+                     X._extract_view(vue)[Y_class_pred],
+                     s=160, edgecolors='orange', linewidths=2, label="class prediction: "+str(index))
+
 
 
 if __name__ == '__main__':
@@ -40,10 +62,10 @@ if __name__ == '__main__':
     fig = plt.figure(figsize=(12., 11.))
     fig.suptitle("MVML: result" + str(result1), fontsize=16)
     plot_subplot(X_train, y_train, y_pred11
-                 , 0, (4, 1, 1), "train vue 0" )
-    plot_subplot(X_test, y_test,y_pred1, 0, (4, 1, 2), "test vue 0" )
-    plot_subplot(X_test, y_test, y_pred1, 1, (4, 1, 3), "test vue 1" )
-    plot_subplot(X_test, y_test,y_pred1, 2, (4, 1, 4), "test vue 2" )
+                 , 0, (4, 1, 1), "train vue 0 color" )
+    plot_subplot(X_test, y_test,y_pred1, 0, (4, 1, 2), "test vue 0 color" )
+    plot_subplot(X_test, y_test, y_pred1, 1, (4, 1, 3), "test vue 1 gradiant 0" )
+    plot_subplot(X_test, y_test,y_pred1, 2, (4, 1, 4), "test vue 2 gradiant 1" )
     #plt.legend()
     plt.show()
 
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.py.md5 b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.py.md5
index e39917a4ca1ab661c8575934a47b6493e22245c6..ec045359ca81cc9fa94c470f5d121e6a4b162394 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.py.md5
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.py.md5
@@ -1 +1 @@
-b4b4bb03418027ba62ce77c251085cf5
\ No newline at end of file
+c401fe6af938dc5fef9c977303a2fdcf
\ No newline at end of file
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.rst b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.rst
index a843b90af0ffa20f0f03a5d549701f2a9801f0c8..00713953e2f2644460cd1400a2aae4237b6dd181 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.rst
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML.rst
@@ -7,9 +7,9 @@
 .. _sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMVML.py:
 
 
-================
-Use Case of MVML
-================
+=========================
+Use Case of MVML on digit
+========================
 Use case for all classifier of multimodallearn MVML
 
 multi class digit from sklearn, multivue
@@ -30,9 +30,54 @@ multi class digit from sklearn, multivue
 
  .. code-block:: none
 
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 2}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 4}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 6}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 5, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 6}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 3, 'precond_A_1': 4}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 4}
+    WARNING:root:warning appears during fit process{'precond_A': 3, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 4}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 6}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 6}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 6}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 4}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 1}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 2}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 2}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 1}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 2}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 3, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 3, 'precond_A_1': 4}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 3, 'precond_A_1': 4}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 6}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 3, 'precond_A_1': 4}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 3, 'precond_A_1': 2}
+    WARNING:root:warning appears during fit process{'precond_A': 5, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 5, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 4}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 6}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 4}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 4}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 6}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 1}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
+    WARNING:root:warning appears during fit process{'precond_A': 4, 'precond_A_1': 5}
     result of MVML on digit with oneversone
-    98.88888888888889
-    /home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMVML.py:48: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
+    96.88888888888889
+    /home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMVML.py:70: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
       plt.show()
 
 
@@ -45,7 +90,7 @@ multi class digit from sklearn, multivue
 
 .. code-block:: default
 
-    from __future__ import absolute_import
+
     import numpy as np
     import matplotlib.pyplot as plt
     from sklearn.multiclass import OneVsOneClassifier
@@ -54,7 +99,29 @@ multi class digit from sklearn, multivue
     from multimodal.tests.data.get_dataset_path import get_dataset_path
     from multimodal.datasets.data_sample import MultiModalArray
     from multimodal.kernels.mvml import MVML
-    from usecase_function import plot_subplot
+    import numpy as np
+    import matplotlib.pyplot as plt
+    import matplotlib._color_data as mcd
+
+
+    def plot_subplot(X, Y, Y_pred, vue, subplot, title):
+        cn = mcd.CSS4_COLORS
+        classes = np.unique(Y)
+        n_classes = len(np.unique(Y))
+        axs = plt.subplot(subplot[0],subplot[1],subplot[2])
+        axs.set_title(title)
+        #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',
+        #            edgecolors=(0, 0, 0))
+        for index, k in zip(range(n_classes), cn.keys()):
+             Y_class, = np.where(Y==classes[index])
+             Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])
+             plt.scatter(X._extract_view(vue)[Y_class],
+                         X._extract_view(vue)[Y_class],
+                         s=40, c=cn[k], edgecolors='blue', linewidths=2, label="class real class: "+str(index)) #
+             plt.scatter(X._extract_view(vue)[Y_class_pred],
+                         X._extract_view(vue)[Y_class_pred],
+                         s=160, edgecolors='orange', linewidths=2, label="class prediction: "+str(index))
+
 
 
     if __name__ == '__main__':
@@ -74,10 +141,10 @@ multi class digit from sklearn, multivue
         fig = plt.figure(figsize=(12., 11.))
         fig.suptitle("MVML: result" + str(result1), fontsize=16)
         plot_subplot(X_train, y_train, y_pred11
-                     , 0, (4, 1, 1), "train vue 0" )
-        plot_subplot(X_test, y_test,y_pred1, 0, (4, 1, 2), "test vue 0" )
-        plot_subplot(X_test, y_test, y_pred1, 1, (4, 1, 3), "test vue 1" )
-        plot_subplot(X_test, y_test,y_pred1, 2, (4, 1, 4), "test vue 2" )
+                     , 0, (4, 1, 1), "train vue 0 color" )
+        plot_subplot(X_test, y_test,y_pred1, 0, (4, 1, 2), "test vue 0 color" )
+        plot_subplot(X_test, y_test, y_pred1, 1, (4, 1, 3), "test vue 1 gradiant 0" )
+        plot_subplot(X_test, y_test,y_pred1, 2, (4, 1, 4), "test vue 2 gradiant 1" )
         #plt.legend()
         plt.show()
 
@@ -85,7 +152,7 @@ multi class digit from sklearn, multivue
 
 .. rst-class:: sphx-glr-timing
 
-   **Total running time of the script:** ( 0 minutes  39.921 seconds)
+   **Total running time of the script:** ( 1 minutes  14.485 seconds)
 
 
 .. _sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMVML.py:
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML_codeobj.pickle b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML_codeobj.pickle
index 4dd76ec965cb59222faf72499944a67476d843c4..5a1229e5a310fecdf045fbb051596b5208a1faad 100644
Binary files a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML_codeobj.pickle and b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMVML_codeobj.pickle differ
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.ipynb b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.ipynb
index fdd32ebf31a3a9ec1e48aa58507a47f5e2e39aef..3dcb32d6a0dae557755fd069528a869242cc6406 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.ipynb
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.ipynb
@@ -15,7 +15,7 @@
       "cell_type": "markdown",
       "metadata": {},
       "source": [
-        "\n# Use Case MuCumBo\n\nUse case for all classifier of multimodallearn  MuCumBo\n\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction\n\n\n"
+        "\n# Use Case MuCumBo on digit\n\nUse case for all classifier of multimodallearn  MuCumBo\n\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction\n\n\n"
       ]
     },
     {
@@ -26,7 +26,7 @@
       },
       "outputs": [],
       "source": [
-        "from __future__ import absolute_import\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.tree import DecisionTreeClassifier\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\n\nfrom multimodal.boosting.cumbo import MuCumboClassifier\nfrom usecase_function import plot_subplot\n\n\nif __name__ == '__main__':\n    # file = get_dataset_path(\"digit_histogram.npy\")\n    file = get_dataset_path(\"digit_col_grad.npy\")\n    y = np.load(get_dataset_path(\"digit_y.npy\"))\n    base_estimator = DecisionTreeClassifier(max_depth=4)\n    dic_digit = load_dict(file)\n    XX =MultiModalArray(dic_digit)\n    X_train, X_test, y_train, y_test = train_test_split(XX, y)\n    est3 = MuCumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)\n    y_pred3 = est3.predict(X_test)\n    y_pred33 = est3.predict(X_train)\n    print(\"result of MuCumboClassifier on digit \")\n    result3 = np.mean(y_pred3.ravel() == y_test.ravel()) * 100\n    print(result3)\n\n    fig = plt.figure(figsize=(12., 11.))\n    fig.suptitle(\"MuCumbo: result\" + str(result3), fontsize=16)\n    plot_subplot(X_train, y_train, y_pred33  ,0, (4, 1, 1), \"train vue 0\" )\n    plot_subplot(X_test, y_test,y_pred3 , 0, (4, 1, 2), \"test vue 0\" )\n    plot_subplot(X_test, y_test, y_pred3,1, (4, 1, 3), \"test vue 1\" )\n    plot_subplot(X_test, y_test,y_pred3, 2, (4, 1, 4), \"test vue 2\" )\n    # plt.legend()\n    plt.show()"
+        "import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.tree import DecisionTreeClassifier\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\n\nfrom multimodal.boosting.cumbo import MuCumboClassifier\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib._color_data as mcd\n\n\ndef plot_subplot(X, Y, Y_pred, vue, subplot, title):\n    cn = mcd.CSS4_COLORS\n    classes = np.unique(Y)\n    n_classes = len(np.unique(Y))\n    axs = plt.subplot(subplot[0],subplot[1],subplot[2])\n    axs.set_title(title)\n    #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',\n    #            edgecolors=(0, 0, 0))\n    for index, k in zip(range(n_classes), cn.keys()):\n         Y_class, = np.where(Y==classes[index])\n         Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])\n         plt.scatter(X._extract_view(vue)[Y_class],\n                     X._extract_view(vue)[Y_class],\n                     s=40, c=cn[k], edgecolors='blue', linewidths=2, label=\"class real class: \"+str(index)) #\n         plt.scatter(X._extract_view(vue)[Y_class_pred],\n                     X._extract_view(vue)[Y_class_pred],\n                     s=160, edgecolors='orange', linewidths=2, label=\"class prediction: \"+str(index))\n\n\n\nif __name__ == '__main__':\n    # file = get_dataset_path(\"digit_histogram.npy\")\n    file = get_dataset_path(\"digit_col_grad.npy\")\n    y = np.load(get_dataset_path(\"digit_y.npy\"))\n    base_estimator = DecisionTreeClassifier(max_depth=4)\n    dic_digit = load_dict(file)\n    XX =MultiModalArray(dic_digit)\n    X_train, X_test, y_train, y_test = train_test_split(XX, y)\n    est3 = MuCumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)\n    y_pred3 = est3.predict(X_test)\n    y_pred33 = est3.predict(X_train)\n    print(\"result of MuCumboClassifier on digit \")\n    result3 = np.mean(y_pred3.ravel() == y_test.ravel()) * 100\n    print(result3)\n\n    fig = plt.figure(figsize=(12., 11.))\n    fig.suptitle(\"MuCumbo: result\" + str(result3), fontsize=16)\n    plot_subplot(X_train, y_train, y_pred33  ,0, (4, 1, 1), \"train vue 0 color\" )\n    plot_subplot(X_test, y_test,y_pred3 , 0, (4, 1, 2), \"test vue 0 color\" )\n    plot_subplot(X_test, y_test, y_pred3,1, (4, 1, 3), \"test vue 1 gradiant 0\" )\n    plot_subplot(X_test, y_test,y_pred3, 2, (4, 1, 4), \"test vue 2 gradiant 1\" )\n    # plt.legend()\n    plt.show()"
       ]
     }
   ],
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.py b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.py
index fa7490a51c1e3410b91e7a4cbcdb85557277fe15..f3d72540d8740b13aeea34b95c561447780b75e8 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.py
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.py
@@ -1,8 +1,8 @@
 # -*- coding: utf-8 -*-
 """
-================
-Use Case MuCumBo
-================
+=========================
+Use Case MuCumBo on digit
+=========================
 Use case for all classifier of multimodallearn  MuCumBo
 
 multi class digit from sklearn, multivue
@@ -11,7 +11,7 @@ multi class digit from sklearn, multivue
  - vue 2 gradiant of image in second direction
 
 """
-from __future__ import absolute_import
+
 import numpy as np
 import matplotlib.pyplot as plt
 from sklearn.model_selection import train_test_split
@@ -21,7 +21,29 @@ from multimodal.tests.data.get_dataset_path import get_dataset_path
 from multimodal.datasets.data_sample import MultiModalArray
 
 from multimodal.boosting.cumbo import MuCumboClassifier
-from usecase_function import plot_subplot
+import numpy as np
+import matplotlib.pyplot as plt
+import matplotlib._color_data as mcd
+
+
+def plot_subplot(X, Y, Y_pred, vue, subplot, title):
+    cn = mcd.CSS4_COLORS
+    classes = np.unique(Y)
+    n_classes = len(np.unique(Y))
+    axs = plt.subplot(subplot[0],subplot[1],subplot[2])
+    axs.set_title(title)
+    #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',
+    #            edgecolors=(0, 0, 0))
+    for index, k in zip(range(n_classes), cn.keys()):
+         Y_class, = np.where(Y==classes[index])
+         Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])
+         plt.scatter(X._extract_view(vue)[Y_class],
+                     X._extract_view(vue)[Y_class],
+                     s=40, c=cn[k], edgecolors='blue', linewidths=2, label="class real class: "+str(index)) #
+         plt.scatter(X._extract_view(vue)[Y_class_pred],
+                     X._extract_view(vue)[Y_class_pred],
+                     s=160, edgecolors='orange', linewidths=2, label="class prediction: "+str(index))
+
 
 
 if __name__ == '__main__':
@@ -41,9 +63,9 @@ if __name__ == '__main__':
 
     fig = plt.figure(figsize=(12., 11.))
     fig.suptitle("MuCumbo: result" + str(result3), fontsize=16)
-    plot_subplot(X_train, y_train, y_pred33  ,0, (4, 1, 1), "train vue 0" )
-    plot_subplot(X_test, y_test,y_pred3 , 0, (4, 1, 2), "test vue 0" )
-    plot_subplot(X_test, y_test, y_pred3,1, (4, 1, 3), "test vue 1" )
-    plot_subplot(X_test, y_test,y_pred3, 2, (4, 1, 4), "test vue 2" )
+    plot_subplot(X_train, y_train, y_pred33  ,0, (4, 1, 1), "train vue 0 color" )
+    plot_subplot(X_test, y_test,y_pred3 , 0, (4, 1, 2), "test vue 0 color" )
+    plot_subplot(X_test, y_test, y_pred3,1, (4, 1, 3), "test vue 1 gradiant 0" )
+    plot_subplot(X_test, y_test,y_pred3, 2, (4, 1, 4), "test vue 2 gradiant 1" )
     # plt.legend()
     plt.show()
\ No newline at end of file
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.py.md5 b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.py.md5
index 72e532bf080371b7c41474a7b291d5f17dc3bd72..c2d73f1b2fb19dec0ca52b86bb645321d788572d 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.py.md5
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.py.md5
@@ -1 +1 @@
-41656262e29b2bcd048fa6cd8a96eaf4
\ No newline at end of file
+44b8d61f1868492460cd9a01408b1073
\ No newline at end of file
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.rst b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.rst
index f67674fc284e7454cd759cb58970c7a033565953..bb4b9b2418e6127b3ee38ead11701e6fde011b6e 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.rst
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo.rst
@@ -7,9 +7,9 @@
 .. _sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMuCuBo.py:
 
 
-================
-Use Case MuCumBo
-================
+=========================
+Use Case MuCumBo on digit
+=========================
 Use case for all classifier of multimodallearn  MuCumBo
 
 multi class digit from sklearn, multivue
@@ -31,8 +31,8 @@ multi class digit from sklearn, multivue
  .. code-block:: none
 
     result of MuCumboClassifier on digit 
-    85.33333333333334
-    /home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMuCuBo.py:49: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
+    87.77777777777777
+    /home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMuCuBo.py:71: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
       plt.show()
 
 
@@ -45,7 +45,7 @@ multi class digit from sklearn, multivue
 
 .. code-block:: default
 
-    from __future__ import absolute_import
+
     import numpy as np
     import matplotlib.pyplot as plt
     from sklearn.model_selection import train_test_split
@@ -55,7 +55,29 @@ multi class digit from sklearn, multivue
     from multimodal.datasets.data_sample import MultiModalArray
 
     from multimodal.boosting.cumbo import MuCumboClassifier
-    from usecase_function import plot_subplot
+    import numpy as np
+    import matplotlib.pyplot as plt
+    import matplotlib._color_data as mcd
+
+
+    def plot_subplot(X, Y, Y_pred, vue, subplot, title):
+        cn = mcd.CSS4_COLORS
+        classes = np.unique(Y)
+        n_classes = len(np.unique(Y))
+        axs = plt.subplot(subplot[0],subplot[1],subplot[2])
+        axs.set_title(title)
+        #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',
+        #            edgecolors=(0, 0, 0))
+        for index, k in zip(range(n_classes), cn.keys()):
+             Y_class, = np.where(Y==classes[index])
+             Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])
+             plt.scatter(X._extract_view(vue)[Y_class],
+                         X._extract_view(vue)[Y_class],
+                         s=40, c=cn[k], edgecolors='blue', linewidths=2, label="class real class: "+str(index)) #
+             plt.scatter(X._extract_view(vue)[Y_class_pred],
+                         X._extract_view(vue)[Y_class_pred],
+                         s=160, edgecolors='orange', linewidths=2, label="class prediction: "+str(index))
+
 
 
     if __name__ == '__main__':
@@ -75,16 +97,16 @@ multi class digit from sklearn, multivue
 
         fig = plt.figure(figsize=(12., 11.))
         fig.suptitle("MuCumbo: result" + str(result3), fontsize=16)
-        plot_subplot(X_train, y_train, y_pred33  ,0, (4, 1, 1), "train vue 0" )
-        plot_subplot(X_test, y_test,y_pred3 , 0, (4, 1, 2), "test vue 0" )
-        plot_subplot(X_test, y_test, y_pred3,1, (4, 1, 3), "test vue 1" )
-        plot_subplot(X_test, y_test,y_pred3, 2, (4, 1, 4), "test vue 2" )
+        plot_subplot(X_train, y_train, y_pred33  ,0, (4, 1, 1), "train vue 0 color" )
+        plot_subplot(X_test, y_test,y_pred3 , 0, (4, 1, 2), "test vue 0 color" )
+        plot_subplot(X_test, y_test, y_pred3,1, (4, 1, 3), "test vue 1 gradiant 0" )
+        plot_subplot(X_test, y_test,y_pred3, 2, (4, 1, 4), "test vue 2 gradiant 1" )
         # plt.legend()
         plt.show()
 
 .. rst-class:: sphx-glr-timing
 
-   **Total running time of the script:** ( 0 minutes  11.436 seconds)
+   **Total running time of the script:** ( 0 minutes  14.171 seconds)
 
 
 .. _sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMuCuBo.py:
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo_codeobj.pickle b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo_codeobj.pickle
index 17bb32a3b64f9f8683437434f73b239f034f8fbf..7776629f1c4112e0842acfe4f7c5dfd032c59212 100644
Binary files a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo_codeobj.pickle and b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMuCuBo_codeobj.pickle differ
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.ipynb b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.ipynb
index b7b3d45d0a03305b3aac3169ca4812d1ceee02fd..032b41560963c15797500a345d11e7c07e0d43cc 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.ipynb
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.ipynb
@@ -15,7 +15,7 @@
       "cell_type": "markdown",
       "metadata": {},
       "source": [
-        "\n# Use Case MumBo\n\nUse case for all classifier of multimodallearn MumBo\n\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction\n\n\n"
+        "\n# Use Case MumBo on digit\n\nUse case for all classifier of multimodallearn MumBo\n\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction\n\n\n"
       ]
     },
     {
@@ -26,7 +26,7 @@
       },
       "outputs": [],
       "source": [
-        "from __future__ import absolute_import\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.tree import DecisionTreeClassifier\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\n\nfrom multimodal.boosting.mumbo import MumboClassifier\n\nfrom usecase_function import plot_subplot\n\n\nif __name__ == '__main__':\n    # file = get_dataset_path(\"digit_histogram.npy\")\n    file = get_dataset_path(\"digit_col_grad.npy\")\n    y = np.load(get_dataset_path(\"digit_y.npy\"))\n    base_estimator = DecisionTreeClassifier(max_depth=4)\n    dic_digit = load_dict(file)\n    XX =MultiModalArray(dic_digit)\n    X_train, X_test, y_train, y_test = train_test_split(XX, y)\n\n    est2 = MumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)\n    y_pred2 = est2.predict(X_test)\n    y_pred22 = est2.predict(X_train)\n    print(\"result of MumboClassifier on digit \")\n    result2 = np.mean(y_pred2.ravel() == y_test.ravel()) * 100\n    print(result2)\n\n    fig = plt.figure(figsize=(12., 11.))\n    fig.suptitle(\"Mumbo: result\" + str(result2), fontsize=16)\n    plot_subplot(X_train, y_train, y_pred22 , 0, (4, 1, 1), \"train vue 0\" )\n    plot_subplot(X_test, y_test,y_pred2, 0, (4, 1, 2), \"test vue 0\" )\n    plot_subplot(X_test, y_test, y_pred2, 1, (4, 1, 3), \"test vue 1\" )\n    plot_subplot(X_test, y_test,y_pred2, 2, (4, 1, 4), \"test vue 2\" )\n    # plt.legend()\n    plt.show()"
+        "import numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.tree import DecisionTreeClassifier\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\n\nfrom multimodal.boosting.mumbo import MumboClassifier\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib._color_data as mcd\n\n\ndef plot_subplot(X, Y, Y_pred, vue, subplot, title):\n    cn = mcd.CSS4_COLORS\n    classes = np.unique(Y)\n    n_classes = len(np.unique(Y))\n    axs = plt.subplot(subplot[0],subplot[1],subplot[2])\n    axs.set_title(title)\n    #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',\n    #            edgecolors=(0, 0, 0))\n    for index, k in zip(range(n_classes), cn.keys()):\n         Y_class, = np.where(Y==classes[index])\n         Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])\n         plt.scatter(X._extract_view(vue)[Y_class],\n                     X._extract_view(vue)[Y_class],\n                     s=40, c=cn[k], edgecolors='blue', linewidths=2, label=\"class real class: \"+str(index)) #\n         plt.scatter(X._extract_view(vue)[Y_class_pred],\n                     X._extract_view(vue)[Y_class_pred],\n                     s=160, edgecolors='orange', linewidths=2, label=\"class prediction: \"+str(index))\n\n\n\nif __name__ == '__main__':\n    # file = get_dataset_path(\"digit_histogram.npy\")\n    file = get_dataset_path(\"digit_col_grad.npy\")\n    y = np.load(get_dataset_path(\"digit_y.npy\"))\n    base_estimator = DecisionTreeClassifier(max_depth=4)\n    dic_digit = load_dict(file)\n    XX =MultiModalArray(dic_digit)\n    X_train, X_test, y_train, y_test = train_test_split(XX, y)\n\n    est2 = MumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)\n    y_pred2 = est2.predict(X_test)\n    y_pred22 = est2.predict(X_train)\n    print(\"result of MumboClassifier on digit \")\n    result2 = np.mean(y_pred2.ravel() == y_test.ravel()) * 100\n    print(result2)\n\n    fig = plt.figure(figsize=(12., 11.))\n    fig.suptitle(\"Mumbo: result\" + str(result2), fontsize=16)\n    plot_subplot(X_train, y_train, y_pred22 , 0, (4, 1, 1), \"train vue 0\" )\n    plot_subplot(X_test, y_test,y_pred2, 0, (4, 1, 2), \"test vue 0\" )\n    plot_subplot(X_test, y_test, y_pred2, 1, (4, 1, 3), \"test vue 1\" )\n    plot_subplot(X_test, y_test,y_pred2, 2, (4, 1, 4), \"test vue 2\" )\n    # plt.legend()\n    plt.show()"
       ]
     }
   ],
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.py b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.py
index 76e4020e88fb204c8715668dd200bc447ac5297c..3f5d7d10fac1ee3df27e72eeed107fb7b6a8a118 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.py
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.py
@@ -1,8 +1,8 @@
 # -*- coding: utf-8 -*-
 """
-==============
-Use Case MumBo
-==============
+=======================
+Use Case MumBo on digit
+=======================
 Use case for all classifier of multimodallearn MumBo
 
 multi class digit from sklearn, multivue
@@ -11,7 +11,7 @@ multi class digit from sklearn, multivue
  - vue 2 gradiant of image in second direction
 
 """
-from __future__ import absolute_import
+
 import numpy as np
 import matplotlib.pyplot as plt
 from sklearn.model_selection import train_test_split
@@ -22,7 +22,29 @@ from multimodal.datasets.data_sample import MultiModalArray
 
 from multimodal.boosting.mumbo import MumboClassifier
 
-from usecase_function import plot_subplot
+import numpy as np
+import matplotlib.pyplot as plt
+import matplotlib._color_data as mcd
+
+
+def plot_subplot(X, Y, Y_pred, vue, subplot, title):
+    cn = mcd.CSS4_COLORS
+    classes = np.unique(Y)
+    n_classes = len(np.unique(Y))
+    axs = plt.subplot(subplot[0],subplot[1],subplot[2])
+    axs.set_title(title)
+    #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',
+    #            edgecolors=(0, 0, 0))
+    for index, k in zip(range(n_classes), cn.keys()):
+         Y_class, = np.where(Y==classes[index])
+         Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])
+         plt.scatter(X._extract_view(vue)[Y_class],
+                     X._extract_view(vue)[Y_class],
+                     s=40, c=cn[k], edgecolors='blue', linewidths=2, label="class real class: "+str(index)) #
+         plt.scatter(X._extract_view(vue)[Y_class_pred],
+                     X._extract_view(vue)[Y_class_pred],
+                     s=160, edgecolors='orange', linewidths=2, label="class prediction: "+str(index))
+
 
 
 if __name__ == '__main__':
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.py.md5 b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.py.md5
index 17c46a09d474ecc1fe897c03793a97d9928f3f2d..f35b383b77c26a08dc053afa513fbd33c49fc56e 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.py.md5
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.py.md5
@@ -1 +1 @@
-2135145bcc76c1c0354a13e5fff1666c
\ No newline at end of file
+8e13b092f67c6a5d55b9a8b80feb306b
\ No newline at end of file
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.rst b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.rst
index 08314501ac44488591661763dc83bd1386f19e0d..d848e2fb57da3e6e99a6ee5f0053d17a37269b1d 100644
--- a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.rst
+++ b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo.rst
@@ -7,9 +7,9 @@
 .. _sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMumBo.py:
 
 
-==============
-Use Case MumBo
-==============
+=======================
+Use Case MumBo on digit
+=======================
 Use case for all classifier of multimodallearn MumBo
 
 multi class digit from sklearn, multivue
@@ -30,9 +30,11 @@ multi class digit from sklearn, multivue
 
  .. code-block:: none
 
+    /home/dominique/.virtualenvs/env_multiv/local/lib/python3.6/site-packages/sklearn/ensemble/weight_boosting.py:29: DeprecationWarning: numpy.core.umath_tests is an internal NumPy module and should not be imported. It will be removed in a future NumPy release.
+      from numpy.core.umath_tests import inner1d
     result of MumboClassifier on digit 
-    96.0
-    /home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMumBo.py:51: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
+    98.22222222222223
+    /home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMumBo.py:73: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
       plt.show()
 
 
@@ -45,7 +47,7 @@ multi class digit from sklearn, multivue
 
 .. code-block:: default
 
-    from __future__ import absolute_import
+
     import numpy as np
     import matplotlib.pyplot as plt
     from sklearn.model_selection import train_test_split
@@ -56,7 +58,29 @@ multi class digit from sklearn, multivue
 
     from multimodal.boosting.mumbo import MumboClassifier
 
-    from usecase_function import plot_subplot
+    import numpy as np
+    import matplotlib.pyplot as plt
+    import matplotlib._color_data as mcd
+
+
+    def plot_subplot(X, Y, Y_pred, vue, subplot, title):
+        cn = mcd.CSS4_COLORS
+        classes = np.unique(Y)
+        n_classes = len(np.unique(Y))
+        axs = plt.subplot(subplot[0],subplot[1],subplot[2])
+        axs.set_title(title)
+        #plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',
+        #            edgecolors=(0, 0, 0))
+        for index, k in zip(range(n_classes), cn.keys()):
+             Y_class, = np.where(Y==classes[index])
+             Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])
+             plt.scatter(X._extract_view(vue)[Y_class],
+                         X._extract_view(vue)[Y_class],
+                         s=40, c=cn[k], edgecolors='blue', linewidths=2, label="class real class: "+str(index)) #
+             plt.scatter(X._extract_view(vue)[Y_class_pred],
+                         X._extract_view(vue)[Y_class_pred],
+                         s=160, edgecolors='orange', linewidths=2, label="class prediction: "+str(index))
+
 
 
     if __name__ == '__main__':
@@ -87,7 +111,7 @@ multi class digit from sklearn, multivue
 
 .. rst-class:: sphx-glr-timing
 
-   **Total running time of the script:** ( 0 minutes  5.520 seconds)
+   **Total running time of the script:** ( 0 minutes  6.374 seconds)
 
 
 .. _sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMumBo.py:
diff --git a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo_codeobj.pickle b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo_codeobj.pickle
index 2518b269c03ea056b7851ca059c0df264c1ac823..e0f1aabd0269750c2c6f298be6c7da6ed057b5a5 100644
Binary files a/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo_codeobj.pickle and b/doc/tutorial/auto_examples/usecase/plot_usecase_exampleMumBo_codeobj.pickle differ
diff --git a/doc/tutorial/auto_examples/usecase/sg_execution_times.rst b/doc/tutorial/auto_examples/usecase/sg_execution_times.rst
index 223ae1fc7d7d472e3a2466ade64b4707085dc797..1150b05363bcb88291d08c5cc1244c9ea66b14d4 100644
--- a/doc/tutorial/auto_examples/usecase/sg_execution_times.rst
+++ b/doc/tutorial/auto_examples/usecase/sg_execution_times.rst
@@ -5,16 +5,16 @@
 
 Computation times
 =================
-**01:09.574** total execution time for **tutorial_auto_examples_usecase** files:
+**01:55.487** total execution time for **tutorial_auto_examples_usecase** files:
 
 +------------------------------------------------------------------------------------------------------------------+-----------+--------+
-| :ref:`sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMVML.py` (``plot_usecase_exampleMVML.py``)     | 00:39.921 | 0.0 MB |
+| :ref:`sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMVML.py` (``plot_usecase_exampleMVML.py``)     | 01:14.485 | 0.0 MB |
 +------------------------------------------------------------------------------------------------------------------+-----------+--------+
-| :ref:`sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMKL.py` (``plot_usecase_exampleMKL.py``)       | 00:12.697 | 0.0 MB |
+| :ref:`sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMKL.py` (``plot_usecase_exampleMKL.py``)       | 00:20.457 | 0.0 MB |
 +------------------------------------------------------------------------------------------------------------------+-----------+--------+
-| :ref:`sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMuCuBo.py` (``plot_usecase_exampleMuCuBo.py``) | 00:11.436 | 0.0 MB |
+| :ref:`sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMuCuBo.py` (``plot_usecase_exampleMuCuBo.py``) | 00:14.171 | 0.0 MB |
 +------------------------------------------------------------------------------------------------------------------+-----------+--------+
-| :ref:`sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMumBo.py` (``plot_usecase_exampleMumBo.py``)   | 00:05.520 | 0.0 MB |
+| :ref:`sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMumBo.py` (``plot_usecase_exampleMumBo.py``)   | 00:06.374 | 0.0 MB |
 +------------------------------------------------------------------------------------------------------------------+-----------+--------+
 | :ref:`sphx_glr_tutorial_auto_examples_usecase_usecase_function.py` (``usecase_function.py``)                     | 00:00.000 | 0.0 MB |
 +------------------------------------------------------------------------------------------------------------------+-----------+--------+
diff --git a/doc/tutorial/auto_examples/usecase/usecase_function_codeobj.pickle b/doc/tutorial/auto_examples/usecase/usecase_function_codeobj.pickle
index 11a5d97962739de44a38fb53598691ab2a7bbee1..ad823d70d0d8c15936fac37922405dedd5018905 100644
Binary files a/doc/tutorial/auto_examples/usecase/usecase_function_codeobj.pickle and b/doc/tutorial/auto_examples/usecase/usecase_function_codeobj.pickle differ
diff --git a/multimodal/boosting/mumbo.py b/multimodal/boosting/mumbo.py
index 3ddee635db2147f90959b974b9d14f5264620953..84530d8fd8551aabb7836a293e659148cac44bf1 100644
--- a/multimodal/boosting/mumbo.py
+++ b/multimodal/boosting/mumbo.py
@@ -221,7 +221,12 @@ class MumboClassifier(BaseEnsemble, ClassifierMixin, UBoosting):
 
     def _compute_alphas(self, edges):
         """Compute values of confidence rate alpha given edge values."""
-        alphas = 0.5 * np.log((1.+edges) / (1.-edges))
+        np.where(edges > 1.0, edges, 1.0)
+        alphas = 0.5 * np.log((1. + edges) / (1. - edges))
+        if np.any(np.isinf(alphas)) or np.any(np.isnan(alphas)):
+
+            alphas[np.where(np.isnan(alphas))[0]] = 1.0
+            alphas[np.where(np.isinf(alphas))[0]] = 1.0
         return alphas
 
     def _compute_cost_global(self, label_score_global, best_predicted_classes,