Skip to content
Snippets Groups Projects
Commit d2e352c6 authored by Dominique Benielli's avatar Dominique Benielli
Browse files

add usecase examples

parent c80f65e8
Branches
Tags
No related merge requests found
Pipeline #4160 failed
Showing
with 887 additions and 0 deletions
%% Cell type:code id: tags:
``` python
%matplotlib inline
```
%% Cell type:markdown id: tags:
\n# Use Case MKL\n\nUse case for all classifier of multimodallearn MKL\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction
%% Cell type:code id: tags:
``` python
from __future__ import absolute_import\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.multiclass import OneVsOneClassifier\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.tree import DecisionTreeClassifier\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\nfrom multimodal.kernels.mvml import MVML\nfrom multimodal.kernels.lpMKL import MKL\n\nfrom usecase_function import plot_subplot\n\nif __name__ == '__main__':\n # file = get_dataset_path("digit_histogram.npy")\n file = get_dataset_path("digit_col_grad.npy")\n y = np.load(get_dataset_path("digit_y.npy"))\n base_estimator = DecisionTreeClassifier(max_depth=4)\n dic_digit = load_dict(file)\n XX =MultiModalArray(dic_digit)\n X_train, X_test, y_train, y_test = train_test_split(XX, y)\n\n est4 = OneVsOneClassifier(MKL(lmbda=0.1, nystrom_param=0.2)).fit(X_train, y_train)\n y_pred4 = est4.predict(X_test)\n y_pred44 = est4.predict(X_train)\n print("result of MKL on digit with oneversone")\n result4 = np.mean(y_pred4.ravel() == y_test.ravel()) * 100\n print(result4)\n\n fig = plt.figure(figsize=(12., 11.))\n fig.suptitle("MKL : result" + str(result4), fontsize=16)\n plot_subplot(X_train, y_train, y_pred44 ,0, (4, 1, 1), "train vue 0" )\n plot_subplot(X_test, y_test,y_pred4 , 0, (4, 1, 2), "test vue 0" )\n plot_subplot(X_test, y_test, y_pred4,1, (4, 1, 3), "test vue 1" )\n plot_subplot(X_test, y_test,y_pred4, 2, (4, 1, 4), "test vue 2" )\n # plt.legend()\n plt.show()
```
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
""" """
======== ============
Use Case Use Case MKL
======== ============
Use case for all classifier of multimodallearn (in file mvml.py) is intended to be used with very simple simulated dataset Use case for all classifier of multimodallearn MKL
multi class digit from sklearn, multivue multi class digit from sklearn, multivue
- vue 0 digit data (color of sklearn) - vue 0 digit data (color of sklearn)
- vue 1 gradiant of image in first direction - vue 1 gradiant of image in first direction
- vue 2 gradiant of image in second direction - vue 2 gradiant of image in second direction
""" """
from __future__ import absolute_import
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import matplotlib._color_data as mcd
from sklearn import datasets
from sklearn.multiclass import OneVsRestClassifier
from sklearn.multiclass import OneVsOneClassifier from sklearn.multiclass import OneVsOneClassifier
from sklearn.model_selection import train_test_split from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier from sklearn.tree import DecisionTreeClassifier
...@@ -25,27 +21,8 @@ from multimodal.tests.data.get_dataset_path import get_dataset_path ...@@ -25,27 +21,8 @@ from multimodal.tests.data.get_dataset_path import get_dataset_path
from multimodal.datasets.data_sample import MultiModalArray from multimodal.datasets.data_sample import MultiModalArray
from multimodal.kernels.mvml import MVML from multimodal.kernels.mvml import MVML
from multimodal.kernels.lpMKL import MKL from multimodal.kernels.lpMKL import MKL
from multimodal.boosting.mumbo import MumboClassifier
from multimodal.boosting.cumbo import MuCumboClassifier
def plot_subplot(X, Y, Y_pred, vue, subplot, title):
cn = mcd.CSS4_COLORS
classes = np.unique(Y)
n_classes = len(np.unique(Y))
axs = plt.subplot(subplot[0],subplot[1],subplot[2])
axs.set_title(title)
#plt.scatter(X._extract_view(vue), X._extract_view(vue), s=40, c='gray',
# edgecolors=(0, 0, 0))
for index, k in zip(range(n_classes), cn.keys()):
Y_class, = np.where(Y==classes[index])
Y_class_pred = np.intersect1d(np.where(Y_pred==classes[index])[0], np.where(Y_pred==Y)[0])
plt.scatter(X._extract_view(vue)[Y_class],
X._extract_view(vue)[Y_class],
s=40, c=cn[k], edgecolors='blue', linewidths=2, label="class real class: "+str(index)) #
plt.scatter(X._extract_view(vue)[Y_class_pred],
X._extract_view(vue)[Y_class_pred],
s=160, edgecolors='orange', linewidths=2, label="class prediction: "+str(index))
from usecase_function import plot_subplot
if __name__ == '__main__': if __name__ == '__main__':
# file = get_dataset_path("digit_histogram.npy") # file = get_dataset_path("digit_histogram.npy")
...@@ -55,25 +32,6 @@ if __name__ == '__main__': ...@@ -55,25 +32,6 @@ if __name__ == '__main__':
dic_digit = load_dict(file) dic_digit = load_dict(file)
XX =MultiModalArray(dic_digit) XX =MultiModalArray(dic_digit)
X_train, X_test, y_train, y_test = train_test_split(XX, y) X_train, X_test, y_train, y_test = train_test_split(XX, y)
est1 = OneVsOneClassifier(MVML(lmbda=0.1, eta=1, nystrom_param=0.2)).fit(X_train, y_train)
y_pred1 = est1.predict(X_test)
y_pred11 = est1.predict(X_train)
print("result of MVML on digit with oneversone")
result1 = np.mean(y_pred1.ravel() == y_test.ravel()) * 100
print(result1)
est2 = MumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)
y_pred2 = est2.predict(X_test)
y_pred22 = est2.predict(X_train)
print("result of MumboClassifier on digit ")
result2 = np.mean(y_pred2.ravel() == y_test.ravel()) * 100
print(result2)
est3 = MuCumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)
y_pred3 = est3.predict(X_test)
y_pred33 = est3.predict(X_train)
print("result of MuCumboClassifier on digit ")
result3 = np.mean(y_pred3.ravel() == y_test.ravel()) * 100
print(result3)
est4 = OneVsOneClassifier(MKL(lmbda=0.1, nystrom_param=0.2)).fit(X_train, y_train) est4 = OneVsOneClassifier(MKL(lmbda=0.1, nystrom_param=0.2)).fit(X_train, y_train)
y_pred4 = est4.predict(X_test) y_pred4 = est4.predict(X_test)
...@@ -90,30 +48,4 @@ if __name__ == '__main__': ...@@ -90,30 +48,4 @@ if __name__ == '__main__':
plot_subplot(X_test, y_test,y_pred4, 2, (4, 1, 4), "test vue 2" ) plot_subplot(X_test, y_test,y_pred4, 2, (4, 1, 4), "test vue 2" )
# plt.legend() # plt.legend()
plt.show() plt.show()
fig = plt.figure(figsize=(12., 11.))
fig.suptitle("MuCumbo: result" + str(result3), fontsize=16)
plot_subplot(X_train, y_train, y_pred33 ,0, (4, 1, 1), "train vue 0" )
plot_subplot(X_test, y_test,y_pred3 , 0, (4, 1, 2), "test vue 0" )
plot_subplot(X_test, y_test, y_pred3,1, (4, 1, 3), "test vue 1" )
plot_subplot(X_test, y_test,y_pred3, 2, (4, 1, 4), "test vue 2" )
# plt.legend()
plt.show()
fig = plt.figure(figsize=(12., 11.))
fig.suptitle("Mumbo: result" + str(result2), fontsize=16)
plot_subplot(X_train, y_train, y_pred22 , 0, (4, 1, 1), "train vue 0" )
plot_subplot(X_test, y_test,y_pred2, 0, (4, 1, 2), "test vue 0" )
plot_subplot(X_test, y_test, y_pred2, 1, (4, 1, 3), "test vue 1" )
plot_subplot(X_test, y_test,y_pred2, 2, (4, 1, 4), "test vue 2" )
# plt.legend()
plt.show()
fig = plt.figure(figsize=(12., 11.))
fig.suptitle("MVML: result" + str(result1), fontsize=16)
plot_subplot(X_train, y_train, y_pred11
, 0, (4, 1, 1), "train vue 0" )
plot_subplot(X_test, y_test,y_pred1, 0, (4, 1, 2), "test vue 0" )
plot_subplot(X_test, y_test, y_pred1, 1, (4, 1, 3), "test vue 1" )
plot_subplot(X_test, y_test,y_pred1, 2, (4, 1, 4), "test vue 2" )
#plt.legend()
plt.show()
#mvml = MVML(lmbda=0.1, eta=1, nystrom_param=0.2)
#mvml.fit(dic_digit_histo, y)
4f807359096f5f5b3a7ee6b3ea540b91
\ No newline at end of file
.. note::
:class: sphx-glr-download-link-note
Click :ref:`here <sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMKL.py>` to download the full example code
.. rst-class:: sphx-glr-example-title
.. _sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMKL.py:
============
Use Case MKL
============
Use case for all classifier of multimodallearn MKL
multi class digit from sklearn, multivue
- vue 0 digit data (color of sklearn)
- vue 1 gradiant of image in first direction
- vue 2 gradiant of image in second direction
.. image:: /tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMKL_001.png
:class: sphx-glr-single-img
.. rst-class:: sphx-glr-script-out
Out:
.. code-block:: none
result of MKL on digit with oneversone
98.44444444444444
/home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMKL.py:50: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
plt.show()
|
.. code-block:: default
from __future__ import absolute_import
import numpy as np
import matplotlib.pyplot as plt
from sklearn.multiclass import OneVsOneClassifier
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from multimodal.datasets.base import load_dict, save_dict
from multimodal.tests.data.get_dataset_path import get_dataset_path
from multimodal.datasets.data_sample import MultiModalArray
from multimodal.kernels.mvml import MVML
from multimodal.kernels.lpMKL import MKL
from usecase_function import plot_subplot
if __name__ == '__main__':
# file = get_dataset_path("digit_histogram.npy")
file = get_dataset_path("digit_col_grad.npy")
y = np.load(get_dataset_path("digit_y.npy"))
base_estimator = DecisionTreeClassifier(max_depth=4)
dic_digit = load_dict(file)
XX =MultiModalArray(dic_digit)
X_train, X_test, y_train, y_test = train_test_split(XX, y)
est4 = OneVsOneClassifier(MKL(lmbda=0.1, nystrom_param=0.2)).fit(X_train, y_train)
y_pred4 = est4.predict(X_test)
y_pred44 = est4.predict(X_train)
print("result of MKL on digit with oneversone")
result4 = np.mean(y_pred4.ravel() == y_test.ravel()) * 100
print(result4)
fig = plt.figure(figsize=(12., 11.))
fig.suptitle("MKL : result" + str(result4), fontsize=16)
plot_subplot(X_train, y_train, y_pred44 ,0, (4, 1, 1), "train vue 0" )
plot_subplot(X_test, y_test,y_pred4 , 0, (4, 1, 2), "test vue 0" )
plot_subplot(X_test, y_test, y_pred4,1, (4, 1, 3), "test vue 1" )
plot_subplot(X_test, y_test,y_pred4, 2, (4, 1, 4), "test vue 2" )
# plt.legend()
plt.show()
.. rst-class:: sphx-glr-timing
**Total running time of the script:** ( 0 minutes 12.697 seconds)
.. _sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMKL.py:
.. only :: html
.. container:: sphx-glr-footer
:class: sphx-glr-footer-example
.. container:: sphx-glr-download
:download:`Download Python source code: plot_usecase_exampleMKL.py <plot_usecase_exampleMKL.py>`
.. container:: sphx-glr-download
:download:`Download Jupyter notebook: plot_usecase_exampleMKL.ipynb <plot_usecase_exampleMKL.ipynb>`
.. only:: html
.. rst-class:: sphx-glr-signature
`Gallery generated by Sphinx-Gallery <https://sphinx-gallery.github.io>`_
File added
%% Cell type:code id: tags:
``` python
%matplotlib inline
```
%% Cell type:markdown id: tags:
\n# Use Case of MVML\n\nUse case for all classifier of multimodallearn MVML\n\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction
%% Cell type:code id: tags:
``` python
from __future__ import absolute_import\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.multiclass import OneVsOneClassifier\nfrom sklearn.model_selection import train_test_split\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\nfrom multimodal.kernels.mvml import MVML\nfrom usecase_function import plot_subplot\n\n\nif __name__ == '__main__':\n # file = get_dataset_path("digit_histogram.npy")\n file = get_dataset_path("digit_col_grad.npy")\n y = np.load(get_dataset_path("digit_y.npy"))\n dic_digit = load_dict(file)\n XX =MultiModalArray(dic_digit)\n X_train, X_test, y_train, y_test = train_test_split(XX, y)\n est1 = OneVsOneClassifier(MVML(lmbda=0.1, eta=1, nystrom_param=0.2)).fit(X_train, y_train)\n y_pred1 = est1.predict(X_test)\n y_pred11 = est1.predict(X_train)\n print("result of MVML on digit with oneversone")\n result1 = np.mean(y_pred1.ravel() == y_test.ravel()) * 100\n print(result1)\n\n fig = plt.figure(figsize=(12., 11.))\n fig.suptitle("MVML: result" + str(result1), fontsize=16)\n plot_subplot(X_train, y_train, y_pred11\n , 0, (4, 1, 1), "train vue 0" )\n plot_subplot(X_test, y_test,y_pred1, 0, (4, 1, 2), "test vue 0" )\n plot_subplot(X_test, y_test, y_pred1, 1, (4, 1, 3), "test vue 1" )\n plot_subplot(X_test, y_test,y_pred1, 2, (4, 1, 4), "test vue 2" )\n #plt.legend()\n plt.show()
```
# -*- coding: utf-8 -*-
"""
================
Use Case of MVML
================
Use case for all classifier of multimodallearn MVML
multi class digit from sklearn, multivue
- vue 0 digit data (color of sklearn)
- vue 1 gradiant of image in first direction
- vue 2 gradiant of image in second direction
"""
from __future__ import absolute_import
import numpy as np
import matplotlib.pyplot as plt
from sklearn.multiclass import OneVsOneClassifier
from sklearn.model_selection import train_test_split
from multimodal.datasets.base import load_dict, save_dict
from multimodal.tests.data.get_dataset_path import get_dataset_path
from multimodal.datasets.data_sample import MultiModalArray
from multimodal.kernels.mvml import MVML
from usecase_function import plot_subplot
if __name__ == '__main__':
# file = get_dataset_path("digit_histogram.npy")
file = get_dataset_path("digit_col_grad.npy")
y = np.load(get_dataset_path("digit_y.npy"))
dic_digit = load_dict(file)
XX =MultiModalArray(dic_digit)
X_train, X_test, y_train, y_test = train_test_split(XX, y)
est1 = OneVsOneClassifier(MVML(lmbda=0.1, eta=1, nystrom_param=0.2)).fit(X_train, y_train)
y_pred1 = est1.predict(X_test)
y_pred11 = est1.predict(X_train)
print("result of MVML on digit with oneversone")
result1 = np.mean(y_pred1.ravel() == y_test.ravel()) * 100
print(result1)
fig = plt.figure(figsize=(12., 11.))
fig.suptitle("MVML: result" + str(result1), fontsize=16)
plot_subplot(X_train, y_train, y_pred11
, 0, (4, 1, 1), "train vue 0" )
plot_subplot(X_test, y_test,y_pred1, 0, (4, 1, 2), "test vue 0" )
plot_subplot(X_test, y_test, y_pred1, 1, (4, 1, 3), "test vue 1" )
plot_subplot(X_test, y_test,y_pred1, 2, (4, 1, 4), "test vue 2" )
#plt.legend()
plt.show()
b4b4bb03418027ba62ce77c251085cf5
\ No newline at end of file
.. note::
:class: sphx-glr-download-link-note
Click :ref:`here <sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMVML.py>` to download the full example code
.. rst-class:: sphx-glr-example-title
.. _sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMVML.py:
================
Use Case of MVML
================
Use case for all classifier of multimodallearn MVML
multi class digit from sklearn, multivue
- vue 0 digit data (color of sklearn)
- vue 1 gradiant of image in first direction
- vue 2 gradiant of image in second direction
.. image:: /tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMVML_001.png
:class: sphx-glr-single-img
.. rst-class:: sphx-glr-script-out
Out:
.. code-block:: none
result of MVML on digit with oneversone
98.88888888888889
/home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMVML.py:48: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
plt.show()
|
.. code-block:: default
from __future__ import absolute_import
import numpy as np
import matplotlib.pyplot as plt
from sklearn.multiclass import OneVsOneClassifier
from sklearn.model_selection import train_test_split
from multimodal.datasets.base import load_dict, save_dict
from multimodal.tests.data.get_dataset_path import get_dataset_path
from multimodal.datasets.data_sample import MultiModalArray
from multimodal.kernels.mvml import MVML
from usecase_function import plot_subplot
if __name__ == '__main__':
# file = get_dataset_path("digit_histogram.npy")
file = get_dataset_path("digit_col_grad.npy")
y = np.load(get_dataset_path("digit_y.npy"))
dic_digit = load_dict(file)
XX =MultiModalArray(dic_digit)
X_train, X_test, y_train, y_test = train_test_split(XX, y)
est1 = OneVsOneClassifier(MVML(lmbda=0.1, eta=1, nystrom_param=0.2)).fit(X_train, y_train)
y_pred1 = est1.predict(X_test)
y_pred11 = est1.predict(X_train)
print("result of MVML on digit with oneversone")
result1 = np.mean(y_pred1.ravel() == y_test.ravel()) * 100
print(result1)
fig = plt.figure(figsize=(12., 11.))
fig.suptitle("MVML: result" + str(result1), fontsize=16)
plot_subplot(X_train, y_train, y_pred11
, 0, (4, 1, 1), "train vue 0" )
plot_subplot(X_test, y_test,y_pred1, 0, (4, 1, 2), "test vue 0" )
plot_subplot(X_test, y_test, y_pred1, 1, (4, 1, 3), "test vue 1" )
plot_subplot(X_test, y_test,y_pred1, 2, (4, 1, 4), "test vue 2" )
#plt.legend()
plt.show()
.. rst-class:: sphx-glr-timing
**Total running time of the script:** ( 0 minutes 39.921 seconds)
.. _sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMVML.py:
.. only :: html
.. container:: sphx-glr-footer
:class: sphx-glr-footer-example
.. container:: sphx-glr-download
:download:`Download Python source code: plot_usecase_exampleMVML.py <plot_usecase_exampleMVML.py>`
.. container:: sphx-glr-download
:download:`Download Jupyter notebook: plot_usecase_exampleMVML.ipynb <plot_usecase_exampleMVML.ipynb>`
.. only:: html
.. rst-class:: sphx-glr-signature
`Gallery generated by Sphinx-Gallery <https://sphinx-gallery.github.io>`_
File added
%% Cell type:code id: tags:
``` python
%matplotlib inline
```
%% Cell type:markdown id: tags:
\n# Use Case MuCumBo\n\nUse case for all classifier of multimodallearn MuCumBo\n\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction
%% Cell type:code id: tags:
``` python
from __future__ import absolute_import\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.tree import DecisionTreeClassifier\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\n\nfrom multimodal.boosting.cumbo import MuCumboClassifier\nfrom usecase_function import plot_subplot\n\n\nif __name__ == '__main__':\n # file = get_dataset_path("digit_histogram.npy")\n file = get_dataset_path("digit_col_grad.npy")\n y = np.load(get_dataset_path("digit_y.npy"))\n base_estimator = DecisionTreeClassifier(max_depth=4)\n dic_digit = load_dict(file)\n XX =MultiModalArray(dic_digit)\n X_train, X_test, y_train, y_test = train_test_split(XX, y)\n est3 = MuCumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)\n y_pred3 = est3.predict(X_test)\n y_pred33 = est3.predict(X_train)\n print("result of MuCumboClassifier on digit ")\n result3 = np.mean(y_pred3.ravel() == y_test.ravel()) * 100\n print(result3)\n\n fig = plt.figure(figsize=(12., 11.))\n fig.suptitle("MuCumbo: result" + str(result3), fontsize=16)\n plot_subplot(X_train, y_train, y_pred33 ,0, (4, 1, 1), "train vue 0" )\n plot_subplot(X_test, y_test,y_pred3 , 0, (4, 1, 2), "test vue 0" )\n plot_subplot(X_test, y_test, y_pred3,1, (4, 1, 3), "test vue 1" )\n plot_subplot(X_test, y_test,y_pred3, 2, (4, 1, 4), "test vue 2" )\n # plt.legend()\n plt.show()
```
# -*- coding: utf-8 -*-
"""
================
Use Case MuCumBo
================
Use case for all classifier of multimodallearn MuCumBo
multi class digit from sklearn, multivue
- vue 0 digit data (color of sklearn)
- vue 1 gradiant of image in first direction
- vue 2 gradiant of image in second direction
"""
from __future__ import absolute_import
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from multimodal.datasets.base import load_dict, save_dict
from multimodal.tests.data.get_dataset_path import get_dataset_path
from multimodal.datasets.data_sample import MultiModalArray
from multimodal.boosting.cumbo import MuCumboClassifier
from usecase_function import plot_subplot
if __name__ == '__main__':
# file = get_dataset_path("digit_histogram.npy")
file = get_dataset_path("digit_col_grad.npy")
y = np.load(get_dataset_path("digit_y.npy"))
base_estimator = DecisionTreeClassifier(max_depth=4)
dic_digit = load_dict(file)
XX =MultiModalArray(dic_digit)
X_train, X_test, y_train, y_test = train_test_split(XX, y)
est3 = MuCumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)
y_pred3 = est3.predict(X_test)
y_pred33 = est3.predict(X_train)
print("result of MuCumboClassifier on digit ")
result3 = np.mean(y_pred3.ravel() == y_test.ravel()) * 100
print(result3)
fig = plt.figure(figsize=(12., 11.))
fig.suptitle("MuCumbo: result" + str(result3), fontsize=16)
plot_subplot(X_train, y_train, y_pred33 ,0, (4, 1, 1), "train vue 0" )
plot_subplot(X_test, y_test,y_pred3 , 0, (4, 1, 2), "test vue 0" )
plot_subplot(X_test, y_test, y_pred3,1, (4, 1, 3), "test vue 1" )
plot_subplot(X_test, y_test,y_pred3, 2, (4, 1, 4), "test vue 2" )
# plt.legend()
plt.show()
\ No newline at end of file
41656262e29b2bcd048fa6cd8a96eaf4
\ No newline at end of file
.. note::
:class: sphx-glr-download-link-note
Click :ref:`here <sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMuCuBo.py>` to download the full example code
.. rst-class:: sphx-glr-example-title
.. _sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMuCuBo.py:
================
Use Case MuCumBo
================
Use case for all classifier of multimodallearn MuCumBo
multi class digit from sklearn, multivue
- vue 0 digit data (color of sklearn)
- vue 1 gradiant of image in first direction
- vue 2 gradiant of image in second direction
.. image:: /tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMuCuBo_001.png
:class: sphx-glr-single-img
.. rst-class:: sphx-glr-script-out
Out:
.. code-block:: none
result of MuCumboClassifier on digit
85.33333333333334
/home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMuCuBo.py:49: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
plt.show()
|
.. code-block:: default
from __future__ import absolute_import
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from multimodal.datasets.base import load_dict, save_dict
from multimodal.tests.data.get_dataset_path import get_dataset_path
from multimodal.datasets.data_sample import MultiModalArray
from multimodal.boosting.cumbo import MuCumboClassifier
from usecase_function import plot_subplot
if __name__ == '__main__':
# file = get_dataset_path("digit_histogram.npy")
file = get_dataset_path("digit_col_grad.npy")
y = np.load(get_dataset_path("digit_y.npy"))
base_estimator = DecisionTreeClassifier(max_depth=4)
dic_digit = load_dict(file)
XX =MultiModalArray(dic_digit)
X_train, X_test, y_train, y_test = train_test_split(XX, y)
est3 = MuCumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)
y_pred3 = est3.predict(X_test)
y_pred33 = est3.predict(X_train)
print("result of MuCumboClassifier on digit ")
result3 = np.mean(y_pred3.ravel() == y_test.ravel()) * 100
print(result3)
fig = plt.figure(figsize=(12., 11.))
fig.suptitle("MuCumbo: result" + str(result3), fontsize=16)
plot_subplot(X_train, y_train, y_pred33 ,0, (4, 1, 1), "train vue 0" )
plot_subplot(X_test, y_test,y_pred3 , 0, (4, 1, 2), "test vue 0" )
plot_subplot(X_test, y_test, y_pred3,1, (4, 1, 3), "test vue 1" )
plot_subplot(X_test, y_test,y_pred3, 2, (4, 1, 4), "test vue 2" )
# plt.legend()
plt.show()
.. rst-class:: sphx-glr-timing
**Total running time of the script:** ( 0 minutes 11.436 seconds)
.. _sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMuCuBo.py:
.. only :: html
.. container:: sphx-glr-footer
:class: sphx-glr-footer-example
.. container:: sphx-glr-download
:download:`Download Python source code: plot_usecase_exampleMuCuBo.py <plot_usecase_exampleMuCuBo.py>`
.. container:: sphx-glr-download
:download:`Download Jupyter notebook: plot_usecase_exampleMuCuBo.ipynb <plot_usecase_exampleMuCuBo.ipynb>`
.. only:: html
.. rst-class:: sphx-glr-signature
`Gallery generated by Sphinx-Gallery <https://sphinx-gallery.github.io>`_
File added
%% Cell type:code id: tags:
``` python
%matplotlib inline
```
%% Cell type:markdown id: tags:
\n# Use Case MumBo\n\nUse case for all classifier of multimodallearn MumBo\n\nmulti class digit from sklearn, multivue\n - vue 0 digit data (color of sklearn)\n - vue 1 gradiant of image in first direction\n - vue 2 gradiant of image in second direction
%% Cell type:code id: tags:
``` python
from __future__ import absolute_import\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.tree import DecisionTreeClassifier\nfrom multimodal.datasets.base import load_dict, save_dict\nfrom multimodal.tests.data.get_dataset_path import get_dataset_path\nfrom multimodal.datasets.data_sample import MultiModalArray\n\nfrom multimodal.boosting.mumbo import MumboClassifier\n\nfrom usecase_function import plot_subplot\n\n\nif __name__ == '__main__':\n # file = get_dataset_path("digit_histogram.npy")\n file = get_dataset_path("digit_col_grad.npy")\n y = np.load(get_dataset_path("digit_y.npy"))\n base_estimator = DecisionTreeClassifier(max_depth=4)\n dic_digit = load_dict(file)\n XX =MultiModalArray(dic_digit)\n X_train, X_test, y_train, y_test = train_test_split(XX, y)\n\n est2 = MumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)\n y_pred2 = est2.predict(X_test)\n y_pred22 = est2.predict(X_train)\n print("result of MumboClassifier on digit ")\n result2 = np.mean(y_pred2.ravel() == y_test.ravel()) * 100\n print(result2)\n\n fig = plt.figure(figsize=(12., 11.))\n fig.suptitle("Mumbo: result" + str(result2), fontsize=16)\n plot_subplot(X_train, y_train, y_pred22 , 0, (4, 1, 1), "train vue 0" )\n plot_subplot(X_test, y_test,y_pred2, 0, (4, 1, 2), "test vue 0" )\n plot_subplot(X_test, y_test, y_pred2, 1, (4, 1, 3), "test vue 1" )\n plot_subplot(X_test, y_test,y_pred2, 2, (4, 1, 4), "test vue 2" )\n # plt.legend()\n plt.show()
```
# -*- coding: utf-8 -*-
"""
==============
Use Case MumBo
==============
Use case for all classifier of multimodallearn MumBo
multi class digit from sklearn, multivue
- vue 0 digit data (color of sklearn)
- vue 1 gradiant of image in first direction
- vue 2 gradiant of image in second direction
"""
from __future__ import absolute_import
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from multimodal.datasets.base import load_dict, save_dict
from multimodal.tests.data.get_dataset_path import get_dataset_path
from multimodal.datasets.data_sample import MultiModalArray
from multimodal.boosting.mumbo import MumboClassifier
from usecase_function import plot_subplot
if __name__ == '__main__':
# file = get_dataset_path("digit_histogram.npy")
file = get_dataset_path("digit_col_grad.npy")
y = np.load(get_dataset_path("digit_y.npy"))
base_estimator = DecisionTreeClassifier(max_depth=4)
dic_digit = load_dict(file)
XX =MultiModalArray(dic_digit)
X_train, X_test, y_train, y_test = train_test_split(XX, y)
est2 = MumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)
y_pred2 = est2.predict(X_test)
y_pred22 = est2.predict(X_train)
print("result of MumboClassifier on digit ")
result2 = np.mean(y_pred2.ravel() == y_test.ravel()) * 100
print(result2)
fig = plt.figure(figsize=(12., 11.))
fig.suptitle("Mumbo: result" + str(result2), fontsize=16)
plot_subplot(X_train, y_train, y_pred22 , 0, (4, 1, 1), "train vue 0" )
plot_subplot(X_test, y_test,y_pred2, 0, (4, 1, 2), "test vue 0" )
plot_subplot(X_test, y_test, y_pred2, 1, (4, 1, 3), "test vue 1" )
plot_subplot(X_test, y_test,y_pred2, 2, (4, 1, 4), "test vue 2" )
# plt.legend()
plt.show()
2135145bcc76c1c0354a13e5fff1666c
\ No newline at end of file
.. note::
:class: sphx-glr-download-link-note
Click :ref:`here <sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMumBo.py>` to download the full example code
.. rst-class:: sphx-glr-example-title
.. _sphx_glr_tutorial_auto_examples_usecase_plot_usecase_exampleMumBo.py:
==============
Use Case MumBo
==============
Use case for all classifier of multimodallearn MumBo
multi class digit from sklearn, multivue
- vue 0 digit data (color of sklearn)
- vue 1 gradiant of image in first direction
- vue 2 gradiant of image in second direction
.. image:: /tutorial/auto_examples/usecase/images/sphx_glr_plot_usecase_exampleMumBo_001.png
:class: sphx-glr-single-img
.. rst-class:: sphx-glr-script-out
Out:
.. code-block:: none
result of MumboClassifier on digit
96.0
/home/dominique/projets/ANR-Lives/scikit-multimodallearn/examples/usecase/plot_usecase_exampleMumBo.py:51: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
plt.show()
|
.. code-block:: default
from __future__ import absolute_import
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from multimodal.datasets.base import load_dict, save_dict
from multimodal.tests.data.get_dataset_path import get_dataset_path
from multimodal.datasets.data_sample import MultiModalArray
from multimodal.boosting.mumbo import MumboClassifier
from usecase_function import plot_subplot
if __name__ == '__main__':
# file = get_dataset_path("digit_histogram.npy")
file = get_dataset_path("digit_col_grad.npy")
y = np.load(get_dataset_path("digit_y.npy"))
base_estimator = DecisionTreeClassifier(max_depth=4)
dic_digit = load_dict(file)
XX =MultiModalArray(dic_digit)
X_train, X_test, y_train, y_test = train_test_split(XX, y)
est2 = MumboClassifier(base_estimator=base_estimator).fit(X_train, y_train)
y_pred2 = est2.predict(X_test)
y_pred22 = est2.predict(X_train)
print("result of MumboClassifier on digit ")
result2 = np.mean(y_pred2.ravel() == y_test.ravel()) * 100
print(result2)
fig = plt.figure(figsize=(12., 11.))
fig.suptitle("Mumbo: result" + str(result2), fontsize=16)
plot_subplot(X_train, y_train, y_pred22 , 0, (4, 1, 1), "train vue 0" )
plot_subplot(X_test, y_test,y_pred2, 0, (4, 1, 2), "test vue 0" )
plot_subplot(X_test, y_test, y_pred2, 1, (4, 1, 3), "test vue 1" )
plot_subplot(X_test, y_test,y_pred2, 2, (4, 1, 4), "test vue 2" )
# plt.legend()
plt.show()
.. rst-class:: sphx-glr-timing
**Total running time of the script:** ( 0 minutes 5.520 seconds)
.. _sphx_glr_download_tutorial_auto_examples_usecase_plot_usecase_exampleMumBo.py:
.. only :: html
.. container:: sphx-glr-footer
:class: sphx-glr-footer-example
.. container:: sphx-glr-download
:download:`Download Python source code: plot_usecase_exampleMumBo.py <plot_usecase_exampleMumBo.py>`
.. container:: sphx-glr-download
:download:`Download Jupyter notebook: plot_usecase_exampleMumBo.ipynb <plot_usecase_exampleMumBo.ipynb>`
.. only:: html
.. rst-class:: sphx-glr-signature
`Gallery generated by Sphinx-Gallery <https://sphinx-gallery.github.io>`_
File added
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment