Skip to content
Snippets Groups Projects
Commit 94ab2676 authored by Dominique Benielli's avatar Dominique Benielli
Browse files

one versu one

parent 3f3f5ee5
Branches
Tags
No related merge requests found
Pipeline #4156 passed
...@@ -19,7 +19,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel): ...@@ -19,7 +19,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel):
lmbda : float coeficient for combined kernels lmbda : float coeficient for combined kernels
m_param : float (default : 1.0) nystrom_param : float (default : 1.0)
value between 0 and 1 indicating level of nyström approximation; value between 0 and 1 indicating level of nyström approximation;
1 = no approximation 1 = no approximation
...@@ -74,13 +74,13 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel): ...@@ -74,13 +74,13 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel):
weights : learned weight for combining the solutions of views, learned in weights : learned weight for combining the solutions of views, learned in
""" """
def __init__(self, lmbda, m_param=1.0, kernel="precomputed", def __init__(self, lmbda, nystrom_param=1.0, kernel="linear",
kernel_params=None, use_approx=True, precision=1E-4, n_loops=50): kernel_params=None, use_approx=True, precision=1E-4, n_loops=50):
# calculate nyström approximation (if used) # calculate nyström approximation (if used)
self.lmbda = lmbda self.lmbda = lmbda
self.n_loops = n_loops self.n_loops = n_loops
self.use_approx = use_approx self.use_approx = use_approx
self.m_param = m_param self.nystrom_param = nystrom_param
self.kernel= kernel self.kernel= kernel
self.kernel_params = kernel_params self.kernel_params = kernel_params
self.precision = precision self.precision = precision
...@@ -134,11 +134,12 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel): ...@@ -134,11 +134,12 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel):
y = y.astype(float) y = y.astype(float)
self.regression_ = True self.regression_ = True
else: else:
raise ValueError("MKL algorithms is a binary classifier" raise ValueError("MKL algorithms is a binary classifier")
" or performs regression with float target") # " or performs regression with float target")
self.y_ = y self.y_ = y
n = self.K_.shape[0] n = self.K_.shape[0]
self._calc_nystrom(self.K_, n) self._calc_nystrom(self.K_, n)
C, weights = self.learn_lpMKL() C, weights = self.learn_lpMKL()
self.C = C self.C = C
self.weights = weights self.weights = weights
...@@ -175,7 +176,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel): ...@@ -175,7 +176,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel):
# gammas are fixed upon arrival to the loop # gammas are fixed upon arrival to the loop
# -> solve for alpha! # -> solve for alpha!
if self.m_param < 1 and self.use_approx: if self.nystrom_param < 1 and self.use_approx:
combined_kernel = np.zeros((n, n)) combined_kernel = np.zeros((n, n))
for v in range(0, views): for v in range(0, views):
combined_kernel = combined_kernel + weights[v] * kernels[v] combined_kernel = combined_kernel + weights[v] * kernels[v]
...@@ -194,7 +195,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel): ...@@ -194,7 +195,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel):
# first the ||f_t||^2 todo wtf is the formula used here???? # first the ||f_t||^2 todo wtf is the formula used here????
ft2 = np.zeros(views) ft2 = np.zeros(views)
for v in range(0, views): for v in range(0, views):
if self.m_param < 1 and self.use_approx: if self.nystrom_param < 1 and self.use_approx:
# ft2[v,vv] = weights_old[v,vv] * np.dot(np.transpose(C), np.dot(np.dot(np.dot(data.U_dict[v], # ft2[v,vv] = weights_old[v,vv] * np.dot(np.transpose(C), np.dot(np.dot(np.dot(data.U_dict[v],
# np.transpose(data.U_dict[v])), # np.transpose(data.U_dict[v])),
# np.dot(data.U_dict[vv], # np.dot(data.U_dict[vv],
...@@ -247,6 +248,39 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel): ...@@ -247,6 +248,39 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel):
else: else:
return C, weights return C, weights
def decision_function(self, X):
"""Compute the decision function of X.
Parameters
----------
X : dict dictionary with all views {array like} with shape = (n_samples, n_features) for multi-view
for each view.
or
`MultiModalData` , `MultiModalArray`
or
{array-like,}, shape = (n_samples, n_features)
Training multi-view input samples. can be also Kernel where attibute 'kernel'
is set to precompute "precomputed"
Returns
-------
dec_fun : numpy.ndarray, shape = (n_samples, )
Decision function of the input samples.
For binary classification,
values <=0 mean classification in the first class in ``classes_``
and values >0 mean classification in the second class in
``classes_``.
"""
check_is_fitted(self, ['X_', 'C', 'K_', 'y_', 'weights'])
X, K = self._global_kernel_transform(X,
views_ind=self.X_.views_ind,
Y=self.X_)
check_array(X)
C = self.C
weights = self.weights
pred = self.lpMKL_predict(K, C, weights)
return pred
def predict(self, X): def predict(self, X):
""" """
...@@ -280,15 +314,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel): ...@@ -280,15 +314,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel):
y : numpy.ndarray, shape = (n_samples,) y : numpy.ndarray, shape = (n_samples,)
Predicted classes. Predicted classes.
""" """
check_is_fitted(self, ['X_', 'C', 'K_', 'y_', 'weights']) pred = self.decision_function(X)
X, K = self._global_kernel_transform(X,
views_ind=self.X_.views_ind,
Y=self.X_)
check_array(X)
C = self.C
weights = self.weights
pred = self.lpMKL_predict(K, C, weights)
pred = np.sign(pred) pred = np.sign(pred)
pred = pred.astype(int) pred = pred.astype(int)
pred = np.where(pred == -1, 0, pred) pred = np.where(pred == -1, 0, pred)
...@@ -315,7 +341,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel): ...@@ -315,7 +341,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel):
""" """
views = X.n_views views = X.n_views
tt = X.shape[0] tt = X.shape[0]
m = self.K_.shape[0] # self.m_param * n m = self.K_.shape[0] # self.nystrom_param * n
# NO TEST KERNEL APPROXIMATION # NO TEST KERNEL APPROXIMATION
# kernel = weights[0] * self.data.test_kernel_dict[0] # kernel = weights[0] * self.data.test_kernel_dict[0]
...@@ -325,7 +351,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel): ...@@ -325,7 +351,7 @@ class MKL(BaseEstimator, ClassifierMixin, MKernel):
# TEST KERNEL APPROXIMATION # TEST KERNEL APPROXIMATION
kernel = np.zeros((tt, self.K_.shape[0])) kernel = np.zeros((tt, self.K_.shape[0]))
for v in range(0, views): for v in range(0, views):
if self.m_param < 1: if self.nystrom_param < 1:
kernel = kernel + weights[v] * np.dot(np.dot(X.get_view(v)[:, 0:m], self.W_sqrootinv_dict[v]), kernel = kernel + weights[v] * np.dot(np.dot(X.get_view(v)[:, 0:m], self.W_sqrootinv_dict[v]),
np.transpose(self.U_dict[v])) np.transpose(self.U_dict[v]))
else: else:
......
...@@ -194,6 +194,7 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin): ...@@ -194,6 +194,7 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin):
if type_of_target(y) in "binary": if type_of_target(y) in "binary":
self.classes_, y = np.unique(y, return_inverse=True) self.classes_, y = np.unique(y, return_inverse=True)
y[y==0] = -1.0 y[y==0] = -1.0
self.n_classes = len(self.classes_)
elif type_of_target(y) in "continuous": elif type_of_target(y) in "continuous":
y = y.astype(float) y = y.astype(float)
self.regression_ = True self.regression_ = True
...@@ -434,13 +435,7 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin): ...@@ -434,13 +435,7 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin):
y : numpy.ndarray, shape = (n_samples,) y : numpy.ndarray, shape = (n_samples,)
Predicted classes. Predicted classes.
""" """
check_is_fitted(self, ['X_', 'U_dict', 'K_', 'y_']) # , 'U_dict', 'K_' 'y_' pred = self.decision_function(X)
X, test_kernels = self._global_kernel_transform(X,
views_ind=self.X_.views_ind,
Y=self.X_)
check_array(X)
pred = self._predict_mvml(test_kernels, self.g, self.w).squeeze()
if self.regression_: if self.regression_:
return pred return pred
else: else:
...@@ -449,6 +444,36 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin): ...@@ -449,6 +444,36 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin):
pred = np.where(pred == -1, 0, pred) pred = np.where(pred == -1, 0, pred)
return np.take(self.classes_, pred) return np.take(self.classes_, pred)
def decision_function(self, X):
"""Compute the decision function of X.
Parameters
----------
X : { array-like, sparse matrix},
shape = (n_samples, n_views * n_features)
Multi-view input samples.
maybe also MultimodalData
Returns
-------
dec_fun : numpy.ndarray, shape = (n_samples, )
Decision function of the input samples.
For binary classification,
values <=0 mean classification in the first class in ``classes_``
and values >0 mean classification in the second class in
``classes_``.
"""
check_is_fitted(self, ['X_', 'U_dict', 'K_', 'y_']) # , 'U_dict', 'K_' 'y_'
X, test_kernels = self._global_kernel_transform(X,
views_ind=self.X_.views_ind,
Y=self.X_)
check_array(X)
pred = self._predict_mvml(test_kernels, self.g, self.w).squeeze()
return pred
def _predict_mvml(self, test_kernels, g, w): def _predict_mvml(self, test_kernels, g, w):
""" """
......
...@@ -32,10 +32,10 @@ class MKLTest(unittest.TestCase): ...@@ -32,10 +32,10 @@ class MKLTest(unittest.TestCase):
clf.test_y = test_y clf.test_y = test_y
def testInitMKL(self): def testInitMKL(self):
mkl = MKL(lmbda=3, m_param = 1.0, kernel = "precomputed", mkl = MKL(lmbda=3, nystrom_param=1.0, kernel = "precomputed",
kernel_params = None, use_approx = True, kernel_params = None, use_approx = True,
precision = 1E-4, n_loops = 50) precision = 1E-4, n_loops = 50)
self.assertEqual(mkl.m_param, 1.0) self.assertEqual(mkl.nystrom_param, 1.0)
self.assertEqual(mkl.lmbda, 3) self.assertEqual(mkl.lmbda, 3)
self.assertEqual(mkl.n_loops, 50) self.assertEqual(mkl.n_loops, 50)
self.assertEqual(mkl.precision, 1E-4) self.assertEqual(mkl.precision, 1E-4)
...@@ -44,7 +44,7 @@ class MKLTest(unittest.TestCase): ...@@ -44,7 +44,7 @@ class MKLTest(unittest.TestCase):
####################################################### #######################################################
# task with dict and not precomputed # task with dict and not precomputed
####################################################### #######################################################
mkl = MKL(lmbda=3, m_param = 1.0, kernel=['rbf'], kernel_params=[{'gamma':50}], mkl = MKL(lmbda=3, nystrom_param=1.0, kernel=['rbf'], kernel_params=[{'gamma':50}],
use_approx = True, use_approx = True,
precision = 1E-4, n_loops = 50) precision = 1E-4, n_loops = 50)
mkl.fit(self.kernel_dict, y=self.y, views_ind=None) mkl.fit(self.kernel_dict, y=self.y, views_ind=None)
...@@ -56,8 +56,8 @@ class MKLTest(unittest.TestCase): ...@@ -56,8 +56,8 @@ class MKLTest(unittest.TestCase):
####################################################### #######################################################
# task with dict and not precomputed # task with dict and not precomputed
####################################################### #######################################################
mkl = MKL(lmbda=3, m_param = 0.3, kernel=['rbf'], kernel_params=[{'gamma':50}], mkl = MKL(lmbda=3, nystrom_param=0.3, kernel=['rbf'], kernel_params=[{'gamma':50}],
use_approx = True, use_approx=True,
precision = 1E-4, n_loops = 50) precision = 1E-4, n_loops = 50)
views_ind = [120, 240] views_ind = [120, 240]
mkl.fit(self.kernel_dict, y=self.y, views_ind=None) mkl.fit(self.kernel_dict, y=self.y, views_ind=None)
...@@ -71,7 +71,7 @@ class MKLTest(unittest.TestCase): ...@@ -71,7 +71,7 @@ class MKLTest(unittest.TestCase):
# mvml = MVML.fit(self.kernel_dict, self.y) # mvml = MVML.fit(self.kernel_dict, self.y)
w_expected = np.array([[0.5], [0.5]]) w_expected = np.array([[0.5], [0.5]])
x_metricl = MultiModalArray(self.kernel_dict) x_metricl = MultiModalArray(self.kernel_dict)
mkl2 = MKL(lmbda=3, m_param = 0.3, kernel=['rbf'], kernel_params=[{'gamma':50}], mkl2 = MKL(lmbda=3, nystrom_param = 0.3, kernel=['rbf'], kernel_params=[{'gamma':50}],
use_approx = True, use_approx = True,
precision = 1E0, n_loops = 50) precision = 1E0, n_loops = 50)
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
...@@ -84,13 +84,13 @@ class MKLTest(unittest.TestCase): ...@@ -84,13 +84,13 @@ class MKLTest(unittest.TestCase):
# mvml = MVML.fit(self.kernel_dict, self.y) # mvml = MVML.fit(self.kernel_dict, self.y)
w_expected = np.array([[0.5], [0.5]]) w_expected = np.array([[0.5], [0.5]])
x_metricl = MultiModalArray(self.kernel_dict) x_metricl = MultiModalArray(self.kernel_dict)
mkl2 = MKL(lmbda=3, m_param = 0.3, kernel="precomputed", mkl2 = MKL(lmbda=3, nystrom_param=0.3, kernel="precomputed",
use_approx = True, use_approx = True,
precision = 1E-9, n_loops = 600) precision = 1E-9, n_loops = 600)
mkl2.fit(x_metricl, y=self.y, views_ind=None) mkl2.fit(x_metricl, y=self.y, views_ind=None)
def testPredictMVML_witoutFit(self): def testPredictMVML_witoutFit(self):
mkl = MKL(lmbda=3, m_param = 0.3, kernel=['rbf'], kernel_params=[{'gamma':50}], mkl = MKL(lmbda=3, nystrom_param=0.3, kernel=['rbf'], kernel_params=[{'gamma':50}],
use_approx = True, use_approx = True,
precision = 1E-9, n_loops = 50) precision = 1E-9, n_loops = 50)
with self.assertRaises(NotFittedError): with self.assertRaises(NotFittedError):
...@@ -98,7 +98,7 @@ class MKLTest(unittest.TestCase): ...@@ -98,7 +98,7 @@ class MKLTest(unittest.TestCase):
def testPredictMVML_witoutFit(self): def testPredictMVML_witoutFit(self):
x_metric = MultiModalArray(self.kernel_dict) x_metric = MultiModalArray(self.kernel_dict)
mkl = MKL(lmbda=3, m_param = 0.3, kernel=['rbf'], kernel_params=[{'gamma':50}], mkl = MKL(lmbda=3, nystrom_param = 0.3, kernel=['rbf'], kernel_params=[{'gamma':50}],
use_approx = True, use_approx = True,
precision = 1E-9, n_loops = 50) precision = 1E-9, n_loops = 50)
mkl.fit(x_metric, y=self.y, views_ind=None) mkl.fit(x_metric, y=self.y, views_ind=None)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment