diff --git a/multimodal/boosting/combo.py b/multimodal/boosting/combo.py
index 5b67ba8215c3d44b59e69b8d2f03f7caddd6d1b4..c56ebf2428c28885de7d0b30d9903a369538ec7f 100644
--- a/multimodal/boosting/combo.py
+++ b/multimodal/boosting/combo.py
@@ -48,11 +48,11 @@ estimator for classification implemented in the ``MuComboClassifier`` class.
 import numpy as np
 from sklearn.base import ClassifierMixin
 from sklearn.ensemble import BaseEnsemble
-from sklearn.ensemble.forest import BaseForest
+from sklearn.ensemble._forest import BaseForest
 from sklearn.metrics import accuracy_score
 from sklearn.tree import DecisionTreeClassifier
 from sklearn.tree._tree import DTYPE
-from sklearn.tree.tree import BaseDecisionTree
+from sklearn.tree import BaseDecisionTree
 from sklearn.utils import check_array, check_X_y, check_random_state
 from sklearn.utils.multiclass import check_classification_targets
 from sklearn.utils.validation import check_is_fitted, has_fit_parameter
@@ -141,11 +141,11 @@ class MuComboClassifier(BaseEnsemble, ClassifierMixin, UBoosting):
     >>> base_estimator = DecisionTreeClassifier(max_depth=2)
     >>> clf = MuComboClassifier(base_estimator=base_estimator, random_state=1)
     >>> clf.fit(X, y, views_ind)  # doctest: +NORMALIZE_WHITESPACE
-    MuComboClassifier(base_estimator=DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=2,
+    MuComboClassifier(base_estimator=DecisionTreeClassifier(ccp_alpha=0.0, class_weight=None, criterion='gini', max_depth=2,
             max_features=None, max_leaf_nodes=None,
             min_impurity_decrease=0.0, min_impurity_split=None,
             min_samples_leaf=1, min_samples_split=2,
-            min_weight_fraction_leaf=0.0, presort=False, random_state=None,
+            min_weight_fraction_leaf=0.0, presort='deprecated', random_state=None,
             splitter='best'),
          n_estimators=50, random_state=1)
     >>> print(clf.predict([[ 5.,  3.,  1.,  1.]]))
@@ -222,8 +222,9 @@ class MuComboClassifier(BaseEnsemble, ClassifierMixin, UBoosting):
         dist = np.empty(cost.shape[:2], dtype=cost.dtype, order="C")
         # NOTE: In Sokol's PhD thesis, the formula for dist is mistakenly given
         # with a minus sign in section 2.2.2 page 31
-        dist[:, :] = cost[:, np.arange(n_samples), y] \
-            / np.sum(cost[:, np.arange(n_samples), y], axis=1)[:, np.newaxis]
+        sum_cost = np.sum(cost[:, np.arange(n_samples), y], axis=1)[:, np.newaxis]
+        sum_cost[sum_cost==0] = 1
+        dist[:, :] = cost[:, np.arange(n_samples), y] / sum_cost
         return dist
 
     def _indicatrice(self, predicted_classes, y_i):
@@ -444,6 +445,8 @@ class MuComboClassifier(BaseEnsemble, ClassifierMixin, UBoosting):
         views_ind_, n_views = self.X_._validate_views_ind(self.X_.views_ind,
                                                           self.X_.shape[1])
         check_X_y(self.X_, y)
+        if not isinstance(y, np.ndarray):
+            y = np.asarray(y)
         check_classification_targets(y)
         self._validate_estimator()
 
diff --git a/multimodal/boosting/mumbo.py b/multimodal/boosting/mumbo.py
index 4b5244b16f03a604360ca3b16da2bc813775c4f3..255c4f77e1844c3212988f68048d8d51ebcff67f 100644
--- a/multimodal/boosting/mumbo.py
+++ b/multimodal/boosting/mumbo.py
@@ -48,10 +48,10 @@ import numpy as np
 
 from sklearn.base import ClassifierMixin
 from sklearn.ensemble import BaseEnsemble
-from sklearn.ensemble.forest import BaseForest
+from sklearn.ensemble._forest import BaseForest
 from sklearn.metrics import accuracy_score
 from sklearn.tree import DecisionTreeClassifier
-from sklearn.tree.tree import BaseDecisionTree
+from sklearn.tree import BaseDecisionTree
 from sklearn.tree._tree import DTYPE
 from sklearn.utils import check_array, check_X_y, check_random_state
 from sklearn.utils.multiclass import check_classification_targets
@@ -143,11 +143,11 @@ class MumboClassifier(BaseEnsemble, ClassifierMixin, UBoosting):
     >>> base_estimator = DecisionTreeClassifier(max_depth=2)
     >>> clf = MumboClassifier(base_estimator=base_estimator, random_state=0)
     >>> clf.fit(X, y, views_ind)  # doctest: +NORMALIZE_WHITESPACE
-    MumboClassifier(base_estimator=DecisionTreeClassifier(class_weight=None,
+    MumboClassifier(base_estimator=DecisionTreeClassifier(ccp_alpha=0.0, class_weight=None,
             criterion='gini', max_depth=2, max_features=None,
             max_leaf_nodes=None, min_impurity_decrease=0.0,
             min_impurity_split=None, min_samples_leaf=1, min_samples_split=2,
-            min_weight_fraction_leaf=0.0, presort=False, random_state=None,
+            min_weight_fraction_leaf=0.0, presort='deprecated', random_state=None,
             splitter='best'),
         best_view_mode='edge', n_estimators=50, random_state=0)
     >>> print(clf.predict([[ 5.,  3.,  1.,  1.]]))
@@ -377,6 +377,8 @@ class MumboClassifier(BaseEnsemble, ClassifierMixin, UBoosting):
         views_ind_, n_views = self.X_._validate_views_ind(self.X_.views_ind,
                                                           self.X_.shape[1])
         check_X_y(self.X_, y, accept_sparse=accept_sparse, dtype=dtype)
+        if not isinstance(y, np.ndarray):
+            y = np.asarray(y)
         check_classification_targets(y)
         self._validate_estimator()
 
diff --git a/multimodal/kernels/mvml.py b/multimodal/kernels/mvml.py
index e261661636d5851369b896be336f0b9e3be975b7..0636077e11b6104e23eaee8de87a149e55ac8cec 100644
--- a/multimodal/kernels/mvml.py
+++ b/multimodal/kernels/mvml.py
@@ -230,9 +230,10 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin):
         # if type_of_target(y) not in "binary":
         #     raise ValueError("target should be binary")
 
-        check_classification_targets(y)
+
 
         if type_of_target(y) in "binary":
+            check_classification_targets(y)
             self.classes_, y = np.unique(y, return_inverse=True)
             y[y==0] = -1.0
             self.n_classes = len(self.classes_)
@@ -342,7 +343,7 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin):
                 else:
                     # A_inv = self._inverse_precond_LU(A + 1e-8 * np.eye(views * self.n_approx), pos="precond_A") # self._inverse_precond_jacobi(A + 1e-8 * np.eye(views * self.n_approx), pos="precond_A")
                     A_inv = self._inv_best_precond(A + 1e-8 * np.eye(views * self.n_approx), pos="precond_A")
-            except spli.LinAlgError:
+            except spli.LinAlgError:  # pragma: no cover
                 self.warning_message["LinAlgError"] = self.warning_message.get("LinAlgError", 0) + 1
                 try:
                     A_inv = spli.pinv(A + 1e-07 * np.eye(views * self.n_approx))
@@ -352,7 +353,7 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin):
                     except ValueError:
                         self.warning_message["ValueError"] = self.warning_message.get("ValueError", 0) + 1
                         return A_prev, g_prev
-            except ValueError:
+            except ValueError:  # pragma: no cover
                 self.warning_message["ValueError"] = self.warning_message.get("ValueError", 0) + 1
                 return A_prev, g_prev, w_prev
             # print("A_inv ",np.sum(A_inv))
@@ -372,7 +373,7 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin):
                 else:
                     # g = np.dot(self._inverse_precond_LU(A_inv, pos="precond_A_1"), g)
                     g = np.dot(self._inv_best_precond(A_inv, pos="precond_A_1"), g)
-            except spli.LinAlgError:
+            except spli.LinAlgError:  # pragma: no cover
                 self.warning_message["LinAlgError"] = self.warning_message.get("LinAlgError", 0) + 1
                 g = spli.solve(A_inv, g)
 
@@ -428,7 +429,7 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin):
             A_inv = spli.pinv(A)
         return A_inv
 
-    def _inverse_precond_jacobi(self, A, pos="precond_A"):
+    def _inverse_precond_jacobi(self, A, pos="precond_A"):  # pragma: no cover
         J_1 = np.diag(1.0/np.diag(A))
         # J_1 = np.linalg.inv(J)
         P = np.dot(J_1, A)
@@ -438,10 +439,9 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin):
             self.warning_message[pos] = self.warning_message.get(pos, 0) + 1
         else:
             A_inv = self._inverse_precond_LU(A, pos=pos)
-
         return A_inv
 
-    def _inverse_precond_LU(self, A, pos="precond_A"):
+    def _inverse_precond_LU(self, A, pos="precond_A"):  # pragma: no cover
         P, L, U = spli.lu(A)
         M = spli.inv(np.dot(L, U))
         P = np.dot(M, A)
@@ -606,17 +606,17 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin):
         try:
             # minA_inv = np.min(np.absolute(A_prev)) , rcond=self.r_cond*minA_inv
             A_prev_inv = spli.pinv(A_prev)
-        except spli.LinAlgError:
+        except spli.LinAlgError:  # pragma: no cover
             try:
                 A_prev_inv = spli.pinv(A_prev + 1e-6 * np.eye(views * m))
             except spli.LinAlgError:
                 return A_prev
             except ValueError:
                 return A_prev
-        except ValueError:
+        except ValueError:  # pragma: no cover
             return A_prev
 
-        if np.any(np.isnan(A_prev_inv)):
+        if np.any(np.isnan(A_prev_inv)):  # pragma: no cover
             # just in case the inverse didn't return a proper solution (happened once or twice)
             return A_prev
 
@@ -625,9 +625,9 @@ class MVML(MKernel, BaseEstimator, ClassifierMixin, RegressorMixin):
         # if there is one small negative eigenvalue this gets rid of it
         try:
             val, vec = spli.eigh(A_tmp)
-        except spli.LinAlgError:
+        except spli.LinAlgError:  # pragma: no cover
             return A_prev
-        except ValueError:
+        except ValueError:  # pragma: no cover
             return A_prev
         val[val < 0] = 0
 
diff --git a/multimodal/tests/test_mvml.py b/multimodal/tests/test_mvml.py
index 9f33c3b5959835ed1e441d602e8ecc4202b02c6d..7b1cb4c3479d5d58486c7bba52204db1f33a9bb6 100644
--- a/multimodal/tests/test_mvml.py
+++ b/multimodal/tests/test_mvml.py
@@ -95,6 +95,22 @@ class MVMLTest(unittest.TestCase):
         w_expected = np.array([[0.5],[0.5]])
         np.testing.assert_almost_equal(mvml.w, w_expected, 8)
 
+    def testFitMVMLRegression(self):
+        #######################################################
+        # task with dict and not precomputed
+        #######################################################
+        y = self.y
+        y += np.random.uniform(0,1, size=y.shape)
+        mvml = MVML(lmbda=0.1, eta=1,
+                    kernel=['rbf'], kernel_params=[{'gamma':50}],
+                    nystrom_param=0.2)
+        views_ind = [120, 240]
+        mvml.fit(self.kernel_dict, y=y, views_ind=None)
+        self.assertEqual(mvml.A.shape, (48, 48))
+        self.assertEqual(mvml.g.shape,(48, 1))
+        w_expected = np.array([[0.5],[0.5]])
+        np.testing.assert_almost_equal(mvml.w, w_expected, 8)
+
     def testFitMVMLPrecision(self):
         #######################################################
         # task with dict and not precomputed
diff --git a/setup.cfg b/setup.cfg
index 2b8e7f45560df1bf68c6b693c57d29a50f6e0434..f3b90c5a82a8077eedee2012c96b090952131fb2 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -14,3 +14,6 @@ branch = True
 source = multimodal
 include = */multimodal/*
 omit = */tests/*
+
+[coverage:report]
+exclude_lines = pragma: no cover
\ No newline at end of file