Skip to content
Snippets Groups Projects
Commit c90b5763 authored by Baptiste Bauvin's avatar Baptiste Bauvin
Browse files

test graalbrain

parent f7806c28
No related branches found
No related tags found
No related merge requests found
from ..MonoviewUtils import change_label_to_minus
from .BoostUtils import StumpsClassifiersGenerator, BaseBoost
class PregenClassifier(BaseBoost):
def pregen_voters(self, X, y=None):
if y is not None:
neg_y = change_label_to_minus(y)
if self.estimators_generator is None:
self.estimators_generator = StumpsClassifiersGenerator(
n_stumps_per_attribute=self.n_stumps,
self_complemented=self.self_complemented)
self.estimators_generator.fit(X, neg_y)
else:
neg_y=None
classification_matrix = self._binary_classification_matrix(X)
return classification_matrix, neg_y
......@@ -8,13 +8,14 @@ from ..Monoview.MonoviewUtils import CustomRandint, BaseMonoviewClassifier, chan
from ..Monoview.Additions.BoostUtils import get_accuracy_graph
from .. import Metrics
from ..Monoview.Additions.BoostUtils import get_accuracy_graph, StumpsClassifiersGenerator, BaseBoost
from ..Monoview.Additions.PregenUtils import PregenClassifier
# Author-Info
__author__ = "Baptiste Bauvin"
__status__ = "Prototype" # Production, Development, Prototype
class AdaboostPregen(AdaBoostClassifier, BaseBoost, BaseMonoviewClassifier):
class AdaboostPregen(AdaBoostClassifier, BaseMonoviewClassifier, PregenClassifier):
def __init__(self, random_state=None, n_estimators=50,
base_estimator=None, n_stumps=1, self_complemeted=True , **kwargs):
......@@ -82,17 +83,17 @@ class AdaboostPregen(AdaBoostClassifier, BaseBoost, BaseMonoviewClassifier):
np.savetxt(directory + "times.csv", np.array([self.train_time, self.pred_time]), delimiter=',')
return interpretString
def pregen_voters(self, X, y=None):
if y is not None:
neg_y = change_label_to_minus(y)
if self.estimators_generator is None:
self.estimators_generator = StumpsClassifiersGenerator(
n_stumps_per_attribute=self.n_stumps,
self_complemented=self.self_complemented)
self.estimators_generator.fit(X, neg_y)
else:
neg_y=None
classification_matrix = self._binary_classification_matrix(X)
# def pregen_voters(self, X, y=None):
# if y is not None:
# neg_y = change_label_to_minus(y)
# if self.estimators_generator is None:
# self.estimators_generator = StumpsClassifiersGenerator(
# n_stumps_per_attribute=self.n_stumps,
# self_complemented=self.self_complemented)
# self.estimators_generator.fit(X, neg_y)
# else:
# neg_y=None
# classification_matrix = self._binary_classification_matrix(X)
return classification_matrix, neg_y
def formatCmdArgs(args):
......
......@@ -5,11 +5,12 @@ import numpy as np
from ..Monoview.MonoviewUtils import CustomRandint, CustomUniform, BaseMonoviewClassifier, change_label_to_minus, change_label_to_zero
from ..Monoview.Additions.BoostUtils import StumpsClassifiersGenerator, BaseBoost
from ..Monoview.Additions.PregenUtils import PregenClassifier
# Author-Info
__author__ = "Baptiste Bauvin"
__status__ = "Prototype" # Production, Development, Prototype
class SCMPregen(scm, BaseMonoviewClassifier, BaseBoost):
class SCMPregen(scm, BaseMonoviewClassifier, PregenClassifier):
def __init__(self, random_state=None, model_type="conjunction",
max_rules=10, p=0.1, n_stumps=10,self_complemented=True, **kwargs):
......@@ -35,44 +36,45 @@ class SCMPregen(scm, BaseMonoviewClassifier, BaseBoost):
return self
def predict(self, X):
h = np.ones(X.shape)
print('poul')
pregen_h, _ = self.pregen_voters(h)
print('from')
pred = super(SCMPregen, self).predict(pregen_h)
return pred
def pregen_voters(self, X, y=None):
if y is not None:
if self.estimators_generator is None:
self.estimators_generator = StumpsClassifiersGenerator(
n_stumps_per_attribute=self.n_stumps,
self_complemented=self.self_complemented)
self.estimators_generator.fit(X, y)
else:
neg_y=None
classification_matrix = self._binary_classification_matrix_t(X)
return classification_matrix, y
def _collect_probas_t(self, X):
print('jb')
for est in self.estimators_generator.estimators_:
print(type(est))
print(est.predict_proba_t(X))
print('ha')
return np.asarray([clf.predict_proba(X) for clf in self.estimators_generator.estimators_])
def _binary_classification_matrix_t(self, X):
probas = self._collect_probas_t(X)
predicted_labels = np.argmax(probas, axis=2)
predicted_labels[predicted_labels == 0] = -1
values = np.max(probas, axis=2)
return (predicted_labels * values).T
pregen_h, _ = self.pregen_voters(X)
return self.classes_[self.model_.predict(X)]
def get_params(self, deep=True):
return {"p": self.p, "model_type": self.model_type,
"max_rules": self.max_rules,
"random_state": self.random_state, "n_stumps":self.n_stumps}
# def pregen_voters(self, X, y=None):
# if y is not None:
# if self.estimators_generator is None:
# self.estimators_generator = StumpsClassifiersGenerator(
# n_stumps_per_attribute=self.n_stumps,
# self_complemented=self.self_complemented)
# self.estimators_generator.fit(X, y)
# else:
# neg_y=None
# classification_matrix = self._binary_classification_matrix_t(X)
# return classification_matrix, y
#
# def _collect_probas_t(self, X):
# print('jb')
# for est in self.estimators_generator.estimators_:
# print(type(est))
# print(est.predict_proba_t(X))
# print('ha')
# return np.asarray([clf.predict_proba(X) for clf in self.estimators_generator.estimators_])
#
# def _binary_classification_matrix_t(self, X):
# probas = self._collect_probas_t(X)
# predicted_labels = np.argmax(probas, axis=2)
# predicted_labels[predicted_labels == 0] = -1
# values = np.max(probas, axis=2)
# return (predicted_labels * values).T
def canProbas(self):
"""Used to know if the classifier can return label probabilities"""
return True
return False
def getInterpret(self, directory, y_test):
interpretString = "Model used : " + str(self.model_)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment