Skip to content
Snippets Groups Projects
Commit 40c7ee5a authored by Baptiste Bauvin's avatar Baptiste Bauvin
Browse files

Added get_params and removed mincq tracking

parent 805cbf47
No related branches found
No related tags found
No related merge requests found
......@@ -696,9 +696,6 @@ def get_accuracy_graph(plotted_data, classifier_name, file_name, name="Accuracie
class BaseBoost(object):
def __init__(self):
self.n_stumps = 10
def _collect_probas(self, X):
return np.asarray([clf.predict_proba(X) for clf in self.estimators_generator.estimators_])
......
......@@ -19,9 +19,9 @@ class ColumnGenerationClassifierQar(BaseEstimator, ClassifierMixin, BaseBoost):
def __init__(self, n_max_iterations=None, estimators_generator=None,
random_state=42, self_complemented=True, twice_the_same=False,
c_bound_choice=True, random_start=True,
n_stumps_per_attribute=None, use_r=True, c_bound_sol=True,
n_stumps_per_attribute=1, use_r=True, c_bound_sol=True,
plotted_metric=Metrics.zero_one_loss, save_train_data=True,
test_graph=True, mincq_tracking=True):
test_graph=True, mincq_tracking=False):
super(ColumnGenerationClassifierQar, self).__init__()
r"""
......@@ -46,7 +46,6 @@ class ColumnGenerationClassifierQar(BaseEstimator, ClassifierMixin, BaseBoost):
plotted_metric : Metric module
The metric that will be plotted for each iteration of boosting.
"""
if type(random_state) is int:
self.random_state = np.random.RandomState(random_state)
else:
......@@ -62,7 +61,6 @@ class ColumnGenerationClassifierQar(BaseEstimator, ClassifierMixin, BaseBoost):
self.c_bound_choice = c_bound_choice
self.random_start = random_start
self.plotted_metric = plotted_metric
if n_stumps_per_attribute:
self.n_stumps = n_stumps_per_attribute
self.use_r = use_r
self.c_bound_sol = c_bound_sol
......@@ -74,15 +72,16 @@ class ColumnGenerationClassifierQar(BaseEstimator, ClassifierMixin, BaseBoost):
"n_stumps", "use_r", "c_bound_sol"]
self.mincq_tracking = mincq_tracking
def get_params(self, deep=True):
return {"random_state":self.random_state, "n_max_iterations":self.n_max_iterations}
def set_params(self, **params):
self.n_max_iterations = params["n_max_iterations"]
return self
def fit(self, X, y):
start = time.time()
formatted_X, formatted_y = self.format_X_y(X, y)
self.init_info_containers()
......
......@@ -23,7 +23,7 @@ from sklearn import metrics # For stastics on classification
# Author-Info
__author__ = "Nikolas Huelsmann"
__status__ = "Prototype" # Production, Development, Prototype
__date__ = 2016 - 03 - 25
__date__ = 2016_03_25
#### Export Features to CSV
......
......@@ -28,7 +28,7 @@ def randomizedSearch(X_train, y_train, randomState, outputFileName, classifierMo
scorer = metricModule.get_scorer(**metricKWARGS)
nb_possible_combinations = compute_possible_combinations(params_dict)
min_list = np.array([min(nb_possible_combination, nIter) for nb_possible_combination in nb_possible_combinations])
randomSearch = RandomizedSearchCV(estimator, n_iter=np.sum(min_list), param_distributions=params_dict, refit=True,
randomSearch = RandomizedSearchCV(estimator, n_iter=int(np.sum(min_list)), param_distributions=params_dict, refit=True,
n_jobs=nbCores, scoring=scorer, cv=KFolds, random_state=randomState)
detector = randomSearch.fit(X_train, y_train)
......
......@@ -16,7 +16,6 @@ class CGDesc(ColumnGenerationClassifierQar, BaseMonoviewClassifier):
use_r=True,
c_bound_sol=True
)
self.param_names = ["n_max_iterations"]
self.distribs = [CustomRandint(low=2, high=1000)]
self.classed_params = []
......@@ -30,7 +29,7 @@ class CGDesc(ColumnGenerationClassifierQar, BaseMonoviewClassifier):
return self.getInterpretQar(directory, y_test)
def get_name_for_fusion(self):
return "CGr"
return "CGD"
def formatCmdArgs(args):
......
......@@ -5,7 +5,7 @@ from ..Monoview.Additions.QarBoostUtils import ColumnGenerationClassifierQar
class CGreed(ColumnGenerationClassifierQar, BaseMonoviewClassifier):
def __init__(self, random_state=None, n_max_iterations=500, n_stumps_per_attribute=10, **kwargs):
def __init__(self, random_state=None, n_max_iterations=500, n_stumps_per_attribute=1, **kwargs):
super(CGreed, self).__init__(n_max_iterations=n_max_iterations,
random_state=random_state,
self_complemented=True,
......
......@@ -7,7 +7,7 @@ import os
class CQBoost(ColumnGenerationClassifier, BaseMonoviewClassifier):
def __init__(self, random_state=None, mu=0.01, epsilon=1e-06, **kwargs):
def __init__(self, random_state=None, mu=0.01, epsilon=1e-06, n_stumps=10, **kwargs):
super(CQBoost, self).__init__(
random_state=random_state,
mu=mu,
......@@ -18,6 +18,7 @@ class CQBoost(ColumnGenerationClassifier, BaseMonoviewClassifier):
CustomRandint(low=1, high=15, multiplier="e-")]
self.classed_params = []
self.weird_strings = {}
self.n_stumps = n_stumps
if "nbCores" not in kwargs:
self.nbCores = 1
else:
......
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import GradientBoostingClassifier
from ..Monoview.MonoviewUtils import CustomRandint, BaseMonoviewClassifier
# Author-Info
__author__ = "Baptiste Bauvin"
__status__ = "Prototype" # Production, Development, Prototype
class GradientBoosting(GradientBoostingClassifier, BaseMonoviewClassifier):
def __init__(self, random_state=None, loss="exponential", max_depth=1.0,
n_estimators=100, init=DecisionTreeClassifier(max_depth=1), **kwargs):
super(GradientBoosting, self).__init__(
loss=loss,
max_depth=max_depth,
n_estimators=n_estimators,
init=init,
random_state=random_state
)
self.param_names = ["n_estimators",]
self.classed_params = []
self.distribs = [CustomRandint(low=50, high=500),]
self.weird_strings = {}
def canProbas(self):
"""Used to know if the classifier can return label probabilities"""
return True
def getInterpret(self, directory, y_test):
interpretString = ""
return interpretString
def formatCmdArgs(args):
"""Used to format kwargs for the parsed args"""
kwargsDict = {"n_estimators": args.GB_n_est,}
return kwargsDict
def paramsToSet(nIter, randomState):
paramsSet = []
for _ in range(nIter):
paramsSet.append({"n_estimators": randomState.randint(50, 500),})
return paramsSet
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment