Skip to content
Snippets Groups Projects
Commit 63ea5577 authored by Baptiste Bauvin's avatar Baptiste Bauvin
Browse files

Removed mincq

parent 40c2f34e
Branches
Tags
No related merge requests found
......@@ -110,15 +110,6 @@ cb_boost:
n_max_iterations: [10]
n_stumps: [1]
min_cq_graalpy:
mu: [0.01]
n_stumps_per_attribute: [1]
min_cq_graalpy_tree:
mu: [0.01]
n_stumps_per_attribute: [1]
max_depth: [2]
lasso:
alpha: [1]
max_iter: [2]
......@@ -126,10 +117,6 @@ lasso:
gradient_boosting:
n_estimators: [2]
min_cq:
mu: [0.01]
n_stumps_per_attribute: [1]
######################################
# The Multiview Classifier arguments #
......
import numpy as np
from ..monoview.additions.BoostUtils import StumpsClassifiersGenerator
from ..monoview.additions.MinCQUtils import RegularizedBinaryMinCqClassifier
from ..monoview.monoview_utils import BaseMonoviewClassifier, CustomUniform
classifier_class_name = "MinCQGraalpy"
class MinCQGraalpy(RegularizedBinaryMinCqClassifier, BaseMonoviewClassifier):
def __init__(self, random_state=None, mu=0.01, self_complemented=True,
n_stumps_per_attribute=1, **kwargs):
super(MinCQGraalpy, self).__init__(mu=mu,
estimators_generator=StumpsClassifiersGenerator(
n_stumps_per_attribute=n_stumps_per_attribute,
self_complemented=self_complemented),
)
self.param_names = ["mu", "n_stumps_per_attribute", "random_state"]
self.distribs = [CustomUniform(loc=0.05, state=2.0, multiplier="e-"),
[n_stumps_per_attribute], [random_state]]
self.n_stumps_per_attribute = n_stumps_per_attribute
self.classed_params = []
self.weird_strings = {}
self.random_state = random_state
if "nbCores" not in kwargs:
self.nbCores = 1
else:
self.nbCores = kwargs["nbCores"]
def canProbas(self):
"""Used to know if the classifier can return label probabilities"""
return False
def set_params(self, **params):
self.mu = params["mu"]
self.random_state = params["random_state"]
self.n_stumps_per_attribute = params["n_stumps_per_attribute"]
return self
def get_params(self, deep=True):
return {"random_state": self.random_state, "mu": self.mu,
"n_stumps_per_attribute": self.n_stumps_per_attribute}
def getInterpret(self, directory, y_test):
interpret_string = "Cbound on train :" + str(self.train_cbound)
np.savetxt(directory + "times.csv", np.array([self.train_time, 0]))
# interpret_string += "Train C_bound value : "+str(self.cbound_train)
# y_rework = np.copy(y_test)
# y_rework[np.where(y_rework==0)] = -1
# interpret_string += "\n Test c_bound value : "+str(self.majority_vote.cbound_value(self.x_test, y_rework))
return interpret_string
def get_name_for_fusion(self):
return "MCG"
# def formatCmdArgs(args):
# """Used to format kwargs for the parsed args"""
# kwargsDict = {"mu": args.MCG_mu,
# "n_stumps_per_attribute": args.MCG_stumps}
# return kwargsDict
def paramsToSet(nIter, random_state):
"""Used for weighted linear early fusion to generate random search sets"""
paramsSet = []
for _ in range(nIter):
paramsSet.append({})
return paramsSet
import numpy as np
from ..monoview.additions.BoostUtils import TreeClassifiersGenerator
from ..monoview.additions.MinCQUtils import RegularizedBinaryMinCqClassifier
from ..monoview.monoview_utils import BaseMonoviewClassifier, CustomUniform
classifier_class_name = "MinCQGraalpyTree"
class MinCQGraalpyTree(RegularizedBinaryMinCqClassifier,
BaseMonoviewClassifier):
def __init__(self, random_state=None, mu=0.01, self_complemented=True,
n_stumps_per_attribute=1, max_depth=2, **kwargs):
super(MinCQGraalpyTree, self).__init__(mu=mu,
estimators_generator=TreeClassifiersGenerator(
n_trees=n_stumps_per_attribute,
max_depth=max_depth,
self_complemented=self_complemented),
)
self.param_names = ["mu", "n_stumps_per_attribute", "random_state",
"max_depth"]
self.distribs = [CustomUniform(loc=0.05, state=2.0, multiplier="e-"),
[n_stumps_per_attribute], [random_state], [max_depth]]
self.n_stumps_per_attribute = n_stumps_per_attribute
self.classed_params = []
self.weird_strings = {}
self.max_depth = max_depth
self.random_state = random_state
if "nbCores" not in kwargs:
self.nbCores = 1
else:
self.nbCores = kwargs["nbCores"]
def canProbas(self):
"""Used to know if the classifier can return label probabilities"""
return True
def set_params(self, **params):
self.mu = params["mu"]
self.random_state = params["random_state"]
self.n_stumps_per_attribute = params["n_stumps_per_attribute"]
self.max_depth = params["max_depth"]
return self
def get_params(self, deep=True):
return {"random_state": self.random_state, "mu": self.mu,
"n_stumps_per_attribute": self.n_stumps_per_attribute,
"max_depth": self.max_depth}
def getInterpret(self, directory, y_test):
interpret_string = "Cbound on train :" + str(self.train_cbound)
np.savetxt(directory + "times.csv", np.array([self.train_time, 0]))
# interpret_string += "Train C_bound value : "+str(self.cbound_train)
# y_rework = np.copy(y_test)
# y_rework[np.where(y_rework==0)] = -1
# interpret_string += "\n Test c_bound value : "+str(self.majority_vote.cbound_value(self.x_test, y_rework))
return interpret_string
def get_name_for_fusion(self):
return "MCG"
# def formatCmdArgs(args):
# """Used to format kwargs for the parsed args"""
# kwargsDict = {"mu": args.MCGT_mu,
# "n_stumps_per_attribute": args.MCGT_trees,
# "max_depth": args.MCGT_max_depth}
# return kwargsDict
def paramsToSet(nIter, randomState):
"""Used for weighted linear early fusion to generate random search sets"""
paramsSet = []
for _ in range(nIter):
paramsSet.append({})
return paramsSet
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment