Skip to content
Snippets Groups Projects
Commit 532aad57 authored by bbauvin's avatar bbauvin
Browse files

Added train values on global analysis

parent 4823cd17
No related branches found
No related tags found
No related merge requests found
......@@ -59,7 +59,7 @@ def execute(name, learningRate, nbFolds, nbCores, gridSearch, metrics, nIter, fe
metricKWARGS = dict((index, metricConfig) for index, metricConfig in enumerate(metric[1]))
else:
metricKWARGS = {}
metricsScores[metric[0]] = [np.mean(np.array([getattr(Metrics, metric[0]).score(y_test, y_test_pred) for y_test, y_test_pred in zip(y_tests, y_test_preds)])), "",
metricsScores[metric[0]] = [np.mean(np.array([getattr(Metrics, metric[0]).score(y_train, y_train_pred) for y_train, y_train_pred in zip(y_trains, y_train_preds)])), "",
np.mean(np.array([getattr(Metrics, metric[0]).score(y_test, y_test_pred) for y_test, y_test_pred in zip(y_tests, y_test_preds)]))]
stringAnalysis += "\n\n Classification took "+ str(hms(seconds=int(time)))
......
......@@ -100,9 +100,9 @@ def gridSearch(X_train, y_train, nbFolds=4, metric=["accuracy_score", None], nIt
def getConfig(config):
try :
return "\n\t\t- SCM with max_attributes : "+str(config[0])#+", c : "+str(config[1])+", p : "+str(config[2])
return "\n\t\t- SCM with max_attributes : "+str(config[0])+", c : "+str(config[1])+", p : "+str(config[2])
except:
return "\n\t\t- SCM with max_attributes : "+str(config["0"])#+", c : "+str(config["1"])+", p : "+str(config["2"])
return "\n\t\t- SCM with max_attributes : "+str(config["0"])+", c : "+str(config["1"])+", p : "+str(config["2"])
def transformData(dataArray):
......
......@@ -112,6 +112,11 @@ class SCMForLinear(LateFusionClassifier):
packedDataset = dsetFile.get("temp_scm")
attributeClassification = BaptisteRuleClassifications(packedDataset, monoViewDecisions.shape[0])
self.SCMClassifier.fit(binaryAttributes, DATASET.get("Labels")[usedIndices], attribute_classifications=attributeClassification)
try:
dsetFile.close()
os.remove(name)
except:
pass
def getConfig(self, fusionMethodConfig, monoviewClassifiersNames,monoviewClassifiersConfigs):
configString = "with SCM for linear with max_attributes : "+str(self.config[1])+", p : "+str(self.config[0])+\
......
......@@ -7,6 +7,7 @@ import logging
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
#Import own Modules
import Metrics
......@@ -20,7 +21,7 @@ def autolabel(rects, ax):
for rect in rects:
height = rect.get_height()
ax.text(rect.get_x() + rect.get_width()/2., 1.01*height,
"%.2f" % round(height, 4),
"%.2f" % height,
ha='center', va='bottom')
......@@ -34,7 +35,10 @@ def resultAnalysis(benchmark, results, name, times, metrics):
nbResults = len(mono)+len(multi)
validationScores = [float(res[1][2][metric[0]][2]) for res in mono]
validationScores += [float(scores[metric[0]][2]) for a, b, scores in multi]
trainScores = [float(res[1][2][metric[0]][0]) for res in mono]
trainScores += [float(scores[metric[0]][0]) for a, b, scores in multi]
f = pylab.figure(figsize=(40, 30))
width = 0.35 # the width of the bars
fig = plt.gcf()
fig.subplots_adjust(bottom=105.0, top=105.01)
ax = f.add_axes([0.1, 0.1, 0.8, 0.8])
......@@ -43,9 +47,12 @@ def resultAnalysis(benchmark, results, name, times, metrics):
else:
metricKWARGS = {}
ax.set_title(getattr(Metrics, metric[0]).getConfig(**metricKWARGS)+" on validation set for each classifier")
rects = ax.bar(range(nbResults), validationScores, align='center')
rects = ax.bar(range(nbResults), validationScores, width, color="r")
rect2 = ax.bar(np.arange(nbResults)+width, trainScores, width, color="0.3")
autolabel(rects, ax)
ax.set_xticks(range(nbResults))
autolabel(rect2, ax)
ax.legend((rects[0], rect2[0]), ('Train', 'Test'))
ax.set_xticks(np.arange(nbResults)+width)
ax.set_xticklabels(names, rotation="vertical")
f.savefig("Results/"+time.strftime("%Y%m%d-%H%M%S")+"-"+name+"-"+metric[0]+".png")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment