Skip to content
Snippets Groups Projects
Commit bd9a80e9 authored by Baptiste Bauvin's avatar Baptiste Bauvin
Browse files

Some bug correction

parent 2f357080
No related branches found
No related tags found
No related merge requests found
......@@ -2,7 +2,6 @@
# -*- encoding: utf-8
import numpy as np
from sklearn.ensemble import VotingClassifier
# TODO :
# Linear Weighted Fusion
......
......@@ -10,31 +10,34 @@ from sklearn.svm import SVC
# able to compute a score for each class in each mono-view classification
# decisions : (nbExample * nbFeature * NB_CLASS) array with the OVO/OVA scores for each
# monoViewDecisions : (nbExample * nbFeature * NB_CLASS) array with the OVO/OVA scores for each
# example, feature and each class
# weights : (nbFeature) array with the weights for each feature
def weightedLinear(decisions, weights):
def weightedLinear(monoViewDecisions, weights):
# Normalize weights ?
# weights = weights/float(max(weights))
fusedExamples = np.array([sum(np.array([featureScores * weight for weight,featureScores\
in zip(weights, exampleDecisions)])) for exampleDecisions in decisions])
in zip(weights, exampleDecisions)])) for exampleDecisions in monoViewDecisions])
# print fused
return np.array([np.argmax(fusedExample) for fusedExample in fusedExamples])
# The SVMClassifier is here used to find the right weights for linearfusion
def SVMForLinearFusionTrain(decisions, labels):
def SVMForLinearFusionTrain(monoViewDecisions, labels):
SVMClassifier = SVC()
SVMClassifier.fit(decisions, labels)
SVMClassifier.fit(monoViewDecisions, labels)
return SVMClassifier
def SVMForLinearFusionFuse(decisions, SVMClassifier):
labels = SVMClassifier.predict(decisions)
def SVMForLinearFusionFuse(monoViewDecisions, SVMClassifier):
labels = SVMClassifier.predict(monoViewDecisions)
return labels
......@@ -42,11 +45,11 @@ def SVMForLinearFusionFuse(decisions, SVMClassifier):
# For majority voting, we have a problem : we have 5 fetures and 101 classes
# on Calthech, so if each feature votes for one class, we can't find a good
# result
def majorityVoting(decisions, NB_CLASS):
nbExample = len(decisions)
votes = np.array([np.zeros(NB_CLASS) for example in decisions])
def majorityVoting(monoViewDecisions, NB_CLASS):
nbExample = len(monoViewDecisions)
votes = np.array([np.zeros(NB_CLASS) for example in monoViewDecisions])
for exampleIndice in range(nbExample):
for featureClassification in decisions[exampleIndice]:
for featureClassification in monoViewDecisions[exampleIndice]:
votes[exampleIndice, featureClassification]+=1
nbMaximum = len(np.where(votes[exampleIndice]==max(votes[exampleIndice]))[0])
try:
......@@ -54,14 +57,12 @@ def majorityVoting(decisions, NB_CLASS):
except:
print "Majority voting can't decide, each classifier has voted for a different class"
raise
# Can be upgraded by restarting a new classification process if
# there are multiple maximums ?:
# while nbMaximum>1:
# relearn with only the classes that have a maximum number of vote
# votes = revote
# nbMaximum = len(np.where(votes==max(votes))[0])
return np.array([np.argmax(exampleVotes) for exampleVotes in votes])
......@@ -76,30 +77,30 @@ if __name__ == '__main__':
LABELS = np.array([TRUE_CLASS for i in range(DATASET_LENGTH)])
LABELS[0] = 0
decisionsEasy = np.array([np.array([np.zeros(NB_CLASS) for i in range(nbFeature)])for example in range(DATASET_LENGTH)])
for exampleDecisions in decisionsEasy:
monoViewDecisionsEasy = np.array([np.array([np.zeros(NB_CLASS) for i in range(nbFeature)])for example in range(DATASET_LENGTH)])
for exampleDecisions in monoViewDecisionsEasy:
for decision in exampleDecisions:
decision[TRUE_CLASS]=12
# print decisionsEasy
# print monoViewDecisionsEasy
decisionsHard = np.array([np.array([np.zeros(NB_CLASS) for i in range(nbFeature)])for example in range(DATASET_LENGTH)])
for exampleDecisions in decisionsHard:
monoViewDecisionsHard = np.array([np.array([np.zeros(NB_CLASS) for i in range(nbFeature)])for example in range(DATASET_LENGTH)])
for exampleDecisions in monoViewDecisionsHard:
for decision in exampleDecisions:
decision[TRUE_CLASS]=12
exampleDecisions[nbFeature-2]=np.zeros(NB_CLASS)+1400
exampleDecisions[nbFeature-2][TRUE_CLASS]-=110
decisionsMajority = np.array([np.array([TRUE_CLASS,TRUE_CLASS,TRUE_CLASS,1,5]) for example in range(DATASET_LENGTH)])
decisionsMajorityFail = np.array([np.array([1,2,3,4,5]) for example in range(DATASET_LENGTH)])
monoViewDecisionsMajority = np.array([np.array([TRUE_CLASS,TRUE_CLASS,TRUE_CLASS,1,5]) for example in range(DATASET_LENGTH)])
monoViewDecisionsMajorityFail = np.array([np.array([1,2,3,4,5]) for example in range(DATASET_LENGTH)])
weights = np.random.rand(nbFeature)
weights[nbFeature-2] = 2
SVMClassifier = SVMForLinearFusionTrain(decisionsMajority, LABELS)
SVMClassifier = SVMForLinearFusionTrain(monoViewDecisionsMajority, LABELS)
print weightedLinear(decisionsEasy, weights)
print weightedLinear(decisionsHard, weights)
print SVMForLinearFusionFuse(decisionsMajority, SVMClassifier)
print majorityVoting(decisionsMajority, NB_CLASS)
print majorityVoting(decisionsMajorityFail, NB_CLASS)
print weightedLinear(monoViewDecisionsEasy, weights)
print weightedLinear(monoViewDecisionsHard, weights)
print SVMForLinearFusionFuse(monoViewDecisionsMajority, SVMClassifier)
print majorityVoting(monoViewDecisionsMajority, NB_CLASS)
print majorityVoting(monoViewDecisionsMajorityFail, NB_CLASS)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment