Skip to content
Snippets Groups Projects
Commit 2f357080 authored by Baptiste Bauvin's avatar Baptiste Bauvin
Browse files

Modified fustion to take all the dataset as input and added SVM in lateFusion

parent 727f2765
No related branches found
No related tags found
No related merge requests found
......@@ -2,10 +2,12 @@
# -*- encoding: utf-8
import numpy as np
from sklearn.ensemble import VotingClassifier
# TODO :
# Linear Weighted Fusion
# Bayesian Inference /!\ Statistically independant => ?
# SVM ?
# Dempster Schafer Theory /!\ Inunderstandable
# Dynamic Bayesian Networks ?
# Neural Network ?
......@@ -18,9 +20,9 @@ def linearWeightedFusion (toFuse, weights):
# Normalize weights ?
# weights = weights/float(max(weights))
weighted = np.array([feature*weights for (feature, weight) in zip(toFuse, weights)])
fused = weighted.flatten()
weighted = np.array([np.array([feature*weights for (feature, weight) in zip(exampleToFuse, weights)]).flatten() for exampleToFuse in toFuse])
return fused
if __name__ == '__main__':
\ No newline at end of file
......@@ -3,38 +3,52 @@
import numpy as np
import sys
from sklearn.svm import SVC
# Our method in multiclass classification will be One-vs-One or One-vs-All
# classifiers, so if we can get the output of these classifiers, we are
# able to compute a score for each class in each mono-view classification
# decisions : (nbFeature * NB_CLASS) arrray with the OVO/OVA scores for each
# feature and each class
# weights : (nbFeature) arrays with the weights for each feature
# decisions : (nbExample * nbFeature * NB_CLASS) array with the OVO/OVA scores for each
# example, feature and each class
# weights : (nbFeature) array with the weights for each feature
def weightedLinear(decisions, weights):
# Normalize weights ?
# weights = weights/float(max(weights))
fused = sum(np.array([featureScores * weight for weight,featureScores\
in zip(weights, decisions)]))
fusedExamples = np.array([sum(np.array([featureScores * weight for weight,featureScores\
in zip(weights, exampleDecisions)])) for exampleDecisions in decisions])
# print fused
return np.argmax(fused)
return np.array([np.argmax(fusedExample) for fusedExample in fusedExamples])
# The SVMClassifier is here used to find the right weights for linearfusion
def SVMForLinearFusionTrain(decisions, labels):
SVMClassifier = SVC()
SVMClassifier.fit(decisions, labels)
return SVMClassifier
def SVMForLinearFusionFuse(decisions, SVMClassifier):
labels = SVMClassifier.predict(decisions)
return labels
# For majority voting, we have a problem : we have 5 fetures and 101 classes
# on Calthech, so if each feature votes for one class, we can't find a good
# result
def majorityVoting(decisions, NB_CLASS):
votes = np.zeros(NB_CLASS)
nbFeature = len(decisions)
for featureClassification in decisions:
votes[featureClassification]+=1
nbMaximum = len(np.where(votes==max(votes))[0])
nbExample = len(decisions)
votes = np.array([np.zeros(NB_CLASS) for example in decisions])
for exampleIndice in range(nbExample):
for featureClassification in decisions[exampleIndice]:
votes[exampleIndice, featureClassification]+=1
nbMaximum = len(np.where(votes[exampleIndice]==max(votes[exampleIndice]))[0])
try:
assert nbMaximum != nbFeature
except:
......@@ -42,39 +56,50 @@ def majorityVoting(decisions, NB_CLASS):
raise
# Can be upgraded by restarting a new classification process if
# there are multiple maximums :
# there are multiple maximums ?:
# while nbMaximum>1:
# relearn with only the classes that have a maximum number of vote
# votes = revote
# nbMaximum = len(np.where(votes==max(votes))[0])
return np.argmax(votes)
return np.array([np.argmax(exampleVotes) for exampleVotes in votes])
# Main for testing
if __name__ == '__main__':
DATASET_LENGTH = 10
nbFeature = 5
NB_CLASS = 12
TRUE_CLASS = 3
LABELS = np.array([TRUE_CLASS for i in range(DATASET_LENGTH)])
LABELS[0] = 0
decisionsEasy = np.array([np.zeros(NB_CLASS) for i in range(nbFeature)])
for decision in decisionsEasy:
decisionsEasy = np.array([np.array([np.zeros(NB_CLASS) for i in range(nbFeature)])for example in range(DATASET_LENGTH)])
for exampleDecisions in decisionsEasy:
for decision in exampleDecisions:
decision[TRUE_CLASS]=12
# print decisionsEasy
decisionsHard = np.array([np.zeros(NB_CLASS) for i in range(nbFeature)])
for decision in decisionsHard:
decisionsHard = np.array([np.array([np.zeros(NB_CLASS) for i in range(nbFeature)])for example in range(DATASET_LENGTH)])
for exampleDecisions in decisionsHard:
for decision in exampleDecisions:
decision[TRUE_CLASS]=12
decisionsHard[nbFeature-2]=np.zeros(NB_CLASS)+1400
decisionsHard[nbFeature-2][TRUE_CLASS]-=110
exampleDecisions[nbFeature-2]=np.zeros(NB_CLASS)+1400
exampleDecisions[nbFeature-2][TRUE_CLASS]-=110
decisionsMajority = np.array([TRUE_CLASS,TRUE_CLASS,TRUE_CLASS,1,5])
decisionsMajorityFail = np.array([1,2,3,4,5])
decisionsMajority = np.array([np.array([TRUE_CLASS,TRUE_CLASS,TRUE_CLASS,1,5]) for example in range(DATASET_LENGTH)])
decisionsMajorityFail = np.array([np.array([1,2,3,4,5]) for example in range(DATASET_LENGTH)])
weights = np.random.rand(nbFeature)
weights[nbFeature-2] = 2
SVMClassifier = SVMForLinearFusionTrain(decisionsMajority, LABELS)
print weightedLinear(decisionsEasy, weights)
print weightedLinear(decisionsHard, weights)
print SVMForLinearFusionFuse(decisionsMajority, SVMClassifier)
print majorityVoting(decisionsMajority, NB_CLASS)
print majorityVoting(decisionsMajorityFail, NB_CLASS)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment