Skip to content
Snippets Groups Projects
Commit 546d0e44 authored by Franck Dary's avatar Franck Dary
Browse files

Show errors in script printResults

parent 3629e2c8
No related branches found
No related tags found
No related merge requests found
...@@ -11,6 +11,12 @@ if __name__ == "__main__" : ...@@ -11,6 +11,12 @@ if __name__ == "__main__" :
output = [] output = []
outputByModelScore = dict() outputByModelScore = dict()
for pathToFile in glob.iglob("" + '*stderr') :
for line in open(pathToFile, "r") :
if "Error" in line or "ERROR" in line or "error" in line :
print(pathToFile,":")
print("\t"+line,end="")
for pathToFile in glob.iglob("" + '*stdout') : for pathToFile in glob.iglob("" + '*stdout') :
model = pathToFile.split("_UD_")[0] model = pathToFile.split("_UD_")[0]
corpus = pathToFile.split("_UD_")[1].split('.')[0] corpus = pathToFile.split("_UD_")[1].split('.')[0]
...@@ -32,15 +38,19 @@ if __name__ == "__main__" : ...@@ -32,15 +38,19 @@ if __name__ == "__main__" :
for model in outputByModelScore : for model in outputByModelScore :
for metric in outputByModelScore[model] : for metric in outputByModelScore[model] :
score = 0.0 score = 0.0
standardDeviation = 0.0
for exp in outputByModelScore[model][metric] : for exp in outputByModelScore[model][metric] :
score += float(exp[2]) score += float(exp[2])
score /= len(outputByModelScore[model][metric]) score /= len(outputByModelScore[model][metric])
standardDeviation = 0.0
if len(outputByModelScore[model][metric]) > 1 :
for exp in outputByModelScore[model][metric] : for exp in outputByModelScore[model][metric] :
standardDeviation += (float(exp[2])-score)**2 standardDeviation += (float(exp[2])-score)**2
standardDeviation /= len(outputByModelScore[model][metric])-1 standardDeviation /= len(outputByModelScore[model][metric])-1
standardDeviation = math.sqrt(standardDeviation) standardDeviation = math.sqrt(standardDeviation)
if standardDeviation > 0 :
score = "%.2f[±%.2f]%%"%(score,standardDeviation) score = "%.2f[±%.2f]%%"%(score,standardDeviation)
else :
score = "%.2f%%"%score
output.append(outputByModelScore[model][metric][0]) output.append(outputByModelScore[model][metric][0])
output[-1][2] = score output[-1][2] = score
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment