Skip to content

Commit

Permalink
final cleanse for utils file
Browse files Browse the repository at this point in the history
  • Loading branch information
gicraveiro committed Feb 7, 2022
1 parent bfa0e13 commit 962e268
Showing 1 changed file with 5 additions and 12 deletions.
17 changes: 5 additions & 12 deletions utils.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
import re
import os
from sklearn.metrics import precision_score, f1_score, recall_score # ,accuracy_score
from sklearn.metrics import precision_score, f1_score, recall_score
import json
import numpy
#import sklearn
from sklearn.metrics import ConfusionMatrixDisplay
import matplotlib.pyplot as plt

Expand All @@ -21,7 +20,8 @@ def reconstruct_hyphenated_words(corpus):
i += 1
return corpus

# noun chunks that correspond to keywords
# used to reconstruct noun chunks that correspond to keywords
# merge the compound words specified in the keywords parameters into the same token
def reconstruct_noun_chunks(corpus,keywords):
i = 0
while i < len(corpus):
Expand Down Expand Up @@ -65,16 +65,10 @@ def clean_corpus(corpus):

# WRITE OUTPUT STATISTICS FILE
def write_output_stats_file(path, name, ref_labels, pred_labels, labels):
#path = 'output/Simple Classifier/1labelPredictionsStats_'+name+'.txt'
#os.makedirs(os.path.dirname(path), exist_ok=True)
#print(ref_labels, pred_labels, labels)
with open(path, 'a') as file:
print(name,"set:\n", file=file)
#print("Accuracy:",round( accuracy_score( ref_labels, pred_labels), 3), file=file)
#print("Precision micro:",round( precision_score( ref_labels, pred_labels, average="micro"), 3), file=file)
print(name,"set:\n", file=file) # Title
print("Precision macro:",round( precision_score( ref_labels, pred_labels, average="macro"),3), file=file)
print("Precision Individually:", numpy.round (precision_score( ref_labels, pred_labels, average=None, labels=labels),3), file=file)
#print("Recall micro:",round( recall_score( ref_labels, pred_labels, average="micro"),3), file=file)
print("Recall macro:",round( recall_score( ref_labels, pred_labels, average="macro"),3), file=file)
print("Recall Individually:", numpy.round(recall_score( ref_labels, pred_labels, average=None, labels=labels),3), file=file)
print("F1 Score micro:",round( f1_score( ref_labels, pred_labels, average="micro"),3), file=file)
Expand All @@ -92,9 +86,8 @@ def write_predictions_file(name, pred_dict):

# Creates a confusion matrix
def create_confusion_matrix(refs, preds, normalize, path, labels, display_labels):
#print(sklearn.__version__)
ConfusionMatrixDisplay.from_predictions(refs,preds, normalize=normalize, labels=labels, display_labels=display_labels)
plt.xticks(rotation=45, ha="right")
plt.subplots_adjust(bottom=0.4)
#plt.show()
#plt.show() # obs.: either show or save the confusion matrix
plt.savefig(path)

0 comments on commit 962e268

Please sign in to comment.