-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest_vis.py
105 lines (91 loc) · 3.12 KB
/
test_vis.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
from comet_ml import experiment
from data_loader.uts_classification_data_loader import UtsClassificationDataLoader
from models.uts_classification_model import UtsClassificationModel
from trainers.uts_classification_trainer import UtsClassificationTrainer
from evaluater.uts_classification_evaluater import UtsClassificationEvaluater
from utils.config import process_config_VisOverfit
from utils.dirs import create_dirs
from utils.utils import get_args
import pandas as pd
import numpy as np
import os
import time
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
import seaborn as sns
def plot_metrics_matrix():
metrics = np.arange(9).reshape(3,3)
col_labels = []
for i in range(3):
col_labels.append("Cls."+str(i))
row_labels = ['Precision', 'Recall', 'F1-score']
sns.set(font_scale=2.5)
f, hm = plt.subplots(figsize=(25, 25))
hm = sns.heatmap(metrics,
xticklabels=True,
yticklabels=True,
cmap="YlGnBu",
cbar=True,
annot=True,
square=True,
fmt='.2f',
annot_kws={'size': 20})
hm.set_xticklabels(col_labels, fontsize=18, horizontalalignment='right')
hm.set_yticklabels(row_labels, fontsize=18, horizontalalignment='right')
plt.title('Metrics')
plt.show()
plt.savefig('hhh.png', format='png')
plt.cla()
plt.clf()
plt.close('all')
def plot_trainingsize_metric2(data):
plt.figure()
plt.plot(data["training_size"],data["accuracy"])
plt.plot(data["training_size"], data["f1"])
plt.title('model ' )
plt.ylabel('metric', fontsize='large')
plt.xlabel('training_size', fontsize='large')
plt.legend(['accuracy', 'f1'], loc='upper left')
plt.show()
plt.savefig('hhhh.png', bbox_inches='tight')
def plot_trainingsize_metric1(data):
plt.figure(num=3)
plt.show()
plt.plot(data["training_size"],data["accuracy"])
plt.plot(data["training_size"], data["f1"])
plt.show()
plt.title('model ' )
plt.ylabel('metric', fontsize='large')
plt.xlabel('training_size', fontsize='large')
plt.legend(['accuracy', 'f1'], loc='upper left')
plt.show()
plt.savefig('hh.png', bbox_inches='tight')
split =10
training_size = []
accuracy = []
precision = []
recall = []
f1 = []
# for i in range(split):
# training_size.append(i+1000)
# accuracy.append(i+2)
# precision.append(i+1)
# recall.append(i+3)
# f1.append(i+
res = pd.DataFrame(data=np.zeros((1, 4), dtype=np.float),
index=[0],
columns=['precision', 'accuracy', 'recall', 'duration'])
res['accuracy'] = 0.8
res['precision'] = 0.9
res['recall'] = 1
res['duration'] =2
d = res.loc[0,'accuracy']
training_size = [36, 72, 108]
accuracy = [res['accuracy'] , res['accuracy'] , res['accuracy'] ]
f1 = [res['recall'] , res['recall'] , res['recall'] ]
metrics = {"accuracy":accuracy,"precision":precision,"recall":recall,"f1":f1,"training_size":training_size}
plot_metrics_matrix()
# plot_trainingsize_metric2(metrics)
plot_trainingsize_metric1(metrics)
print('k')