Newer
Older
import matplotlib.pyplot as plt
import numpy as np
from sklearn.metrics import plot_confusion_matrix
from sklearn.metrics import confusion_matrix
from sklearn.metrics import classification_report
import pandas as pd
import seaborn as sns
def evaluate_model(clf, X_test, y_test, y_pred, valid_y, classes, classesName, pathSave):
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
#classifier, label_list, test_x, valid_y, title = "Confusion matrix"):
precision = []
recall = []
f1 = []
support = []
weighted_avg = None
accuracy = None
df = pd.DataFrame(columns= ['className', 'precision', 'recall', 'f1-score', 'support', 'FP', 'FN', 'TP', 'TN'])
report = classification_report( y_pred, valid_y, output_dict = True)
for c in classes:
precision.append(report[c]['precision'])
recall.append(report[c]['recall'])
f1.append(report[c]['f1-score'])
support.append(report[c]['support'])
accuracy = report['accuracy']
weighted_avg = report['weighted avg']
cnf_matrix = confusion_matrix(valid_y, y_pred)
FP = cnf_matrix.sum(axis=0) - np.diag(cnf_matrix)
FN = cnf_matrix.sum(axis=1) - np.diag(cnf_matrix)
TP = np.diag(cnf_matrix)
TN = cnf_matrix.sum() - (FP + FN + TP)
df['className'] = classesName
df['precision'] = precision
df['recall'] = recall
df['f1-score'] = f1
df['support'] = support
df['FP'] = FP
df['FN'] = FN
df['TP'] = TP
df['TN'] = TN
#disp = plot_confusion_matrix(classifier, test_x, valid_y,
# display_labels= label_list,
# cmap=plt.cm.Blues,
# normalize=None)
#disp.ax_.set_title(title)
#print(title)
#print(disp.confusion_matrix)
#plt.show()
plt.rcParams["font.size"] = 3
plot_confusion_matrix(clf, X_test, y_test)
plt.savefig(pathSave)