-
Notifications
You must be signed in to change notification settings - Fork 27
/
Copy pathplot_functions.py
41 lines (36 loc) · 1.26 KB
/
plot_functions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
# Plot Functions
import matplotlib.pyplot as plt
import numpy as np
from sklearn import metrics
def plot_confusion_matrix(cm, classes, title, cmap):
"function for plotting confusion matrix"
plt.clf()
plt.imshow(cm, interpolation='nearest', cmap=cmap)
classnames = classes
plt.title(title)
plt.ylabel('True label')
plt.xlabel('Predicted label')
tick_marks = np.arange(len(classnames))
plt.xticks(tick_marks, classnames, rotation=45)
plt.yticks(tick_marks, classnames)
s = [['TN','FP'], ['FN', 'TP']]
for i in range(2):
for j in range(2):
plt.text(j,i, str(s[i][j])+" = "+str(cm[i][j]))
plt.show()
def plot_roc_auc(arg1, arg2, arg3):
"a function to plot roc_auc"
fig, ax = plt.subplots(figsize=(8, 6))
for i, v in arg1:
y_score = v.predict_proba(arg2)[:, 1]
fpr, tpr, _ = metrics.roc_curve(arg3, y_score)
roc_auc = metrics.auc(fpr, tpr)
plt.plot(fpr, tpr,lw=2, label= i + ' (area = %0.2f)' % roc_auc)
plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('ROC Curve')
plt.legend(loc="lower right")
plt.show()