-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathevaluate.py
44 lines (38 loc) · 1.6 KB
/
evaluate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import numpy as np
from sklearn.metrics import confusion_matrix
from sklearn.metrics import f1_score
from sklearn.metrics import jaccard_similarity_score
model = model(input_size = (512,512,3))
model.summary()
model.load_weights('...')
predictions = model.predict(x_test, batch_size=2, verbose=1)
y_scores = predictions.reshape(predictions.shape[0]*predictions.shape[1]*predictions.shape[2]*predictions.shape[3], 1)
print(y_scores.shape)
y_true = y_test.reshape(te_mask.shape[0]*te_mask.shape[1]*te_mask.shape[2]*te_mask.shape[3], 1)
y_scores = np.where(y_scores>0.5, 1, 0)
y_true = np.where(y_true>0.5, 1, 0)
#Confusion matrix
threshold_confusion = 0.5
print ("\nConfusion matrix: Custom threshold (for positive) of " +str(threshold_confusion))
y_pred = np.empty((y_scores.shape[0]))
for i in range(y_scores.shape[0]):
if y_scores[i]>=threshold_confusion:
y_pred[i]=1
else:
y_pred[i]=0
confusion = confusion_matrix(y_true, y_pred)
print (confusion)
sensitivity = 0
if float(confusion[1,1]+confusion[1,0])!=0:
sensitivity = float(confusion[1,1])/float(confusion[1,1]+confusion[1,0])
print ("Sensitivity: " +str(sensitivity))
precision = 0
if float(confusion[1,1]+confusion[0,1])!=0:
precision = float(confusion[1,1])/float(confusion[1,1]+confusion[0,1])
print ("Precision: " +str(precision))
#Jaccard similarity index
jaccard_index = jaccard_similarity_score(y_true, y_pred, normalize=True)
print ("\nJaccard similarity score: " +str(jaccard_index))
#F1 score
F1_score = f1_score(y_true, y_pred, labels=None, average='binary', sample_weight=None)
print ("\nF1 score (F-measure): " +str(F1_score))