/ changedetection / utils / my_metrics.py
my_metrics.py
 1  #!/usr/bin/env python3
 2  # -*- coding: utf-8 -*-
 3  """
 4  Created on Thu Mar 10 08:08:56 2022
 5  
 6  @author: aleoikon
 7  """
 8  
 9  '''
10  Recall = Tp / Tp + Fn x 100
11  Accuracy = Tp + Tn / Tp + Fn + Fp + Tn x 100
12  Specificity = Tn / Tn + Fp x 100
13  Precision = Tp / Tp + Fp x100
14  F_measure = 2 x Recall x Precision / Recall + Precision
15  
16  TP: True Change, Predicted Change
17  TN: True No Change, Predicted No Change
18  FP: True No Change, Predicted Change
19  FN: True Change, Predicted No Change
20  '''
21  
22  
23  import numpy as np
24  from sklearn.metrics import confusion_matrix
25  import matplotlib.pyplot as plt
26  
27  y_true = np.array([0,0,0,1,1,1])
28  y_pred = np.array([0,1,0,1,0,0])
29  confusion_matrix(y_true, y_pred, labels=[0,1])
30  
31  def get_confusion_matrix(y_true, y_predicted):
32      y_true = y_true.flatten()
33      y_pred = y_predicted.flatten()
34      print(confusion_matrix(y_true, y_pred, labels=[0,1]))
35      
36  
37  def recall(y_true, y_predicted):
38      y_true = y_true.flatten()
39      y_pred = y_predicted.flatten()
40      tn, fp, fn, tp = confusion_matrix(y_true, y_pred, labels=[0,1]).ravel() 
41      #print(confusion_matrix(y_true, y_pred, labels=[0,1]))
42      recall = (tp / (tp+fn)) * 100
43      return recall
44  
45  def accuracy(y_true, y_predicted):
46      y_true = y_true.flatten()
47      y_pred = y_predicted.flatten()
48      tn, fp, fn, tp = confusion_matrix(y_true, y_pred, labels=[0,1]).ravel() 
49      accuracy = ((tp+tn)/(tp+fn+fp+tn))*100
50      return accuracy
51  
52  def specificity(y_true, y_predicted):
53      y_true = y_true.flatten()
54      y_pred = y_predicted.flatten()
55      tn, fp, fn, tp = confusion_matrix(y_true, y_pred, labels=[0,1]).ravel()
56      specificity = (tn / (tn+fp))*100
57      return specificity
58  
59  def precision(y_true, y_predicted):
60      y_true = y_true.flatten()
61      y_pred = y_predicted.flatten()
62      tn, fp, fn, tp = confusion_matrix(y_true, y_pred, labels=[0,1]).ravel()
63      precision = (tp/(tp+fp))*100
64      return precision
65  
66  def f_measure(y_true,y_predicted):
67      y_true = y_true.flatten()
68      y_pred = y_predicted.flatten()
69      tn, fp, fn, tp = confusion_matrix(y_true, y_pred, labels=[0,1]).ravel()
70      f = (2 * recall(y_true,y_pred) * precision(y_true,y_pred)) / (recall(y_true,y_pred) + precision(y_true,y_pred))
71      return f
72  
73  
74  from sklearn.metrics import roc_curve, roc_auc_score
75  
76  def get_roc(y_true,y_predicted):
77      y_true = y_true.flatten()
78      y_pred = y_predicted.flatten()
79      false_positive_rate, true_positive_rate, threshold = roc_curve(y_true, y_pred)
80      print('roc_auc_score: ', roc_auc_score(y_true, y_pred))
81  
82      plt.subplots(1, figsize=(10,10))
83      plt.title('Receiver Operating Characteristic')
84      plt.plot(false_positive_rate, true_positive_rate)
85      plt.plot([0, 1], ls="--")
86      plt.plot([0, 0], [1, 0] , c=".7"), plt.plot([1, 1] , c=".7")
87      plt.ylabel('True Positive Rate')
88      plt.xlabel('False Positive Rate')
89      plt.show()
90  
91  
92  
93  
94