Nekshay commited on
Commit
abb8bf0
·
verified ·
1 Parent(s): 16c3ce8

Update code.txt

Browse files
Files changed (1) hide show
  1. code.txt +32 -0
code.txt CHANGED
@@ -57,3 +57,35 @@ def main():
57
 
58
  if __name__ == "__main__":
59
  main()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
 
58
  if __name__ == "__main__":
59
  main()
60
+
61
+
62
+ import numpy as np
63
+ from sklearn.metrics import confusion_matrix, precision_score, recall_score, f1_score, accuracy_score
64
+
65
+ # Assuming you have true labels and predicted labels
66
+ y_true = [0, 1, 2, 1, 0, 1, 2, 2, 0] # Replace with your true labels
67
+ y_pred = [0, 0, 2, 1, 0, 1, 2, 1, 0] # Replace with your predicted labels
68
+
69
+ # Calculate the confusion matrix
70
+ conf_matrix = confusion_matrix(y_true, y_pred)
71
+
72
+ # Print the confusion matrix
73
+ print("Confusion Matrix:")
74
+ print(conf_matrix)
75
+
76
+ # Calculate precision, recall, f1-score, and accuracy for each label
77
+ precision = precision_score(y_true, y_pred, average=None)
78
+ recall = recall_score(y_true, y_pred, average=None)
79
+ f1 = f1_score(y_true, y_pred, average=None)
80
+ accuracy = accuracy_score(y_true, y_pred)
81
+
82
+ # Print precision, recall, f1-score for each label
83
+ for i in range(len(precision)):
84
+ print(f"Label {i}:")
85
+ print(f" Precision: {precision[i]:.4f}")
86
+ print(f" Recall: {recall[i]:.4f}")
87
+ print(f" F1-Score: {f1[i]:.4f}")
88
+ print()
89
+
90
+ # Print overall accuracy
91
+ print(f"Overall Accuracy: {accuracy:.4f}")