-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMetaboDashboardConfig.py
26 lines (19 loc) · 1.05 KB
/
MetaboDashboardConfig.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
from sklearn.metrics import accuracy_score, f1_score, precision_score, confusion_matrix, roc_auc_score, recall_score
NUMBER_FEATURE_TO_KEEP_FOR_PCA = 40
def true_positive_rate(y_true, y_pred):
return confusion_matrix(y_true=y_true, y_pred=y_pred)[1][1]
def false_positive_rate(y_true, y_pred):
return confusion_matrix(y_true=y_true, y_pred=y_pred)[0][1]
def true_negative_rate(y_true, y_pred):
return confusion_matrix(y_true=y_true, y_pred=y_pred)[0][0]
def false_negative_rate(y_true, y_pred):
return confusion_matrix(y_true=y_true, y_pred=y_pred)[1][0]
STATISTICS={"Accuracy": accuracy_score,
"ROC AUC Score": roc_auc_score,
#"f1-score": f1_score,
#"Precision": precision_score,
#"Sensitivity": recall_score, #Positive predicted correctly (TP/TP+FN)
"True positive": true_positive_rate, #Positive prediction that are positive (TP/TP+FP)
"False positive": false_positive_rate,
"True negative": true_negative_rate,
"False negative": false_negative_rate}