Python Code

머신러닝/딥러닝 성능 측정 함수

Kimhj 2023. 10. 12. 13:34

 

  • 모델이 inference 한 결과를 통해 모델의 성능 지표를 계산하는 함수임.
  • 마지막 print 부분은 없어도 되고, 정답값(y_true) 과 예측값(y_prob),  그리고 Threshold 값을 입력해주면 된다.

 

from sklearn.metrics import roc_auc_score, average_precision_score, confusion_matrix, precision_score, recall_score ,f1_score


def eval_metrics(y_true, y_prob, cut_off=0.5):

    '''
        Function: 
            Model Performance Calculation 

        Parameters:
            y_true (List or Numpy Array) : Label (Ground Truth)
            y_prob (List or Numpy Array) : Prediction Probability (Inference Results)
            cut_off (Float) : Threshold value
        Returns:
            Calculated Metris (Float) : AUROC, AUPRC, F1-score, Precision, Recall, Sensivitiy, Specificity

    '''

    y_pred = np.where(y_prob > cut_off, 1, 0)

    auc = round(roc_auc_score(y_true, y_prob), 3)
    prc = round(average_precision_score(y_true, y_prob), 3)
    precision = round(precision_score(y_true, y_pred), 3)
    recall = round(recall_score(y_true, y_pred), 3)
    f1 = round(f1_score(y_true, y_pred), 3)

    # confusion Matrix (CM)
    tn, fp, fn, tp = confusion_matrix(y_true, y_pred).ravel()
    # sensitivity
    sensitivity = round(tp / (tp + fn), 3)
    # specificity
    specificity = round(tn / (tn + fp), 3)

    print(f"AUROC: { auc }, AUPRC: { prc }, F1-score: { f1 }, Precision: { precision }, Recall(Sensitivity): { recall }, Specificity: { specificity }")

    return auc, prc, f1, precision, recall, sensitivity, specificity