

def get_metrics(y_true, y_pred):
    if num_classes == 2:
        print('*'*27, 'precision_score:', precision_score(y_true, y_pred, pos_label=1))
        print('*'*27, 'recall_score:', recall_score(y_true, y_pred, pos_label=1))
        print('*'*27, 'f1_score:', f1_score(y_true, y_pred, pos_label=1))
    else:
        average = 'weighted'
        print('*'*27, average+'_precision_score:{:.3f}'.format(precision_score(y_true, y_pred, average=average)))
        print('*'*27, average+'_recall_score:{:.3}'.format(recall_score(y_true, y_pred, average=average)))
        print('*'*27, average+'_f1_score:{:.3f}'.format(f1_score(y_true, y_pred, average=average)))

    print('*'*27, 'accuracy:{:.3f}'.format(accuracy_score(y_true, y_pred)))
    print('*'*27, 'confusion_matrix:\n', confusion_matrix(y_true, y_pred))
    print('*'*27, 'classification_report:\n', classification_report(y_true, y_pred))
