import os
from matplotlib.font_manager import FontProperties
import itertools
import matplotlib.pyplot as plt
import numpy as np
from sklearn.metrics import confusion_matrix
def print_cm(true, pred,location):
    # 计算混淆矩阵
    cm = confusion_matrix(true, pred)
    # 计算召回率 F1
    Absent_num = np.sum(cm[0])
    Soft_num = np.sum(cm[1])
    Loud_num = np.sum(cm[2])
    Absent_recall = cm[0][0] / Absent_num
    Soft_recall = cm[1][1] / Soft_num
    Loud_recall = cm[2][2] / Loud_num
    All_num = Absent_num + Soft_num + Loud_num

    UAR = (Absent_recall + Soft_recall + Loud_recall) / 3
    WAR = (Absent_recall * Absent_num + Soft_recall * Soft_num +Loud_recall *Loud_num)/All_num

    # PCG_acc_soft_aver = (acc_metric+Soft_recall)/2 #准确率和soft找回率均值
    # print(str(location)+"---")
    print("------------------------------"+str(location)+"----------------------------")
    print("Absent_recall: %.4f, Soft_recall: %.4f, Loud_recall: %.4f, UAR: %.4f, WAR:  %.4f"
          % (Absent_recall, Soft_recall, Loud_recall, UAR ,WAR))
    a = np.sum(cm, 0)
    Absent_Precision = cm[0][0] / a[0]
    Soft_Precision = cm[1][1] / a[1]
    Loud_Precision = cm[2][2] / a[2]

    Absent_f1 = (2 * Absent_recall * Absent_Precision) / (Absent_recall + Absent_Precision)
    Soft_f1 = (2 * Soft_recall * Soft_Precision) / (Soft_recall + Soft_Precision)
    Loud_f1 = (2 * Loud_recall * Loud_Precision) / (Loud_recall + Loud_Precision)
    UF1 = (Absent_f1 + Soft_f1 + Loud_f1) / 3
    print("Absent_F1: %.4f, Soft_F1: %.4f, Loud_F1: %.4f,UF1: %.4f"
          % (Absent_f1, Soft_f1, Loud_f1, UF1))


# 绘制混淆矩阵
def plot_confusion_matrix(cm, classes, normalize=False, title='Confusion matrix', cmap=plt.cm.Blues):
    """
    - cm : 计算出的混淆矩阵的值
    - classes : 混淆矩阵中每一行每一列对应的列
    - normalize : True:显示百分比, False:显示个数
    """
    if normalize:
        cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
        print("显示百分比：")
        np.set_printoptions(formatter={'float': '{: 0.2f}'.format})
#         print(cm)
#     else:
#         print('显示具体数字：')
#         print(cm)
    plt.figure()
    plt.imshow(cm, interpolation='nearest', cmap=cmap)
    plt.title(title)
#     plt.colorbar()
    tick_marks = np.arange(len(classes))
    plt.xticks(tick_marks, classes, rotation=45)
    plt.yticks(tick_marks, classes)
    # matplotlib版本问题，如果不加下面这行代码，则绘制的混淆矩阵上下只能显示一半，有的版本的matplotlib不需要下面的代码，分别试一下即可
#     plt.ylim(len(classes) - 0.5, -0.5)
    fmt = '.2f' if normalize else 'd'
    thresh = cm.max() / 2.
    for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
        plt.text(j, i, format(cm[i, j], fmt),
                 horizontalalignment="center",
                 color="white" if cm[i, j] > thresh else "black")
    plt.tight_layout()
    plt.ylabel('True label')
    plt.xlabel('Predicted label')
    plt.show()
#     plt.savefig('E:\桌面\实验结果\交叉验证混淆矩阵.png')
#     plt.savefig(result_path + '/交叉验证混淆矩阵.png', dpi=600)


# cm1 = np.array([[151,10,0],[7,19,5],[0,0,10]])
# cm2 = np.array([[153,8,0],[8,19,4],[0,1,9]])
# cm3 = np.array([[148,12,0],[4,25,1],[0,1,10]])
# cm4 = np.array([[147,13,0],[6,22,2],[1,2,8]])
# cm5 = np.array([[148,12,0],[4,26,1],[0,2,8]])
# cm = cm1+cm2+cm3+cm4+cm5
# # cm = np.array([[319,24,0],[14,48,7],[1,5,24]])
# attack_types = ['absent', 'soft', 'loud']
# # 归一化
# # plot_confusion_matrix(cnf_matrix, classes=attack_types, normalize=True, title='Confusion matrix')
# # 不归一化
# result_path = 'E:/桌面/组会/实验结果图/'
#
# plot_confusion_matrix(cm, classes=attack_types, normalize=False, title='Confusion matrix')
# # plt.savefig('E:\桌面\实验结果\交叉验证混淆矩阵.jpg', dpi=300, bbox_inches='tight')
#
#
# #计算召回率 F1
# Absent_num = np.sum(cm[0])
# Soft_num = np.sum(cm[1])
# Loud_num = np.sum(cm[2])
# print("Absent: %d，Soft：%d，Loud：%d" %(Absent_num,Soft_num,Loud_num))
# Absent_recall = cm[0][0] / Absent_num
# Soft_recall = cm[1][1] / Soft_num
# Loud_recall = cm[2][2] / Loud_num
# UAR = (Absent_recall+Soft_recall+Loud_recall)/3
# WAR= (Absent_recall * Absent_num+Soft_recall*Soft_num+Loud_recall*Loud_num)/(Absent_num+Soft_num+Loud_num)
# print("------------------------------Patient result------------------------------" )
# print("Absent_recall: %.4f, Soft_recall: %.4f, Loud_recall: %.4f, UAR: %.4f, WAR: %.4f"
#       % (Absent_recall, Soft_recall, Loud_recall,UAR,WAR))
# a = np.sum(cm, 0)
# Absent_Precision = cm[0][0] / a[0]
# Soft_Precision = cm[1][1] / a[1]
# Loud_Precision = cm[2][2] / a[2]
#
# Absent_f1 = (2 * Absent_recall * Absent_Precision) / (Absent_recall + Absent_Precision)
# Soft_f1 = (2 * Soft_recall * Soft_Precision) / (Soft_recall + Soft_Precision)
# Loud_f1 = (2 * Loud_recall * Loud_Precision) / (Loud_recall + Loud_Precision)
# UAF = (Absent_f1+Soft_f1+Loud_f1)/3
#
# print("Absent_F1: %.4f, Soft_F1: %.4f, Loud_F1: %.4f, UAF: %.4f"
#       % (Absent_f1, Soft_f1, Loud_f1,UAF))
# acc =(cm[0][0]+cm[1][1]+cm[2][2])/sum(sum(cm))
# print("Acc: %.4f" % acc )