
import numpy as np
from keras.utils import np_utils
from sklearn.metrics import recall_score, confusion_matrix, roc_auc_score, precision_score, roc_curve, f1_score, \
    accuracy_score
from config import Config
from util.auto_str import auto_str
from entity.evaluation import Evaluation
from plot import Plot
from log import log
from dao.evaluationdao import EvaluationDao


@auto_str
class Predict:
    evaluation = Evaluation()

    def _gety_pred(self ,predict_list):
        y_pred = []
        for index, predict in enumerate(predict_list):
            # print(index,predict)
            max = np.argmax(predict)
            y_pred.append(max)
        return np_utils.to_categorical(np.array(y_pred), Config.NUM_CLASSES)

    def calc_metrics(self, model, x_test, y_test, epoch, train_id: int):  # 计算各个指标
        log.info("Running at predict")
        y_pred = model.predict(x_test)
        val_predict = self._gety_pred(y_pred)
        val_targ = y_test
        self.evaluation.auc_score = roc_auc_score(val_targ, y_pred)
        _val_recall = recall_score(val_targ, val_predict, average='macro')
        self.evaluation.accuracy_score = accuracy_score(val_targ, val_predict)
        self.evaluation.f1_score = f1_score(val_targ, val_predict, average='macro')
        self.evaluation.precision_score = precision_score(val_targ, val_predict, average="macro")
        roc_img_name = f"storage/{epoch + 1}ROC{train_id}.png"
        pr_img_name = f"storage/{epoch + 1}PR{train_id}.png"
        labels = [
            "Harmful",
            "Kitchen",
            "Other",
            "Recyclable",
            "micro",
            "macro",
        ]
        Plot().plot_ROC(val_targ, y_pred, labels, roc_img_name)
        Plot().plot_pr(val_targ, y_pred, labels, pr_img_name)
        ################################
        cnf_matrix: np.ndarray = confusion_matrix(self.n4ton(val_targ), self.n4ton(val_predict))
        FP = cnf_matrix.sum(axis=0) - np.diag(cnf_matrix).astype(float)
        FN = cnf_matrix.sum(axis=1) - np.diag(cnf_matrix).astype(float)
        TP = np.diag(cnf_matrix).astype(float)
        TN = cnf_matrix.sum() - (FP + FN + TP).astype(float)
        log.debug(TP)
        TPR = TP / (TP + FN)
        TNR = TN / (TN + FP)
        FPR = FP / (FP + TN)
        FNR = FN / (TP + FN)
        sum_TP = 0
        for i in range(Config.NUM_CLASSES):
            sum_TP += cnf_matrix[i, i]
        acc = sum_TP / np.sum(cnf_matrix)
        log.debug("acc" + str(acc))
        self.evaluation.fpr = FPR.sum() / 4
        self.evaluation.tpr = TPR.sum() / 4
        self.evaluation.tnr = TNR.sum() / 4
        self.evaluation.fnr = FNR.sum() / 4
        self.evaluation.ks = self.evaluation.tpr - self.evaluation.fpr
        self.evaluation.epoch = epoch + 1
        self.evaluation.roc_img_name = roc_img_name
        self.evaluation.pr_img_name = pr_img_name
        self.evaluation.train_id = train_id
        self.evaluation.id = None
        log.info(self.evaluation)
        ed = EvaluationDao()
        ed.setAll(self.evaluation)

    def n4ton(self, data):
        res = []
        for col in data:
            for i in range(len(col)):
                if col[i] == 1:
                    res.append(i)
                    break
        return np.array(res)

