import logging
import shutil
import time

import editdistance as ed
from utils.ms_utils import CharsetMapper,onehot,blend_mask
import mindspore as ms
from mindspore import nn


class TextAccuracy(nn.Metric):
    _names = ['ccr', 'cwr', 'ted', 'ned', 'ted/w']
    def __init__(self, charset_path, max_length, case_sensitive, model_eval):
        self.charset_path = charset_path
        self.max_length = max_length
        self.case_sensitive = case_sensitive
        self.charset = CharsetMapper(charset_path, self.max_length)
        self.names = self._names

        self.model_eval = model_eval or 'alignment'
        assert self.model_eval in ['vision', 'language', 'alignment']
 
    def clear(self, **kwargs):
        self.total_num_char = 0.
        self.total_num_word = 0.
        self.correct_num_char = 0.
        self.correct_num_word = 0.
        self.total_ed = 0.
        self.total_ned = 0.


    def eval(self):
        char_acc = self.correct_num_char / self.total_num_char
        word_acc = self.correct_num_word / self.total_num_word
        total_ed = self.total_ed
        total_ned = self.total_ned
        ed_acc = self.total_ed / self.total_num_word

        return char_acc, word_acc


    def update(self, *inputs):
        last_output = inputs[0]
        last_target = inputs[1]

        output = self._get_output(last_output)#得到output
        logits, pt_lengths = output['logits'], output['pt_lengths']#得到logits,pt_lengths
        pt_text, pt_scores, pt_lengths_ = self.decode(logits) #decode 得到pt_text,pt_socres,pt_lengths
        assert (pt_lengths == pt_lengths_).all(), f'{pt_lengths} != {pt_lengths_} for {pt_text}'

        last_output = self._update_output(last_output, {'pt_text':pt_text, 'pt_scores':pt_scores})#感觉像是不断更新last_output
                                                    #可能是要把pt_text,pt_socres再存到新的last_outout里面去

        pt_text = [self.charset.trim(t) for t in pt_text]
        #pt_text = str(pt_text)
        #pt_text = pt_text.strip('[').strip(']').strip("'")
        label = last_target
        if label.dim() == 3: label = label.argmax(axis=-1)  # one-hot label  ，感觉是得到了label #这一行明显是有问题的，
                                                           # 因为MindSpore没有dim，有axis，可是label.dim()暂不知怎么改
        gt_text = [self.charset.get_text(l, trim=True) for l in label]#获取真值
        
        for i in range(len(gt_text)): #根据gt_text和pt_text去不断地计算
            if not self.case_sensitive:
                gt_text[i], pt_text[i] = gt_text[i].lower(), pt_text[i].lower()
                #gt_text[i] = gt_text[i].lower()
            distance = ed.eval(gt_text[i], pt_text[i])
            self.total_ed += distance
            self.total_ned += float(distance) / max(len(gt_text[i]), 1)

            if gt_text[i] == pt_text[i]:
                self.correct_num_word += 1
            self.total_num_word += 1
            
            for j in range(min(len(gt_text[i]), len(pt_text[i]))):
                if gt_text[i][j] == pt_text[i][j]:
                    self.correct_num_char += 1
            self.total_num_char += len(gt_text[i])


    def _get_output(self, last_output):
        if isinstance(last_output, (tuple, list)): 
            for res in last_output:
                if res['name'] == self.model_eval: output = res
        else: output = last_output
        return output
    
    def _update_output(self, last_output, items):
        if isinstance(last_output, (tuple, list)): 
            for res in last_output:
                if res['name'] == self.model_eval: res.update(items)
        else: last_output.update(items)
        return last_output

    # def on_batch_end(self, last_output, last_target, **kwargs):
    #     output = self._get_output(last_output)
    #     logits, pt_lengths = output['logits'], output['pt_lengths']
    #     pt_text, pt_scores, pt_lengths_ = self.decode(logits)
    #     assert (pt_lengths == pt_lengths_).all(), f'{pt_lengths} != {pt_lengths_} for {pt_text}'
    #     last_output = self._update_output(last_output, {'pt_text':pt_text, 'pt_scores':pt_scores})

    #     pt_text = [self.charset.trim(t) for t in pt_text]
    #     label = last_target[0]
    #     if label.dim() == 3: label = label.argmax(dim=-1)  # one-hot label
    #     gt_text = [self.charset.get_text(l, trim=True) for l in label]
        
    #     for i in range(len(gt_text)):
    #         if not self.case_sensitive:
    #             gt_text[i], pt_text[i] = gt_text[i].lower(), pt_text[i].lower()
    #         distance = ed.eval(gt_text[i], pt_text[i])
    #         self.total_ed += distance
    #         self.total_ned += float(distance) / max(len(gt_text[i]), 1)

    #         if gt_text[i] == pt_text[i]:
    #             self.correct_num_word += 1
    #         self.total_num_word += 1
            
    #         for j in range(min(len(gt_text[i]), len(pt_text[i]))):
    #             if gt_text[i][j] == pt_text[i][j]:
    #                 self.correct_num_char += 1
    #         self.total_num_char += len(gt_text[i])

    #     return {'last_output': last_output}

    # def on_epoch_end(self, last_metrics, **kwargs):
    #     mets = [self.correct_num_char / self.total_num_char,
    #             self.correct_num_word / self.total_num_word,
    #             self.total_ed,
    #             self.total_ned,
    #             self.total_ed / self.total_num_word]
    #     return add_metrics(last_metrics, mets)

    def decode(self, logit):
        """ Greed decode """
        # TODO: test running time and decode on GPU
        ms_softmax = ms.ops.Softmax(axis=2)
        out = ms_softmax(logit)
        pt_text, pt_scores, pt_lengths = [], [], []
        for o in out:
            text = self.charset.get_text(o.argmax(axis=1), padding=False, trim=False)
            text = text.split(self.charset.null_char)[0]  # end at end-token
            pt_text.append(text)
            pt_scores.append(o.max(axis=1)[0])
            pt_lengths.append(min(len(text) + 1, self.max_length))  # one for end-token
        ms_stack = ms.ops.Stack()
        pt_scores = ms_stack(pt_scores)
        
        #pt_lengths = pt_scores.new_tensor(pt_lengths, dtype=torch.long)#我理解的就是把pt_lengths复制了一下
        pt_lengths = ms.Tensor(pt_lengths,dtype=ms.int64)
        
        return pt_text, pt_scores, pt_lengths
