#! -*- encoding:utf-8 -*-
"""
@File    :   task_n.py
@Author  :   Zachary Li
@Contact :   li_zaaachary@163.com
@Dscpt   :   
"""
import logging
import pdb

logger = logging.getLogger("trainer")
console = logging.StreamHandler();console.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s %(name)s - %(message)s', datefmt = r"%y/%m/%d %H:%M")
console.setFormatter(formatter)
logger.addHandler(console)

from utils.base_trainer import BaseTrainer
from utils.common import get_device


class Trainer(BaseTrainer):
    def __init__(self, 
        model, multi_gpu, device, 
        print_step, eval_after_tacc,
        output_model_dir, fp16, clip_batch_off, exp_name='csqa'):

        super(Trainer, self).__init__(
            model, multi_gpu, device, print_step, eval_after_tacc, output_model_dir, v_num=3,
            exp_name=exp_name
        )

        self.fp16 = fp16
        self.clip_batch_off = clip_batch_off
        logger.info(f"fp16: {fp16}; clip_batch_off: {clip_batch_off}")

    @staticmethod
    def clip_batch(batch):
        """
        find the longest seq_len in the batch, and cut all sequence to seq_len
        """
        # print("batch size is {}".format(len(batch[0])))
        # import pdb; pdb.set_trace()
        if len(batch) == 4:
            input_ids, attention_mask, token_type_ids, labels = batch
        elif len(batch) == 7:
            input_ids, attention_mask, token_type_ids, long_input_ids, long_attention_mask, long_token_type_ids, labels = batch
        else:
            input_ids, attention_mask, token_type_ids = batch
        # [batch_size, 5, max_seq_len]
        batch_size = input_ids.size(0)
        while True:
            # cut seq_len step by step
            end_flag = False
            for i in range(batch_size):
                # traverse batch find if any case has reach the end
                if input_ids[i, -1] != 0:
                    end_flag = True 
            
            if end_flag:
                break
            else:
                input_ids = input_ids[:, :-1]
        
        max_seq_length = input_ids.size(-1)
        attention_mask = attention_mask[:, :max_seq_length]
        token_type_ids = token_type_ids[:, :max_seq_length]
        
        while True:
            # cut seq_len step by step
            end_flag = False
            # import pdb; pdb.set_trace()
            for i in range(batch_size):
                # traverse batch find if any case has reach the end
                for j in range(2):
                    if long_input_ids[i, j, -1] != 0:
                        end_flag = True
            
            if end_flag:
                break
            else:
                long_input_ids = long_input_ids[:, :, :-1]
        
        max_seq_length = long_input_ids.size(-1)
        long_attention_mask = long_attention_mask[:, :, :max_seq_length]
        long_token_type_ids = long_token_type_ids[:, :, :max_seq_length]

        # logger.info(f'clip batch to {max_seq_length}')
        output = (input_ids, attention_mask, token_type_ids)
        if len(batch) == 4:
            output = output + (labels,)
        elif len(batch) == 7:
            output = output + (long_input_ids, long_attention_mask, long_token_type_ids, labels)
        return output
        
    def _forward(self, batch):
        if not self.clip_batch_off:
            batch = self.clip_batch(batch)
        batch = tuple(t.to(self.device) for t in batch)
        result = self.model(*batch)  # loss, right_num
        # statistic
        predicts = result[-1].detach().cpu()

        return result[0], predicts    # loss

    def _report(self, p, r, f1, loss, acc, mode='Train'):

        output_str = f"{mode}: loss {loss:.4f}; p: {p*100:.3f}%, r: {r*100:.3f}%, f1: {f1*100:.3f}%, acc {acc*100:.3f}%"

        logger.info(output_str)
