import sys
sys.path.append("..")
import torch
import torch.nn as nn
import torch.nn.functional as F
from transformers import AdamW, BertConfig, BertTokenizer, BertModel, AutoTokenizer
from torch.utils.data import DataLoader, random_split
from utils.data_utils import DatesetReader
import logging as wlog
from models.bert_model import MyBert
from utils.metrics_utils import MetricsUtil
from utils.storage_utils import ModelUtil
from utils.msg_utils import MsgRobot

class Instructor:
    def __init__(self, opt) -> None:
        self.opt = opt
        self.max_acc = 0
        self.max_f1 = 0
        self.max_f1_m = 0

        #region 初始化模型
        self.tokenizer = AutoTokenizer.from_pretrained(opt.plm, normalization=True) 
        self.stance_dataset = DatesetReader(opt, self.tokenizer,dataset=opt.db)

        train_len = int(len(self.stance_dataset.train_data) * opt.train_ratio)
        self.trainset, self.valset = random_split(self.stance_dataset.train_data, (train_len, len(self.stance_dataset.train_data)-train_len))
        self.train_data_loader = DataLoader(self.trainset, batch_size=opt.batch_size, shuffle=True)
        self.val_data_loader = DataLoader(self.valset, batch_size=opt.batch_size, shuffle=False)
        self.test_data_loader = DataLoader(self.stance_dataset.test_data, batch_size=opt.batch_size, shuffle=False)

        self.model = MyBert(self.opt).to(self.opt.device)        
        for n,p in self.model.named_parameters():
            if "bert.embeddings" in n:
                p.requires_grad = False
        self.optimizer = torch.optim.AdamW(filter(lambda p: p.requires_grad, self.model.parameters()), lr=opt.lr, weight_decay=opt.wd)
        
        # for n,p in self.model.named_parameters():
        #     if "bert.embeddings" in n:
        #         p.requires_grad = False
        # optimizer_grouped_parameters = [
        #     {'params': [p for n, p in self.model.named_parameters() if n.startswith('bert.encoder')] , 'lr': self.opt.lr},
        #     {'params': [p for n, p in self.model.named_parameters() if n.startswith('bert.pooler')] , 'lr': 1e-3},
        #     {'params': [p for n, p in self.model.named_parameters() if n.startswith('linear')], 'lr': 1e-3},
        #     {'params': [p for n, p in self.model.named_parameters() if n.startswith('out')], 'lr': 1e-3}
        # ]
        # self.optimizer = AdamW(optimizer_grouped_parameters)

        self.bert_criterion = nn.CrossEntropyLoss(reduction='sum')
        #endregion

    def train(self):
        global_step = 0

        for epoch in range(self.opt.num_epoch):
            print('epoch: ', epoch, '>' * 100)

            n_correct, n_total = 0, 0
            for _, sample_batched in enumerate(self.train_data_loader):
                global_step += 1
                self.model.train()
                self.optimizer.zero_grad()

                targets, texts, stances, input_idss, attention_masks, token_type_idss = sample_batched
                if len(stances)==1:
                    continue
                labels = stances.to(self.opt.device)
                inputs = {
                    'input_ids': input_idss.squeeze().to(self.opt.device),
                    'attention_mask': attention_masks.squeeze().to(self.opt.device),
                    'token_type_ids': token_type_idss.squeeze().to(self.opt.device)
                }
                predict = self.model(inputs)

                loss = self.bert_criterion(predict, labels)

                loss.backward()
                self.optimizer.step()

                if global_step % self.opt.log_step == 0:
                    n_correct += (torch.argmax(predict, -1) == labels).sum().item()
                    n_total += len(predict)
                    train_acc = n_correct / n_total
                    
                    print('bert_loss: {:.4f}, acc: {:.4f}'.format(loss.item(), train_acc))

            val_acc, val_f1, val_f1_m = self.test(epoch, type='val')
            print('bert_loss: {:.4f}, acc: {:.4f}, val_acc: {:.4f}, val_f1: {:.4f}, val_f1_m: {:.4f}'.
                format(loss.item(), train_acc, val_acc, val_f1, val_f1_m))

            # print(f'{self.opt.db} lr: {self.opt.lr}, max_epoch: {self.max_epoch}, max_f1: {self.max_f1:.4f}, max_f1_m: {self.max_f1_m:.4f}')
            if epoch-self.max_epoch > self.opt.endurance:
                print('early stop.')
                break

        # try:            
        print('load model...')
        self.model = MyBert(self.opt).to(self.opt.device)
        ModelUtil.load_model(self.model, self.opt.db)
        test_acc, test_f1, test_f1_m = self.test(epoch, type='test')

        result = f'{self.opt.db}, seed: {self.opt.seed}, lr: {self.opt.lr}, test_acc: {test_acc:.4f}, test_f1: {test_f1:.4f}, test_f1_m: {test_f1_m:.4f}'
        print(result)
        MsgRobot.send_msg(result)
        wlog.info(result)
        # except:
        #     pass
        return self.max_f1, self.max_f1_m

    def test(self, epoch, type='val'):
        if type=='val':
            data_loader = self.val_data_loader
        else:
            data_loader = self.test_data_loader

        self.model.eval()
        with torch.no_grad():
            all_labels = None
            all_predicts = None

            for _, t_sample_batched in enumerate(data_loader):
                targets, texts, stances, input_idss, attention_masks, token_type_idss = t_sample_batched
                if len(stances)==1:
                    continue
                labels = stances.to(self.opt.device)
                inputs = {
                    'input_ids': input_idss.squeeze().to(self.opt.device),
                    'attention_mask': attention_masks.squeeze().to(self.opt.device),
                    'token_type_ids': token_type_idss.squeeze().to(self.opt.device)
                }
                predicts = self.model(inputs)

                if all_labels is None:
                    all_labels = labels
                    all_predicts = predicts
                else:
                    all_labels = torch.cat((all_labels, labels), dim=0)
                    all_predicts = torch.cat((all_predicts, predicts), dim=0)
        
        metrics = MetricsUtil.cal_metrics(all_predicts, all_labels)
        accuracy = metrics['accuracy']
        f1 = metrics['f1_macro']
        f1_m = metrics['f1_average']

        if f1 > self.max_f1 and f1_m > self.max_f1_m:
            print('model saved...')
            ModelUtil.save_model(self.model, self.opt.db)
            self.max_f1 = f1
            self.max_f1_m = f1_m
            self.max_epoch = epoch                             

        return accuracy, f1, f1_m