from models.base_model import BaseModel
from pytorch_transformers import BertForSequenceClassification, BertConfig
import torch
from torch import nn
import time
from os.path import join
from pytorch_transformers import AdamW
import re
from glob import glob
import numpy as np

pretrained_model_dir = '/home/njuciairs/wangshuai/pretrained_models/RoBERTa_zh_L12_PyTorch'  # /home/njuciairs/wangshuai/pretrained_models/RoBERTa_zh_Large_PyTorch


class BertMultiClass(BaseModel):
    def __init__(self, learning_rate=5e-6, version_id=None, continue_train=False, gamma=0.8,
                 model_name=None):
        if model_name == None:
            model_name = 'BertMultiClass_roberta'
        self.model_name = model_name
        super().__init__(model_name=self.model_name, version_id=version_id, continue_train=continue_train)
        config = BertConfig.from_pretrained(pretrained_model_dir)
        config.num_labels = 3
        self.bertModel = BertForSequenceClassification.from_pretrained(pretrained_model_dir, config=config)
        self.steps = 0

        self.optimizer = AdamW(self.parameters(), lr=learning_rate)
        self.crossEntropyLoss = nn.CrossEntropyLoss()
        self.steps = 0
        self.negative_try = 0
        self.desperate_try = 0
        self.epochs = 0
        self.lr_scheduler = torch.optim.lr_scheduler.ExponentialLR(self.optimizer, gamma, last_epoch=-1)

    def forward(self, inputs):
        tokens, token_type_ids, attention_mask, labels = inputs
        outputs = self.bertModel(input_ids=tokens, token_type_ids=token_type_ids, attention_mask=attention_mask,
                                 labels=labels)
        loss, logits = outputs[:2]
        return loss, logits

    def predict(self, inputs):
        ids, tokens, token_type_ids, attention_mask = inputs
        with torch.no_grad():
            logits = self.bertModel(input_ids=tokens, token_type_ids=token_type_ids, attention_mask=attention_mask)[0]
        return logits

    def train_one_epoch(self, data_loader):
        self.bertModel.train()
        total_len = len(data_loader)
        tik = time.time()
        batch_size = data_loader.batch_size
        for i, batch in enumerate(data_loader):
            self.steps += 1
            self.optimizer.zero_grad()
            loss, logits = self.forward(batch)
            loss.backward()
            torch.nn.utils.clip_grad_norm_(self.parameters(), 1)
            self.optimizer.step()
            self.writer.add_scalars('losses', {'classification_loss': loss.item()}, self.steps)
            print("\rstep:%d of %d, and negative try: %d,desperate try:%d" % (
                i, total_len, self.negative_try, self.desperate_try), end="")
            negative_repeat = 0
            while torch.sum(logits.argmax(dim=1) != batch[-1]) > 0 and self.epochs > 16:
                self.steps += 1
                self.negative_try += 1
                self.optimizer.zero_grad()
                loss, logits = self.forward(batch)
                loss.backward()
                torch.nn.utils.clip_grad_norm_(self.parameters(), 1)
                self.optimizer.step()
                self.writer.add_scalars('losses', {'classification_loss': loss.item()}, self.steps)
                print("\rstep:%d of %d, and negative try: %d,desperate try:%d" % (
                    i, total_len, self.negative_try, self.desperate_try), end="")
                negative_repeat += 1
                if negative_repeat > 1:
                    self.desperate_try += 1
                    break
                self.writer.add_scalars('negative_try',
                                        {'negative_try': self.negative_try, 'desperate_try': self.desperate_try},
                                        self.steps)

        tok = time.time()
        print(' used time:', tok - tik)
        self.lr_scheduler.step()

    def train_epochs(self, train_loader, val_loader, num_epoch, early_stop=5):
        best_score = 0
        early_epochs = 0
        for e in range(1, num_epoch + 1):
            self.epochs += 1
            self.train_one_epoch(train_loader)
            acc_score = self.validate(val_loader)
            self.writer.add_scalars('validate', {'acc_score': acc_score}, e)
            print('epoch %d' % (e), 'acc_score :%.3f' % (acc_score))

            total_score = acc_score
            if total_score > best_score:
                best_score = total_score
                early_epochs = 0
                save_path = join(self.model_save_dir, 'score%f_epoch%d_time%s' % (
                    best_score, e, time.strftime("%Y%m%d%H%M%S", time.localtime())))
                torch.save(self.state_dict(), save_path)
            else:
                early_epochs += 1
                if early_epochs >= early_stop:
                    break

    def validate(self, data_loader):
        self.bertModel.train(mode=False)
        with torch.no_grad():
            true_labels, predict_labels = [], []
            for batch in data_loader:
                tokens, token_type_ids, attention_mask, labels = batch
                _, logits = self.forward(batch)
                predict_labels.append(logits.argmax(dim=1))
                true_labels.append(labels)
            true_labels = torch.cat(true_labels, dim=0)
            predict_labels = torch.cat(predict_labels, dim=0)
            score = torch.sum(true_labels == predict_labels).float() / len(true_labels)

        return score.item()

    def inference(self, test_loader):
        self.bertModel.train(mode=False)
        with torch.no_grad():
            predict_labels = []
            id_list = []
            batch_nums = len(test_loader)
            for i, batch in enumerate(test_loader):
                print('\r%d of %d' % (i, batch_nums), end='')
                ids, tokens, token_type_ids, attention_mask = batch
                logits = self.predict(batch)
                predict_labels.extend(logits.argmax(dim=1).tolist())
                id_list.extend(ids)
        return zip(id_list, predict_labels)

    def load_best_model(self):
        models = glob(self.model_save_dir + '/score*')
        rep = 'score([.0-9]*)_epoch'
        scores = [float(re.search(rep, p).group(1)) for p in models]
        best_model = models[np.argmax(scores)]
        self.load_state_dict(torch.load(best_model))
        self.score = best_model.split('/')[-1]
        print('loaded', self.score)

    def inference_for_features(self, data_loader):
        self.bertModel.train(mode=False)
        with torch.no_grad():
            predict_logits = []
            id_list = []
            batch_nums = len(data_loader)
            for i, batch in enumerate(data_loader):
                print('\rinference for features: %d of %d' % (i, batch_nums), end='')
                ids, tokens, token_type_ids, attention_mask = batch
                logits = self.predict(batch)
                predict_logits.extend(logits)
                id_list.extend(ids)
        return zip(id_list, predict_logits)
