from dataloader import ContentSet
from torch.utils.data import DataLoader
from pytorch_transformers import BertModel, BertTokenizer
import torch
import torch.nn as nn
from sklearn.metrics import accuracy_score
import os
import pandas as pd
import numpy as np
from utils import *


class Model1(object):
    def __init__(self, bert_dir="../bert_cn/", train_file="./data/train.csv", dev_file="./data/dev.csv",
                 test_file="./data/test_dataset.csv",  dev_test_batchsize= 100):
        self.test_file = test_file
        self.train_file = train_file
        dev_set = ContentSet(dev_file)
        self.dev_loader = DataLoader(dev_set,batch_size=dev_test_batchsize)
        self.tokenizer = BertTokenizer.from_pretrained(bert_dir)
        self.model = BertModel.from_pretrained(bert_dir).cuda()
        self.cls = nn.Linear(768, 3).cuda()
        self.loss_fn = nn.CrossEntropyLoss()

    def sent2vec(self, batch):
        input_ids = encode_sent_list(self.tokenizer, batch[2])
        ipt_tensor, mask_tensor = pad_sentences(input_ids, 0.0)
        hiddens, outs = self.model(ipt_tensor.cuda(), attention_mask=mask_tensor.cuda())
        return hiddens, outs

    def forward(self, batch):
        hiddens, outs = self.sent2vec(batch)
        pred = self.cls(outs)
        return pred

    def adversarial_train(self, max_epoch=10, batchsize=20, saved_model="./model/adversarial_model.pkl"):
        best_valid_acc = 0.0
        best_valid_loss = 10.0
        train_set = ContentSet(self.train_file)
        train_loader = DataLoader(train_set, batch_size=batchsize, shuffle=True)
        # descriminator = nn.Linear(768, 9).cuda()
        descriminator = nn.Sequential(
            nn.Linear(768, 768),
            nn.ReLU(inplace=True),
            nn.Linear(768, 256),
            nn.ReLU(inplace=True),
            nn.Linear(256, 9)
        ).cuda()
        opt = torch.optim.Adam([{'params': self.model.parameters(), 'lr': 2e-5},
                                {'params': self.cls.parameters(), 'lr': 2e-3}])
        adver_opt = torch.optim.Adam([{'params': descriminator.parameters(), 'lr': 5e-4}])
        for step, batch in enumerate(train_loader):
            _ , outs = self.sent2vec(batch)
            cat_pred = descriminator(outs)
            adv_loss = self.loss_fn(cat_pred, batch[1].cuda())
            adv_loss.backward()
            adver_opt.step()
            acc = accuracy_score(batch[1], cat_pred.cpu().argmax(dim=1))
            print("Descriminator Trainning:  %3d |%3d iterations, loss: %3.5f,  accuracy:%3.5f" % (
                step, len(train_loader), adv_loss, acc))
            if step%100 == 99:
                break

        adver_opt = torch.optim.Adam([{'params': descriminator.parameters(), 'lr': -2e-5}])
        for epoch in range(max_epoch):
            for step, batch in enumerate(train_loader):
                _, outs = self.sent2vec(batch)
                cat_pred = descriminator(outs)
                adv_loss = self.loss_fn(cat_pred, batch[1].cuda())
                descriminator.zero_grad()
                pred = self.cls(outs)
                rm_loss = self.loss_fn(pred, batch[-1].cuda())
                loss = rm_loss - 0.01*adv_loss
                acc = accuracy_score(batch[-1], pred.cpu().argmax(dim=1))
                opt.zero_grad()
                loss.backward()
                opt.step()
                adver_opt.step()
                print("%3d |%3d iterations, %3d | %3d epochs, loss: %3.5f, adv_loss:%3.5f, accuracy:%3.5f" % (
                step, len(train_loader), epoch, max_epoch, rm_loss, adv_loss, acc))
                if step%500 == 499:
                    mean_acc, mean_loss = self.valid()
                    print("######## %3d epochs ##### valid_loss | max_valid_loss:%3.5f | %3.5f, acc | best_acc: %3.5f | %3.5f" % (epoch, mean_loss, best_valid_loss, mean_acc, best_valid_acc))
                    if mean_acc > best_valid_acc:
                        best_valid_acc = mean_acc
                    if mean_loss < best_valid_loss:
                        best_valid_loss = mean_loss
                        torch.save(
                            {
                                    "bert": self.model.state_dict(),
                                "cls": self.cls.state_dict()
                            },
                            saved_model
                        )
    def train(self, max_epoch=10, batchsize=20, saved_model="./model/best_model1.pkl"):
        best_valid_acc = 0.0
        best_valid_loss = 10.0
        train_set = ContentSet(self.train_file)
        train_loader = DataLoader(train_set, batch_size=batchsize, shuffle=True)

        opt = torch.optim.Adam([{'params': self.model.parameters(), 'lr': 2e-5},
                                {'params': self.cls.parameters(), 'lr': 2e-3}])
        for epoch in range(max_epoch):
            for step, batch in enumerate(train_loader):
                pred = self.forward(batch)
                loss = self.loss_fn(pred, batch[-1].cuda())  # + 0.5*Entrophy(pred)
                acc = accuracy_score(batch[-1], pred.cpu().argmax(dim=1))
                opt.zero_grad()
                loss.backward()
                opt.step()
                print("%3d |%3d iterations, %3d | %3d epochs, loss: %3.5f,  accuracy:%3.5f" % (
                step, len(train_loader), epoch, max_epoch, loss, acc))
                if step%500 == 499:
                    mean_acc, mean_loss = self.valid()
                    print("######## %3d epochs ##### valid_loss | max_valid_loss:%3.5f | %3.5f, acc | best_acc: %3.5f | %3.5f" % (epoch, mean_loss, best_valid_loss, mean_acc, best_valid_acc))
                    if mean_acc > best_valid_acc:
                        best_valid_acc = mean_acc
                    if mean_loss < best_valid_loss:
                        best_valid_loss = mean_loss
                        torch.save(
                            {
                                    "bert": self.model.state_dict(),
                                "cls": self.cls.state_dict()
                            },
                            saved_model
                        )

    def valid(self, model_file=None):
        if model_file is not None and os.path.exists(model_file):
            checkpoint = torch.load(model_file)
            self.model.load_state_dict(checkpoint["bert"])
            self.cls.load_state_dict(checkpoint["cls"])

        sum_acc = 0.0
        sum_losses = 0.0
        with torch.no_grad():
            for step, batch in enumerate(self.dev_loader):
                pred = self.forward(batch)
                loss = self.loss_fn(pred, batch[-1].cuda())  # + 0.5*Entrophy(pred)
                sum_acc += accuracy_score(batch[-1], pred.cpu().argmax(dim=1))
                sum_losses += loss
            mean_acc = sum_acc * 1.0 / (step + 1)
            mean_loss = sum_losses * 1.0 / (step + 1)
        return mean_acc, mean_loss

    def infer(self, model_file):
        test_set = ContentSet(self.test_file, label_type=-1)
        test_loader = DataLoader(test_set, batch_size=100)
        if os.path.exists(model_file):
            checkpoint = torch.load(model_file)
            self.model.load_state_dict(checkpoint["bert"])
            self.cls.load_state_dict(checkpoint["cls"])
        all_preds = []
        ids = []
        with torch.no_grad():
            for step, batch in enumerate(test_loader):
                pred = self.forward(batch).softmax(dim=1)
                all_preds.append(pred.repeat([1, 3]).cpu().numpy())
                ids.extend(list(batch[0]))
        df = pd.DataFrame(np.concatenate(all_preds, axis=0),
                          columns=['ncw_prob_label_0c', 'fake_prob_label_0c', 'real_prob_label_0c', 'ncw_prob_label_2c',
                                   'fake_prob_label_2c', 'real_prob_label_2c', 'ncw_prob_label_all',
                                   'fake_prob_label_all', 'real_prob_label_all'])
        df['id'] = np.array(ids)
        df.to_csv("./data/test_model1_pred.csv",
                  columns=['id', 'fake_prob_label_0c', 'fake_prob_label_2c', 'fake_prob_label_all',
                           'real_prob_label_0c', 'real_prob_label_2c', 'real_prob_label_all', 'ncw_prob_label_0c',
                           'ncw_prob_label_2c', 'ncw_prob_label_all'], index=False)


if __name__ == '__main__':
    # model = Model1()
    # model.valid(model_file="./model/best_valid_model_3classes.pkl")
    model = Model1(bert_dir="../bert_cn", train_file="./data/train_Sim09.csv", dev_file="./data/dev_sim09.csv")
    model.train(saved_model="./model/SimTrain.pkl")