import sys, os, time
import random, numpy as np
import torch, torch.nn as nn
import shutil, logging, json
from tqdm import tqdm
from torch.utils.data import DataLoader, Dataset
from transformers import BertTokenizer, BertConfig
# from transformers import BertForSequenceClassification, AdamW
from collections import OrderedDict

from dataloader import QNLIDataset
from BaseModel.modeling_bert import BertForSequenceClassification
from BaseModel.modeling_bert_lowrank import BertForSequenceClassificationLowRank
from lowrank_method import svd_decompose, hessian_decompose
from train import train_model, evaluate_model
import warnings
warnings.filterwarnings("ignore", category=FutureWarning)

def obtain_model(bert_path, device, num_labels = 2, lr_model = 5e-5):
    bert_config = BertConfig.from_pretrained(bert_path, num_labels=num_labels)
    tokenizer_M = BertTokenizer.from_pretrained(bert_path)
    bert_config.num_labels = num_labels
    bert_config.hidden_act = "relu"
    tokenizer_M.model_max_length = 128
    # Create the model
    bert = BertForSequenceClassification.from_pretrained(bert_path, config = bert_config).to(device)
    bert.learning_rate = lr_model
    return bert, tokenizer_M


def print_size_of_model(model):
    torch.save(model.state_dict(), "model_size.p")
    print('Size (MB):', os.path.getsize("model_size.p")/1e6)
    os.remove('model_size.p')

def train_qnli(bert_path, data_name):
    # Set dataloader
    tokenizer = BertTokenizer.from_pretrained(bert_path)
    train_dataset = QNLIDataset("./glue_data/QNLI/train.tsv", tokenizer, num_samples=10000)
    test_dataset = QNLIDataset("./glue_data/QNLI/dev.tsv", tokenizer, num_samples=200)
    train_dataloader = DataLoader(train_dataset, batch_size=20, shuffle=True)
    test_dataloader = DataLoader(test_dataset, batch_size=20, shuffle=False)

    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    model, tokenizer_M = obtain_model(bert_path, device, num_labels=2)
    print_size_of_model(model)
    train_model(model, train_dataloader, test_dataloader, device, epochs=3)
    evaluate_model(model, test_dataloader, device)
    torch.save(model.state_dict(), "./model/model_trained/qnli_model.pth")
    return 0


if __name__ == "__main__":
    bert_path = "./model/bert_en"
    data_name = "QNLI"
    train_qnli(bert_path, data_name)