import sys
sys.path.append("../../")
import gc
import fitlog
from torch.optim.lr_scheduler import LambdaLR
from torch.optim import Adam
from transformers import DistilBertConfig
from transformers import DistilBertTokenizer
from transformers import BertConfig
from transformers import BertTokenizer
import torch.nn.functional as F
from datareader import *
from metrics import *
import pickle
from model import VanillaBert
from modeling_bert import AugBertForSequenceClassification

def Evaluate(
        model: torch.nn.Module,
        validation_evaluator: NLIEvaluator,
        best_acc,
        patience_counter: int,
        model_dir: str,
        domain_name: str
):
    (val_loss, acc, P, R, F1), _ = validation_evaluator.evaluate(model)
    print(f"Validation acc: {acc}")
    if acc > best_acc:
        best_acc = acc
        torch.save(model.state_dict(), f'{model_dir}/model_{domain_name}.pth')
        patience_counter = 0
        # Log to wandb
        fitlog.add_best_metric({
            'Validation accuracy': acc,
            'Validation Precision': P,
            'Validation Recall': R,
            'Validation F1': F1,
            'Validation loss': val_loss})
        print({
            'Validation accuracy': acc,
            'Validation Precision': P,
            'Validation Recall': R,
            'Validation F1': F1,
            'Validation loss': val_loss})
    else:
        patience_counter += 1
    return best_acc, patience_counter

def TrainLoader(source_domain:NLIDataset, target_domain:NLIDataset, batchsize=8):
    idxs = random.sample(range(len(source_domain)+len(target_domain)),
                         len(source_domain)+len(target_domain))
    source_len = len(source_domain)
    for i in range(0, len(idxs), batchsize):
        batch_idxs = idxs[i:min(i+batchsize, len(idxs))]
        batch = source_domain.collate_raw_batch(
            [source_domain[idx] if idx < source_len else target_domain[idx-source_len] for idx in batch_idxs]
        )
        yield batch

def train(
        model: torch.nn.Module,
        source_domain: NLIDataset,
        target_domain: NLIDataset,
        optimizer: torch.optim.Optimizer,
        scheduler: LambdaLR,
        n_epochs: int,
        log_interval: int = 1,
        valid_interval: int = 100,
        model_dir: str = "wandb_local",
        batchsize=32,
        gradient_accumulation: int = 4
):
    epoch_counter = 0
    tau = 0.1
    while epoch_counter < n_epochs:
        idx = 1
        for batch in TrainLoader(source_domain, target_domain, batchsize=batchsize):
            optimizer.zero_grad()
            input_ids, masks, seg_ids, labels, domains = batch[0], batch[1], batch[2], \
                                                            batch[3], batch[4]
            input_vecs1 = model.Sents2Vecs(input_ids,
                                           token_ids=seg_ids,
                                           attention_mask=masks)
            input_vecs2 = model.Sents2Vecs(input_ids,
                                           token_ids=seg_ids,
                                           attention_mask=masks)
            cosine = torch.matmul(input_vecs1, input_vecs2.transpose(0, 1))/ \
                        torch.matmul(input_vecs1.norm(dim=1).unsqueeze(1),
                                     input_vecs2.norm(dim=1).unsqueeze(0) )
            cosine = cosine/tau
            contrastive_labels = torch.arange(len(cosine), device=cosine.device)
            loss = F.nll_loss(cosine.softmax(dim=1).log(), contrastive_labels)
            loss = loss / gradient_accumulation
            loss.backward()
            gc.collect()
            if idx%gradient_accumulation == 0:
                optimizer.step()
                if scheduler is not None:
                    scheduler.step()

            if (idx+1) % log_interval == 0:
                fitlog.add_metric({
                    "Loss": loss.item()
                }, step=idx)
                print({
                    "Loss": loss.item()
                })

            if (idx+1) % valid_interval == 0:
                torch.save(model.state_dict(),
                           f'{model_dir}/ContrastiveBert_Epoch_{(idx+1)/valid_interval}.pth'
                           )
                gc.collect()

            idx += 1
        epoch_counter += 1


if __name__ == "__main__":
    with open("../../args.pkl", 'rb') as fr:
        args = pickle.load(fr)
    args.model_dir = "./tmp"
    # args.model_dir = str(__file__).rstrip(".py")
    # Set all the seeds
    seed = args.seed
    random.seed(seed)
    np.random.seed(seed)
    torch.manual_seed(seed)
    torch.cuda.manual_seed_all(seed)
    torch.backends.cudnn.deterministic = True
    torch.backends.cudnn.benchmark = False

    # model configuration
    batch_size = args.batch_size
    lr = args.lr
    weight_decay = args.weight_decay
    n_epochs = args.n_epochs
    args.full_bert = True
    args.bertPath = "../../../bert_en/"

    batch_size = args.batch_size
    lr = args.lr
    weight_decay = args.weight_decay
    n_epochs = args.n_epochs
    args.full_bert = True
    print("====>", args.full_bert)
    bert_model = 'bert-base-uncased' if args.full_bert else 'distilbert-base-uncased'
    if args.full_bert:
        bert_config = BertConfig.from_pretrained(bert_model, num_labels=2) if args.bertPath is None else \
                        BertConfig.from_pretrained(args.bertPath, num_labels=2)
        tokenizer = BertTokenizer.from_pretrained(bert_model) if args.bertPath is None else \
                        BertTokenizer.from_pretrained(args.bertPath)
    else:
        bert_config = DistilBertConfig.from_pretrained(bert_model, num_labels=2) if args.distillBertPath is None else \
                        DistilBertConfig.from_pretrained(args.distillBertPath, num_labels=2)
        tokenizer = DistilBertTokenizer.from_pretrained(bert_model) if args.distillBertPath is None else \
                        DistilBertTokenizer.from_pretrained(args.distillBertPath)

    domain_id = 2
    NLI_domain_list = list(NLI_domain_map.keys())
    SNLI_set = NLIDataset("../../../snli_1.0/snli_1.0_train.jsonl", tokenizer=tokenizer)
    new_domain_name = NLI_domain_list[domain_id - 1]
    test_set = NLIDataset(f"../../../multinli_1.0/Domain_{new_domain_name}.jsonl", tokenizer=tokenizer)

    accs, Ps, Rs, F1s = [], [], [], []

    log_dir = args.model_dir
    if not os.path.exists(log_dir):
        os.system("mkdir %s" % log_dir)
    else:
        os.system("rm -rf %s" % log_dir)
        os.system("mkdir %s" % log_dir)
    fitlog.set_log_dir(log_dir)
    fitlog.add_hyper({
            "epochs": n_epochs,
            "learning_rate": lr,
            "warmup": args.warmup_steps,
            "weight_decay": weight_decay,
            "batch_size": batch_size,
            "train_split_percentage": args.train_pct,
            "bert_model": bert_model,
            "seed": seed,
            "pretrained_model": args.pretrained_model,
            "tags": ",".join(args.tags)
        }, name=args.run_name)

    bert_config.num_labels = 3
    bert_config.hidden_act = "relu"
    # Create the model
    bert = AugBertForSequenceClassification.from_pretrained(
                    bert_model, config=bert_config) if args.bertPath is None \
            else AugBertForSequenceClassification.from_pretrained(
                    args.bertPath, config=bert_config)
                
    model = VanillaBert(bert)
    model.DataParallel()

    # Create the optimizer
    no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']
    optimizer_grouped_parameters = [
        {'params': [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],
         'weight_decay': weight_decay},
        {'params': [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
    ]
    optimizer = Adam(optimizer_grouped_parameters, lr=5e-7)

    # Train
    train(
        model,
        SNLI_set,
        test_set,
        optimizer,
        None,
        3,
        log_interval=1,
        valid_interval=1000,
        model_dir=args.model_dir,
        batchsize=96,
        gradient_accumulation=4
    )
