# -------BERT--MASK-----使用gpt开发集-----------------
import pickle

import torch
from utils.datasets import BERTMlm
from torch.utils.data import DataLoader
from utils.models import Ours
from utils.functions import add_muti_mask_vec, drugseed_bert_ids, drug_names_bert_ids, BoxPlot
from utils import drug_names
from transformers import BertTokenizer, BertForMaskedLM
from confs import step2 as conf
from tqdm import tqdm
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
from torch.utils.tensorboard import SummaryWriter


def inf(model_name, device, topk, seedonly, type="dev"):
    # box_plot = BoxPlot({"type": [], "ranks": [], "epoch": []})
    if seedonly:
        dic = drugseed_bert_ids
    else:
        dic = drug_names_bert_ids
    writer = SummaryWriter(f'./runstep2dev/{model_name}_top{topk}')
    print(f"{model_name}_top{topk}")
    conf.BATCHSIZE = 4

    tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
    model = Ours(conf).to(device)
    infset = BERTMlm(conf, tokenizer, type=type)
    for ep in range(5):
        pred = []
        labels = torch.Tensor()
        model.load_state_dict(torch.load(f"./models/{step}/Epoch{ep}_{model_name}.pkl", map_location=device))

        inf_loader = DataLoader(
            infset,
            batch_size=conf.BATCHSIZE,
            num_workers=4,
            pin_memory=True,
            shuffle=False
        )
        model.eval()
        with torch.no_grad():
            # for steps, _ in enumerate(tqdm(inf_loader)):
            for steps, _ in enumerate(inf_loader):
                for k in _[0].keys():
                    _[0][k] = _[0][k].squeeze(1).to(device)
                labels = torch.cat([labels, _[1]], dim=-1)
                output = model(_, type="dev")
                logits = output.logits
                mask_token_index = (_[0]["input_ids"] == tokenizer.mask_token_id).nonzero(as_tuple=True)[1]
                mask_logits_ = torch.stack([logits[i, j] for i, j in enumerate(mask_token_index)])
                mask_logits = add_muti_mask_vec(mask_logits_, tokenizer, seedonly=seedonly)
                sorted, indices = torch.sort(mask_logits, descending=True)
                # for rawidx in range(indices.shape[0]):
                #     for colidx in range(indices.shape[1]):
                #         if indices[rawidx, colidx].item() in dic.values():
                #             box_plot.datatemplate["type"].append(int(labels[rawidx].item()))
                #             box_plot.datatemplate["ranks"].append(colidx)
                #             box_plot.datatemplate["epoch"].append(ep)
                #             break
                for i in range(indices.shape[0]):
                    if len(set(dic.values()) & set(indices[i, :topk].tolist())) != 0:
                        pred.append(1)
                    else:
                        pred.append(0)
        labels = labels.tolist()
        acc = accuracy_score(labels, pred)
        precision = precision_score(labels, pred, average='weighted')
        recall = recall_score(labels, pred, average='weighted')
        f1 = f1_score(labels, pred, average='weighted')

        print(f'Epoch: {ep}, Acc: {acc:.2f}, Precision: {precision:.2f}, Recall: {recall:.2f}, F1: {f1:.2f}')
        writer.add_scalar(f"Acc", acc, ep)
        writer.add_scalar(f"P", precision, ep)
        writer.add_scalar(f"R", recall, ep)
        writer.add_scalar(f"F1", f1, ep)
        # box_plot.draw()


if __name__ == '__main__':
    device = "cuda:1"
    step = "step3train"
    model_name = "step4train_06_mlm"
    seedonly = False
    # inf(model_name, device, topk=1, type="dev", seedonly=seedonly)
    for topk in [1, 5, 10, 20]:
        # for topk in [5]:
        inf(model_name, device, topk, type="dev", seedonly=seedonly)
