# ---------BERT MASK----推理------------------
import torch
from utils.datasets import BERTMlm
from torch.utils.data import DataLoader
from utils.functions import add_muti_mask_vec, drugseed_bert_ids, drug_names_bert_ids, BoxPlot
from utils import emerging_euph, drug_names
from utils.models import Ours
from transformers import BertTokenizer, BertForMaskedLM
from confs import step2 as conf


def inf(model_name, device, topk, seedonly, type="checktarget"):
    box_plot = BoxPlot({"type": [], "ranks": [], "epoch": []})
    if seedonly:
        dic = drugseed_bert_ids
    else:
        dic = drug_names_bert_ids
    conf.BATCHSIZE = 4
    tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
    model = Ours(conf).to(device)
    for ep in [0, 1, 2, 3]:
        # model.load_state_dict(torch.load(f"./models/{step}/Epoch{ep}_{model_name}.pkl", map_location=device))
        print(f"Epoch{ep}_{model_name}_topn{topk}_seedonly={seedonly}")
        total_cnt = 0
        for impeuph in emerging_euph.keys():
            cnt = 0
            infset = BERTMlm(conf, tokenizer, type=type, impeuph=impeuph)
            inf_loader = DataLoader(
                infset,
                batch_size=conf.BATCHSIZE,
                num_workers=4,
                pin_memory=True,
                shuffle=False
            )

            model.eval()
            with torch.no_grad():
                # for steps, _ in enumerate(tqdm(inf_loader)):
                for steps, _ in enumerate(inf_loader):
                    for k in _[0].keys():
                        _[0][k] = _[0][k].squeeze(1).to(device)

                    output = model(_, type="check")
                    logits = output.logits
                    mask_token_index = (_[0]["input_ids"] == tokenizer.mask_token_id).nonzero(as_tuple=True)[1]
                    mask_logits_ = torch.stack([logits[i, j] for i, j in enumerate(mask_token_index)])
                    mask_logits = add_muti_mask_vec(mask_logits_, tokenizer, seedonly=seedonly)
                    sorted, indices = torch.sort(mask_logits, descending=True)
                    for rawidx in range(indices.shape[0]):
                        for colidx in range(indices.shape[1]):
                            if indices[rawidx, colidx].item() in dic.values():
                                # box_plot.datatemplate["type"].append(1)
                                box_plot.datatemplate["type"].append(impeuph)
                                box_plot.datatemplate["ranks"].append(colidx)
                                box_plot.datatemplate["epoch"].append(ep)
                                break
                    for i in range(indices.shape[0]):
                        if len(set(dic.values()) & set(indices[i, :topk].tolist())) != 0:
                            cnt += 1
            print(f"{impeuph}: {cnt}")
            total_cnt += cnt
        box_plot.draw()
        print(f"Epoch{ep}_{model_name}_topn{topk}_totalcnt:{total_cnt}")


if __name__ == '__main__':
    device = "cuda:0"
    step = "step3train"
    # model_name = "step4train_06_mlm"
    model_name = "step3train_06_mlm"
    seedonly = True
    # for topk in [1, 5, 10, 20]:
    for topk in [10]:
        inf(model_name, device, topk, type="checktarget", seedonly=seedonly)
