# ---------BERT MASK----推理------------------
import torch
from utils.datasets import BERTBin
from torch.utils.data import DataLoader
from utils.functions import add_muti_mask_vec, drugseed_bert_ids, drug_names_bert_ids, BoxPlot
from utils import emerging_euph, drug_names
from utils.models import OursBin
from transformers import BertTokenizer
from confs import step2 as conf


def inf(model_name, device, type="checktarget"):
    conf.BATCHSIZE = 4
    tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
    model = OursBin(conf).to(device)
    for ep in range(10):
        cnt = 0
        model.load_state_dict(
            torch.load(open(f"./models/step1train/Epoch{ep}_{model_name}.pkl", "rb"), map_location=device))
        print(f"-------Epoch{ep}_{model_name}----------")
        for impeuph in emerging_euph.keys():
            infset = BERTBin(conf, tokenizer, type=type, impeuph=impeuph)
            inf_loader = DataLoader(
                infset,
                batch_size=conf.BATCHSIZE,
                num_workers=4,
                pin_memory=True,
                shuffle=False
            )

            preds = torch.Tensor().to(device)
            model.eval()
            with torch.no_grad():
                for d in inf_loader:
                    for k in d[0].keys():
                        d[0][k] = d[0][k].squeeze(1).to(device)
                    # d[1] = d[1].squeeze(-1).to(device) #False
                    output = model(d, type=type)
                    _, predicted = torch.max(output.logits, dim=1)
                    preds = torch.cat([preds.long(), predicted], dim=0)
            preds = preds.cpu()
            cnt_ = torch.sum(preds == 1)
            print(f"{impeuph}: {cnt_}")
            cnt += cnt_

        print(f"Epoch{ep}_impeuph: {cnt}")


if __name__ == '__main__':
    device = "cuda:0"
    step = "step1train"
    model_name = "step1train_mlm2"
    seedonly = True
    inf(model_name, device, type="checktarget")
