import torch
from torch import nn
from transformers import AutoTokenizer
from torch.utils.data import Dataset, DataLoader
from tqdm import tqdm

import sys
sys.path.append('..')

from utils import save_obj
from settings import BERT_PATH, raw_attr

import pandas as pd

from BERT_infer_data import BERT_data


device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
config = {
    "batch": 16,
    "device": device,
    "model_path": BERT_PATH,
    "new_weight": "wuhan_cls.pth",
}
device = config["device"]

from transformers import BertForSequenceClassification

model = BertForSequenceClassification.from_pretrained(
    config["model_path"],  # Use the 12-layer BERT model, with an uncased vocab.
    num_labels=88,
    output_attentions=False,  # Whether the model returns attentions weights.
    output_hidden_states=False,  # Whether the model returns all hidden-states.
)
model.cuda()
weights = torch.load(config["new_weight"])
model.load_state_dict(weights)

tokenizer = AutoTokenizer.from_pretrained(config["model_path"])


class InferDataset(Dataset):
    def __init__(self, infer_data, tokenizer):
        self.tokenizer = tokenizer
        self.data = infer_data

    def __len__(self):
        return len(self.data)

    def __getitem__(self, idx):
        sample = self.data[idx]
        return sample

dataset = InferDataset(BERT_data, tokenizer)


def collate_fn(feature):
    feature = tokenizer(
        feature,
        padding="max_length",
        max_length=512,
        truncation=True,
        return_tensors="pt",
    )
    return feature


infer_dataloader = DataLoader(
    dataset,
    batch_size=config["batch"],
    shuffle=False,
    collate_fn=collate_fn,
    pin_memory=True,
)

def predict(model, infer_dataloader):
    # 加载模型
    model.eval()
    ans = []
    with torch.no_grad():
        for feature in tqdm(infer_dataloader):
            feature = {k: v.to(device) for k, v in feature.items()}
            output = model(**feature)
            logits = output["logits"]
            ans += logits.argmax(dim=-1).tolist()
    return ans

res = predict(model, infer_dataloader)


# for idx in range(64):
#     print(BERT_data[idx])
#     print(raw_attr[res[idx]])
#     print()

save_obj(res, "BERT_infer_res.pkl")

# nohup python BERT_infer.py > BERT_infer.log 2>&1 &