# 推理接口封装
# 提供模型预测的便捷接口
import torch
from transformers import BertTokenizerFast


class BertPredictor:
    """BERT模型预测器"""
    def __init__(self, model, tokenizer_name, device='cuda' if torch.cuda.is_available() else 'cpu'):
        self.model = model.to(device)
        self.model.eval()
        self.tokenizer = BertTokenizerFast.from_pretrained(tokenizer_name)
        self.device = device

    def predict(self, text):
        """对单条文本进行预测"""
        inputs = self.tokenizer(text, return_tensors='pt').to(self.device)
        with torch.no_grad():
            outputs = self.model(**inputs)
        predictions = torch.argmax(outputs.logits, dim=1)
        return predictions.item()