import torch
from transformers import BertTokenizer,BertForSequenceClassification
device=torch.device("cuda" if torch.cuda.is_available() else "cpu")
from utils import Singleton
class TopicBinaryClassification(metaclass=Singleton):
    def __init__(self) -> None:
        self.twoModel=torch.load('./models/topic/sec_bert_topic.bin')
        self.twoModel.to(device)
        self.tokenizer=BertTokenizer.from_pretrained('./models/bert-base-chinese')
    def topciTest(self,text : str):
        inputs=self.tokenizer(text,return_tensors="pt",padding=True,truncation=True,max_length=128).to(device)
        with torch.no_grad():
            outputs=self.twoModel(**inputs)
            probabilities = torch.softmax(outputs.logits, dim=1)
            predicted_class_idx=torch.argmax(outputs.logits,dim=1).item()
        return predicted_class_idx,probabilities[0][predicted_class_idx]