import pandas as pd import torch import joblib from models.preprocess_stage.bert_model import preprocess_bert from models.preprocess_stage.bert_model import model MAX_LEN = 100 # позже добавлю способ пользователю самому выбирать масимальную длину # DEVICE='cpu' logreg = joblib.load('models/weights/LogRegBestWeights.sav') def predict_1(text): preprocessed_text, attention_mask = preprocess_bert(text, MAX_LEN=MAX_LEN) preprocessed_text, attention_mask = torch.tensor(preprocessed_text).unsqueeze(0), torch.tensor([attention_mask]) # model.to(DEVICE) with torch.inference_mode(): vector = model(preprocessed_text, attention_mask=attention_mask)[0][:, 0, :] predict = logreg.predict(vector) return predict[-1]