Spaces:
Sleeping
Sleeping
import torch | |
import requests | |
from transformers import XLNetTokenizer | |
import gradio as gr | |
# Link to the saved model on Hugging Face Spaces | |
model_link = 'https://huggingface.co/spaces/AliArshad/SeverityPrediction/blob/main/severitypredictor.pt' | |
# Download the model file | |
response = requests.get(model_link) | |
model_path = 'severitypredictor.pt' | |
with open(model_path, 'wb') as f: | |
f.write(response.content) | |
# Try loading the downloaded file as a PyTorch model | |
try: | |
model = torch.load(model_path) | |
tokenizer = XLNetTokenizer.from_pretrained('xlnet-base-cased') | |
# Function for prediction | |
def xl_net_predict(text): | |
inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True, max_length=100) | |
with torch.no_grad(): | |
outputs = model(**inputs) | |
logits = outputs.logits | |
probabilities = torch.softmax(logits, dim=1) | |
predicted_class = torch.argmax(probabilities).item() | |
return "Severe" if predicted_class == 1 else "Non-severe" | |
# Customizing the interface | |
iface = gr.Interface( | |
fn=xl_net_predict, | |
inputs=gr.Textbox(lines=2, label="Summary", placeholder="Enter text here..."), | |
outputs=gr.Textbox(label="Predicted Severity"), | |
title="XLNet Based Bug Report Severity Prediction", | |
description="Enter text and predict its severity (Severe or Non-severe).", | |
theme="huggingface", | |
examples=[ | |
["Can't open multiple bookmarks at once from the bookmarks sidebar using the context menu"], | |
["Minor enhancements to make-source-package.sh"] | |
], | |
allow_flagging=False | |
) | |
iface.launch() | |
except Exception as e: | |
print(f"An error occurred: {e}") | |