farooq-09's picture
Create app.py
e46f1b6
import gradio as gr
from transformers import AutoModelForSequenceClassification, AutoTokenizer
import numpy as np
# Load the pre-trained text classification model from Hugging Face
model = AutoModelForSequenceClassification.from_pretrained("bert-base-uncased", num_labels=2)
tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased")
def classify_text(text):
# Preprocess the text input
encoded_text = tokenizer(text, truncation=True, padding=True, return_tensors="pt")
# Make predictions using the pre-trained model
with torch.no_grad():
output = model(**encoded_text)
logits = output.logits
predictions = np.argmax(logits, axis=1)
# Convert predictions to class labels
class_labels = ["positive", "negative"]
predicted_labels = [class_labels[i] for i in predictions]
# Return the predicted labels
return predicted_labels
# Define the Gradio interface
interface = gr.Interface(
fn=classify_text,
inputs=gr.inputs.Textbox(label="Enter text to classify:"),
outputs=gr.outputs.Label(label="Predicted Label:")
)
# Launch the Gradio interface
interface.launch()