File size: 1,140 Bytes
e46f1b6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import gradio as gr
from transformers import AutoModelForSequenceClassification, AutoTokenizer
import numpy as np

# Load the pre-trained text classification model from Hugging Face
model = AutoModelForSequenceClassification.from_pretrained("bert-base-uncased", num_labels=2)
tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased")

def classify_text(text):
    # Preprocess the text input
    encoded_text = tokenizer(text, truncation=True, padding=True, return_tensors="pt")

    # Make predictions using the pre-trained model
    with torch.no_grad():
        output = model(**encoded_text)
        logits = output.logits
        predictions = np.argmax(logits, axis=1)

    # Convert predictions to class labels
    class_labels = ["positive", "negative"]
    predicted_labels = [class_labels[i] for i in predictions]

    # Return the predicted labels
    return predicted_labels

# Define the Gradio interface
interface = gr.Interface(
    fn=classify_text,
    inputs=gr.inputs.Textbox(label="Enter text to classify:"),
    outputs=gr.outputs.Label(label="Predicted Label:")
)

# Launch the Gradio interface
interface.launch()