MariamKili's picture
Add application file
e5e07ff
raw history blame
No virus
1.62 kB
import gradio as gr
import tensorflow as tf
import numpy as np
from transformers import TFAutoModelForSequenceClassification, DistilBertTokenizer
from huggingface_hub import hf_hub_download
# Define the repository name and model ID
repository_name = "MariamKili/my_bert_model"
model_id = "tf_model"
# Load the tokenizer
tokenizer = DistilBertTokenizer.from_pretrained("distilbert-base-uncased")
# Load the model directly from Hugging Face Hub
model = TFAutoModelForSequenceClassification.from_pretrained(repository_name)
# Your prediction function would remain the same
def predict_sentiment(text):
# Tokenize and encode the input text
encoded_input = tokenizer.encode_plus(
text,
add_special_tokens=True,
max_length=512,
padding="max_length",
return_attention_mask=True,
truncation=True,
return_tensors="tf"
)
# Make predictions
output = model(encoded_input)
probabilities = tf.nn.softmax(output.logits, axis=1).numpy()[0]
predicted_label = np.argmax(probabilities)
confidence_score = probabilities[predicted_label]
# Decode the predicted label
label = "positive" if predicted_label == 1 else "negative"
return label, confidence_score
# Create the Gradio interface
text_input = gr.components.Textbox(lines=5, label="Enter your text here")
output_text = gr.components.Textbox(label="Predicted Sentiment")
# Define the Gradio interface
iface=gr.Interface(fn=predict_sentiment, inputs=text_input, outputs=output_text, title="Sentiment Analysis Application System")
# Launch the Gradio app
iface.launch(share=True)