rviana's picture
Add Multilingual BERT model.
88cfbb8
raw
history blame
1.46 kB
import gradio as gr
from transformers import pipeline
import streamlit as st
import socket
import threading
# Specify the model name explicitly to avoid warnings
model_name = "nlptown/bert-base-multilingual-uncased-sentiment"
# Load the pre-trained sentiment-analysis pipeline
try:
classifier = pipeline('sentiment-analysis', model=model_name)
except Exception as e:
st.error(f"Error loading pipeline: {e}")
st.stop()
# Function to classify sentiment
def classify_text(text):
result = classifier(text)[0]
return f"{result['label']} with score {result['score']}"
# Function to find an available port
def find_free_port():
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(('', 0))
return s.getsockname()[1]
# Function to run Gradio in a separate thread
def run_gradio():
iface = gr.Interface(fn=classify_text, inputs="text", outputs="text")
iface.launch(server_port=find_free_port())
# Start Gradio in a separate thread
threading.Thread(target=run_gradio).start()
# Streamlit code
st.title('IMDb Sentiment Analysis')
st.write('This project performs sentiment analysis on IMDb movie reviews using Streamlit.')
st.text_input("Enter text for sentiment analysis", key="input_text")
if st.button("Classify"):
text = st.session_state.input_text
if text:
result = classify_text(text)
st.write(result)
else:
st.write("Please enter text for classification.")