import gradio as gr from transformers import pipeline #from flask import jsonify # Load pre-trained model and tokenizer from Hugging Face #model_name = "aubmindlab/bert-base-arabertv02-twitter" model_name = "CAMeL-Lab/bert-base-arabic-camelbert-msa-sentiment" sentiment_analysis = pipeline("sentiment-analysis", model=model_name, tokenizer="CAMeL-Lab/bert-base-arabic-camelbert-msa-sentiment") def analyze_sentiment(text): #result = sentiment_analysis(text) #return result result = sentiment_analysis(text)[0] """return jsonify({ "sentiment": result['label'], "confidence": result['score'] })""" return f"sentiment: {result['label']}, confidence: {result['score']}" # Define Gradio interface iface = gr.Interface(fn=analyze_sentiment, inputs="text", outputs="text", title="Arabic Sentiment Analysis") iface.launch(share=True)