Iris_ml_app.py / sentiment_analyzerss.py
Kotieu's picture
Rename Iris_ml_app.py to sentiment_analyzerss.py
14b4c05
import streamlit as st
from transformers import pipeline
import matplotlib.pyplot as plt
from PIL import Image, ImageFilter
import numpy as np
from tensorflow.keras.applications.mobilenet_v2 import MobileNetV2, preprocess_input, decode_predictions
from tensorflow.keras.preprocessing.image import img_to_array
# Chargement du modèle d'analyse de sentiments et de reconnaissance d'images
sentiment_pipeline = pipeline("sentiment-analysis")
image_model = MobileNetV2(weights='imagenet')
def plot_sentiment(sentiment):
""" Dessine un graphique en camembert pour l'analyse de sentiments. """
labels = ['Positive', 'Negative']
sizes = [sentiment['score'], 1 - sentiment['score']]
colors = ['#ff9999','#66b3ff']
fig, ax = plt.subplots()
ax.pie(sizes, colors=colors, labels=labels, autopct='%1.1f%%', startangle=90)
ax.axis('equal')
return fig
def classify_image(image):
""" Classifie l'image et retourne les prédictions. """
image = image.resize((224, 224))
image = img_to_array(image)
image = np.expand_dims(image, axis=0)
image = preprocess_input(image)
preds = image_model.predict(image)
return decode_predictions(preds, top=3)[0]
def apply_filters(image):
""" Applique des filtres à l'image et retourne l'image filtrée. """
filter = st.sidebar.selectbox("Sélectionnez un filtre", ["Original", "Flou", "Contours", "Accentuation"])
if filter == "Flou":
return image.filter(ImageFilter.BLUR)
elif filter == "Contours":
return image.filter(ImageFilter.CONTOUR)
elif filter == "Accentuation":
return image.filter(ImageFilter.EDGE_ENHANCE)
return image
# Configuration de l'application
st.title("Mon Application de Reconnaissance d'Images et d'Analyse de Sentiments")
st.sidebar.title("Options")
# Barre de navigation
options = st.sidebar.radio("Choisissez une fonctionnalité:", ['Reconnaissance d\'Images', 'Analyse de Sentiments'])
# Reconnaissance d'images
if options == 'Reconnaissance d\'Images':
uploaded_file = st.file_uploader("Téléchargez une image...", type=["jpg", "png", "jpeg"])
if uploaded_file is not None:
image = Image.open(uploaded_file)
filtered_image = apply_filters(image)
st.image(filtered_image, caption='Image téléchargée', use_column_width=True)
# Classification de l'image
st.write("Classifying...")
labels = classify_image(filtered_image)
for label in labels:
st.write(f"{label[1]} ({label[2]*100:.2f}%)")
# Analyse de sentiments basée sur les étiquettes de l'image
sentiments = [sentiment_pipeline(label[1])[0] for label in labels]
for sentiment in sentiments:
st.write(f"Sentiment pour '{sentiment['label']}' : {sentiment['score']:.2f}")
fig = plot_sentiment(sentiment)
st.pyplot(fig)
# Analyse de sentiments
elif options == 'Analyse de Sentiments':
user_input = st.text_area("Entrez le texte à analyser", "Tapez ici...")
if st.button("Analyser"):
result = sentiment_pipeline(user_input)[0]
st.write(f"Sentiment: {result['label']}, Score: {result['score']:.2f}")
fig = plot_sentiment(result)
st.pyplot(fig)