# Facial expression classifier import os from fastai.vision.all import * import gradio as gr # Emotion learn_emotion = load_learner('emotions_vgg19.pkl') learn_emotion_labels = learn_emotion.dls.vocab # Sentiment learn_sentiment = load_learner('sentiment_vgg19.pkl') learn_sentiment_labels = learn_sentiment.dls.vocab # Predict def predict(img): img = PILImage.create(img) pred_emotion, pred_emotion_idx, probs_emotion = learn_emotion.predict(img) pred_sentiment, pred_sentiment_idx, probs_sentiment = learn_sentiment.predict(img) emotions = {f'emotion_{learn_emotion_labels[i]}': float(probs_emotion[i]) for i in range(len(learn_emotion_labels))} sentiments = {f'sentiment_{learn_sentiment_labels[i]}': float(probs_sentiment[i]) for i in range(len(learn_sentiment_labels))} return {**emotions, **sentiments} # Gradio title = "Facial Expression Sentiment Classifier" description = "A model to detect emotion and sentiment from facial expressions trained on FER2013 dataset using FastAi. Created as a demo for AI Course." article = 'Sample images are taken from VG & AftenPoften webpages. Copyrights belong to respective brands. All rights reserved.' interpretation='default' enable_queue=True examples = ['happy1.jpg', 'happy2.jpg', 'angry1.jpg', 'angry2.jpg', 'neutral1.jpg', 'neutral2.jpg'] gr.Interface(fn = predict, inputs = gr.Image(shape=(48, 48), image_mode='L'), outputs = gr.Label(), title = title, description = description, article=article).launch(share=True, enable_queue=enable_queue)