muneebashraf commited on
Commit
8f2f602
·
1 Parent(s): 2c9923d
Files changed (1) hide show
  1. app.py +1 -3
app.py CHANGED
@@ -1,9 +1,8 @@
1
  import gradio as gr
2
- from transformers import BlipProcessor, BlipForConditionalGeneration, RobertaTokenizer, RobertaForSequenceClassification
3
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
4
  import torch
5
 
6
-
7
  # Load the image captioning model and tokenizer
8
  caption_model_name = "Salesforce/blip-image-captioning-large"
9
  caption_processor = BlipProcessor.from_pretrained(caption_model_name)
@@ -39,7 +38,6 @@ def generate_caption_and_analyze_emotions(image):
39
  emotion_label_id = emotion_outputs.logits.argmax().item()
40
  emotion_label = emotion_tokenizer.decode(emotion_label_id)
41
 
42
-
43
  # Prepare the final output with sentiment information
44
  final_output = f"The sentiment in the provided image shows: {emotion_label}.\n\nGenerated Caption: {decoded_caption}"
45
  return final_output
 
1
  import gradio as gr
2
+ from transformers import BlipProcessor, BlipForConditionalGeneration, AutoTokenizer, AutoModelForSequenceClassification
3
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
4
  import torch
5
 
 
6
  # Load the image captioning model and tokenizer
7
  caption_model_name = "Salesforce/blip-image-captioning-large"
8
  caption_processor = BlipProcessor.from_pretrained(caption_model_name)
 
38
  emotion_label_id = emotion_outputs.logits.argmax().item()
39
  emotion_label = emotion_tokenizer.decode(emotion_label_id)
40
 
 
41
  # Prepare the final output with sentiment information
42
  final_output = f"The sentiment in the provided image shows: {emotion_label}.\n\nGenerated Caption: {decoded_caption}"
43
  return final_output