sms07's picture
Update app.py
7b36a49
import streamlit as st
from bertopic import BERTopic
from PIL import Image
from transformers import (
pipeline,
BlenderbotTokenizer,
BlenderbotForConditionalGeneration,
)
st.title("Georgios Ioannou's Visual Question Answering With Hugging Face")
st.write("Drag and drop an image file here.")
# Allow the user to upload an image file
image = st.file_uploader("Choose an image...", type=["jpg", "jpeg", "png"])
if image is not None:
# Display the uploaded image
image = Image.open(image)
st.image(image, caption="Uploaded Image", use_column_width=True)
question = st.text_input("What's your question?")
# Model 1.
# Model 1 gets input from the user.
# User -> Model 1
vqa_pipeline = pipeline(
task="visual-question-answering", model="dandelin/vilt-b32-finetuned-vqa"
)
#########################################################################
# Model 2.
# Model 2 gets input from Model 1.
# User -> Model 1 -> Model 2
bbu_pipeline = pipeline(task="fill-mask", model="bert-base-uncased")
#########################################################################
# Model 3.
# Model 3 gets input from Model 2.
# User -> Model 1 -> Model 2 -> Model 3
model_name = "facebook/blenderbot-400M-distill"
tokenizer = BlenderbotTokenizer.from_pretrained(
pretrained_model_name_or_path=model_name
)
facebook_model = BlenderbotForConditionalGeneration.from_pretrained(
pretrained_model_name_or_path=model_name
)
#########################################################################
# Model 4.
# Model 4 gets input from Model 3.
# User -> Model 1 -> Model 2 -> Model 3 -> Model 4
gpt2_pipeline = pipeline(task="text-generation", model="gpt2")
#########################################################################
# Model 5.
# Model 5 gets input from Model 4.
# User -> Model 1 -> Model 2 -> Model 3 -> Model 4 -> Model 5
topic_model_1 = BERTopic.load(path="davanstrien/chat_topics")
topic_model_2 = BERTopic.load(path="MaartenGr/BERTopic_ArXiv")
###############################################################################
# 5 MODEL INFERENCES.
# User Input = Image + Question About The Image.
# User -> Model 1 -> Model 2 -> Model 3 -> Model 4 -> Model 5
# Model 1.
vqa_pipeline_output = vqa_pipeline(image, question, top_k=5)[0]
# Model 2.
text = (
"I love "
+ str(vqa_pipeline_output["answer"])
+ " and I would like to know how to [MASK]."
)
bbu_pipeline_output = bbu_pipeline(text)
# Model 3.
utterance = bbu_pipeline_output[0]["sequence"]
inputs = tokenizer(utterance, return_tensors="pt")
result = facebook_model.generate(**inputs)
facebook_model_output = tokenizer.decode(result[0])
# Model 4.
facebook_model_output = facebook_model_output.replace("<s> ", "")
facebook_model_output = facebook_model_output.replace("<s>", "")
facebook_model_output = facebook_model_output.replace("</s>", "")
gpt2_pipeline_output = gpt2_pipeline(facebook_model_output)[0]["generated_text"]
# Model 5.
topic, prob = topic_model_1.transform(gpt2_pipeline_output)
topic_model_1_output = topic_model_1.get_topic_info(topic[0])["Representation"][
0
]
topic, prob = topic_model_2.transform(gpt2_pipeline_output)
topic_model_2_output = topic_model_2.get_topic_info(topic[0])["Representation"][
0
]
###############################################################################
st.write("-" * 150)
st.write("vqa_pipeline_output =", vqa_pipeline_output)
st.write("bbu_pipeline_output =", bbu_pipeline_output)
st.write("facebook_model_output =", facebook_model_output)
st.write("gpt2_pipeline_output =", gpt2_pipeline_output)
st.write("topic_model_1_output =", topic_model_1_output)
st.write("topic_model_2_output =", topic_model_2_output)
st.write("-" * 150)
st.write("SUMMARY")
st.subheader("Your Image:")
st.image(image, caption="Your Image", use_column_width=True)
st.subheader("Your Question:")
st.write(question)
st.write("-" * 100)
st.subheader("1. Highest Predicted Answer For Your Question:")
st.write(vqa_pipeline_output["answer"])
st.write(text)
st.subheader("2. Highest Predicted Sequence On [MASK] Based on 1.:")
st.write(bbu_pipeline_output[0]["sequence"])
st.subheader("3. Conversation Based On Previous Answer Based on 2.:")
st.write(facebook_model_output)
st.subheader("4. Text Generated Based On Previous Answer Based on 3.:")
st.write(gpt2_pipeline_output)
st.subheader("5. Highest Predicted Topic Model_1 For Previous The Answer Based on 4.:")
st.write(topic_model_1_output)
st.subheader("6. Highest Predicted Topic Model_2 For Previous The Answer Based on 4.:")
st.write(topic_model_2_output)
st.write("-" * 150)