File size: 1,738 Bytes
0a17934
0199d44
31a0b27
 
 
 
 
6d11c81
 
 
31a0b27
 
 
c45b9d6
31a0b27
 
 
 
0a17934
 
 
 
 
31a0b27
759f312
31a0b27
 
 
0a17934
31a0b27
 
0a17934
31a0b27
0a17934
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import gradio as gr
from transformers import pipeline, BartTokenizer, BartForConditionalGeneration

# Initialize the image classification pipeline
classifier = pipeline("image-classification", model="google/vit-base-patch16-224")

# Initialize the tokenizer and model for the generative text (GPT-like model)
model_name = "facebook/bart-large-cnn"  # Example BART model for demonstration
tokenizer = BartTokenizer.from_pretrained(model_name)
model = BartForConditionalGeneration.from_pretrained(model_name)

def generate_tweet(label):
    # Generate a promotional tweet using a GPT-like model
    prompt = f"Write a tweet about {label}:"
    inputs = tokenizer.encode(prompt, return_tensors="pt")
    outputs = model.generate(inputs, max_length=280, num_return_sequences=1)
    tweet = tokenizer.decode(outputs[0], skip_special_tokens=True)
    return tweet

def predict(image):
    predictions = classifier(image)
    # Sort predictions based on confidence and select the top one
    top_prediction = sorted(predictions, key=lambda x: x['score'], reverse=True)[0]
    label = top_prediction['label'].split(',')[0]  # Clean up label if necessary
    
    # Generate the tweet
    tweet = generate_tweet(label)
    return tweet

title = "Image Classifier to Generative Tweet"
description = "This demo recognizes and classifies images using the 'google/vit-base-patch16-224' model and generates a creative promotional tweet about the top prediction using a generative text model."
input_component = gr.Image(type="pil", label="Upload an image here")
output_component = gr.Textbox(label="Generated Promotional Tweet")

gr.Interface(fn=predict, inputs=input_component, outputs=output_component, title=title, description=description).launch()