Spaces:
Running
Running
import torch | |
import numpy as np | |
from openai import OpenAI | |
import os | |
client = OpenAI() | |
import streamlit as st | |
from PIL import Image | |
from diffusers import AutoPipelineForText2Image | |
import random | |
def get_prompt_to_guess(): | |
random_prompt = ["tree", "cat", "dog", "consultant", "artificial intelligence", "beauty", "immeuble", "plage", "cyborg", "futuristic"] | |
response = client.chat.completions.create( | |
model="gpt-3.5-turbo", | |
messages=[ | |
{"role": "system", "content": "You are a helpful assistant to generate one simple prompt in order to generate an image. Your given prompt won't go over 10 words. You only return the prompt. You will also answer in french."}, | |
{"role": "user", "content": f"Donne moi un prompt pour generer une image de {random.choice(random_prompt)}"}, | |
] | |
) | |
return response.choices[0].message.content | |
def get_model(): | |
pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/sdxl-turbo", torch_dtype=torch.float32, variant="fp16") | |
return pipe | |
def generate_image(_pipe, prompt): | |
return _pipe(prompt=prompt, num_inference_steps=1, guidance_scale=0.0, seed=1).images[0] | |
if "ask_answer" not in st.session_state: | |
st.session_state["ask_answer"] = False | |
if "testing" not in st.session_state: | |
st.session_state["testing"] = False | |
if "submit_guess" not in st.session_state: | |
st.session_state["submit_guess"] = False | |
if "real_ask_answer" not in st.session_state: | |
st.session_state["real_ask_answer"] = False | |
def check_prompt(prompt, prompt_to_guess): | |
return prompt.strip() == prompt_to_guess.strip() | |
pipe = get_model() | |
prompt = get_prompt_to_guess() | |
im_to_guess = generate_image(pipe, prompt) | |
h, w = im_to_guess.size | |
st.title("Guess the prompt by Ekimetrics") | |
st.text("Rules : guess the prompt (in French, with no fault) to generate the left image with the sdxl turbo model") | |
st.text("Hint : use right side to help you guess the prompt by testing some") | |
st.text("Disclosure : this runs on CPU so generation are quite slow (even with sdxl turbo)") | |
col_1, col_2 = st.columns([0.5, 0.5]) | |
with col_1: | |
st.header("GUESS THE PROMPT") | |
guessed_prompt = st.text_area("Input your guess prompt") | |
st.session_state["submit_guess"] = st.button("guess the prompt") | |
if st.session_state["submit_guess"]: | |
if check_prompt(guessed_prompt, prompt): | |
st.text("Good prompt ! test again in 24h !") | |
else: | |
st.text("wrong prompt !") | |
st.session_state["ask_answer"] = st.button("get the answer") | |
if st.session_state["ask_answer"]: | |
st.text(f"Cheater ! but here is the prompt : \n {prompt}") | |
st.image(im_to_guess) | |
if "testing" not in st.session_state: | |
st.session_state["testing"] = False | |
with col_2: | |
st.header("TEST THE PROMPT") | |
testing_prompt = st.text_area("Input your testing prompt") | |
if st.session_state["testing"]: | |
im = generate_image(pipe, testing_prompt) | |
st.session_state["testing"] = False | |
else: | |
im = np.zeros([h,w,3]) | |
st.session_state["testing"] = st.button("test the prompt") | |
st.image(im) | |