|
import gradio as gr |
|
from PIL import Image |
|
import base64 |
|
import io |
|
import glob |
|
import cv2 |
|
import numpy as np |
|
import torch |
|
from controlnet_aux import HEDdetector |
|
from diffusers import StableDiffusionControlNetPipeline, ControlNetModel, UniPCMultistepScheduler |
|
|
|
def predict(sketch, description): |
|
|
|
sketch_pil = Image.fromarray(sketch) |
|
|
|
hed = HEDdetector.from_pretrained('lllyasviel/Annotators') |
|
|
|
image = hed(sketch_pil, scribble=True) |
|
|
|
model_id = "runwayml/stable-diffusion-v1-5" |
|
controlnet_id = "lllyasviel/sd-controlnet-scribble" |
|
|
|
|
|
controlnet = ControlNetModel.from_pretrained(controlnet_id) |
|
|
|
|
|
pipe = StableDiffusionControlNetPipeline.from_pretrained(model_id, controlnet=controlnet) |
|
|
|
|
|
pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
result = pipe(description, image, num_inference_steps=10).images[0] |
|
|
|
return result |
|
with gr.Blocks() as iface: |
|
|
|
sketchpad = gr.Sketchpad(shape=(400, 300), brush_radius=5, label="Sketchpad- Draw something") |
|
txt= gr.Textbox(lines=3, label="Description - Describe your sketch with style") |
|
im = gr.Image(label="Output Image", interactive=False) |
|
button = gr.Button(value="Submit") |
|
button.click(predict, inputs=[sketchpad, txt], outputs=im) |
|
flag= gr.CSVLogger() |
|
flag.setup([sketchpad, txt, im], "flagged_data_points") |
|
button_flag = gr.Button(value="Flag") |
|
button_flag.click(lambda *args: flag.flag(args), [sketchpad, txt, im], None, preprocess=False) |
|
|
|
|
|
|
|
sketch_path = glob.glob("flagged/sketch/*.png") |
|
|
|
iface.launch() |
|
|