Spaces:
Running
Running
import gradio as gr | |
model1 = gr.load("models/pimpilikipilapi1/NSFW_master") | |
model2 = gr.load("models/DiegoJR1973/NSFW-TrioHMH-Flux") | |
model3 = gr.load("models/prashanth970/flux-lora-uncensored") | |
def generate_images(text): | |
result_image1 = model1(text) | |
result_image2 = model2(text) | |
result_image3 = model3(text) | |
print(f"Result from model1: {type(result_image1)} - {result_image1}") | |
print(f"Result from model2: {type(result_image2)} - {result_image2}") | |
print(f"Result from model3: {type(result_image3)} - {result_image3}") | |
if isinstance(result_image1, tuple): | |
result_image1 = result_image1[0] | |
if isinstance(result_image2, tuple): | |
result_image2 = result_image2[0] | |
if isinstance(result_image3, tuple): | |
result_image3 = result_image3[0] | |
return result_image1, result_image2, result_image3 | |
interface = gr.Interface( | |
fn=generate_images, | |
inputs=[ | |
gr.Textbox(label="Type here your imagination:", placeholder="Type or click an example..."), | |
], | |
outputs=[ | |
gr.Image(label="Model 1 Output"), | |
gr.Image(label="Model 2 Output"), | |
gr.Image(label="Model 3 Output") | |
], | |
theme="huggingface", | |
description="Sorry for the inconvenience. The model is currently running on the CPU, which might affect performance. We appreciate your understanding.", | |
) | |
interface.launch() | |