|
import os |
|
|
|
|
|
return_code = os.system('pip install flask') |
|
if return_code != 0: |
|
raise RuntimeError("Failed to install Flask") |
|
|
|
import gradio as gr |
|
from random import randint |
|
from all_models import models |
|
from flask import Flask, request, send_file |
|
from io import BytesIO |
|
from PIL import Image, ImageChops |
|
|
|
app = Flask(__name__) |
|
|
|
|
|
models_load = {} |
|
|
|
def load_model(model_name): |
|
global models_load |
|
if model_name not in models_load: |
|
try: |
|
m = gr.load(f'models/{model_name}') |
|
models_load[model_name] = m |
|
except Exception as error: |
|
print(f"Error loading model {model_name}: {error}") |
|
models_load[model_name] = gr.Interface(lambda txt: None, ['text'], ['image']) |
|
|
|
def gen_fn(model_str, prompt, negative_prompt=None, noise=None, cfg_scale=None, num_inference_steps=None): |
|
if model_str not in models_load: |
|
load_model(model_str) |
|
|
|
if model_str in models_load: |
|
if noise == "random": |
|
noise = str(randint(0, 99999999999)) |
|
full_prompt = f'{prompt} {noise}' if noise else prompt |
|
try: |
|
if negative_prompt: |
|
full_prompt += f' -{negative_prompt}' |
|
|
|
|
|
call_params = [full_prompt] |
|
if cfg_scale is not None: |
|
|
|
if 'cfg_scale' not in models_load[model_str].parameters: |
|
return None, 'cfg_scale parameter is not supported by the model' |
|
call_params.append(cfg_scale) |
|
if num_inference_steps is not None: |
|
|
|
if 'num_inference_steps' not in models_load[model_str].parameters: |
|
return None, 'num_inference_steps parameter is not supported by the model' |
|
call_params.append(num_inference_steps) |
|
|
|
result = models_load[model_str](*call_params) |
|
|
|
|
|
if isinstance(result, str): |
|
if os.path.exists(result): |
|
image = Image.open(result) |
|
else: |
|
print(f"File path not found: {result}") |
|
return None, 'File path not found' |
|
elif isinstance(result, Image.Image): |
|
image = result |
|
else: |
|
print("Result is not an image:", type(result)) |
|
return None, 'Result is not an image' |
|
|
|
|
|
black = Image.new('RGB', image.size, (0, 0, 0)) |
|
if ImageChops.difference(image, black).getbbox() is None: |
|
return None, 'Generated image is completely black' |
|
|
|
return image, None |
|
|
|
except Exception as e: |
|
print("Error generating image:", e) |
|
return None, f"Error generating image: {e}" |
|
else: |
|
print(f"Model {model_str} not found") |
|
return None, f"Model {model_str} not found" |
|
|
|
@app.route('/', methods=['GET']) |
|
def home(): |
|
prompt = request.args.get('prompt', '') |
|
model = request.args.get('model', '') |
|
negative_prompt = request.args.get('Nprompt', None) |
|
noise = request.args.get('noise', None) |
|
cfg_scale = request.args.get('cfg_scale', None) |
|
num_inference_steps = request.args.get('steps', None) |
|
|
|
try: |
|
if cfg_scale is not None: |
|
cfg_scale = float(cfg_scale) |
|
except ValueError: |
|
return 'Invalid "cfg_scale" parameter. It should be a number.', 400 |
|
|
|
try: |
|
if num_inference_steps is not None: |
|
num_inference_steps = int(num_inference_steps) |
|
except ValueError: |
|
return 'Invalid "steps" parameter. It should be an integer.', 400 |
|
|
|
if not model: |
|
return 'Please provide a "model" query parameter in the URL.', 400 |
|
|
|
if not prompt: |
|
return 'Please provide a "prompt" query parameter in the URL.', 400 |
|
|
|
|
|
image, error_message = gen_fn(model, prompt, negative_prompt, noise, cfg_scale, num_inference_steps) |
|
if error_message: |
|
return error_message, 400 |
|
|
|
if isinstance(image, Image.Image): |
|
|
|
img_io = BytesIO() |
|
image.save(img_io, format='PNG') |
|
img_io.seek(0) |
|
return send_file(img_io, mimetype='image/png', as_attachment=False) |
|
|
|
return 'Failed to generate image.', 500 |
|
|
|
if __name__ == '__main__': |
|
|
|
app.run(host='0.0.0.0', port=7860) |
|
|