b / app.py
soiz's picture
Update app.py
bc1a1bd verified
raw
history blame
4.84 kB
import os
# Install Flask if not already installed
return_code = os.system('pip install flask')
if return_code != 0:
raise RuntimeError("Failed to install Flask")
import gradio as gr
from random import randint
from all_models import models
from flask import Flask, request, send_file
from io import BytesIO
from PIL import Image, ImageChops
app = Flask(__name__)
# グローバルγͺγƒ’γƒ‡γƒ«θΎžζ›Έ
models_load = {}
def load_model(model_name):
global models_load
if model_name not in models_load:
try:
m = gr.load(f'models/{model_name}')
models_load[model_name] = m
except Exception as error:
print(f"Error loading model {model_name}: {error}")
models_load[model_name] = gr.Interface(lambda txt: None, ['text'], ['image'])
def gen_fn(model_str, prompt, negative_prompt=None, noise=None, cfg_scale=None, num_inference_steps=None):
if model_str not in models_load:
load_model(model_str) # γƒ’γƒ‡γƒ«γŒγƒ­γƒΌγƒ‰γ•γ‚Œγ¦γ„γͺγ„ε ΄εˆγ―γƒ­γƒΌγƒ‰γ™γ‚‹
if model_str in models_load:
if noise == "random":
noise = str(randint(0, 99999999999))
full_prompt = f'{prompt} {noise}' if noise else prompt
try:
if negative_prompt:
full_prompt += f' -{negative_prompt}'
# Construct the function call parameters dynamically
call_params = [full_prompt]
if cfg_scale is not None:
# Check if cfg_scale can be set
if 'cfg_scale' not in models_load[model_str].parameters:
return None, 'cfg_scale parameter is not supported by the model'
call_params.append(cfg_scale)
if num_inference_steps is not None:
# Check if num_inference_steps can be set
if 'num_inference_steps' not in models_load[model_str].parameters:
return None, 'num_inference_steps parameter is not supported by the model'
call_params.append(num_inference_steps)
result = models_load[model_str](*call_params)
# Check if result is an image or a file path
if isinstance(result, str): # Assuming result might be a file path
if os.path.exists(result):
image = Image.open(result)
else:
print(f"File path not found: {result}")
return None, 'File path not found'
elif isinstance(result, Image.Image):
image = result
else:
print("Result is not an image:", type(result))
return None, 'Result is not an image'
# Check if the image is completely black
black = Image.new('RGB', image.size, (0, 0, 0))
if ImageChops.difference(image, black).getbbox() is None:
return None, 'Generated image is completely black'
return image, None
except Exception as e:
print("Error generating image:", e)
return None, f"Error generating image: {e}"
else:
print(f"Model {model_str} not found")
return None, f"Model {model_str} not found"
@app.route('/', methods=['GET'])
def home():
prompt = request.args.get('prompt', '')
model = request.args.get('model', '')
negative_prompt = request.args.get('Nprompt', None)
noise = request.args.get('noise', None)
cfg_scale = request.args.get('cfg_scale', None)
num_inference_steps = request.args.get('steps', None)
try:
if cfg_scale is not None:
cfg_scale = float(cfg_scale)
except ValueError:
return 'Invalid "cfg_scale" parameter. It should be a number.', 400
try:
if num_inference_steps is not None:
num_inference_steps = int(num_inference_steps)
except ValueError:
return 'Invalid "steps" parameter. It should be an integer.', 400
if not model:
return 'Please provide a "model" query parameter in the URL.', 400
if not prompt:
return 'Please provide a "prompt" query parameter in the URL.', 400
# Generate the image
image, error_message = gen_fn(model, prompt, negative_prompt, noise, cfg_scale, num_inference_steps)
if error_message:
return error_message, 400
if isinstance(image, Image.Image): # Ensure the result is a PIL image
# Save image to BytesIO object
img_io = BytesIO()
image.save(img_io, format='PNG')
img_io.seek(0)
return send_file(img_io, mimetype='image/png', as_attachment=False)
return 'Failed to generate image.', 500
if __name__ == '__main__':
# Launch Flask app
app.run(host='0.0.0.0', port=7860) # Run Flask app