File size: 4,402 Bytes
6437aa7 c325d55 ca0e7fc f9b29bc c325d55 f9b29bc 6437aa7 bc1a1bd 6437aa7 ac69805 f9b29bc ac69805 c325d55 ac69805 c325d55 f9b29bc e940c08 ac69805 c325d55 622195c c325d55 622195c c325d55 7c29ed9 c325d55 7c29ed9 c325d55 7c29ed9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 |
import os
# Install Flask if not already installed
return_code = os.system('pip install flask')
if return_code != 0:
raise RuntimeError("Failed to install Flask")
import gradio as gr
from random import randint
from all_models import models
from flask import Flask, request, send_file
from io import BytesIO
from PIL import Image, ImageChops
app = Flask(__name__)
# γ°γγΌγγ«γͺγ’γγ«θΎζΈ
models_load = {}
def load_model(model_name):
global models_load
if model_name not in models_load:
try:
m = gr.load(f'models/{model_name}')
models_load[model_name] = m
except Exception as error:
print(f"Error loading model {model_name}: {error}")
models_load[model_name] = gr.Interface(lambda txt: None, ['text'], ['image'])
def gen_fn(model_str, prompt, negative_prompt=None, noise=None, cfg_scale=None, num_inference_steps=None):
if model_str not in models_load:
load_model(model_str) # γ’γγ«γγγΌγγγγ¦γγͺγε ΄εγ―γγΌγγγ
if model_str in models_load:
if noise == "random":
noise = str(randint(0, 99999999999))
full_prompt = f'{prompt} {noise}' if noise else prompt
# Construct the function call parameters dynamically
inputs = [full_prompt]
if negative_prompt:
inputs.append(negative_prompt)
if cfg_scale is not None:
inputs.append(cfg_scale)
if num_inference_steps is not None:
inputs.append(num_inference_steps)
try:
result = models_load[model_str](*inputs)
# Check if result is an image or a file path
if isinstance(result, str): # Assuming result might be a file path
if os.path.exists(result):
image = Image.open(result)
else:
print(f"File path not found: {result}")
return None, 'File path not found'
elif isinstance(result, Image.Image):
image = result
else:
print("Result is not an image:", type(result))
return None, 'Result is not an image'
# Check if the image is completely black
black = Image.new('RGB', image.size, (0, 0, 0))
if ImageChops.difference(image, black).getbbox() is None:
return None, 'The image is completely black. There may be a parameter that cannot be specified, or an error may have occurred internally.'
return image, None
except Exception as e:
print("Error generating image:", e)
return None, f"Error generating image: {e}"
else:
print(f"Model {model_str} not found")
return None, f"Model {model_str} not found"
@app.route('/', methods=['GET'])
def home():
prompt = request.args.get('prompt', '')
model = request.args.get('model', '')
negative_prompt = request.args.get('Nprompt', None)
noise = request.args.get('noise', None)
cfg_scale = request.args.get('cfg_scale', None)
num_inference_steps = request.args.get('steps', None)
try:
if cfg_scale is not None:
cfg_scale = float(cfg_scale)
except ValueError:
return 'Invalid "cfg_scale" parameter. It should be a number.', 400
try:
if num_inference_steps is not None:
num_inference_steps = int(num_inference_steps)
except ValueError:
return 'Invalid "steps" parameter. It should be an integer.', 400
if not model:
return 'Please provide a "model" query parameter in the URL.', 400
if not prompt:
return 'Please provide a "prompt" query parameter in the URL.', 400
# Generate the image
image, error_message = gen_fn(model, prompt, negative_prompt, noise, cfg_scale, num_inference_steps)
if error_message:
return error_message, 400
if isinstance(image, Image.Image): # Ensure the result is a PIL image
# Save image to BytesIO object
img_io = BytesIO()
image.save(img_io, format='PNG')
img_io.seek(0)
return send_file(img_io, mimetype='image/png', as_attachment=False)
return 'Failed to generate image.', 500
if __name__ == '__main__':
# Launch Flask app
app.run(host='0.0.0.0', port=7860) # Run Flask app
|