File size: 3,931 Bytes
6437aa7 58582ba f9b29bc 58582ba f9b29bc 6437aa7 ac69805 f9b29bc ac69805 f9b29bc e940c08 ac69805 2194a2e ac69805 e940c08 8c4ccaa e940c08 ac69805 f9b29bc ac69805 f9b29bc ac69805 f9b29bc ac69805 f9b29bc ac69805 79d4fbf 2194a2e 8c4ccaa e940c08 1255cd6 8c4ccaa 1255cd6 e940c08 9cdcff4 ac69805 e940c08 ac69805 9cdcff4 ac69805 9cdcff4 f9b29bc 6fcb39b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 |
import os
# Install Flask if not already installed
return_code = os.system('pip install flask')
if return_code != 0:
raise RuntimeError("Failed to install Flask")
import gradio as gr
from random import randint
from all_models import models
from flask import Flask, request, send_file
from io import BytesIO
from PIL import Image
app = Flask(__name__)
# γ°γγΌγγ«γͺγ’γγ«θΎζΈ
models_load = {}
def load_model(model_name):
global models_load
if model_name not in models_load:
try:
m = gr.load(f'models/{model_name}')
models_load[model_name] = m
except Exception as error:
print(f"Error loading model {model_name}: {error}")
models_load[model_name] = gr.Interface(lambda txt: None, ['text'], ['image'])
def gen_fn(model_str, prompt, negative_prompt=None, noise=None, cfg_scale=None, num_inference_steps=None):
if model_str not in models_load:
load_model(model_str) # γ’γγ«γγγΌγγγγ¦γγͺγε ΄εγ―γγΌγγγ
if model_str in models_load:
if noise == "random":
noise = str(randint(0, 99999999999))
full_prompt = f'{prompt} {noise}' if noise else prompt
try:
if negative_prompt:
full_prompt += f' -{negative_prompt}'
# Construct the function call parameters dynamically
call_params = {'text': full_prompt}
if cfg_scale is not None:
call_params['cfg_scale'] = cfg_scale
if num_inference_steps is not None:
call_params['num_inference_steps'] = num_inference_steps
result = models_load[model_str](**call_params)
# Check if result is an image or a file path
if isinstance(result, str): # Assuming result might be a file path
if os.path.exists(result):
return Image.open(result)
else:
print(f"File path not found: {result}")
return None
elif isinstance(result, Image.Image):
return result
else:
print("Result is not an image:", type(result))
return None
except Exception as e:
print("Error generating image:", e)
return None
else:
print(f"Model {model_str} not found")
return None
@app.route('/', methods=['GET'])
def home():
prompt = request.args.get('prompt', '')
model = request.args.get('model', '')
negative_prompt = request.args.get('Nprompt', None)
noise = request.args.get('noise', None)
cfg_scale = request.args.get('cfg_scale', None)
num_inference_steps = request.args.get('steps', None)
try:
if cfg_scale is not None:
cfg_scale = float(cfg_scale)
except ValueError:
return 'Invalid "cfg_scale" parameter. It should be a number.', 400
try:
if num_inference_steps is not None:
num_inference_steps = int(num_inference_steps)
except ValueError:
return 'Invalid "steps" parameter. It should be an integer.', 400
if not model:
return 'Please provide a "model" query parameter in the URL.', 400
if not prompt:
return 'Please provide a "prompt" query parameter in the URL.', 400
# Generate the image
image = gen_fn(model, prompt, negative_prompt, noise, cfg_scale, num_inference_steps)
if isinstance(image, Image.Image): # Ensure the result is a PIL image
# Save image to BytesIO object
img_io = BytesIO()
image.save(img_io, format='PNG')
img_io.seek(0)
return send_file(img_io, mimetype='image/png', as_attachment=False)
return 'Failed to generate image.', 500
if __name__ == '__main__':
# Launch Flask app
app.run(host='0.0.0.0', port=7860) # Run Flask app
|