soiz commited on
Commit
e505e53
β€’
1 Parent(s): ca0e7fc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -89
app.py CHANGED
@@ -26,102 +26,56 @@ def load_model(model_name):
26
  models_load[model_name] = m
27
  except Exception as error:
28
  print(f"Error loading model {model_name}: {error}")
29
- models_load[model_name] = gr.Interface(lambda txt: None, ['text'], ['image'])
30
 
31
  def gen_fn(model_str, prompt, negative_prompt=None, noise=None, cfg_scale=None, num_inference_steps=None):
32
  if model_str not in models_load:
33
  load_model(model_str) # γƒ’γƒ‡γƒ«γŒγƒ­γƒΌγƒ‰γ•γ‚Œγ¦γ„γͺγ„ε ΄εˆγ―γƒ­γƒΌγƒ‰γ™γ‚‹
34
 
35
- if model_str in models_load:
36
- if noise == "random":
37
- noise = str(randint(0, 99999999999))
38
- full_prompt = f'{prompt} {noise}' if noise else prompt
39
- try:
40
- # Construct the function call parameters dynamically
41
- call_params = {'prompt': full_prompt}
42
- if negative_prompt:
43
- call_params['negative_prompt'] = negative_prompt
44
- if cfg_scale is not None:
45
- # Check if cfg_scale can be set
46
- if 'cfg_scale' not in models_load[model_str].parameters:
47
- return None, 'cfg_scale parameter is not supported by the model'
48
- call_params['cfg_scale'] = cfg_scale
49
- if num_inference_steps is not None:
50
- # Check if num_inference_steps can be set
51
- if 'num_inference_steps' not in models_load[model_str].parameters:
52
- return None, 'num_inference_steps parameter is not supported by the model'
53
- call_params['num_inference_steps'] = num_inference_steps
54
-
55
- result = models_load[model_str].predict(**call_params)
56
-
57
- # Check if result is an image or a file path
58
- if isinstance(result, str): # Assuming result might be a file path
59
- if os.path.exists(result):
60
- image = Image.open(result)
61
- else:
62
- print(f"File path not found: {result}")
63
- return None, 'File path not found'
64
- elif isinstance(result, Image.Image):
65
- image = result
66
- else:
67
- print("Result is not an image:", type(result))
68
- return None, 'Result is not an image'
69
-
70
- # Check if the image is completely black
71
- black = Image.new('RGB', image.size, (0, 0, 0))
72
- if ImageChops.difference(image, black).getbbox() is None:
73
- return None, 'The image is completely black. There may be a parameter that cannot be specified, or an error may have occurred internally.'
74
-
75
- return image, None
76
-
77
- except Exception as e:
78
- print("Error generating image:", e)
79
- return None, f"Error generating image: {e}"
80
- else:
81
- print(f"Model {model_str} not found")
82
- return None, f"Model {model_str} not found"
83
 
84
- @app.route('/', methods=['GET'])
85
- def home():
86
- prompt = request.args.get('prompt', '')
87
- model = request.args.get('model', '')
88
- negative_prompt = request.args.get('Nprompt', None)
89
- noise = request.args.get('noise', None)
90
- cfg_scale = request.args.get('cfg_scale', None)
91
- num_inference_steps = request.args.get('steps', None)
92
-
93
  try:
 
 
 
 
94
  if cfg_scale is not None:
95
- cfg_scale = float(cfg_scale)
96
- except ValueError:
97
- return 'Invalid "cfg_scale" parameter. It should be a number.', 400
98
-
99
- try:
100
  if num_inference_steps is not None:
101
- num_inference_steps = int(num_inference_steps)
102
- except ValueError:
103
- return 'Invalid "steps" parameter. It should be an integer.', 400
104
-
105
- if not model:
106
- return 'Please provide a "model" query parameter in the URL.', 400
 
 
 
 
 
 
107
 
108
- if not prompt:
109
- return 'Please provide a "prompt" query parameter in the URL.', 400
110
-
111
- # Generate the image
112
- image, error_message = gen_fn(model, prompt, negative_prompt, noise, cfg_scale, num_inference_steps)
113
- if error_message:
114
- return error_message, 400
115
-
116
- if isinstance(image, Image.Image): # Ensure the result is a PIL image
117
- # Save image to BytesIO object
118
- img_io = BytesIO()
119
- image.save(img_io, format='PNG')
120
- img_io.seek(0)
121
- return send_file(img_io, mimetype='image/png', as_attachment=False)
122
-
123
- return 'Failed to generate image.', 500
124
-
125
- if __name__ == '__main__':
126
- # Launch Flask app
127
- app.run(host='0.0.0.0', port=7860) # Run Flask app
 
26
  models_load[model_name] = m
27
  except Exception as error:
28
  print(f"Error loading model {model_name}: {error}")
29
+ models_load[model_name] = None # Set to None if loading fails
30
 
31
  def gen_fn(model_str, prompt, negative_prompt=None, noise=None, cfg_scale=None, num_inference_steps=None):
32
  if model_str not in models_load:
33
  load_model(model_str) # γƒ’γƒ‡γƒ«γŒγƒ­γƒΌγƒ‰γ•γ‚Œγ¦γ„γͺγ„ε ΄εˆγ―γƒ­γƒΌγƒ‰γ™γ‚‹
34
 
35
+ model = models_load.get(model_str)
36
+ if model is None:
37
+ return None, f"Model {model_str} not found or failed to load"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
+ if noise == "random":
40
+ noise = str(randint(0, 99999999999))
41
+ full_prompt = f'{prompt} {noise}' if noise else prompt
 
 
 
 
 
 
42
  try:
43
+ # Construct the function call parameters dynamically
44
+ call_params = {'prompt': full_prompt}
45
+ if negative_prompt:
46
+ call_params['negative_prompt'] = negative_prompt
47
  if cfg_scale is not None:
48
+ # Check if cfg_scale can be set
49
+ if 'cfg_scale' not in model.parameters:
50
+ return None, 'cfg_scale parameter is not supported by the model'
51
+ call_params['cfg_scale'] = cfg_scale
 
52
  if num_inference_steps is not None:
53
+ # Check if num_inference_steps can be set
54
+ if 'num_inference_steps' not in model.parameters:
55
+ return None, 'num_inference_steps parameter is not supported by the model'
56
+ call_params['num_inference_steps'] = num_inference_steps
57
+
58
+ # Ensure that model's call method is used correctly
59
+ if hasattr(model, 'predict'):
60
+ result = model.predict(**call_params)
61
+ elif hasattr(model, 'process'):
62
+ result = model.process(**call_params)
63
+ else:
64
+ return None, f"Model {model_str} does not have a callable method"
65
 
66
+ # Check if result is an image or a file path
67
+ if isinstance(result, str): # Assuming result might be a file path
68
+ if os.path.exists(result):
69
+ image = Image.open(result)
70
+ else:
71
+ print(f"File path not found: {result}")
72
+ return None, 'File path not found'
73
+ elif isinstance(result, Image.Image):
74
+ image = result
75
+ else:
76
+ print("Result is not an image:", type(result))
77
+ return None, 'Result is not an image'
78
+
79
+ # Check if the image is completely black
80
+ black = Image.new('RGB', image.size, (0, 0, 0))
81
+ if ImageChops.difference(image, black).get