soiz commited on
Commit
c2fd988
1 Parent(s): 5db4e56

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -3
app.py CHANGED
@@ -38,8 +38,6 @@ def gen_fn(model_str, prompt, negative_prompt=None, noise=None, cfg_scale=None,
38
  try:
39
  # Construct the function call parameters dynamically
40
  call_params = [full_prompt]
41
- if negative_prompt:
42
- call_params.append(negative_prompt)
43
  if cfg_scale is not None:
44
  # Check if cfg_scale can be set
45
  if 'cfg_scale' not in models_load[model_str].parameters:
@@ -51,7 +49,7 @@ def gen_fn(model_str, prompt, negative_prompt=None, noise=None, cfg_scale=None,
51
  return None, 'num_inference_steps parameter is not supported by the model'
52
  call_params.append(num_inference_steps)
53
 
54
- result = models_load[model_str](*call_params)
55
 
56
  # Check if result is an image or a file path
57
  if isinstance(result, str): # Assuming result might be a file path
 
38
  try:
39
  # Construct the function call parameters dynamically
40
  call_params = [full_prompt]
 
 
41
  if cfg_scale is not None:
42
  # Check if cfg_scale can be set
43
  if 'cfg_scale' not in models_load[model_str].parameters:
 
49
  return None, 'num_inference_steps parameter is not supported by the model'
50
  call_params.append(num_inference_steps)
51
 
52
+ result = models_load[model_str](*call_params, negative_prompt=negative_prompt)
53
 
54
  # Check if result is an image or a file path
55
  if isinstance(result, str): # Assuming result might be a file path