apolinario commited on
Commit
8dfb33f
1 Parent(s): 5ea000a

Better NSFW wording

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -114,7 +114,7 @@ def run(prompt, steps, width, height, images, scale):
114
  if(sims.max()<18):
115
  all_samples_images.append(image_vector)
116
  else:
117
- return(None,None,"Sorry, NSFW content was detected on your outputs. Try again with different prompts. If you feel your prompt was not supposed to give NSFW outputs, this may be due to a bias in the model. Read more about biases in the Biases Acknowledgment section below.")
118
  #Image.fromarray(x_sample.astype(np.uint8)).save(os.path.join(sample_path, f"{base_count:04}.png"))
119
  base_count += 1
120
  all_samples.append(x_samples_ddim)
 
114
  if(sims.max()<18):
115
  all_samples_images.append(image_vector)
116
  else:
117
+ return(None,None,"Sorry, potential NSFW content was detected on your outputs by our NSFW detection model. Try again with different prompts. If you feel your prompt was not supposed to give NSFW outputs, this may be due to a bias in the model. Read more about biases in the Biases Acknowledgment section below.")
118
  #Image.fromarray(x_sample.astype(np.uint8)).save(os.path.join(sample_path, f"{base_count:04}.png"))
119
  base_count += 1
120
  all_samples.append(x_samples_ddim)