5dimension commited on
Commit
b787e2d
1 Parent(s): f81c81e

Update app.py

Browse files

better results

Files changed (1) hide show
  1. app.py +1 -4
app.py CHANGED
@@ -162,10 +162,7 @@ def run(prompt, steps, width, height, images, scale):
162
  image_features /= image_features.norm(dim=-1, keepdim=True)
163
  query = image_features.cpu().detach().numpy().astype("float32")
164
  unsafe = is_unsafe(safety_model,query,0.5)
165
- if(not unsafe):
166
- all_samples_images.append(image_vector)
167
- else:
168
- return(None,None,"Sorry, potential NSFW content was detected on your outputs by our NSFW detection model. Try again with different prompts. If you feel your prompt was not supposed to give NSFW outputs, this may be due to a bias in the model. Read more about biases in the Biases Acknowledgment section below.")
169
  #Image.fromarray(x_sample.astype(np.uint8)).save(os.path.join(sample_path, f"{base_count:04}.png"))
170
  base_count += 1
171
  all_samples.append(x_samples_ddim)
 
162
  image_features /= image_features.norm(dim=-1, keepdim=True)
163
  query = image_features.cpu().detach().numpy().astype("float32")
164
  unsafe = is_unsafe(safety_model,query,0.5)
165
+ all_samples_images.append(image_vector)
 
 
 
166
  #Image.fromarray(x_sample.astype(np.uint8)).save(os.path.join(sample_path, f"{base_count:04}.png"))
167
  base_count += 1
168
  all_samples.append(x_samples_ddim)