PeterL1n commited on
Commit
edf024c
1 Parent(s): 7e82c0c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -0
app.py CHANGED
@@ -26,12 +26,17 @@ unet.load_state_dict(load_file(hf_hub_download(repo, opts["4 Steps"][0])))
26
  pipe = StableDiffusionXLPipeline.from_pretrained(base, unet=unet, torch_dtype=dtype, variant="fp16").to(device, dtype)
27
  pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")
28
 
 
 
 
29
  # Inference function.
30
  @spaces.GPU(enable_queue=True)
31
  def generate(prompt, option, progress=gr.Progress()):
32
  global step_loaded
33
  print(prompt, option)
34
  ckpt, step = opts[option]
 
 
35
  progress((0, step))
36
  if step != step_loaded:
37
  print(f"Switching checkpoint from {step_loaded} to {step}")
 
26
  pipe = StableDiffusionXLPipeline.from_pretrained(base, unet=unet, torch_dtype=dtype, variant="fp16").to(device, dtype)
27
  pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")
28
 
29
+ with open("filter.txt") as f:
30
+ filter_words = {word for word in f.read().split("\n") if word}
31
+
32
  # Inference function.
33
  @spaces.GPU(enable_queue=True)
34
  def generate(prompt, option, progress=gr.Progress()):
35
  global step_loaded
36
  print(prompt, option)
37
  ckpt, step = opts[option]
38
+ if any(word in prompt for word in filter_words):
39
+ return None
40
  progress((0, step))
41
  if step != step_loaded:
42
  print(f"Switching checkpoint from {step_loaded} to {step}")