Update app.py
Browse files
app.py
CHANGED
@@ -22,8 +22,8 @@ device = "cuda"
|
|
22 |
#torch.backends.cudnn.benchmark = True
|
23 |
|
24 |
#When running locally, you won`t have access to this, so you can remove this part
|
25 |
-
word_list_dataset = load_dataset("stabilityai/word-list", data_files="list.txt", use_auth_token=True)
|
26 |
-
word_list = word_list_dataset["train"]['text']
|
27 |
|
28 |
is_gpu_busy = False
|
29 |
def infer(prompt):
|
@@ -32,9 +32,9 @@ def infer(prompt):
|
|
32 |
steps = 50
|
33 |
scale = 7.5
|
34 |
#When running locally you can also remove this filter
|
35 |
-
for filter in word_list:
|
36 |
-
|
37 |
-
|
38 |
|
39 |
#generator = torch.Generator(device=device).manual_seed(seed)
|
40 |
#print("Is GPU busy? ", is_gpu_busy)
|
|
|
22 |
#torch.backends.cudnn.benchmark = True
|
23 |
|
24 |
#When running locally, you won`t have access to this, so you can remove this part
|
25 |
+
# word_list_dataset = load_dataset("stabilityai/word-list", data_files="list.txt", use_auth_token=True)
|
26 |
+
# word_list = word_list_dataset["train"]['text']
|
27 |
|
28 |
is_gpu_busy = False
|
29 |
def infer(prompt):
|
|
|
32 |
steps = 50
|
33 |
scale = 7.5
|
34 |
#When running locally you can also remove this filter
|
35 |
+
# for filter in word_list:
|
36 |
+
# if re.search(rf"\b{filter}\b", prompt):
|
37 |
+
# raise gr.Error("Unsafe content found. Please try again with different prompts.")
|
38 |
|
39 |
#generator = torch.Generator(device=device).manual_seed(seed)
|
40 |
#print("Is GPU busy? ", is_gpu_busy)
|