Spaces:
Running
on
Zero
Running
on
Zero
Commit
•
4a24fa3
1
Parent(s):
6f88d2c
Update app.py (#5)
Browse files- Update app.py (03b327763577e2d8e127af844ea9e040a784adbb)
Co-authored-by: yoinked <yoinked@users.noreply.huggingface.co>
app.py
CHANGED
@@ -2,7 +2,7 @@ import spaces
|
|
2 |
import os
|
3 |
import gc
|
4 |
import gradio as gr
|
5 |
-
import gradio_client as grcl
|
6 |
import numpy as np
|
7 |
import torch
|
8 |
import json
|
@@ -13,7 +13,7 @@ from PIL import Image, PngImagePlugin
|
|
13 |
from datetime import datetime
|
14 |
from diffusers.models import AutoencoderKL
|
15 |
from diffusers import StableDiffusionXLPipeline, StableDiffusionXLImg2ImgPipeline
|
16 |
-
GRAD_CLIENT = grcl.Client("https://yoinked-da-nsfw-checker.hf.space/")
|
17 |
logging.basicConfig(level=logging.INFO)
|
18 |
logger = logging.getLogger(__name__)
|
19 |
|
@@ -83,7 +83,11 @@ def generate(
|
|
83 |
upscale_by: float = 1.5,
|
84 |
add_quality_tags: bool = True,
|
85 |
progress=gr.Progress(track_tqdm=True),
|
|
|
86 |
):
|
|
|
|
|
|
|
87 |
generator = utils.seed_everything(seed)
|
88 |
|
89 |
width, height = utils.aspect_ratio_handler(
|
@@ -195,16 +199,16 @@ def generate(
|
|
195 |
|
196 |
def genwrap(*args, **kwargs):
|
197 |
ipth, mtd = generate(*args, **kwargs)
|
198 |
-
r = GRAD_CLIENT.predict(ipth, "chen-evangelion", 0.4, False, False, api_name="/classify")
|
199 |
-
ratings = val[0]
|
200 |
-
rating = rating['confidences']
|
201 |
-
highestval, classtype = -1, "aa"
|
202 |
-
for o in rating:
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
if classtype not in ["general", "sensitive"]: #
|
207 |
-
|
208 |
return ipth, mtd
|
209 |
if torch.cuda.is_available():
|
210 |
pipe = load_pipeline(MODEL)
|
@@ -336,6 +340,8 @@ with gr.Blocks(css="style.css", theme="NoCrypt/miku@1.2.1") as demo:
|
|
336 |
step=1,
|
337 |
value=28,
|
338 |
)
|
|
|
|
|
339 |
with gr.Column(scale=3):
|
340 |
with gr.Blocks():
|
341 |
run_button = gr.Button("Generate", variant="primary")
|
@@ -399,6 +405,7 @@ with gr.Blocks(css="style.css", theme="NoCrypt/miku@1.2.1") as demo:
|
|
399 |
upscaler_strength,
|
400 |
upscale_by,
|
401 |
add_quality_tags,
|
|
|
402 |
],
|
403 |
outputs=[result, gr_metadata],
|
404 |
api_name="run",
|
|
|
2 |
import os
|
3 |
import gc
|
4 |
import gradio as gr
|
5 |
+
# import gradio_client as grcl
|
6 |
import numpy as np
|
7 |
import torch
|
8 |
import json
|
|
|
13 |
from datetime import datetime
|
14 |
from diffusers.models import AutoencoderKL
|
15 |
from diffusers import StableDiffusionXLPipeline, StableDiffusionXLImg2ImgPipeline
|
16 |
+
#GRAD_CLIENT = grcl.Client("https://yoinked-da-nsfw-checker.hf.space/")
|
17 |
logging.basicConfig(level=logging.INFO)
|
18 |
logger = logging.getLogger(__name__)
|
19 |
|
|
|
83 |
upscale_by: float = 1.5,
|
84 |
add_quality_tags: bool = True,
|
85 |
progress=gr.Progress(track_tqdm=True),
|
86 |
+
nsfw_neg=True
|
87 |
):
|
88 |
+
if nsfw_neg:
|
89 |
+
prompt += "general, "
|
90 |
+
negative_prompt += ", explicit, questionable, nude, naked, pussy, penis, uncensored" # mikudayo
|
91 |
generator = utils.seed_everything(seed)
|
92 |
|
93 |
width, height = utils.aspect_ratio_handler(
|
|
|
199 |
|
200 |
def genwrap(*args, **kwargs):
|
201 |
ipth, mtd = generate(*args, **kwargs)
|
202 |
+
#r = GRAD_CLIENT.predict(ipth, "chen-evangelion", 0.4, False, False, api_name="/classify")
|
203 |
+
#ratings = val[0]
|
204 |
+
#rating = rating['confidences']
|
205 |
+
#highestval, classtype = -1, "aa"
|
206 |
+
#for o in rating:
|
207 |
+
# if o['confidence'] > highestval:
|
208 |
+
# highestval = o['confidence']
|
209 |
+
# classtype = o['label']
|
210 |
+
#if classtype not in ["general", "sensitive"]: # i hate code
|
211 |
+
# return "https://upload.wikimedia.org/wikipedia/commons/b/bf/Bucephala-albeola-010.jpg", mtd
|
212 |
return ipth, mtd
|
213 |
if torch.cuda.is_available():
|
214 |
pipe = load_pipeline(MODEL)
|
|
|
340 |
step=1,
|
341 |
value=28,
|
342 |
)
|
343 |
+
with gr.Accordion(label="lewdie.... lewd lewdie...", open=False):
|
344 |
+
nsfwtoggle = gr.Checkbox(label="Anti-NSFW [dont disable this if korean]", value=True)
|
345 |
with gr.Column(scale=3):
|
346 |
with gr.Blocks():
|
347 |
run_button = gr.Button("Generate", variant="primary")
|
|
|
405 |
upscaler_strength,
|
406 |
upscale_by,
|
407 |
add_quality_tags,
|
408 |
+
nsfwtoggle
|
409 |
],
|
410 |
outputs=[result, gr_metadata],
|
411 |
api_name="run",
|