Uthar commited on
Commit
0ed58ab
1 Parent(s): f629330

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +65 -20
app.py CHANGED
@@ -4,9 +4,21 @@ from externalmod import gr_Interface_load, save_image, randomize_seed
4
  import asyncio
5
  import os
6
  from threading import RLock
 
 
 
 
 
 
 
7
  lock = RLock()
8
  HF_TOKEN = os.environ.get("HF_TOKEN") if os.environ.get("HF_TOKEN") else None # If private or gated models aren't used, ENV setting is unnecessary.
9
 
 
 
 
 
 
10
 
11
  def load_fn(models):
12
  global models_load
@@ -26,7 +38,7 @@ load_fn(models)
26
 
27
  num_models = 6
28
  max_images = 6
29
- inference_timeout = 300
30
  default_models = models[:num_models]
31
  MAX_SEED = 2**32-1
32
 
@@ -56,8 +68,7 @@ async def infer(model_str, prompt, nprompt="", height=0, width=0, steps=0, cfg=0
56
  if cfg > 0: cfg = kwargs["guidance_scale"] = cfg
57
  if seed == -1: kwargs["seed"] = randomize_seed()
58
  else: kwargs["seed"] = seed
59
- task = asyncio.create_task(asyncio.to_thread(models_load[model_str].fn,
60
- prompt=prompt, negative_prompt=nprompt, **kwargs, token=HF_TOKEN))
61
  await asyncio.sleep(0)
62
  try:
63
  result = await asyncio.wait_for(task, timeout=timeout)
@@ -74,7 +85,10 @@ async def infer(model_str, prompt, nprompt="", height=0, width=0, steps=0, cfg=0
74
  raise Exception() from e
75
  if task.done() and result is not None and not isinstance(result, tuple):
76
  with lock:
77
- png_path = "img.png"
 
 
 
78
  image = save_image(result, png_path, model_str, prompt, nprompt, height, width, steps, cfg, seed)
79
  return image
80
  return None
@@ -105,19 +119,46 @@ def add_gallery(image, model_str, gallery):
105
  CSS="""
106
  .gradio-container { max-width: 1200px; margin: 0 auto; !important; }
107
  .output { width=112px; height=112px; max_width=112px; max_height=112px; !important; }
108
- .gallery { min_width=512px; min_height=512px; max_height=1024px; !important; }
109
  .guide { text-align: center; !important; }
110
  """
111
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
 
113
  with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', fill_width=True, css=CSS) as demo:
114
- gr.HTML(
115
- )
116
- with gr.Tab('Pr0n Diffusion'):
117
  with gr.Column(scale=2):
118
  with gr.Group():
119
- txt_input = gr.Textbox(label='Your prompt:', lines=4)
120
- neg_input = gr.Textbox(label='Negative prompt:', lines=1)
121
  with gr.Accordion("Advanced", open=False, visible=True):
122
  with gr.Row():
123
  width = gr.Slider(label="Width", info="If 0, the default value is used.", maximum=1216, step=32, value=0)
@@ -129,11 +170,11 @@ with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', fill_width=True, css=CSS) as demo:
129
  seed_rand = gr.Button("Randomize Seed 🎲", size="sm", variant="secondary")
130
  seed_rand.click(randomize_seed, None, [seed], queue=False)
131
  with gr.Row():
132
- gen_button = gr.Button(f'Generate up to {int(num_models)} images in up to 3 minutes total', variant='primary', scale=3)
133
- random_button = gr.Button(f'Random {int(num_models)} 🎲', variant='secondary', scale=1)
134
  #stop_button = gr.Button('Stop', variant='stop', interactive=False, scale=1)
135
  #gen_button.click(lambda: gr.update(interactive=True), None, stop_button)
136
- gr.Markdown("")
137
 
138
  with gr.Column(scale=1):
139
  with gr.Group():
@@ -145,7 +186,7 @@ with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', fill_width=True, css=CSS) as demo:
145
 
146
  with gr.Column(scale=2):
147
  gallery = gr.Gallery(label="Output", show_download_button=True, elem_classes="gallery",
148
- interactive=False, show_share_button=True, container=True, format="png",
149
  preview=True, object_fit="cover", columns=2, rows=2)
150
 
151
  for m, o in zip(current_models, output):
@@ -166,8 +207,10 @@ with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', fill_width=True, css=CSS) as demo:
166
  with gr.Column(scale=2):
167
  model_choice2 = gr.Dropdown(models, label='Choose model', value=models[0])
168
  with gr.Group():
169
- txt_input2 = gr.Textbox(label='Your prompt:', lines=4)
170
- neg_input2 = gr.Textbox(label='Negative prompt:', lines=1)
 
 
171
  with gr.Accordion("Advanced", open=False, visible=True):
172
  with gr.Row():
173
  width2 = gr.Slider(label="Width", info="If 0, the default value is used.", maximum=1216, step=32, value=0)
@@ -176,11 +219,11 @@ with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', fill_width=True, css=CSS) as demo:
176
  steps2 = gr.Slider(label="Number of inference steps", info="If 0, the default value is used.", maximum=100, step=1, value=0)
177
  cfg2 = gr.Slider(label="Guidance scale", info="If 0, the default value is used.", maximum=30.0, step=0.1, value=0)
178
  seed2 = gr.Slider(label="Seed", info="Randomize Seed if -1.", minimum=-1, maximum=MAX_SEED, step=1, value=-1)
179
- seed_rand2 = gr.Button("Randomize Seed 🎲", size="sm", variant="secondary")
180
  seed_rand2.click(randomize_seed, None, [seed2], queue=False)
181
  num_images = gr.Slider(1, max_images, value=max_images, step=1, label='Number of images')
182
  with gr.Row():
183
- gen_button2 = gr.Button('Generate', variant='primary', scale=2)
184
  #stop_button2 = gr.Button('Stop', variant='stop', interactive=False, scale=1)
185
  #gen_button2.click(lambda: gr.update(interactive=True), None, stop_button2)
186
 
@@ -206,8 +249,10 @@ with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', fill_width=True, css=CSS) as demo:
206
  concurrency_limit=None, queue=False) # Be sure to delete ", queue=False" when activating the stop button
207
  o.change(add_gallery, [o, model_choice2, gallery2], [gallery2])
208
  #stop_button2.click(lambda: gr.update(interactive=False), None, stop_button2, cancels=[gen_event2])
209
-
 
210
  gr.Markdown("")
211
 
212
- #demo.queue(default_concurrency_limit=200, max_size=200)
213
  demo.launch(show_api=False, max_threads=400)
 
 
4
  import asyncio
5
  import os
6
  from threading import RLock
7
+ from datetime import datetime
8
+
9
+
10
+ preSetPrompt = "cute tall slender athletic 20+ caucasian woman. gorgeous face. perky tits. sly smile. lifting shirt. explicit pose. artistic. photorealistic. cinematic. f1.4"
11
+ # preSetPrompt = "cute tall slender athletic 20+ nude caucasian woman. gorgeous face. perky tits. gaping outie pussy. pussy juice. sly smile. explicit pose. artistic. photorealistic. cinematic. f1.4"
12
+ negPreSetPrompt = "[deformed | disfigured], poorly drawn, [bad : wrong] anatomy, [extra | missing | floating | disconnected] limb, (mutated hands and fingers), blurry, text, fuzziness"
13
+
14
  lock = RLock()
15
  HF_TOKEN = os.environ.get("HF_TOKEN") if os.environ.get("HF_TOKEN") else None # If private or gated models aren't used, ENV setting is unnecessary.
16
 
17
+ def get_current_time():
18
+ now = datetime.now()
19
+ now2 = now
20
+ current_time = now2.strftime("%y-%m-%d %H:%M:%S")
21
+ return current_time
22
 
23
  def load_fn(models):
24
  global models_load
 
38
 
39
  num_models = 6
40
  max_images = 6
41
+ inference_timeout = 400
42
  default_models = models[:num_models]
43
  MAX_SEED = 2**32-1
44
 
 
68
  if cfg > 0: cfg = kwargs["guidance_scale"] = cfg
69
  if seed == -1: kwargs["seed"] = randomize_seed()
70
  else: kwargs["seed"] = seed
71
+ task = asyncio.create_task(asyncio.to_thread(models_load[model_str].fn, prompt=prompt, negative_prompt=nprompt, **kwargs, token=HF_TOKEN))
 
72
  await asyncio.sleep(0)
73
  try:
74
  result = await asyncio.wait_for(task, timeout=timeout)
 
85
  raise Exception() from e
86
  if task.done() and result is not None and not isinstance(result, tuple):
87
  with lock:
88
+ # png_path = "img.png"
89
+ # png_path = get_current_time() + "_" + model_str.replace("/", "_") + ".png"
90
+ # png_path = model_str.replace("/", "_") + " - " + prompt + " - " + get_current_time() + ".png"
91
+ png_path = model_str.replace("/", "_") + " - " + get_current_time() + ".png"
92
  image = save_image(result, png_path, model_str, prompt, nprompt, height, width, steps, cfg, seed)
93
  return image
94
  return None
 
119
  CSS="""
120
  .gradio-container { max-width: 1200px; margin: 0 auto; !important; }
121
  .output { width=112px; height=112px; max_width=112px; max_height=112px; !important; }
122
+ .gallery { min_width=512px; min_height=512px; max_height=512px; !important; }
123
  .guide { text-align: center; !important; }
124
  """
125
 
126
+ js_func = """
127
+ function refresh() {
128
+ const url = new URL(window.location);
129
+
130
+ if (url.searchParams.get('__theme') !== 'dark') {
131
+ url.searchParams.set('__theme', 'dark');
132
+ window.location.href = url.href;
133
+ }
134
+ }
135
+ """
136
+
137
+ js_AutoSave="""
138
+
139
+ console.log("Yo");
140
+
141
+ var img1 = document.querySelector("div#component-355 .svelte-1kpcxni button.svelte-1kpcxni .svelte-1kpcxni img"),
142
+ observer = new MutationObserver((changes) => {
143
+ changes.forEach(change => {
144
+ if(change.attributeName.includes('src')){
145
+ console.log(img1.src);
146
+ document.querySelector("div#component-355 .svelte-1kpcxni .svelte-sr71km a.svelte-1s8vnbx button").click();
147
+ }
148
+ });
149
+ });
150
+ observer.observe(img1, {attributes : true});
151
+
152
+ """
153
 
154
  with gr.Blocks(theme='NoCrypt/miku@>=1.2.2', fill_width=True, css=CSS) as demo:
155
+ # with gr.Blocks(theme='JohnSmith9982/small_and_pretty', fill_width=True, css=CSS, js=js_func) as demo:
156
+ gr.HTML("")
157
+ with gr.Tab('6 Models'):
158
  with gr.Column(scale=2):
159
  with gr.Group():
160
+ txt_input = gr.Textbox(label='Your prompt:', value=preSetPrompt, lines=3, autofocus=1)
161
+ neg_input = gr.Textbox(label='Negative prompt:', value=negPreSetPrompt, lines=1)
162
  with gr.Accordion("Advanced", open=False, visible=True):
163
  with gr.Row():
164
  width = gr.Slider(label="Width", info="If 0, the default value is used.", maximum=1216, step=32, value=0)
 
170
  seed_rand = gr.Button("Randomize Seed 🎲", size="sm", variant="secondary")
171
  seed_rand.click(randomize_seed, None, [seed], queue=False)
172
  with gr.Row():
173
+ gen_button = gr.Button(f'Generate up to {int(num_models)} images', variant='primary', scale=3)
174
+ random_button = gr.Button(f'Randomize Models', variant='secondary', scale=1)
175
  #stop_button = gr.Button('Stop', variant='stop', interactive=False, scale=1)
176
  #gen_button.click(lambda: gr.update(interactive=True), None, stop_button)
177
+ gr.Markdown("", elem_classes="guide")
178
 
179
  with gr.Column(scale=1):
180
  with gr.Group():
 
186
 
187
  with gr.Column(scale=2):
188
  gallery = gr.Gallery(label="Output", show_download_button=True, elem_classes="gallery",
189
+ interactive=False, show_share_button=False, container=True, format="png",
190
  preview=True, object_fit="cover", columns=2, rows=2)
191
 
192
  for m, o in zip(current_models, output):
 
207
  with gr.Column(scale=2):
208
  model_choice2 = gr.Dropdown(models, label='Choose model', value=models[0])
209
  with gr.Group():
210
+ # global preSetPrompt
211
+ # global negPreSetPrompt
212
+ txt_input2 = gr.Textbox(label='Your prompt:', value = preSetPrompt, lines=3, autofocus=1)
213
+ neg_input2 = gr.Textbox(label='Negative prompt:', value=negPreSetPrompt, lines=1)
214
  with gr.Accordion("Advanced", open=False, visible=True):
215
  with gr.Row():
216
  width2 = gr.Slider(label="Width", info="If 0, the default value is used.", maximum=1216, step=32, value=0)
 
219
  steps2 = gr.Slider(label="Number of inference steps", info="If 0, the default value is used.", maximum=100, step=1, value=0)
220
  cfg2 = gr.Slider(label="Guidance scale", info="If 0, the default value is used.", maximum=30.0, step=0.1, value=0)
221
  seed2 = gr.Slider(label="Seed", info="Randomize Seed if -1.", minimum=-1, maximum=MAX_SEED, step=1, value=-1)
222
+ seed_rand2 = gr.Button("Randomize Seed", size="sm", variant="secondary")
223
  seed_rand2.click(randomize_seed, None, [seed2], queue=False)
224
  num_images = gr.Slider(1, max_images, value=max_images, step=1, label='Number of images')
225
  with gr.Row():
226
+ gen_button2 = gr.Button('Let the machine halucinate', variant='primary', scale=2)
227
  #stop_button2 = gr.Button('Stop', variant='stop', interactive=False, scale=1)
228
  #gen_button2.click(lambda: gr.update(interactive=True), None, stop_button2)
229
 
 
249
  concurrency_limit=None, queue=False) # Be sure to delete ", queue=False" when activating the stop button
250
  o.change(add_gallery, [o, model_choice2, gallery2], [gallery2])
251
  #stop_button2.click(lambda: gr.update(interactive=False), None, stop_button2, cancels=[gen_event2])
252
+
253
+ # gr.Markdown(js_AutoSave)
254
  gr.Markdown("")
255
 
256
+ # demo.queue(default_concurrency_limit=200, max_size=200)
257
  demo.launch(show_api=False, max_threads=400)
258
+ # demo.launch(show_api=False, max_threads=400, js=js_AutoSave)