Spaces:
Running on CPU Upgrade

pcuenq HF staff pngwn HF staff commited on
Commit
a43251a
β€’
1 Parent(s): e15494c

revert changes (#1917)

Browse files

- Update app.py (adac32512de47ff3d9b752640101f3cc7cd6a780)
- Update README.md (9704a8e680b1b9d35205c96e85fba5c0799fffe9)


Co-authored-by: Pete <pngwn@users.noreply.huggingface.co>

Files changed (2) hide show
  1. README.md +1 -1
  2. app.py +49 -58
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: πŸ‹οΈ #πŸ‘•πŸŒŸ
4
  colorFrom: red
5
  colorTo: blue
6
  sdk: gradio
7
- sdk_version: 4.36.0
8
  app_file: app.py
9
  license: mit
10
  disable_embedding: true
 
4
  colorFrom: red
5
  colorTo: blue
6
  sdk: gradio
7
+ sdk_version: 3.45.1
8
  app_file: app.py
9
  license: mit
10
  disable_embedding: true
app.py CHANGED
@@ -84,14 +84,7 @@ word_list = word_list_dataset["train"]['text']
84
 
85
  #gradio.helpers.CACHED_FOLDER="/data/cache"
86
 
87
-
88
- def infer(
89
- prompt,
90
- negative="low_quality",
91
- scale=7,
92
- style_name=None,
93
- profile: gr.OAuthProfile | None = None,
94
- ):
95
  for filter in word_list:
96
  if re.search(rf"\b{filter}\b", prompt):
97
  raise gr.Error("Please try again with a different prompt")
@@ -111,10 +104,11 @@ def infer(
111
 
112
  for prediction in json_data["predictions"]:
113
  for image in prediction["images"]:
114
- pil_image = Image.open(BytesIO(base64.b64decode(image)))
115
- images.append(pil_image)
116
 
117
  if profile is not None: # avoid conversion on non-logged-in users
 
118
  user_history.save_image( # save images + metadata to user history
119
  label=prompt,
120
  image=pil_image,
@@ -255,11 +249,32 @@ css = """
255
  block = gr.Blocks()
256
 
257
  examples = [
258
- ["A serious capybara at work, wearing a suit", "low_quality", 7],
259
- ["A Squirtle fine dining with a view to the London Eye", "low_quality", 7],
260
- ["A tamale food cart in front of a Japanese Castle", "low_quality", 7],
261
- ["a graffiti of a robot serving meals to people", "low_quality", 7],
262
- ["a beautiful cabin in Attersee, Austria, 3d animation style", "low_quality", 7],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
263
  ]
264
 
265
 
@@ -318,19 +333,19 @@ with block:
318
  </div>
319
  """
320
  )
321
-
322
- with gr.Row(elem_id="prompt-container", equal_height=True):
323
- text = gr.Textbox(
324
- label="Enter your prompt",
325
- show_label=False,
326
- max_lines=1,
327
- placeholder="Enter your prompt",
328
- elem_id="prompt-text-input",
329
- )
330
- btn = gr.Button("Generate", scale=0, elem_id="gen-button")
331
 
332
  gallery = gr.Gallery(
333
- label="Generated images", show_label=False, elem_id="gallery", rows=2, columns=2
334
  )
335
 
336
 
@@ -357,40 +372,16 @@ with block:
357
  label="Guidance Scale", minimum=0, maximum=50, value=7.5, step=0.1
358
  )
359
 
360
- ex = gr.Examples(
361
- examples=examples,
362
- fn=infer,
363
- inputs=[text, negative, guidance_scale],
364
- outputs=[gallery, community_group],
365
- cache_examples=True,
366
- )
367
- negative.submit(
368
- infer,
369
- inputs=[text, negative, guidance_scale, style_selection],
370
- outputs=[gallery, community_group],
371
- concurrency_id="infer",
372
- concurrency_limit=8,
373
- )
374
- text.submit(
375
- infer,
376
- inputs=[text, negative, guidance_scale, style_selection],
377
- outputs=[gallery, community_group],
378
- concurrency_id="infer",
379
- concurrency_limit=8,
380
- )
381
- btn.click(
382
- infer,
383
- inputs=[text, negative, guidance_scale, style_selection],
384
- outputs=[gallery, community_group],
385
- concurrency_id="infer",
386
- concurrency_limit=8,
387
- )
388
-
389
  share_button.click(
390
  None,
391
  [],
392
  [],
393
- js=share_js,
394
  )
395
  gr.HTML(
396
  """
@@ -417,5 +408,5 @@ with gr.Blocks(css=css) as block_with_history:
417
  with gr.Tab("Past generations"):
418
  user_history.render()
419
 
420
- block_with_history.queue(max_size=10, api_open=False).launch(show_api=False)
421
- #block_with_history.launch(server_name="0.0.0.0")
 
84
 
85
  #gradio.helpers.CACHED_FOLDER="/data/cache"
86
 
87
+ def infer(prompt, negative="low_quality", scale=7, style_name=None, profile: gr.OAuthProfile | None = None):
 
 
 
 
 
 
 
88
  for filter in word_list:
89
  if re.search(rf"\b{filter}\b", prompt):
90
  raise gr.Error("Please try again with a different prompt")
 
104
 
105
  for prediction in json_data["predictions"]:
106
  for image in prediction["images"]:
107
+ image_b64 = (f"data:image/jpeg;base64,{image}")
108
+ images.append(image_b64)
109
 
110
  if profile is not None: # avoid conversion on non-logged-in users
111
+ pil_image = Image.open(BytesIO(base64.b64decode(image)))
112
  user_history.save_image( # save images + metadata to user history
113
  label=prompt,
114
  image=pil_image,
 
249
  block = gr.Blocks()
250
 
251
  examples = [
252
+ [
253
+ "A serious capybara at work, wearing a suit",
254
+ None,
255
+ None
256
+ ],
257
+ [
258
+ 'A Squirtle fine dining with a view to the London Eye',
259
+ None,
260
+ None
261
+ ],
262
+ [
263
+ 'A tamale food cart in front of a Japanese Castle',
264
+ None,
265
+ None
266
+ ],
267
+ [
268
+ 'a graffiti of a robot serving meals to people',
269
+ None,
270
+ None
271
+ ],
272
+ [
273
+ 'a beautiful cabin in Attersee, Austria, 3d animation style',
274
+ None,
275
+ None
276
+ ],
277
+
278
  ]
279
 
280
 
 
333
  </div>
334
  """
335
  )
336
+
337
+ with gr.Row(elem_id="prompt-container").style(mobile_collapse=False, equal_height=True):
338
+ text = gr.Textbox(
339
+ label="Enter your prompt",
340
+ show_label=False,
341
+ max_lines=1,
342
+ placeholder="Enter your prompt",
343
+ elem_id="prompt-text-input",
344
+ )
345
+ btn = gr.Button("Generate", scale=0, elem_id="gen-button")
346
 
347
  gallery = gr.Gallery(
348
+ label="Generated images", show_label=False, elem_id="gallery", grid=[2]
349
  )
350
 
351
 
 
372
  label="Guidance Scale", minimum=0, maximum=50, value=7.5, step=0.1
373
  )
374
 
375
+ ex = gr.Examples(examples=examples, fn=infer, inputs=[text, negative, guidance_scale], outputs=[gallery, community_group], cache_examples=True, postprocess=False)
376
+ negative.submit(infer, inputs=[text, negative, guidance_scale, style_selection], outputs=[gallery, community_group], postprocess=False)
377
+ text.submit(infer, inputs=[text, negative, guidance_scale, style_selection], outputs=[gallery, community_group], postprocess=False)
378
+ btn.click(infer, inputs=[text, negative, guidance_scale, style_selection], outputs=[gallery, community_group], postprocess=False)
379
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
380
  share_button.click(
381
  None,
382
  [],
383
  [],
384
+ _js=share_js,
385
  )
386
  gr.HTML(
387
  """
 
408
  with gr.Tab("Past generations"):
409
  user_history.render()
410
 
411
+ block_with_history.queue(concurrency_count=8, max_size=10, api_open=False).launch(show_api=False)
412
+ #block_with_history.launch(server_name="0.0.0.0")