Linaqruf commited on
Commit
34d223f
1 Parent(s): 1f66542

fix error while returning result

Browse files
Files changed (3) hide show
  1. app.py +40 -61
  2. demo.ipynb +112 -91
  3. utils.py +7 -4
app.py CHANGED
@@ -83,7 +83,7 @@ def generate(
83
  upscale_by: float = 1.5,
84
  add_quality_tags: bool = True,
85
  progress=gr.Progress(track_tqdm=True),
86
- ) -> Image:
87
  generator = utils.seed_everything(seed)
88
 
89
  width, height = utils.aspect_ratio_handler(
@@ -132,6 +132,7 @@ def generate(
132
  }
133
  else:
134
  metadata["use_upscaler"] = None
 
135
  logger.info(json.dumps(metadata, indent=4))
136
 
137
  try:
@@ -169,12 +170,15 @@ def generate(
169
  output_type="pil",
170
  ).images
171
 
172
- if images and IS_COLAB:
173
- for image in images:
174
- filepath = utils.save_image(image, metadata, OUTPUT_DIR)
175
- logger.info(f"Image saved as {filepath} with metadata")
176
 
177
- return images, metadata
 
 
 
178
  except Exception as e:
179
  logger.exception(f"An error occurred: {e}")
180
  raise
@@ -221,14 +225,11 @@ with gr.Blocks(css="style.css") as demo:
221
  placeholder="Enter your prompt",
222
  container=False,
223
  )
224
- run_button = gr.Button(
225
- "Generate",
226
- variant="primary",
227
- scale=0
228
- )
229
  result = gr.Gallery(
230
  label="Result",
231
  columns=1,
 
232
  preview=True,
233
  show_label=False
234
  )
@@ -239,10 +240,7 @@ with gr.Blocks(css="style.css") as demo:
239
  placeholder="Enter a negative prompt",
240
  )
241
  with gr.Row():
242
- add_quality_tags = gr.Checkbox(
243
- label="Add Quality Tags",
244
- value=True
245
- )
246
  quality_selector = gr.Dropdown(
247
  label="Quality Tags Presets",
248
  interactive=True,
@@ -348,49 +346,12 @@ with gr.Blocks(css="style.css") as demo:
348
  api_name=False,
349
  )
350
 
351
- inputs = [
352
- prompt,
353
- negative_prompt,
354
- seed,
355
- custom_width,
356
- custom_height,
357
- guidance_scale,
358
- num_inference_steps,
359
- sampler,
360
- aspect_ratio_selector,
361
- style_selector,
362
- quality_selector,
363
- use_upscaler,
364
- upscaler_strength,
365
- upscale_by,
366
- add_quality_tags,
367
- ]
368
-
369
- prompt.submit(
370
- fn=utils.randomize_seed_fn,
371
- inputs=[seed, randomize_seed],
372
- outputs=seed,
373
- queue=False,
374
- api_name=False,
375
- ).then(
376
- fn=generate,
377
- inputs=inputs,
378
- outputs=result,
379
- api_name="run",
380
- )
381
- negative_prompt.submit(
382
- fn=utils.randomize_seed_fn,
383
- inputs=[seed, randomize_seed],
384
- outputs=seed,
385
- queue=False,
386
- api_name=False,
387
- ).then(
388
- fn=generate,
389
- inputs=inputs,
390
- outputs=result,
391
- api_name=False,
392
- )
393
- run_button.click(
394
  fn=utils.randomize_seed_fn,
395
  inputs=[seed, randomize_seed],
396
  outputs=seed,
@@ -398,8 +359,26 @@ with gr.Blocks(css="style.css") as demo:
398
  api_name=False,
399
  ).then(
400
  fn=generate,
401
- inputs=inputs,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
402
  outputs=[result, gr_metadata],
403
- api_name=False,
404
  )
405
- demo.queue(max_size=20).launch(debug=IS_COLAB, share=IS_COLAB)
 
 
 
83
  upscale_by: float = 1.5,
84
  add_quality_tags: bool = True,
85
  progress=gr.Progress(track_tqdm=True),
86
+ ):
87
  generator = utils.seed_everything(seed)
88
 
89
  width, height = utils.aspect_ratio_handler(
 
132
  }
133
  else:
134
  metadata["use_upscaler"] = None
135
+ metadata["model"] = "Animagine XL 3.0"
136
  logger.info(json.dumps(metadata, indent=4))
137
 
138
  try:
 
170
  output_type="pil",
171
  ).images
172
 
173
+ if images:
174
+ image_paths = [
175
+ utils.save_image(image, metadata, OUTPUT_DIR, IS_COLAB) for image in images
176
+ ]
177
 
178
+ for image_path in image_paths:
179
+ logger.info(f"Image saved as {image_path} with metadata")
180
+
181
+ return image_paths, metadata
182
  except Exception as e:
183
  logger.exception(f"An error occurred: {e}")
184
  raise
 
225
  placeholder="Enter your prompt",
226
  container=False,
227
  )
228
+ run_button = gr.Button("Generate", variant="primary", scale=0)
 
 
 
 
229
  result = gr.Gallery(
230
  label="Result",
231
  columns=1,
232
+ height="512px",
233
  preview=True,
234
  show_label=False
235
  )
 
240
  placeholder="Enter a negative prompt",
241
  )
242
  with gr.Row():
243
+ add_quality_tags = gr.Checkbox(label="Add Quality Tags", value=True)
 
 
 
244
  quality_selector = gr.Dropdown(
245
  label="Quality Tags Presets",
246
  interactive=True,
 
346
  api_name=False,
347
  )
348
 
349
+ gr.on(
350
+ triggers=[
351
+ prompt.submit,
352
+ negative_prompt.submit,
353
+ run_button.click,
354
+ ],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
355
  fn=utils.randomize_seed_fn,
356
  inputs=[seed, randomize_seed],
357
  outputs=seed,
 
359
  api_name=False,
360
  ).then(
361
  fn=generate,
362
+ inputs=[
363
+ prompt,
364
+ negative_prompt,
365
+ seed,
366
+ custom_width,
367
+ custom_height,
368
+ guidance_scale,
369
+ num_inference_steps,
370
+ sampler,
371
+ aspect_ratio_selector,
372
+ style_selector,
373
+ quality_selector,
374
+ use_upscaler,
375
+ upscaler_strength,
376
+ upscale_by,
377
+ add_quality_tags,
378
+ ],
379
  outputs=[result, gr_metadata],
380
+ api_name="run",
381
  )
382
+
383
+ if __name__ == "__main__":
384
+ demo.queue(max_size=20).launch(debug=IS_COLAB, share=IS_COLAB)
demo.ipynb CHANGED
@@ -1,93 +1,114 @@
1
  {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": null,
6
- "id": "538a3f0c-50c1-4952-9fcc-070d365c9a0f",
7
- "metadata": {
8
- "scrolled": true
9
- },
10
- "outputs": [],
11
- "source": [
12
- "import os\n",
13
- "import subprocess\n",
14
- "from threading import Timer\n",
15
- "from queue import Queue\n",
16
- "\n",
17
- "ROOT_DIR = \"/content\"\n",
18
- "REPO_URL = \"https://huggingface.co/spaces/Linaqruf/animagine-xl\"\n",
19
- "REPO_DIR = os.path.join(ROOT_DIR, \"cagliostro-webui\")\n",
20
- "NGROK_TOKEN = \"\"\n",
21
- "\n",
22
- "os.environ[\"HF_TOKEN\"] = \"\"\n",
23
- "os.environ[\"IS_COLAB\"] = \"1\"\n",
24
- "os.environ[\"MODEL\"] = \"https://huggingface.co/cagliostrolab/animagine-xl-3.0/blob/main/animagine-xl-3.0.safetensors\"\n",
25
- "os.environ[\"CACHE_EXAMPLES\"] = \"1\"\n",
26
- "\n",
27
- "def clone(url, dir, branch=None):\n",
28
- " subprocess.run([\"git\", \"clone\", url, dir], check=True)\n",
29
- " if branch:\n",
30
- " subprocess.run([\"git\", \"checkout\", branch], cwd=dir, check=True)\n",
31
- "\n",
32
- "def install_deps(dir):\n",
33
- " subprocess.run([\"pip\", \"install\", \"-r\", \"requirements.txt\"], cwd=dir, check=True)\n",
34
- "\n",
35
- "def ngrok_tunnel(port,queue,auth_token):\n",
36
- " ngrok.set_auth_token(auth_token)\n",
37
- " url = ngrok.connect(port)\n",
38
- " queue.put(url)\n",
39
- "\n",
40
- "def main():\n",
41
- " if not os.path.exists(REPO_DIR):\n",
42
- " print(f\"Cloning Repository to {REPO_DIR}\")\n",
43
- " clone(REPO_URL, REPO_DIR)\n",
44
- " print(f\"Installing required python libraries\")\n",
45
- " install_deps(REPO_DIR)\n",
46
- " print(\"Done!\")\n",
47
- "\n",
48
- " os.chdir(REPO_DIR)\n",
49
- " \n",
50
- " if NGROK_TOKEN:\n",
51
- " try:\n",
52
- " from pyngrok import conf,ngrok\n",
53
- " except:\n",
54
- " !pip install -qqqq --upgrade setuptools\n",
55
- " !pip install -qqqq -U pyngrok\n",
56
- " from pyngrok import conf,ngrok\n",
57
- " \n",
58
- " ngrok_output_queue = Queue()\n",
59
- " ngrok_thread = Timer(2, ngrok_tunnel, args=(7860, ngrok_output_queue, NGROK_TOKEN))\n",
60
- " ngrok_thread.start()\n",
61
- " ngrok_thread.join()\n",
62
- " \n",
63
- " print(ngrok_output_queue.get()) \n",
64
- " \n",
65
- " !python app.py\n",
66
- "\n",
67
- "if __name__ == \"__main__\":\n",
68
- " main()"
69
- ]
70
- }
71
- ],
72
- "metadata": {
73
- "kernelspec": {
74
- "display_name": "Python 3 (ipykernel)",
75
- "language": "python",
76
- "name": "python3"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
  },
78
- "language_info": {
79
- "codemirror_mode": {
80
- "name": "ipython",
81
- "version": 3
82
- },
83
- "file_extension": ".py",
84
- "mimetype": "text/x-python",
85
- "name": "python",
86
- "nbconvert_exporter": "python",
87
- "pygments_lexer": "ipython3",
88
- "version": "3.10.12"
89
- }
90
- },
91
- "nbformat": 4,
92
- "nbformat_minor": 5
93
- }
 
1
  {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "538a3f0c-50c1-4952-9fcc-070d365c9a0f",
7
+ "metadata": {
8
+ "scrolled": true,
9
+ "id": "538a3f0c-50c1-4952-9fcc-070d365c9a0f"
10
+ },
11
+ "outputs": [],
12
+ "source": [
13
+ "import os\n",
14
+ "import subprocess\n",
15
+ "from threading import Timer\n",
16
+ "from queue import Queue\n",
17
+ "\n",
18
+ "def is_colab():\n",
19
+ " try:\n",
20
+ " import google.colab\n",
21
+ " return True\n",
22
+ " except ImportError:\n",
23
+ " return False\n",
24
+ "\n",
25
+ "ROOT_DIR = \"/workspace/\" if not is_colab() else \"/content/\"\n",
26
+ "REPO_URL = \"https://huggingface.co/spaces/Linaqruf/animagine-xl\"\n",
27
+ "REPO_DIR = os.path.join(ROOT_DIR, \"animagine-xl\")\n",
28
+ "\n",
29
+ "NGROK_TOKEN = \"\"\n",
30
+ "NGROK_SUBDOMAIN = \"\"\n",
31
+ "PORT = 7860\n",
32
+ "\n",
33
+ "# os.environ[\"HF_TOKEN\"] = \"\"\n",
34
+ "os.environ[\"IS_COLAB\"] = \"1\"\n",
35
+ "os.environ[\"MODEL\"] = \"https://huggingface.co/cagliostrolab/animagine-xl-3.0/blob/main/animagine-xl-3.0.safetensors\"\n",
36
+ "os.environ[\"CACHE_EXAMPLES\"] = \"1\"\n",
37
+ "\n",
38
+ "def clone_repository(url, directory, branch=None):\n",
39
+ " subprocess.run([\"git\", \"clone\", url, directory], check=True)\n",
40
+ " if branch:\n",
41
+ " subprocess.run([\"git\", \"checkout\", branch], cwd=directory, check=True)\n",
42
+ "\n",
43
+ "def install_dependencies(directory):\n",
44
+ " dependencies = [\"accelerate==0.27.2\", \"diffusers==0.26.3\", \"gradio==4.20.0\",\n",
45
+ " \"invisible-watermark==0.2.0\", \"spaces==0.24.0\", \"omegaconf==2.3.0\", \"timm==0.9.10\"]\n",
46
+ " if is_colab():\n",
47
+ " subprocess.run([\"pip\", \"install\"] + dependencies, check=True)\n",
48
+ " else:\n",
49
+ " requirements_path = os.path.join(directory, \"requirements.txt\")\n",
50
+ " subprocess.run([\"pip\", \"install\", \"-r\", requirements_path], check=True)\n",
51
+ "\n",
52
+ "def setup_ngrok_tunnel(port, queue, auth_token, subdomain):\n",
53
+ " ngrok.set_auth_token(auth_token)\n",
54
+ " url = ngrok.connect(port, bind_tls=True, subdomain=subdomain)\n",
55
+ " queue.put(url)\n",
56
+ "\n",
57
+ "def main():\n",
58
+ " if not os.path.exists(REPO_DIR):\n",
59
+ " print(f\"Cloning repository to {REPO_DIR}\")\n",
60
+ " clone_repository(REPO_URL, REPO_DIR)\n",
61
+ "\n",
62
+ " print(\"Installing required Python libraries\")\n",
63
+ " install_dependencies(REPO_DIR)\n",
64
+ " print(\"Done!\")\n",
65
+ "\n",
66
+ " os.chdir(REPO_DIR)\n",
67
+ "\n",
68
+ " if NGROK_TOKEN:\n",
69
+ " try:\n",
70
+ " from pyngrok import conf, ngrok\n",
71
+ " except ImportError:\n",
72
+ " subprocess.run([\"pip\", \"install\", \"-qqqq\", \"--upgrade\", \"setuptools\"], check=True)\n",
73
+ " subprocess.run([\"pip\", \"install\", \"-qqqq\", \"-U\", \"pyngrok\"], check=True)\n",
74
+ " from pyngrok import conf, ngrok\n",
75
+ "\n",
76
+ " ngrok.kill()\n",
77
+ " ngrok_output_queue = Queue()\n",
78
+ " ngrok_thread = Timer(2, setup_ngrok_tunnel, args=(PORT, ngrok_output_queue, NGROK_TOKEN, NGROK_SUBDOMAIN))\n",
79
+ " ngrok_thread.start()\n",
80
+ " ngrok_thread.join()\n",
81
+ " print(ngrok_output_queue.get())\n",
82
+ "\n",
83
+ " !python app.py\n",
84
+ "\n",
85
+ "if __name__ == \"__main__\":\n",
86
+ " main()"
87
+ ]
88
+ }
89
+ ],
90
+ "metadata": {
91
+ "kernelspec": {
92
+ "display_name": "Python 3 (ipykernel)",
93
+ "language": "python",
94
+ "name": "python3"
95
+ },
96
+ "language_info": {
97
+ "codemirror_mode": {
98
+ "name": "ipython",
99
+ "version": 3
100
+ },
101
+ "file_extension": ".py",
102
+ "mimetype": "text/x-python",
103
+ "name": "python",
104
+ "nbconvert_exporter": "python",
105
+ "pygments_lexer": "ipython3",
106
+ "version": "3.10.12"
107
+ },
108
+ "colab": {
109
+ "provenance": []
110
+ }
111
  },
112
+ "nbformat": 4,
113
+ "nbformat_minor": 5
114
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
utils.py CHANGED
@@ -4,6 +4,7 @@ import random
4
  import numpy as np
5
  import json
6
  import torch
 
7
  from PIL import Image, PngImagePlugin
8
  from datetime import datetime
9
  from dataclasses import dataclass
@@ -158,12 +159,14 @@ def preprocess_image_dimensions(width, height):
158
  return width, height
159
 
160
 
161
- def save_image(image, metadata, output_dir):
162
- current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
 
 
 
 
163
  os.makedirs(output_dir, exist_ok=True)
164
- filename = f"image_{current_time}.png"
165
  filepath = os.path.join(output_dir, filename)
166
-
167
  metadata_str = json.dumps(metadata)
168
  info = PngImagePlugin.PngInfo()
169
  info.add_text("metadata", metadata_str)
 
4
  import numpy as np
5
  import json
6
  import torch
7
+ import uuid
8
  from PIL import Image, PngImagePlugin
9
  from datetime import datetime
10
  from dataclasses import dataclass
 
159
  return width, height
160
 
161
 
162
+ def save_image(image, metadata, output_dir, is_colab):
163
+ if is_colab:
164
+ current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
165
+ filename = f"image_{current_time}.png"
166
+ else:
167
+ filename = str(uuid.uuid4()) + ".png"
168
  os.makedirs(output_dir, exist_ok=True)
 
169
  filepath = os.path.join(output_dir, filename)
 
170
  metadata_str = json.dumps(metadata)
171
  info = PngImagePlugin.PngInfo()
172
  info.add_text("metadata", metadata_str)