Spaces:
Running
on
Zero
Running
on
Zero
Update
Browse files- README.md +1 -1
- app.py +3 -1
- requirements.txt +7 -6
README.md
CHANGED
@@ -4,7 +4,7 @@ emoji: 🌍
|
|
4 |
colorFrom: gray
|
5 |
colorTo: purple
|
6 |
sdk: gradio
|
7 |
-
sdk_version:
|
8 |
app_file: app.py
|
9 |
license: mit
|
10 |
pinned: false
|
|
|
4 |
colorFrom: gray
|
5 |
colorTo: purple
|
6 |
sdk: gradio
|
7 |
+
sdk_version: 4.8.0
|
8 |
app_file: app.py
|
9 |
license: mit
|
10 |
pinned: false
|
app.py
CHANGED
@@ -8,10 +8,11 @@ import random
|
|
8 |
import gradio as gr
|
9 |
import numpy as np
|
10 |
import PIL.Image
|
|
|
11 |
import torch
|
12 |
from diffusers import AutoencoderKL, DiffusionPipeline
|
13 |
|
14 |
-
DESCRIPTION = "#
|
15 |
if not torch.cuda.is_available():
|
16 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
|
17 |
|
@@ -62,6 +63,7 @@ def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
|
62 |
return seed
|
63 |
|
64 |
|
|
|
65 |
def generate(
|
66 |
prompt: str,
|
67 |
negative_prompt: str = "",
|
|
|
8 |
import gradio as gr
|
9 |
import numpy as np
|
10 |
import PIL.Image
|
11 |
+
import spaces
|
12 |
import torch
|
13 |
from diffusers import AutoencoderKL, DiffusionPipeline
|
14 |
|
15 |
+
DESCRIPTION = "# SDXL"
|
16 |
if not torch.cuda.is_available():
|
17 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
|
18 |
|
|
|
63 |
return seed
|
64 |
|
65 |
|
66 |
+
@spaces.GPU
|
67 |
def generate(
|
68 |
prompt: str,
|
69 |
negative_prompt: str = "",
|
requirements.txt
CHANGED
@@ -1,7 +1,8 @@
|
|
1 |
-
accelerate==0.
|
2 |
-
diffusers==0.
|
3 |
-
gradio==
|
4 |
invisible-watermark==0.2.0
|
5 |
-
Pillow==10.0
|
6 |
-
|
7 |
-
|
|
|
|
1 |
+
accelerate==0.25.0
|
2 |
+
diffusers==0.24.0
|
3 |
+
gradio==4.8.0
|
4 |
invisible-watermark==0.2.0
|
5 |
+
Pillow==10.1.0
|
6 |
+
spaces==0.18.0
|
7 |
+
torch==2.0.0
|
8 |
+
transformers==4.35.2
|