Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -6,12 +6,11 @@ import torch
|
|
6 |
from diffusers import DiffusionPipeline
|
7 |
from transformers import pipeline
|
8 |
|
9 |
-
# ๋ฒ์ญ ํ์ดํ๋ผ์ธ
|
10 |
-
translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en")
|
11 |
-
|
12 |
-
dtype = torch.bfloat16
|
13 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
14 |
|
|
|
15 |
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=dtype).to(device)
|
16 |
|
17 |
MAX_SEED = np.iinfo(np.int32).max
|
@@ -40,7 +39,10 @@ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_in
|
|
40 |
).images[0]
|
41 |
return image, seed
|
42 |
|
43 |
-
examples =
|
|
|
|
|
|
|
44 |
|
45 |
css = """
|
46 |
footer {
|
|
|
6 |
from diffusers import DiffusionPipeline
|
7 |
from transformers import pipeline
|
8 |
|
9 |
+
# ๋ฒ์ญ ํ์ดํ๋ผ์ธ ๋ฐ ํ๋์จ์ด ์ค์
|
|
|
|
|
|
|
10 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
11 |
+
translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en", device=device)
|
12 |
|
13 |
+
dtype = torch.bfloat16
|
14 |
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=dtype).to(device)
|
15 |
|
16 |
MAX_SEED = np.iinfo(np.int32).max
|
|
|
39 |
).images[0]
|
40 |
return image, seed
|
41 |
|
42 |
+
examples = [
|
43 |
+
["Create a new logo for a Tech Startup"],
|
44 |
+
["Design Rocket"]
|
45 |
+
]
|
46 |
|
47 |
css = """
|
48 |
footer {
|