Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,10 @@ import random
|
|
4 |
import spaces
|
5 |
import torch
|
6 |
from diffusers import DiffusionPipeline
|
|
|
|
|
|
|
|
|
7 |
|
8 |
dtype = torch.bfloat16
|
9 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
@@ -18,6 +22,14 @@ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_in
|
|
18 |
if randomize_seed:
|
19 |
seed = random.randint(0, MAX_SEED)
|
20 |
generator = torch.Generator().manual_seed(seed)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
image = pipe(
|
22 |
prompt = prompt,
|
23 |
width = width,
|
@@ -45,12 +57,8 @@ footer {
|
|
45 |
}
|
46 |
"""
|
47 |
|
48 |
-
with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css
|
49 |
-
) as demo:
|
50 |
-
|
51 |
with gr.Column(elem_id="col-container"):
|
52 |
-
|
53 |
-
|
54 |
with gr.Row():
|
55 |
prompt = gr.Text(
|
56 |
label="Prompt",
|
@@ -103,20 +111,18 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css
|
|
103 |
)
|
104 |
|
105 |
gr.Examples(
|
106 |
-
examples
|
107 |
-
fn
|
108 |
-
inputs
|
109 |
-
outputs
|
110 |
cache_examples="lazy"
|
111 |
)
|
112 |
|
113 |
-
gr.
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
outputs = [result, seed]
|
120 |
-
)
|
121 |
|
122 |
demo.launch()
|
|
|
4 |
import spaces
|
5 |
import torch
|
6 |
from diffusers import DiffusionPipeline
|
7 |
+
from transformers import pipeline
|
8 |
+
|
9 |
+
# 번역 파이프라인 초기화
|
10 |
+
translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en")
|
11 |
|
12 |
dtype = torch.bfloat16
|
13 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
22 |
if randomize_seed:
|
23 |
seed = random.randint(0, MAX_SEED)
|
24 |
generator = torch.Generator().manual_seed(seed)
|
25 |
+
|
26 |
+
# 한글 입력 감지 및 번역
|
27 |
+
if any('\uAC00' <= char <= '\uD7A3' for char in prompt):
|
28 |
+
print("Translating Korean prompt...")
|
29 |
+
translated_prompt = translator(prompt, max_length=512)[0]['translation_text']
|
30 |
+
print("Translated prompt:", translated_prompt)
|
31 |
+
prompt = translated_prompt
|
32 |
+
|
33 |
image = pipe(
|
34 |
prompt = prompt,
|
35 |
width = width,
|
|
|
57 |
}
|
58 |
"""
|
59 |
|
60 |
+
with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css) as demo:
|
|
|
|
|
61 |
with gr.Column(elem_id="col-container"):
|
|
|
|
|
62 |
with gr.Row():
|
63 |
prompt = gr.Text(
|
64 |
label="Prompt",
|
|
|
111 |
)
|
112 |
|
113 |
gr.Examples(
|
114 |
+
examples=examples,
|
115 |
+
fn=infer,
|
116 |
+
inputs=[prompt],
|
117 |
+
outputs=[result, seed],
|
118 |
cache_examples="lazy"
|
119 |
)
|
120 |
|
121 |
+
gr.on(
|
122 |
+
triggers=[run_button.click, prompt.submit],
|
123 |
+
fn=infer,
|
124 |
+
inputs=[prompt, seed, randomize_seed, width, height, num_inference_steps],
|
125 |
+
outputs=[result, seed]
|
126 |
+
)
|
|
|
|
|
127 |
|
128 |
demo.launch()
|