Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
#1
by
honey90
- opened
app.py
CHANGED
|
@@ -1,128 +1,200 @@
|
|
| 1 |
-
import
|
| 2 |
-
import numpy as np
|
| 3 |
import random
|
| 4 |
-
|
| 5 |
-
import
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
import torch
|
| 7 |
from diffusers import DiffusionPipeline
|
| 8 |
|
| 9 |
-
#
|
| 10 |
-
|
| 11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
|
| 13 |
-
#
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
|
| 19 |
-
|
| 20 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
prompt=prompt
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
|
| 37 |
def set_prompt(example_text):
|
| 38 |
return example_text
|
| 39 |
|
| 40 |
-
#
|
|
|
|
|
|
|
| 41 |
example_prompts = {
|
| 42 |
-
"
|
| 43 |
-
"""
|
| 44 |
-
-
|
| 45 |
-
-
|
| 46 |
-
-
|
| 47 |
-
-
|
| 48 |
],
|
| 49 |
-
"
|
| 50 |
-
"""
|
| 51 |
KNOWLEDGE
|
| 52 |
-
โโโ ACQUISITION [
|
| 53 |
-
โ โโโ READING [
|
| 54 |
-
โ โโโ PRACTICE [
|
| 55 |
-
โ โโโ OBSERVATION [
|
| 56 |
-
โโโ PROCESSING [
|
| 57 |
-
โ โโโ ANALYSIS [
|
| 58 |
-
โ โโโ SYNTHESIS [
|
| 59 |
-
โโโ RETENTION [
|
| 60 |
-
โ โโโ SHORT-TERM [
|
| 61 |
-
โ โโโ LONG-TERM [
|
| 62 |
โโโ APPLICATION
|
| 63 |
-
โโโ CREATION [
|
| 64 |
-
โโโ INNOVATION [
|
| 65 |
],
|
| 66 |
-
"
|
| 67 |
-
"""
|
| 68 |
-
-
|
| 69 |
-
-
|
| 70 |
-
-
|
| 71 |
-
-
|
| 72 |
],
|
| 73 |
-
"
|
| 74 |
-
"""
|
| 75 |
-
-
|
| 76 |
-
-
|
| 77 |
-
-
|
| 78 |
-
-
|
| 79 |
-
-
|
| 80 |
-
-
|
| 81 |
-
-
|
| 82 |
-
-
|
| 83 |
-
-
|
| 84 |
],
|
| 85 |
-
"
|
| 86 |
-
"""
|
| 87 |
-
-
|
| 88 |
-
-
|
| 89 |
-
-
|
| 90 |
-
-
|
| 91 |
-
-
|
| 92 |
-
-
|
| 93 |
-
-
|
| 94 |
-
-
|
| 95 |
-
- Clear, directional arrows connecting each phase
|
| 96 |
-
- Iconography for each component (e.g., magnifying glass, lightbulb, gear, checklist)
|
| 97 |
-
- Style: Vibrant, educational yet professional, balancing detailed annotations with visual simplicity
|
| 98 |
-
- Layout: Structured with a clear hierarchy and color-coded sections to differentiate process stages"""
|
| 99 |
],
|
| 100 |
-
"
|
| 101 |
-
"""
|
| 102 |
BUSINESS WORKFLOW
|
| 103 |
-
โโโ
|
| 104 |
-
โ โโโ
|
| 105 |
-
โ โโโ
|
| 106 |
-
โโโ
|
| 107 |
-
โ โโโ
|
| 108 |
-
โ โโโ
|
| 109 |
-
โโโ
|
| 110 |
-
โโโ
|
| 111 |
-
โโโ
|
| 112 |
]
|
| 113 |
}
|
| 114 |
|
| 115 |
-
#
|
|
|
|
|
|
|
| 116 |
css = """
|
| 117 |
-
* {
|
| 118 |
-
box-sizing: border-box;
|
| 119 |
-
}
|
| 120 |
body {
|
| 121 |
background: linear-gradient(135deg, #667eea, #764ba2);
|
| 122 |
-
font-family: '
|
| 123 |
-
color: #333;
|
| 124 |
-
margin: 0;
|
| 125 |
-
padding: 0;
|
| 126 |
}
|
| 127 |
.gradio-container {
|
| 128 |
background: rgba(255, 255, 255, 0.95);
|
|
@@ -131,200 +203,115 @@ body {
|
|
| 131 |
box-shadow: 0 8px 30px rgba(0, 0, 0, 0.3);
|
| 132 |
margin: 40px auto;
|
| 133 |
width: 1200px;
|
| 134 |
-
overflow: visible !important;
|
| 135 |
}
|
| 136 |
.sidebar {
|
| 137 |
background: rgba(255, 255, 255, 0.98);
|
| 138 |
border-radius: 10px;
|
| 139 |
padding: 20px;
|
| 140 |
box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);
|
| 141 |
-
position: relative;
|
| 142 |
-
z-index: 10; /* bring sidebar above container edges */
|
| 143 |
overflow: visible !important;
|
| 144 |
}
|
| 145 |
button, .btn {
|
| 146 |
background: linear-gradient(90deg, #ff8a00, #e52e71);
|
| 147 |
-
border: none;
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
font-weight: bold;
|
| 152 |
-
letter-spacing: 1px;
|
| 153 |
-
border-radius: 5px;
|
| 154 |
-
cursor: pointer;
|
| 155 |
transition: transform 0.2s ease-in-out;
|
| 156 |
}
|
| 157 |
-
button:hover, .btn:hover {
|
| 158 |
-
|
| 159 |
-
}
|
| 160 |
-
.example-accordion {
|
| 161 |
-
width: 100% !important;
|
| 162 |
-
max-width: 100% !important;
|
| 163 |
-
}
|
| 164 |
-
.example-accordion button {
|
| 165 |
-
width: auto !important;
|
| 166 |
-
white-space: normal !important;
|
| 167 |
-
}
|
| 168 |
"""
|
| 169 |
|
| 170 |
-
with gr.Blocks(css=css, title="
|
| 171 |
gr.Markdown(
|
| 172 |
"""
|
| 173 |
<div style="text-align:center;">
|
| 174 |
-
<h1
|
| 175 |
-
<p
|
| 176 |
-
<p><strong
|
| 177 |
</div>
|
| 178 |
"""
|
| 179 |
)
|
| 180 |
gr.HTML(
|
| 181 |
"""<a href="https://visitorbadge.io/status?path=https%3A%2F%2Fginigen-Workflow-Canvas.hf.space">
|
| 182 |
-
<img src="https://api.visitorbadge.io/api/visitors?path=https%3A%2F%2Fginigen-Workflow-Canvas.hf.space&countColor=%23263759" alt="
|
| 183 |
</a>"""
|
| 184 |
)
|
| 185 |
-
|
| 186 |
with gr.Row():
|
| 187 |
-
#
|
| 188 |
with gr.Column(scale=2, elem_classes="sidebar"):
|
| 189 |
-
gr.Markdown("###
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 199 |
with gr.Column(scale=8):
|
| 200 |
with gr.Tabs():
|
| 201 |
-
|
| 202 |
-
|
| 203 |
-
|
| 204 |
-
|
| 205 |
-
|
| 206 |
-
|
| 207 |
-
|
| 208 |
-
|
| 209 |
-
|
| 210 |
-
|
| 211 |
-
|
| 212 |
-
|
| 213 |
-
|
| 214 |
-
|
| 215 |
-
|
| 216 |
-
|
| 217 |
-
|
| 218 |
-
|
| 219 |
-
|
| 220 |
-
|
| 221 |
-
|
| 222 |
-
|
| 223 |
-
|
| 224 |
-
|
| 225 |
-
|
| 226 |
-
|
| 227 |
-
|
| 228 |
-
|
| 229 |
-
# Tab 3: Mockup
|
| 230 |
-
with gr.Tab("Mockup"):
|
| 231 |
-
mock_prompt = gr.Textbox(
|
| 232 |
-
label="Mockup Prompt",
|
| 233 |
-
placeholder="Enter a mockup description...",
|
| 234 |
-
lines=5,
|
| 235 |
-
value=example_prompts["Mockup"][0]
|
| 236 |
-
)
|
| 237 |
-
mock_generate = gr.Button("Generate Mockup")
|
| 238 |
-
mock_image = gr.Image(label="Generated Mockup", value="w2.webp")
|
| 239 |
-
with gr.Accordion("Example Prompts", open=True, elem_classes="example-accordion"):
|
| 240 |
-
for ex in example_prompts["Mockup"]:
|
| 241 |
-
gr.Button(ex, variant="secondary").click(fn=lambda ex=ex: set_prompt(ex), outputs=mock_prompt)
|
| 242 |
-
|
| 243 |
-
# Tab 4: Infographic
|
| 244 |
-
with gr.Tab("Infographic"):
|
| 245 |
-
info_prompt = gr.Textbox(
|
| 246 |
-
label="Infographic Prompt",
|
| 247 |
-
placeholder="Enter an infographic description...",
|
| 248 |
-
lines=5,
|
| 249 |
-
value=example_prompts["Infographic"][0]
|
| 250 |
-
)
|
| 251 |
-
info_generate = gr.Button("Generate Infographic")
|
| 252 |
-
info_image = gr.Image(label="Generated Infographic", value="w3.webp")
|
| 253 |
-
with gr.Accordion("Example Prompts", open=True, elem_classes="example-accordion"):
|
| 254 |
-
for ex in example_prompts["Infographic"]:
|
| 255 |
-
gr.Button(ex, variant="secondary").click(fn=lambda ex=ex: set_prompt(ex), outputs=info_prompt)
|
| 256 |
-
|
| 257 |
-
# Tab 5: Diagram
|
| 258 |
-
with gr.Tab("Diagram"):
|
| 259 |
-
diag_prompt = gr.Textbox(
|
| 260 |
-
label="Diagram Prompt",
|
| 261 |
-
placeholder="Enter a diagram description...",
|
| 262 |
-
lines=5,
|
| 263 |
-
value=example_prompts["Diagram"][0]
|
| 264 |
-
)
|
| 265 |
-
diag_generate = gr.Button("Generate Diagram")
|
| 266 |
-
diag_image = gr.Image(label="Generated Diagram", value="w4.webp")
|
| 267 |
-
with gr.Accordion("Example Prompts", open=True, elem_classes="example-accordion"):
|
| 268 |
-
for ex in example_prompts["Diagram"]:
|
| 269 |
-
gr.Button(ex, variant="secondary").click(fn=lambda ex=ex: set_prompt(ex), outputs=diag_prompt)
|
| 270 |
-
|
| 271 |
-
# Tab 6: Flowchart
|
| 272 |
-
with gr.Tab("Flowchart"):
|
| 273 |
-
flow_prompt = gr.Textbox(
|
| 274 |
-
label="Flowchart Prompt",
|
| 275 |
-
placeholder="Enter a flowchart description...",
|
| 276 |
-
lines=5,
|
| 277 |
-
value=example_prompts["Flowchart"][0]
|
| 278 |
-
)
|
| 279 |
-
flow_generate = gr.Button("Generate Flowchart")
|
| 280 |
-
flow_image = gr.Image(label="Generated Flowchart", value="w5.webp")
|
| 281 |
-
with gr.Accordion("Example Prompts", open=True, elem_classes="example-accordion"):
|
| 282 |
-
for ex in example_prompts["Flowchart"]:
|
| 283 |
-
gr.Button(ex, variant="secondary").click(fn=lambda ex=ex: set_prompt(ex), outputs=flow_prompt)
|
| 284 |
-
|
| 285 |
-
# Bind events for generation buttons
|
| 286 |
-
pd_generate.click(
|
| 287 |
-
fn=generate_image,
|
| 288 |
-
inputs=[pd_prompt, seed_slider, randomize_seed, width_slider, height_slider, steps_slider, guidance_slider],
|
| 289 |
-
outputs=[pd_image, seed_slider]
|
| 290 |
-
)
|
| 291 |
-
|
| 292 |
-
mindmap_generate.click(
|
| 293 |
-
fn=generate_image,
|
| 294 |
-
inputs=[mindmap_prompt, seed_slider, randomize_seed, width_slider, height_slider, steps_slider, guidance_slider],
|
| 295 |
-
outputs=[mindmap_image, seed_slider]
|
| 296 |
-
)
|
| 297 |
-
|
| 298 |
-
mock_generate.click(
|
| 299 |
-
fn=generate_image,
|
| 300 |
-
inputs=[mock_prompt, seed_slider, randomize_seed, width_slider, height_slider, steps_slider, guidance_slider],
|
| 301 |
-
outputs=[mock_image, seed_slider]
|
| 302 |
-
)
|
| 303 |
-
|
| 304 |
-
info_generate.click(
|
| 305 |
-
fn=generate_image,
|
| 306 |
-
inputs=[info_prompt, seed_slider, randomize_seed, width_slider, height_slider, steps_slider, guidance_slider],
|
| 307 |
-
outputs=[info_image, seed_slider]
|
| 308 |
-
)
|
| 309 |
-
|
| 310 |
-
diag_generate.click(
|
| 311 |
-
fn=generate_image,
|
| 312 |
-
inputs=[diag_prompt, seed_slider, randomize_seed, width_slider, height_slider, steps_slider, guidance_slider],
|
| 313 |
-
outputs=[diag_image, seed_slider]
|
| 314 |
-
)
|
| 315 |
-
|
| 316 |
-
flow_generate.click(
|
| 317 |
-
fn=generate_image,
|
| 318 |
-
inputs=[flow_prompt, seed_slider, randomize_seed, width_slider, height_slider, steps_slider, guidance_slider],
|
| 319 |
-
outputs=[flow_image, seed_slider]
|
| 320 |
-
)
|
| 321 |
|
| 322 |
if __name__ == "__main__":
|
| 323 |
-
demo.queue()
|
| 324 |
demo.launch(
|
| 325 |
server_name="0.0.0.0",
|
| 326 |
server_port=7860,
|
| 327 |
share=False,
|
| 328 |
show_error=True,
|
| 329 |
debug=True
|
| 330 |
-
)
|
|
|
|
| 1 |
+
import os
|
|
|
|
| 2 |
import random
|
| 3 |
+
import numpy as np
|
| 4 |
+
import gradio as gr
|
| 5 |
+
|
| 6 |
+
# --- Hugging Face Spaces ๋ฐ์ฝ๋ ์ดํฐ(์์ด๋ ๋์ํ๋๋ก ๋์ฒด) ---
|
| 7 |
+
try:
|
| 8 |
+
import spaces
|
| 9 |
+
GPU_DECORATOR = spaces.GPU
|
| 10 |
+
except Exception:
|
| 11 |
+
def GPU_DECORATOR(*args, **kwargs):
|
| 12 |
+
def _wrap(fn):
|
| 13 |
+
return fn
|
| 14 |
+
return _wrap
|
| 15 |
+
|
| 16 |
import torch
|
| 17 |
from diffusers import DiffusionPipeline
|
| 18 |
|
| 19 |
+
# -----------------------
|
| 20 |
+
# ์ฅ์น/์ ๋ฐ๋ ์๋ ์ ํ
|
| 21 |
+
# -----------------------
|
| 22 |
+
def select_device_dtype():
|
| 23 |
+
if torch.cuda.is_available():
|
| 24 |
+
device = "cuda"
|
| 25 |
+
dtype = torch.bfloat16 if torch.cuda.is_bf16_supported() else torch.float16
|
| 26 |
+
else:
|
| 27 |
+
device = "cpu"
|
| 28 |
+
dtype = torch.float32
|
| 29 |
+
return device, dtype
|
| 30 |
+
|
| 31 |
+
device, dtype = select_device_dtype()
|
| 32 |
+
if device == "cuda":
|
| 33 |
+
torch.backends.cuda.matmul.allow_tf32 = True
|
| 34 |
+
|
| 35 |
+
# ์ ํ๊ฐ
|
| 36 |
+
MAX_SEED = np.iinfo(np.uint32).max
|
| 37 |
+
MAX_SIDE = 2048
|
| 38 |
+
MAX_TOTAL_PIXELS = 2_359_296 # ํด์๋ ์ด ํฝ์
๊ฐ๋(์ฝ 2.36MP)
|
| 39 |
+
|
| 40 |
+
# -----------------------
|
| 41 |
+
# ๋ชจ๋ธ ๋ก๋ (FLUX.1-schnell)
|
| 42 |
+
# -----------------------
|
| 43 |
+
MODEL_ID = "black-forest-labs/FLUX.1-schnell"
|
| 44 |
+
pipe = DiffusionPipeline.from_pretrained(MODEL_ID, torch_dtype=dtype).to(device)
|
| 45 |
+
if hasattr(pipe, "enable_vae_slicing"):
|
| 46 |
+
pipe.enable_vae_slicing()
|
| 47 |
+
if hasattr(pipe, "enable_vae_tiling"):
|
| 48 |
+
pipe.enable_vae_tiling()
|
| 49 |
|
| 50 |
+
# -----------------------
|
| 51 |
+
# ์ ํธ
|
| 52 |
+
# -----------------------
|
| 53 |
+
def _clamp_hw(width: int, height: int):
|
| 54 |
+
width = int(max(256, min(int(width), MAX_SIDE)))
|
| 55 |
+
height = int(max(256, min(int(height), MAX_SIDE)))
|
| 56 |
+
if width * height > MAX_TOTAL_PIXELS:
|
| 57 |
+
scale = (MAX_TOTAL_PIXELS / (width * height)) ** 0.5
|
| 58 |
+
width = int((width * scale) // 32 * 32)
|
| 59 |
+
height = int((height * scale) // 32 * 32)
|
| 60 |
+
width = max(256, min(width, MAX_SIDE))
|
| 61 |
+
height = max(256, min(height, MAX_SIDE))
|
| 62 |
+
return width, height
|
| 63 |
|
| 64 |
+
def _validate_params(prompt: str, steps: int, guidance: float):
|
| 65 |
+
if not prompt or not prompt.strip():
|
| 66 |
+
raise ValueError("ํ๋กฌํํธ๊ฐ ๋น์ด ์์ต๋๋ค. ๋ด์ฉ์ ์
๋ ฅํด ์ฃผ์ธ์.")
|
| 67 |
+
if not (1 <= steps <= 50):
|
| 68 |
+
raise ValueError("์ถ๋ก ์คํ
์ 1~50 ๋ฒ์์ฌ์ผ ํฉ๋๋ค.")
|
| 69 |
+
if not (0.0 <= guidance <= 20.0):
|
| 70 |
+
raise ValueError("๊ฐ์ด๋์ค ์ค์ผ์ผ์ 0.0~20.0 ๋ฒ์์ฌ์ผ ํฉ๋๋ค.")
|
| 71 |
|
| 72 |
+
# -----------------------
|
| 73 |
+
# ์์ฑ ํจ์
|
| 74 |
+
# -----------------------
|
| 75 |
+
@GPU_DECORATOR()
|
| 76 |
+
def generate_image(prompt, seed, randomize_seed, width, height, steps, guidance_scale, progress=gr.Progress(track_tqdm=True)):
|
| 77 |
+
try:
|
| 78 |
+
prompt = prompt.strip()
|
| 79 |
+
_validate_params(prompt, steps, guidance_scale)
|
| 80 |
+
|
| 81 |
+
if randomize_seed:
|
| 82 |
+
seed = random.randint(0, MAX_SEED)
|
| 83 |
+
generator = torch.Generator(device=device).manual_seed(int(seed))
|
| 84 |
+
|
| 85 |
+
width, height = _clamp_hw(width, height)
|
| 86 |
+
|
| 87 |
+
progress(0.1, desc="์ด๊ธฐํ ์คโฆ")
|
| 88 |
+
if device == "cuda":
|
| 89 |
+
autocast_ctx = torch.autocast(device_type="cuda", dtype=dtype)
|
| 90 |
+
elif device == "cpu":
|
| 91 |
+
autocast_ctx = torch.autocast(device_type="cpu", dtype=dtype) if dtype != torch.float32 else torch.no_grad()
|
| 92 |
+
else:
|
| 93 |
+
autocast_ctx = torch.no_grad()
|
| 94 |
+
|
| 95 |
+
with autocast_ctx:
|
| 96 |
+
progress(0.4, desc="์ด๋ฏธ์ง ์์ฑ ์คโฆ")
|
| 97 |
+
out = pipe(
|
| 98 |
+
prompt=prompt,
|
| 99 |
+
width=int(width),
|
| 100 |
+
height=int(height),
|
| 101 |
+
num_inference_steps=int(steps),
|
| 102 |
+
generator=generator,
|
| 103 |
+
guidance_scale=float(guidance_scale)
|
| 104 |
+
)
|
| 105 |
+
image = out.images[0]
|
| 106 |
+
|
| 107 |
+
progress(1.0, desc="์๋ฃ")
|
| 108 |
+
return image, int(seed)
|
| 109 |
+
except Exception as e:
|
| 110 |
+
gr.Error(f"์์ฑ ์ค ์ค๋ฅ: {type(e).__name__}: {e}")
|
| 111 |
+
return None, int(seed)
|
| 112 |
|
| 113 |
def set_prompt(example_text):
|
| 114 |
return example_text
|
| 115 |
|
| 116 |
+
# -----------------------
|
| 117 |
+
# ํ๊ธ ์์ ํ๋กฌํํธ
|
| 118 |
+
# -----------------------
|
| 119 |
example_prompts = {
|
| 120 |
+
"์ ํ ๋์์ธ": [
|
| 121 |
+
"""์ฌ๋ฆญํ ์ธ๋์คํธ๋ฆฌ์ผ ์คํ์ผ์ ์ปคํผ๋จธ์ ์ปจ์
์ค์ผ์น:
|
| 122 |
+
- ๊ณก์ ํ ๋ฉํ ๋ฐ๋, ๋ฒ ์ ค ์ต์ํ
|
| 123 |
+
- ์ค์ ์ฉ ํฐ์น์คํฌ๋ฆฐ ํจ๋
|
| 124 |
+
- ๋ชจ๋ํ ๋งคํธ ๋ธ๋ ๋ง๊ฐ
|
| 125 |
+
- ์๊ทธ๋ฆผ(์ค์ผ์น) ์ปจ์
์ํธ ์คํ์ผ"""
|
| 126 |
],
|
| 127 |
+
"๋ง์ธ๋๋งต": [
|
| 128 |
+
"""์๊ทธ๋ฆผ ์คํ์ผ์ ๋ค์ฑ๋ก์ด ๋ง์ธ๋๋งต, ๊ต์ก์ฉ, ์๋๊ฐ ์๋ ์๊ฐ, ๋ช
ํํ ๊ณ์ธต ๊ตฌ์กฐ, ํฉ๊ธ๋น ๋ ์ด์์.
|
| 129 |
KNOWLEDGE
|
| 130 |
+
โโโ ACQUISITION [๋ฒ๊ฐ๊ฐ ์น ๋ ~60px]
|
| 131 |
+
โ โโโ READING [๋น๋๋ ํผ์น ์ฑ
]
|
| 132 |
+
โ โโโ PRACTICE [๊ณต๊ตฌ ์์ด์ฝ]
|
| 133 |
+
โ โโโ OBSERVATION [ํ๋๊ฒฝ ๋ ๋]
|
| 134 |
+
โโโ PROCESSING [๊ธฐ์ด ๋คํธ์ํฌ ~50px]
|
| 135 |
+
โ โโโ ANALYSIS [์์น ๊ทธ๋ํ]
|
| 136 |
+
โ โโโ SYNTHESIS [ํผ์ฆ ์กฐ๊ฐ]
|
| 137 |
+
โโโ RETENTION [๋ฉ๋ชจ๋ฆฌ ์นฉ ~45px]
|
| 138 |
+
โ โโโ SHORT-TERM [๋ฒ์ฉ์]
|
| 139 |
+
โ โโโ LONG-TERM [๊ฒฌ๊ณ ํ ์์นด์ด๋ธ]
|
| 140 |
โโโ APPLICATION
|
| 141 |
+
โโโ CREATION [ํ๋ ํธ]
|
| 142 |
+
โโโ INNOVATION [๋ณ์๋ฆฌ ์ ๊ตฌ]"""
|
| 143 |
],
|
| 144 |
+
"๋ชฉ์
": [
|
| 145 |
+
"""์๊ทธ๋ฆผ ์์ด์ดํ๋ ์ ์คํ์ผ์ ๋ชจ๋ฐ์ผ ๋ฑ
ํน ์ฑ ๋ชฉ์
:
|
| 146 |
+
- ๋ก๊ณ ๊ฐ ์๋ ํ์ดํ ํ๋ฉด
|
| 147 |
+
- ๋ก๊ทธ์ธ(์์ด๋, ๋น๋ฐ๋ฒํธ, ๋ก๊ทธ์ธ ๋ฒํผ)
|
| 148 |
+
- ๋์๋ณด๋ 3๊ฐ ์น์
(์์ก, ๊ฑฐ๋๋ด์ญ, ๋น ๋ฅธ ์คํ)
|
| 149 |
+
- ํ๋จ ๋ด๋น๊ฒ์ด์
(ํ, ์ด์ฒด, ํ๋กํ)"""
|
| 150 |
],
|
| 151 |
+
"์ธํฌ๊ทธ๋ํฝ": [
|
| 152 |
+
"""๋๊ธฐ์
์ฐ์ฐจ๋ณด๊ณ ์์ฉ ํ๋ซ ์คํ์ผ ์ธํฌ๊ทธ๋ํฝ:
|
| 153 |
+
- ์ ๋ชฉ: "Global Renewable Energy Trends 2025"
|
| 154 |
+
- ๋ถ์ : "์์ฅ์ ์ ์จ๊ณผ ์ฑ์ฅ๋ถ์"
|
| 155 |
+
- ์๊ฐ ์์:
|
| 156 |
+
- ์ง์ญ๋ณ ํ์๊ดยทํ๋ ฅยท์๋ ฅ ์์ฐ๋ ๋ง๋๊ทธ๋ํ
|
| 157 |
+
- ์๋์ง ๋น์ค ํ์ด์ฐจํธ: ํ์๊ด(45%)ยทํ๋ ฅ(30%)ยท์๋ ฅ(25%)
|
| 158 |
+
- ์ฐ๋๋ณ ์ฑ์ฅ ์ถ์ธ์
|
| 159 |
+
- ์์ด์ฝ: ๋ฏธ๋๋ฉ ํ์, ํ๋ ฅ ํฐ๋น, ๋ฌผ๋ฐฉ์ธ
|
| 160 |
+
- ๋ ์ด์์: ๊ทธ๋ฆฌ๋ ๊ธฐ๋ฐ, ํ์คํ
ํฌ์ธํธ, ํ์ดํธ ์คํ์ด์ค ์ถฉ๋ถ
|
| 161 |
+
- ์ฃผ์: KPI ํต์ฌ ์์น ๋ฐ ์ ๋ง ์ฝ์์"""
|
| 162 |
],
|
| 163 |
+
"๋ค์ด์ด๊ทธ๋จ": [
|
| 164 |
+
"""์๋ํฌ์๋ ๋น์ฆ๋์ค ์ํฌํ๋ก ๋ค์ด์ด๊ทธ๋จ(์๊ทธ๋ฆผ, ๊ต์ก์ ยท์ ๋ฌธ์ ):
|
| 165 |
+
- ์ ๋ชฉ: "ํตํฉ ๋น์ฆ๋์ค ํ๋ก์ธ์ค"
|
| 166 |
+
- ๊ตฌ์ฑ:
|
| 167 |
+
- ์์ฅ๋ถ์(์ฐจํธ, ๊ฒฝ์์ ๋งต)
|
| 168 |
+
- ์ ๋ต์๋ฆฝ(๋ธ๋ ์ธ์คํ ๋ฐ ํด๋ผ์ฐ๋, ํต์ฌ ํฌ์ปค์ค)
|
| 169 |
+
- ์ ํ์ค๊ณ(์ค์ผ์น, ํผ๋๋ฐฑ ๋ฃจํ)
|
| 170 |
+
- ๊ตฌํ(ํ์๋ผ์ธ ๋ง์ปค, ๋ฆฌ์์ค ์์ด์ฝ)
|
| 171 |
+
- ์ถ์ ํ ๋ฆฌ๋ทฐ(์งํ, ์ง์ ๊ฐ์ )
|
| 172 |
+
- ๋ช
ํํ ๋ฐฉํฅ ํ์ดํ, ์์์ผ๋ก ๋จ๊ณ ๊ตฌ๋ถ"""
|
|
|
|
|
|
|
|
|
|
|
|
|
| 173 |
],
|
| 174 |
+
"ํ๋ก์ฐ์ฐจํธ": [
|
| 175 |
+
"""์๊ทธ๋ฆผ ์คํ์ผ ํ๋ก์ฐ์ฐจํธ, ์ ๋ช
ํ ์, ๋ฏธ๋๋ฉ ์์ด์ฝ.
|
| 176 |
BUSINESS WORKFLOW
|
| 177 |
+
โโโ ์์ [์ด๋ก ๋ฒํผ ~40px]
|
| 178 |
+
โ โโโ ์๊ตฌ์ฌํญ ์์ง [ํด๋ ์์ด์ฝ]
|
| 179 |
+
โ โโโ ๋ฐ์ดํฐ ๋ถ์ [์ฐจํธ ์์ด์ฝ]
|
| 180 |
+
โโโ ๊ตฌํ [์ฝ๋ฉ ์ฌ๋ณผ ~50px]
|
| 181 |
+
โ โโโ ํ๋ก ํธ์๋ [๋ธ๋ผ์ฐ์ ์์ด์ฝ]
|
| 182 |
+
โ โโโ ๋ฐฑ์๋ [์๋ฒ ์์ด์ฝ]
|
| 183 |
+
โโโ ํ
์คํธ & ํตํฉ [๊ธฐ์ด ์์ด์ฝ ~45px]
|
| 184 |
+
โโโ ๋ฐฐํฌ
|
| 185 |
+
โโโ ์ข
๋ฃ [์ฒด์ปค๊ธฐ ๊น๋ฐ ~40px]"""
|
| 186 |
]
|
| 187 |
}
|
| 188 |
|
| 189 |
+
# -----------------------
|
| 190 |
+
# Gradio UI (ํ๊ตญ์ด)
|
| 191 |
+
# -----------------------
|
| 192 |
css = """
|
| 193 |
+
* { box-sizing: border-box; }
|
|
|
|
|
|
|
| 194 |
body {
|
| 195 |
background: linear-gradient(135deg, #667eea, #764ba2);
|
| 196 |
+
font-family: 'Pretendard', 'Apple SD Gothic Neo', 'Noto Sans KR', 'Helvetica Neue', Arial, sans-serif;
|
| 197 |
+
color: #333; margin: 0; padding: 0;
|
|
|
|
|
|
|
| 198 |
}
|
| 199 |
.gradio-container {
|
| 200 |
background: rgba(255, 255, 255, 0.95);
|
|
|
|
| 203 |
box-shadow: 0 8px 30px rgba(0, 0, 0, 0.3);
|
| 204 |
margin: 40px auto;
|
| 205 |
width: 1200px;
|
| 206 |
+
overflow: visible !important;
|
| 207 |
}
|
| 208 |
.sidebar {
|
| 209 |
background: rgba(255, 255, 255, 0.98);
|
| 210 |
border-radius: 10px;
|
| 211 |
padding: 20px;
|
| 212 |
box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);
|
| 213 |
+
position: relative; z-index: 10;
|
|
|
|
| 214 |
overflow: visible !important;
|
| 215 |
}
|
| 216 |
button, .btn {
|
| 217 |
background: linear-gradient(90deg, #ff8a00, #e52e71);
|
| 218 |
+
border: none; color: #fff;
|
| 219 |
+
padding: 12px 24px; text-transform: uppercase;
|
| 220 |
+
font-weight: bold; letter-spacing: 1px;
|
| 221 |
+
border-radius: 5px; cursor: pointer;
|
|
|
|
|
|
|
|
|
|
|
|
|
| 222 |
transition: transform 0.2s ease-in-out;
|
| 223 |
}
|
| 224 |
+
button:hover, .btn:hover { transform: scale(1.05); }
|
| 225 |
+
.example-accordion { width: 100% !important; max-width: 100% !important; }
|
| 226 |
+
.example-accordion button { width: auto !important; white-space: normal !important; }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 227 |
"""
|
| 228 |
|
| 229 |
+
with gr.Blocks(css=css, title="์ํฌํ๋ก ์บ๋ฒ์ค") as demo:
|
| 230 |
gr.Markdown(
|
| 231 |
"""
|
| 232 |
<div style="text-align:center;">
|
| 233 |
+
<h1>์ํฌํ๋ก ์บ๋ฒ์ค</h1>
|
| 234 |
+
<p>์ฌ๋ฌ ํญ์์ ๋น์ฆ๋์ค์ ํ์ํ ๋์์ธ ์ปจ์
๊ณผ ์ํฌํ๋ก ๋ค์ด์ด๊ทธ๋จ์ ์์ฑํด ๋ณด์ธ์.</p>
|
| 235 |
+
<p><strong>์ปค๋ฎค๋ํฐ:</strong> <a href="https://discord.gg/openfreeai" target="_blank">https://discord.gg/openfreeai</a></p>
|
| 236 |
</div>
|
| 237 |
"""
|
| 238 |
)
|
| 239 |
gr.HTML(
|
| 240 |
"""<a href="https://visitorbadge.io/status?path=https%3A%2F%2Fginigen-Workflow-Canvas.hf.space">
|
| 241 |
+
<img src="https://api.visitorbadge.io/api/visitors?path=https%3A%2F%2Fginigen-Workflow-Canvas.hf.space&countColor=%23263759" alt="๋ฐฉ๋ฌธ์ ๋ฐฐ์ง"/>
|
| 242 |
</a>"""
|
| 243 |
)
|
| 244 |
+
|
| 245 |
with gr.Row():
|
| 246 |
+
# ์ผ์ชฝ ์ฌ์ด๋๋ฐ: ๊ณตํต ํ๋ผ๋ฏธํฐ
|
| 247 |
with gr.Column(scale=2, elem_classes="sidebar"):
|
| 248 |
+
gr.Markdown("### ์์ฑ ํ๋ผ๋ฏธํฐ")
|
| 249 |
+
|
| 250 |
+
size_preset = gr.Dropdown(
|
| 251 |
+
label="ํด์๋ ํ๋ฆฌ์
",
|
| 252 |
+
choices=[
|
| 253 |
+
"1024x1024", "1536x1024", "1024x1536",
|
| 254 |
+
"1344x1344", "1536x1536", "1920x1080", "1080x1920"
|
| 255 |
+
],
|
| 256 |
+
value="1024x1024"
|
| 257 |
+
)
|
| 258 |
+
width_slider = gr.Slider(label="๊ฐ๋ก ํด์๋(px)", minimum=256, maximum=MAX_SIDE, step=32, value=1024)
|
| 259 |
+
height_slider = gr.Slider(label="์ธ๋ก ํด์๋(px)", minimum=256, maximum=MAX_SIDE, step=32, value=1024)
|
| 260 |
+
|
| 261 |
+
def apply_preset(preset):
|
| 262 |
+
w, h = map(int, preset.split("x"))
|
| 263 |
+
w, h = _clamp_hw(w, h)
|
| 264 |
+
return w, h
|
| 265 |
+
size_preset.change(fn=apply_preset, inputs=size_preset, outputs=[width_slider, height_slider])
|
| 266 |
+
|
| 267 |
+
seed_slider = gr.Slider(label="์๋(Seed)", minimum=0, maximum=int(MAX_SEED), step=1, value=42)
|
| 268 |
+
randomize_seed = gr.Checkbox(label="์๋ ๋๋ค", value=True)
|
| 269 |
+
|
| 270 |
+
def toggle_seed(disable):
|
| 271 |
+
return gr.update(interactive=not disable)
|
| 272 |
+
randomize_seed.change(fn=toggle_seed, inputs=randomize_seed, outputs=seed_slider)
|
| 273 |
+
|
| 274 |
+
steps_slider = gr.Slider(label="์ถ๋ก ์คํ
", minimum=1, maximum=50, step=1, value=20)
|
| 275 |
+
guidance_slider = gr.Slider(label="๊ฐ์ด๋์ค ์ค์ผ์ผ", minimum=0.0, maximum=20.0, step=0.5, value=7.5)
|
| 276 |
+
|
| 277 |
+
# ๋ฉ์ธ: ํญ UI
|
| 278 |
with gr.Column(scale=8):
|
| 279 |
with gr.Tabs():
|
| 280 |
+
def build_tab(tab_title, ex_key, placeholder_text):
|
| 281 |
+
with gr.Tab(tab_title):
|
| 282 |
+
tb = gr.Textbox(
|
| 283 |
+
label=f"{tab_title} ํ๋กฌํํธ",
|
| 284 |
+
placeholder=placeholder_text,
|
| 285 |
+
lines=5,
|
| 286 |
+
value=example_prompts[ex_key][0]
|
| 287 |
+
)
|
| 288 |
+
btn = gr.Button(f"{tab_title} ์์ฑ")
|
| 289 |
+
img = gr.Image(label=f"{tab_title} ๊ฒฐ๊ณผ", type="pil", value=None, height=512)
|
| 290 |
+
with gr.Accordion("์์ ํ๋กฌํํธ", open=True, elem_classes="example-accordion"):
|
| 291 |
+
for ex in example_prompts[ex_key]:
|
| 292 |
+
gr.Button(ex, variant="secondary").click(
|
| 293 |
+
fn=lambda ex=ex: set_prompt(ex),
|
| 294 |
+
outputs=tb
|
| 295 |
+
)
|
| 296 |
+
btn.click(
|
| 297 |
+
fn=generate_image,
|
| 298 |
+
inputs=[tb, seed_slider, randomize_seed, width_slider, height_slider, steps_slider, guidance_slider],
|
| 299 |
+
outputs=[img, seed_slider]
|
| 300 |
+
)
|
| 301 |
+
|
| 302 |
+
build_tab("์ ํ ๋์์ธ", "์ ํ ๋์์ธ", "์ ํ ๋์์ธ ์ปจ์
์ ์
๋ ฅํ์ธ์โฆ")
|
| 303 |
+
build_tab("๋ง์ธ๋๋งต", "๋ง์ธ๋๋งต", "๋ง์ธ๋๋งต ์ค๋ช
์ ์
๋ ฅํ์ธ์โฆ")
|
| 304 |
+
build_tab("๋ชฉ์
", "๋ชฉ์
", "์ฑ/์น ๋ชฉ์
์ค๏ฟฝ๏ฟฝ๏ฟฝ์ ์
๋ ฅํ์ธ์โฆ")
|
| 305 |
+
build_tab("์ธํฌ๊ทธ๋ํฝ", "์ธํฌ๊ทธ๋ํฝ", "์ธํฌ๊ทธ๋ํฝ ์ค๋ช
์ ์
๋ ฅํ์ธ์โฆ")
|
| 306 |
+
build_tab("๋ค์ด์ด๊ทธ๋จ", "๋ค์ด์ด๊ทธ๋จ", "๋ค์ด์ด๊ทธ๋จ ์ค๋ช
์ ์
๋ ฅํ์ธ์โฆ")
|
| 307 |
+
build_tab("ํ๋ก์ฐ์ฐจํธ", "ํ๋ก์ฐ์ฐจํธ", "ํ๋ก์ฐ์ฐจํธ ์ค๋ช
์ ์
๋ ฅํ์ธ์โฆ")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 308 |
|
| 309 |
if __name__ == "__main__":
|
| 310 |
+
demo.queue(concurrency_count=2, max_size=32)
|
| 311 |
demo.launch(
|
| 312 |
server_name="0.0.0.0",
|
| 313 |
server_port=7860,
|
| 314 |
share=False,
|
| 315 |
show_error=True,
|
| 316 |
debug=True
|
| 317 |
+
)
|