Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
shweaung
commited on
Commit
β’
451e2fe
1
Parent(s):
893d10a
Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,6 @@ from PIL import Image
|
|
8 |
from deep_translator import GoogleTranslator
|
9 |
import json
|
10 |
|
11 |
-
|
12 |
API_TOKEN = os.getenv("HF_READ_TOKEN")
|
13 |
headers = {"Authorization": f"Bearer {API_TOKEN}"}
|
14 |
timeout = 100
|
@@ -27,28 +26,21 @@ article_text = """
|
|
27 |
"""
|
28 |
|
29 |
def query(lora_id, prompt, steps=28, cfg_scale=3.5, randomize_seed=True, seed=-1, width=1024, height=1024):
|
30 |
-
if
|
31 |
return None
|
32 |
|
33 |
-
|
34 |
-
lora_id = "black-forest-labs/FLUX.1-dev"
|
35 |
-
|
36 |
key = random.randint(0, 999)
|
37 |
|
38 |
-
API_URL = "https://api-inference.huggingface.co/models/"
|
39 |
-
|
40 |
-
API_TOKEN = random.choice([os.getenv("HF_READ_TOKEN")])
|
41 |
-
headers = {"Authorization": f"Bearer {API_TOKEN}"}
|
42 |
|
43 |
-
|
44 |
-
|
45 |
|
46 |
prompt = f"{prompt} | ultra detail, ultra elaboration, ultra quality, perfect."
|
47 |
-
# print(f'\033[1mGeneration {key}:\033[0m {prompt}')
|
48 |
|
49 |
# If seed is -1, generate a random seed and use it
|
50 |
-
if randomize_seed
|
51 |
-
seed = random.randint(1, 4294967296)
|
52 |
|
53 |
payload = {
|
54 |
"inputs": prompt,
|
@@ -56,15 +48,14 @@ def query(lora_id, prompt, steps=28, cfg_scale=3.5, randomize_seed=True, seed=-1
|
|
56 |
"cfg_scale": cfg_scale,
|
57 |
"seed": seed,
|
58 |
"parameters": {
|
59 |
-
"width": width,
|
60 |
-
"height": height
|
61 |
}
|
62 |
}
|
63 |
|
64 |
response = requests.post(API_URL, headers=headers, json=payload, timeout=timeout)
|
65 |
if response.status_code != 200:
|
66 |
-
print(f"Error: Failed to get image.
|
67 |
-
print(f"Response content: {response.text}")
|
68 |
if response.status_code == 503:
|
69 |
raise gr.Error(f"{response.status_code} : The model is being loaded")
|
70 |
raise gr.Error(f"{response.status_code}")
|
@@ -98,37 +89,23 @@ with gr.Blocks(theme='Nymbo/Nymbo_Theme', css=css) as app:
|
|
98 |
with gr.Column(elem_id="app-container"):
|
99 |
with gr.Row():
|
100 |
with gr.Column(elem_id="prompt-container"):
|
101 |
-
|
102 |
-
|
103 |
-
with gr.
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
cfg = gr.Slider(label="CFG Scale", value=3.5, minimum=1, maximum=20, step=0.5)
|
115 |
-
# method = gr.Radio(label="Sampling method", value="DPM++ 2M Karras", choices=["DPM++ 2M Karras", "DPM++ SDE Karras", "Euler", "Euler a", "Heun", "DDIM"])
|
116 |
-
|
117 |
-
with gr.Row():
|
118 |
-
text_button = gr.Button("Run", variant='primary', elem_id="gen-button")
|
119 |
-
with gr.Row():
|
120 |
-
image_output = gr.Image(type="pil", label="Image Output", elem_id="gallery")
|
121 |
-
with gr.Row():
|
122 |
-
seed_output = gr.Textbox(label="Seed Used", show_copy_button = True, elem_id="seed-output")
|
123 |
-
|
124 |
gr.Markdown(article_text)
|
125 |
|
126 |
-
gr.Examples(
|
127 |
-
examples = examples,
|
128 |
-
inputs = [text_prompt],
|
129 |
-
)
|
130 |
|
131 |
-
|
132 |
-
text_button.click(query, inputs=[custom_lora, text_prompt, steps, cfg, randomize_seed, seed, width, height], outputs=[image_output,seed_output, seed])
|
133 |
|
134 |
app.launch(show_api=False, share=True)
|
|
|
8 |
from deep_translator import GoogleTranslator
|
9 |
import json
|
10 |
|
|
|
11 |
API_TOKEN = os.getenv("HF_READ_TOKEN")
|
12 |
headers = {"Authorization": f"Bearer {API_TOKEN}"}
|
13 |
timeout = 100
|
|
|
26 |
"""
|
27 |
|
28 |
def query(lora_id, prompt, steps=28, cfg_scale=3.5, randomize_seed=True, seed=-1, width=1024, height=1024):
|
29 |
+
if not prompt:
|
30 |
return None
|
31 |
|
32 |
+
lora_id = lora_id.strip() or "black-forest-labs/FLUX.1-dev"
|
|
|
|
|
33 |
key = random.randint(0, 999)
|
34 |
|
35 |
+
API_URL = f"https://api-inference.huggingface.co/models/{lora_id}"
|
|
|
|
|
|
|
36 |
|
37 |
+
# Uncomment the line below to enable Myanmar-to-English prompt translation
|
38 |
+
prompt = GoogleTranslator(source='my', target='en').translate(prompt)
|
39 |
|
40 |
prompt = f"{prompt} | ultra detail, ultra elaboration, ultra quality, perfect."
|
|
|
41 |
|
42 |
# If seed is -1, generate a random seed and use it
|
43 |
+
seed = random.randint(1, 4294967296) if randomize_seed else seed
|
|
|
44 |
|
45 |
payload = {
|
46 |
"inputs": prompt,
|
|
|
48 |
"cfg_scale": cfg_scale,
|
49 |
"seed": seed,
|
50 |
"parameters": {
|
51 |
+
"width": width,
|
52 |
+
"height": height
|
53 |
}
|
54 |
}
|
55 |
|
56 |
response = requests.post(API_URL, headers=headers, json=payload, timeout=timeout)
|
57 |
if response.status_code != 200:
|
58 |
+
print(f"Error: Failed to get image. Status: {response.status_code} - {response.text}")
|
|
|
59 |
if response.status_code == 503:
|
60 |
raise gr.Error(f"{response.status_code} : The model is being loaded")
|
61 |
raise gr.Error(f"{response.status_code}")
|
|
|
89 |
with gr.Column(elem_id="app-container"):
|
90 |
with gr.Row():
|
91 |
with gr.Column(elem_id="prompt-container"):
|
92 |
+
text_prompt = gr.Textbox(label="Prompt", placeholder="Enter a prompt here (English ααα―αα¬αα±αΈααα―α·ααα«αααΊ)", lines=2, elem_id="prompt-text-input")
|
93 |
+
custom_lora = gr.Textbox(label="Custom Model", info="Model path (Walone LoRA Library ααΎα¬ Model path αα»α¬αΈαααα―ααΊαα«αααΊ)", placeholder="shweaung/mawc-cc")
|
94 |
+
with gr.Accordion("Advanced Settings", open=False):
|
95 |
+
width = gr.Slider(label="Width", value=1024, minimum=64, maximum=1216, step=8)
|
96 |
+
height = gr.Slider(label="Height", value=1024, minimum=64, maximum=1216, step=8)
|
97 |
+
seed = gr.Slider(label="Seed", value=-1, minimum=-1, maximum=4294967296, step=1)
|
98 |
+
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
99 |
+
steps = gr.Slider(label="Sampling steps", value=28, minimum=1, maximum=100, step=1)
|
100 |
+
cfg = gr.Slider(label="CFG Scale", value=3.5, minimum=1, maximum=20, step=0.5)
|
101 |
+
|
102 |
+
text_button = gr.Button("Run", variant='primary', elem_id="gen-button")
|
103 |
+
image_output = gr.Image(type="pil", label="Image Output", elem_id="gallery")
|
104 |
+
seed_output = gr.Textbox(label="Seed Used", show_copy_button=True, elem_id="seed-output")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
105 |
gr.Markdown(article_text)
|
106 |
|
107 |
+
gr.Examples(examples=examples, inputs=[text_prompt])
|
|
|
|
|
|
|
108 |
|
109 |
+
text_button.click(query, inputs=[custom_lora, text_prompt, steps, cfg, randomize_seed, seed, width, height], outputs=[image_output, seed_output, seed])
|
|
|
110 |
|
111 |
app.launch(show_api=False, share=True)
|