fantaxy commited on
Commit
cb9c510
โ€ข
1 Parent(s): 67762a6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -30
app.py CHANGED
@@ -16,7 +16,7 @@ from huggingface_hub import snapshot_download
16
  import spaces
17
 
18
  device = "cuda"
19
- root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
20
  ckpt_dir = f'{root_dir}/weights/Kolors'
21
 
22
  snapshot_download(repo_id="Kwai-Kolors/Kolors", local_dir=ckpt_dir)
@@ -48,9 +48,6 @@ pipe = StableDiffusionXLPipeline(
48
  force_zeros_for_empty_prompt=False
49
  ).to(device)
50
 
51
- #pipe = pipe.to(device)
52
- #pipe.enable_model_cpu_offload()
53
-
54
  if hasattr(pipe.unet, 'encoder_hid_proj'):
55
  pipe.unet.text_encoder_hid_proj = pipe.unet.encoder_hid_proj
56
 
@@ -85,9 +82,9 @@ def infer(prompt, ip_adapter_image, ip_adapter_scale=0.5, negative_prompt="", se
85
  return image, seed
86
 
87
  examples = [
88
- ["A dog", "minta.jpeg", 0.4],
89
- ["A capybara", "king-min.png", 0.5],
90
- ["A cat", "blue_hair.png", 0.5],
91
  ["", "meow.jpeg", 1.0],
92
  ]
93
 
@@ -107,56 +104,56 @@ css="""
107
  with gr.Blocks(css=css) as demo:
108
  with gr.Column(elem_id="col-container"):
109
  gr.Markdown(f"""
110
- # Kolors IP-Adapter - image reference and variations
111
  """)
112
-
113
  with gr.Row():
114
  prompt = gr.Text(
115
- label="Prompt",
116
  show_label=False,
117
  max_lines=1,
118
- placeholder="Enter your prompt",
119
  container=False,
120
  )
121
- run_button = gr.Button("Run", scale=0)
122
-
123
  with gr.Row():
124
  with gr.Column():
125
- ip_adapter_image = gr.Image(label="IP-Adapter Image", type="pil")
126
  ip_adapter_scale = gr.Slider(
127
- label="Image influence scale",
128
- info="Use 1 for creating variations",
129
  minimum=0.0,
130
  maximum=1.0,
131
  step=0.05,
132
  value=0.5,
133
  )
134
- result = gr.Image(label="Result", elem_id="result")
135
 
136
- with gr.Accordion("Advanced Settings", open=False):
137
  negative_prompt = gr.Text(
138
- label="Negative prompt",
139
  max_lines=1,
140
- placeholder="Enter a negative prompt",
141
  )
142
  seed = gr.Slider(
143
- label="Seed",
144
  minimum=0,
145
  maximum=MAX_SEED,
146
  step=1,
147
  value=0,
148
  )
149
- randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
150
  with gr.Row():
151
  width = gr.Slider(
152
- label="Width",
153
  minimum=256,
154
  maximum=MAX_IMAGE_SIZE,
155
  step=32,
156
  value=1024,
157
  )
158
  height = gr.Slider(
159
- label="Height",
160
  minimum=256,
161
  maximum=MAX_IMAGE_SIZE,
162
  step=32,
@@ -164,18 +161,18 @@ with gr.Blocks(css=css) as demo:
164
  )
165
  with gr.Row():
166
  guidance_scale = gr.Slider(
167
- label="Guidance scale",
168
  minimum=0.0,
169
  maximum=10.0,
170
  step=0.1,
171
  value=5.0,
172
  )
173
  num_inference_steps = gr.Slider(
174
- label="Number of inference steps",
175
  minimum=1,
176
  maximum=100,
177
  step=1,
178
- value=100,
179
  )
180
 
181
  gr.Examples(
@@ -193,6 +190,5 @@ with gr.Blocks(css=css) as demo:
193
  outputs=[result, seed]
194
  )
195
 
196
- # ํฌํŠธ 7890 ์„ค์ •, ๋Œ€๊ธฐ์—ด ํ™œ์„ฑํ™”, API ํ™œ์„ฑํ™”
197
- demo.launch()
198
-
 
16
  import spaces
17
 
18
  device = "cuda"
19
+ root_dir = os.getcwd()
20
  ckpt_dir = f'{root_dir}/weights/Kolors'
21
 
22
  snapshot_download(repo_id="Kwai-Kolors/Kolors", local_dir=ckpt_dir)
 
48
  force_zeros_for_empty_prompt=False
49
  ).to(device)
50
 
 
 
 
51
  if hasattr(pipe.unet, 'encoder_hid_proj'):
52
  pipe.unet.text_encoder_hid_proj = pipe.unet.encoder_hid_proj
53
 
 
82
  return image, seed
83
 
84
  examples = [
85
+ ["๊ฐ•์•„์ง€", "minta.jpeg", 0.4],
86
+ ["์นดํ”ผ๋ฐ”๋ผ๋ผ", "king-min.png", 0.5],
87
+ ["๊ณ ์–‘์ด", "blue_hair.png", 0.5],
88
  ["", "meow.jpeg", 1.0],
89
  ]
90
 
 
104
  with gr.Blocks(css=css) as demo:
105
  with gr.Column(elem_id="col-container"):
106
  gr.Markdown(f"""
107
+ # Kolors IP-Adapter - ์ด๋ฏธ์ง€ ์ฐธ์กฐ ๋ฐ ๋ณ€ํ˜•
108
  """)
109
+
110
  with gr.Row():
111
  prompt = gr.Text(
112
+ label="ํ”„๋กฌํ”„ํŠธ",
113
  show_label=False,
114
  max_lines=1,
115
+ placeholder="ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”",
116
  container=False,
117
  )
118
+ run_button = gr.Button("์‹คํ–‰", scale=0)
119
+
120
  with gr.Row():
121
  with gr.Column():
122
+ ip_adapter_image = gr.Image(label="IP-์–ด๋Œ‘ํ„ฐ ์ด๋ฏธ์ง€", type="pil")
123
  ip_adapter_scale = gr.Slider(
124
+ label="์ด๋ฏธ์ง€ ์˜ํ–ฅ ์ฒ™๋„",
125
+ info="๋ณ€ํ˜•์„ ์ƒ์„ฑํ•˜๋ ค๋ฉด 1์„ ์‚ฌ์šฉํ•˜์„ธ์š”",
126
  minimum=0.0,
127
  maximum=1.0,
128
  step=0.05,
129
  value=0.5,
130
  )
131
+ result = gr.Image(label="๊ฒฐ๊ณผ", elem_id="result")
132
 
133
+ with gr.Accordion("๊ณ ๊ธ‰ ์„ค์ •", open=False):
134
  negative_prompt = gr.Text(
135
+ label="๋ถ€์ •์  ํ”„๋กฌํ”„ํŠธ",
136
  max_lines=1,
137
+ placeholder="๋ถ€์ •์  ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”",
138
  )
139
  seed = gr.Slider(
140
+ label="์‹œ๋“œ",
141
  minimum=0,
142
  maximum=MAX_SEED,
143
  step=1,
144
  value=0,
145
  )
146
+ randomize_seed = gr.Checkbox(label="์‹œ๋“œ ๋ฌด์ž‘์œ„ํ™”", value=True)
147
  with gr.Row():
148
  width = gr.Slider(
149
+ label="๋„ˆ๋น„",
150
  minimum=256,
151
  maximum=MAX_IMAGE_SIZE,
152
  step=32,
153
  value=1024,
154
  )
155
  height = gr.Slider(
156
+ label="๋†’์ด",
157
  minimum=256,
158
  maximum=MAX_IMAGE_SIZE,
159
  step=32,
 
161
  )
162
  with gr.Row():
163
  guidance_scale = gr.Slider(
164
+ label="๊ฐ€์ด๋˜์Šค ์ฒ™๋„",
165
  minimum=0.0,
166
  maximum=10.0,
167
  step=0.1,
168
  value=5.0,
169
  )
170
  num_inference_steps = gr.Slider(
171
+ label="์ถ”๋ก  ๋‹จ๊ณ„ ์ˆ˜",
172
  minimum=1,
173
  maximum=100,
174
  step=1,
175
+ value=50,
176
  )
177
 
178
  gr.Examples(
 
190
  outputs=[result, seed]
191
  )
192
 
193
+ # Launch the app
194
+ demo.launch(share=True)