hysts HF staff commited on
Commit
5b8e81a
β€’
1 Parent(s): 8a72943

Add duplicate badge

Browse files
Files changed (1) hide show
  1. app.py +17 -11
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
 
2
  import gradio as gr
3
  import torch
@@ -141,18 +142,23 @@ css = """
141
  block = gr.Blocks(css=css)
142
 
143
  examples = [
144
-
145
  [
146
  'Thinking man in anime style'
147
  ],
148
-
149
  ]
150
 
 
 
151
  with block as demo:
152
- gr.Markdown("""
153
-
154
 
155
  [![Framework: PyTorch](https://img.shields.io/badge/Framework-PyTorch-orange.svg)](https://pytorch.org/) [![Huggingface space](https://img.shields.io/badge/πŸ€—-Huggingface-yello.svg)](https://huggingface.co/sberbank-ai/Kandinsky_2.0)
 
 
 
156
  [Offical BlogPost](https://habr.com/ru/company/sberbank/blog/725282/)
157
  [Offical Telegram Bot](https://t.me/kandinsky21_bot)
158
  [Offical site](https://fusionbrain.ai/diffusion)
@@ -176,7 +182,7 @@ Kandinsky 2.1 was trained on a large-scale image-text dataset LAION HighRes and
176
 
177
  **Kandinsky 2.1** architecture overview:
178
  ![](kandi2.png)
179
-
180
  """
181
  )
182
  with gr.Group():
@@ -194,19 +200,19 @@ Kandinsky 2.1 was trained on a large-scale image-text dataset LAION HighRes and
194
  margin=False,
195
  rounded=(False, True, True, False),
196
  )
197
-
198
  gallery = gr.Gallery(label="Generated images", show_label=False, elem_id="generated_id").style(
199
  grid=[2], height="auto"
200
  )
201
-
202
  ex = gr.Examples(examples=examples, fn=infer, inputs=[text], outputs=gallery, cache_examples=True)
203
  ex.dataset.headers = [""]
204
-
205
  text.submit(infer, inputs=[text], outputs=gallery)
206
  btn.click(infer, inputs=[text], outputs=gallery)
207
  gr.Markdown("""
208
-
209
-
210
  # Authors
211
 
212
  + Arseniy Shakhmatov: [Github](https://github.com/cene555), [Blog](https://t.me/gradientdip)
@@ -219,5 +225,5 @@ gr.Markdown("""
219
 
220
  """
221
  )
222
-
223
  demo.queue(max_size=15).launch()
1
+ import os
2
 
3
  import gradio as gr
4
  import torch
142
  block = gr.Blocks(css=css)
143
 
144
  examples = [
145
+
146
  [
147
  'Thinking man in anime style'
148
  ],
149
+
150
  ]
151
 
152
+ SPACE_ID = os.getenv('SPACE_ID')
153
+
154
  with block as demo:
155
+ gr.Markdown(f"""
156
+
157
 
158
  [![Framework: PyTorch](https://img.shields.io/badge/Framework-PyTorch-orange.svg)](https://pytorch.org/) [![Huggingface space](https://img.shields.io/badge/πŸ€—-Huggingface-yello.svg)](https://huggingface.co/sberbank-ai/Kandinsky_2.0)
159
+
160
+ <p>For faster inference without waiting in queue, you may duplicate the space and upgrade to GPU in settings. <a href="https://huggingface.co/spaces/{SPACE_ID}?duplicate=true"><img style="display: inline; margin-top: 0em; margin-bottom: 0em" src="https://bit.ly/3gLdBN6" alt="Duplicate Space" /></a></p>
161
+
162
  [Offical BlogPost](https://habr.com/ru/company/sberbank/blog/725282/)
163
  [Offical Telegram Bot](https://t.me/kandinsky21_bot)
164
  [Offical site](https://fusionbrain.ai/diffusion)
182
 
183
  **Kandinsky 2.1** architecture overview:
184
  ![](kandi2.png)
185
+
186
  """
187
  )
188
  with gr.Group():
200
  margin=False,
201
  rounded=(False, True, True, False),
202
  )
203
+
204
  gallery = gr.Gallery(label="Generated images", show_label=False, elem_id="generated_id").style(
205
  grid=[2], height="auto"
206
  )
207
+
208
  ex = gr.Examples(examples=examples, fn=infer, inputs=[text], outputs=gallery, cache_examples=True)
209
  ex.dataset.headers = [""]
210
+
211
  text.submit(infer, inputs=[text], outputs=gallery)
212
  btn.click(infer, inputs=[text], outputs=gallery)
213
  gr.Markdown("""
214
+
215
+
216
  # Authors
217
 
218
  + Arseniy Shakhmatov: [Github](https://github.com/cene555), [Blog](https://t.me/gradientdip)
225
 
226
  """
227
  )
228
+
229
  demo.queue(max_size=15).launch()