guipenedo HF staff commited on
Commit
9234f11
1 Parent(s): b525961

Update gradio to fix queue issue (#56)

Browse files

- Update gradio to fix queue issue (718a252e78c311eb612547132255828352e75222)

Files changed (1) hide show
  1. app.py +5 -8
app.py CHANGED
@@ -1,10 +1,7 @@
1
- import json
2
  import os
3
- import shutil
4
- import requests
5
 
6
  import gradio as gr
7
- from huggingface_hub import Repository, InferenceClient
8
 
9
  HF_TOKEN = os.environ.get("HF_TOKEN", None)
10
  API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-180B-chat"
@@ -119,9 +116,9 @@ additional_inputs=[
119
 
120
  with gr.Blocks() as demo:
121
  with gr.Row():
122
- with gr.Column(scale=0.4):
123
  gr.Image("better_banner.jpeg", elem_id="banner-image", show_label=False)
124
- with gr.Column():
125
  gr.Markdown(
126
  """# Falcon-180B Demo
127
 
@@ -140,9 +137,9 @@ with gr.Blocks() as demo:
140
  )
141
 
142
  gr.ChatInterface(
143
- generate,
144
  examples=EXAMPLES,
145
  additional_inputs=additional_inputs,
146
  )
147
 
148
- demo.queue(concurrency_count=100, api_open=False).launch(show_api=False)
 
 
1
  import os
 
 
2
 
3
  import gradio as gr
4
+ from huggingface_hub import InferenceClient
5
 
6
  HF_TOKEN = os.environ.get("HF_TOKEN", None)
7
  API_URL = "https://api-inference.huggingface.co/models/tiiuae/falcon-180B-chat"
 
116
 
117
  with gr.Blocks() as demo:
118
  with gr.Row():
119
+ with gr.Column(scale=2):
120
  gr.Image("better_banner.jpeg", elem_id="banner-image", show_label=False)
121
+ with gr.Column(scale=5):
122
  gr.Markdown(
123
  """# Falcon-180B Demo
124
 
 
137
  )
138
 
139
  gr.ChatInterface(
140
+ generate,
141
  examples=EXAMPLES,
142
  additional_inputs=additional_inputs,
143
  )
144
 
145
+ demo.queue(api_open=False).launch(show_api=False)