lewtun HF staff commited on
Commit
7b8690f
1 Parent(s): 5341160

Tweak top_p

Browse files
Files changed (2) hide show
  1. app.ipynb +6 -6
  2. app.py +27 -27
app.ipynb CHANGED
@@ -663,14 +663,14 @@
663
  },
664
  {
665
  "cell_type": "code",
666
- "execution_count": 31,
667
  "metadata": {},
668
  "outputs": [
669
  {
670
  "name": "stdout",
671
  "output_type": "stream",
672
  "text": [
673
- "Running on local URL: http://127.0.0.1:7868\n",
674
  "\n",
675
  "To create a public link, set `share=True` in `launch()`.\n"
676
  ]
@@ -678,7 +678,7 @@
678
  {
679
  "data": {
680
  "text/html": [
681
- "<div><iframe src=\"http://127.0.0.1:7868/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
682
  ],
683
  "text/plain": [
684
  "<IPython.core.display.HTML object>"
@@ -691,7 +691,7 @@
691
  "data": {
692
  "text/plain": []
693
  },
694
- "execution_count": 31,
695
  "metadata": {},
696
  "output_type": "execute_result"
697
  }
@@ -741,7 +741,7 @@
741
  " top_p = gr.Slider(\n",
742
  " minimum=-0,\n",
743
  " maximum=1.0,\n",
744
- " value=0.95,\n",
745
  " step=0.05,\n",
746
  " interactive=True,\n",
747
  " label=\"Top-p (nucleus sampling)\",\n",
@@ -817,7 +817,7 @@
817
  },
818
  {
819
  "cell_type": "code",
820
- "execution_count": 32,
821
  "metadata": {},
822
  "outputs": [],
823
  "source": [
 
663
  },
664
  {
665
  "cell_type": "code",
666
+ "execution_count": 33,
667
  "metadata": {},
668
  "outputs": [
669
  {
670
  "name": "stdout",
671
  "output_type": "stream",
672
  "text": [
673
+ "Running on local URL: http://127.0.0.1:7869\n",
674
  "\n",
675
  "To create a public link, set `share=True` in `launch()`.\n"
676
  ]
 
678
  {
679
  "data": {
680
  "text/html": [
681
+ "<div><iframe src=\"http://127.0.0.1:7869/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
682
  ],
683
  "text/plain": [
684
  "<IPython.core.display.HTML object>"
 
691
  "data": {
692
  "text/plain": []
693
  },
694
+ "execution_count": 33,
695
  "metadata": {},
696
  "output_type": "execute_result"
697
  }
 
741
  " top_p = gr.Slider(\n",
742
  " minimum=-0,\n",
743
  " maximum=1.0,\n",
744
+ " value=0.8,\n",
745
  " step=0.05,\n",
746
  " interactive=True,\n",
747
  " label=\"Top-p (nucleus sampling)\",\n",
 
817
  },
818
  {
819
  "cell_type": "code",
820
+ "execution_count": 34,
821
  "metadata": {},
822
  "outputs": [],
823
  "source": [
app.py CHANGED
@@ -146,9 +146,34 @@ with gr.Blocks(
146
  label="Chat Output",
147
  )
148
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
  with gr.Row():
150
- chat_input = gr.Textbox(lines=1, label="Chat Input")
151
- chat_input.submit(
 
 
 
 
 
 
 
 
 
 
152
  inference_chat,
153
  [
154
  model_id,
@@ -160,30 +185,5 @@ with gr.Blocks(
160
  ],
161
  [chatbot, state],
162
  )
163
-
164
- with gr.Row():
165
- clear_button = gr.Button(value="Clear", interactive=True)
166
- clear_button.click(
167
- lambda: ("", [], []),
168
- [],
169
- [chat_input, chatbot, state],
170
- queue=False,
171
- )
172
-
173
- submit_button = gr.Button(
174
- value="Submit", interactive=True, variant="primary"
175
- )
176
- submit_button.click(
177
- inference_chat,
178
- [
179
- model_id,
180
- prompt_template,
181
- chat_input,
182
- temperature,
183
- top_p,
184
- state,
185
- ],
186
- [chatbot, state],
187
- )
188
  iface.launch()
189
 
 
146
  label="Chat Output",
147
  )
148
 
149
+ with gr.Column():
150
+ chat_input = gr.Textbox(lines=1, label="Chat Input")
151
+ chat_input.submit(
152
+ inference_chat,
153
+ [
154
+ model_id,
155
+ prompt_template,
156
+ chat_input,
157
+ temperature,
158
+ top_p,
159
+ state,
160
+ ],
161
+ [chatbot, state],
162
+ )
163
+
164
  with gr.Row():
165
+ clear_button = gr.Button(value="Clear", interactive=True)
166
+ clear_button.click(
167
+ lambda: ("", [], []),
168
+ [],
169
+ [chat_input, chatbot, state],
170
+ queue=False,
171
+ )
172
+
173
+ submit_button = gr.Button(
174
+ value="Submit", interactive=True, variant="primary"
175
+ )
176
+ submit_button.click(
177
  inference_chat,
178
  [
179
  model_id,
 
185
  ],
186
  [chatbot, state],
187
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
188
  iface.launch()
189