lewtun HF staff commited on
Commit
8494d2c
1 Parent(s): 7b8690f

Tweak layout

Browse files
Files changed (2) hide show
  1. app.ipynb +64 -51
  2. app.py +44 -47
app.ipynb CHANGED
@@ -663,14 +663,14 @@
663
  },
664
  {
665
  "cell_type": "code",
666
- "execution_count": 33,
667
  "metadata": {},
668
  "outputs": [
669
  {
670
  "name": "stdout",
671
  "output_type": "stream",
672
  "text": [
673
- "Running on local URL: http://127.0.0.1:7869\n",
674
  "\n",
675
  "To create a public link, set `share=True` in `launch()`.\n"
676
  ]
@@ -678,7 +678,7 @@
678
  {
679
  "data": {
680
  "text/html": [
681
- "<div><iframe src=\"http://127.0.0.1:7869/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
682
  ],
683
  "text/plain": [
684
  "<IPython.core.display.HTML object>"
@@ -691,9 +691,25 @@
691
  "data": {
692
  "text/plain": []
693
  },
694
- "execution_count": 33,
695
  "metadata": {},
696
  "output_type": "execute_result"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
697
  }
698
  ],
699
  "source": [
@@ -749,52 +765,49 @@
749
  "\n",
750
  " with gr.Column(scale=1.8):\n",
751
  " with gr.Row():\n",
752
- " with gr.Column(\n",
753
- " scale=1.5,\n",
754
- " ):\n",
755
- " chatbot = gr.Chatbot(\n",
756
- " label=\"Chat Output\",\n",
757
- " )\n",
758
- "\n",
759
- " with gr.Column():\n",
760
- " chat_input = gr.Textbox(lines=1, label=\"Chat Input\")\n",
761
- " chat_input.submit(\n",
762
- " inference_chat,\n",
763
- " [\n",
764
- " model_id,\n",
765
- " prompt_template,\n",
766
- " chat_input,\n",
767
- " temperature,\n",
768
- " top_p,\n",
769
- " state,\n",
770
- " ],\n",
771
- " [chatbot, state],\n",
772
- " )\n",
773
- "\n",
774
- " with gr.Row():\n",
775
- " clear_button = gr.Button(value=\"Clear\", interactive=True)\n",
776
- " clear_button.click(\n",
777
- " lambda: (\"\", [], []),\n",
778
- " [],\n",
779
- " [chat_input, chatbot, state],\n",
780
- " queue=False,\n",
781
- " )\n",
782
- "\n",
783
- " submit_button = gr.Button(\n",
784
- " value=\"Submit\", interactive=True, variant=\"primary\"\n",
785
- " )\n",
786
- " submit_button.click(\n",
787
- " inference_chat,\n",
788
- " [\n",
789
- " model_id,\n",
790
- " prompt_template,\n",
791
- " chat_input,\n",
792
- " temperature,\n",
793
- " top_p,\n",
794
- " state,\n",
795
- " ],\n",
796
- " [chatbot, state],\n",
797
- " )\n",
798
  "iface.launch()\n"
799
  ]
800
  },
@@ -817,7 +830,7 @@
817
  },
818
  {
819
  "cell_type": "code",
820
- "execution_count": 34,
821
  "metadata": {},
822
  "outputs": [],
823
  "source": [
 
663
  },
664
  {
665
  "cell_type": "code",
666
+ "execution_count": 36,
667
  "metadata": {},
668
  "outputs": [
669
  {
670
  "name": "stdout",
671
  "output_type": "stream",
672
  "text": [
673
+ "Running on local URL: http://127.0.0.1:7871\n",
674
  "\n",
675
  "To create a public link, set `share=True` in `launch()`.\n"
676
  ]
 
678
  {
679
  "data": {
680
  "text/html": [
681
+ "<div><iframe src=\"http://127.0.0.1:7871/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
682
  ],
683
  "text/plain": [
684
  "<IPython.core.display.HTML object>"
 
691
  "data": {
692
  "text/plain": []
693
  },
694
+ "execution_count": 36,
695
  "metadata": {},
696
  "output_type": "execute_result"
697
+ },
698
+ {
699
+ "name": "stderr",
700
+ "output_type": "stream",
701
+ "text": [
702
+ "Traceback (most recent call last):\n",
703
+ " File \"/Users/lewtun/miniconda3/envs/hf/lib/python3.8/site-packages/gradio/routes.py\", line 337, in run_predict\n",
704
+ " output = await app.get_blocks().process_api(\n",
705
+ " File \"/Users/lewtun/miniconda3/envs/hf/lib/python3.8/site-packages/gradio/blocks.py\", line 1018, in process_api\n",
706
+ " data = self.postprocess_data(fn_index, result[\"prediction\"], state)\n",
707
+ " File \"/Users/lewtun/miniconda3/envs/hf/lib/python3.8/site-packages/gradio/blocks.py\", line 924, in postprocess_data\n",
708
+ " predictions = convert_component_dict_to_list(\n",
709
+ " File \"/Users/lewtun/miniconda3/envs/hf/lib/python3.8/site-packages/gradio/blocks.py\", line 397, in convert_component_dict_to_list\n",
710
+ " raise ValueError(\n",
711
+ "ValueError: Returned component chatbot not specified as output of function.\n"
712
+ ]
713
  }
714
  ],
715
  "source": [
 
765
  "\n",
766
  " with gr.Column(scale=1.8):\n",
767
  " with gr.Row():\n",
768
+ " chatbot = gr.Chatbot(\n",
769
+ " label=\"Chat Output\",\n",
770
+ " )\n",
771
+ "\n",
772
+ " with gr.Row():\n",
773
+ " chat_input = gr.Textbox(lines=1, label=\"Chat Input\")\n",
774
+ " chat_input.submit(\n",
775
+ " inference_chat,\n",
776
+ " [\n",
777
+ " model_id,\n",
778
+ " prompt_template,\n",
779
+ " chat_input,\n",
780
+ " temperature,\n",
781
+ " top_p,\n",
782
+ " state,\n",
783
+ " ],\n",
784
+ " [chatbot, state],\n",
785
+ " )\n",
786
+ "\n",
787
+ " with gr.Row():\n",
788
+ " clear_button = gr.Button(value=\"Clear\", interactive=True)\n",
789
+ " clear_button.click(\n",
790
+ " lambda: (\"\", [], []),\n",
791
+ " [],\n",
792
+ " [chat_input, chatbot, state],\n",
793
+ " queue=False,\n",
794
+ " )\n",
795
+ "\n",
796
+ " submit_button = gr.Button(\n",
797
+ " value=\"Submit\", interactive=True, variant=\"primary\"\n",
798
+ " )\n",
799
+ " submit_button.click(\n",
800
+ " inference_chat,\n",
801
+ " [\n",
802
+ " model_id,\n",
803
+ " prompt_template,\n",
804
+ " chat_input,\n",
805
+ " temperature,\n",
806
+ " top_p,\n",
807
+ " state,\n",
808
+ " ],\n",
809
+ " [chatbot, state],\n",
810
+ " )\n",
 
 
 
811
  "iface.launch()\n"
812
  ]
813
  },
 
830
  },
831
  {
832
  "cell_type": "code",
833
+ "execution_count": 37,
834
  "metadata": {},
835
  "outputs": [],
836
  "source": [
app.py CHANGED
@@ -131,7 +131,7 @@ with gr.Blocks(
131
  top_p = gr.Slider(
132
  minimum=-0,
133
  maximum=1.0,
134
- value=0.95,
135
  step=0.05,
136
  interactive=True,
137
  label="Top-p (nucleus sampling)",
@@ -139,51 +139,48 @@ with gr.Blocks(
139
 
140
  with gr.Column(scale=1.8):
141
  with gr.Row():
142
- with gr.Column(
143
- scale=1.5,
144
- ):
145
- chatbot = gr.Chatbot(
146
- label="Chat Output",
147
- )
148
-
149
- with gr.Column():
150
- chat_input = gr.Textbox(lines=1, label="Chat Input")
151
- chat_input.submit(
152
- inference_chat,
153
- [
154
- model_id,
155
- prompt_template,
156
- chat_input,
157
- temperature,
158
- top_p,
159
- state,
160
- ],
161
- [chatbot, state],
162
- )
163
-
164
- with gr.Row():
165
- clear_button = gr.Button(value="Clear", interactive=True)
166
- clear_button.click(
167
- lambda: ("", [], []),
168
- [],
169
- [chat_input, chatbot, state],
170
- queue=False,
171
- )
172
-
173
- submit_button = gr.Button(
174
- value="Submit", interactive=True, variant="primary"
175
- )
176
- submit_button.click(
177
- inference_chat,
178
- [
179
- model_id,
180
- prompt_template,
181
- chat_input,
182
- temperature,
183
- top_p,
184
- state,
185
- ],
186
- [chatbot, state],
187
- )
188
  iface.launch()
189
 
 
131
  top_p = gr.Slider(
132
  minimum=-0,
133
  maximum=1.0,
134
+ value=0.8,
135
  step=0.05,
136
  interactive=True,
137
  label="Top-p (nucleus sampling)",
 
139
 
140
  with gr.Column(scale=1.8):
141
  with gr.Row():
142
+ chatbot = gr.Chatbot(
143
+ label="Chat Output",
144
+ )
145
+
146
+ with gr.Row():
147
+ chat_input = gr.Textbox(lines=1, label="Chat Input")
148
+ chat_input.submit(
149
+ inference_chat,
150
+ [
151
+ model_id,
152
+ prompt_template,
153
+ chat_input,
154
+ temperature,
155
+ top_p,
156
+ state,
157
+ ],
158
+ [chatbot, state],
159
+ )
160
+
161
+ with gr.Row():
162
+ clear_button = gr.Button(value="Clear", interactive=True)
163
+ clear_button.click(
164
+ lambda: ("", [], []),
165
+ [],
166
+ [chat_input, chatbot, state],
167
+ queue=False,
168
+ )
169
+
170
+ submit_button = gr.Button(
171
+ value="Submit", interactive=True, variant="primary"
172
+ )
173
+ submit_button.click(
174
+ inference_chat,
175
+ [
176
+ model_id,
177
+ prompt_template,
178
+ chat_input,
179
+ temperature,
180
+ top_p,
181
+ state,
182
+ ],
183
+ [chatbot, state],
184
+ )
 
 
 
185
  iface.launch()
186