5to9 commited on
Commit
e527982
·
1 Parent(s): b0caeb4

0.40 system prompts

Browse files
Files changed (1) hide show
  1. app.py +26 -1
app.py CHANGED
@@ -18,6 +18,13 @@ HF_TOKEN = os.environ.get("HF_TOKEN", None)
18
  login(token=HF_TOKEN)
19
 
20
 
 
 
 
 
 
 
 
21
  model_info = [{"id": "NousResearch/Meta-Llama-3.1-8B-Instruct",
22
  "name": "Meta Llama 3.1 8B Instruct"},
23
  {"id": "mistralai/Mistral-7B-Instruct-v0.3",
@@ -202,7 +209,18 @@ with gr.Blocks() as demo:
202
  with gr.Column():
203
  gr.HTML("<center><h1>🤖le Royale</h1></center>")
204
  gr.Markdown(arena_notes)
205
- system_prompt = gr.Textbox(lines=1, label="System Prompt", value="You are a helpful chatbot that adheres to the prompted request.", show_copy_button=True)
 
 
 
 
 
 
 
 
 
 
 
206
  with gr.Row(variant="panel"):
207
  with gr.Column(scale=1):
208
  submit_btn = gr.Button(value="Generate", variant="primary")
@@ -221,6 +239,13 @@ with gr.Blocks() as demo:
221
  top_p = gr.Slider(minimum=0.0, maximum=1.0, value=1.0, label="Top-p", step=0.01)
222
  repetition_penalty = gr.Slider(minimum=0.1, maximum=2.0, value=1.1, label="Repetition Penalty", step=0.1)
223
 
 
 
 
 
 
 
 
224
  better_bot.select(reveal_bot, inputs=[better_bot, chatbot_a, chatbot_b], outputs=[chatbot_a, chatbot_b]) #fckp outputs=[chatbot_a, chatbot_b]
225
  input_text.submit(generate_both, inputs=[system_prompt, input_text, chatbot_a, chatbot_b, max_new_tokens, temperature, top_p, repetition_penalty], outputs=[chatbot_a, chatbot_b])
226
  submit_btn.click(generate_both, inputs=[system_prompt, input_text, chatbot_a, chatbot_b, max_new_tokens, temperature, top_p, repetition_penalty], outputs=[chatbot_a, chatbot_b])
 
18
  login(token=HF_TOKEN)
19
 
20
 
21
+ system_prompts = {
22
+ "English": "You are a helpful chatbot that adheres to the prompted request.",
23
+ "German": "Du bist ein hilfreicher Chatbot, der die gestellte Anfrage befolgt.",
24
+ "French": "Vous êtes un chatbot utile qui respecte la demande soumise.",
25
+ "Spanish": "Eres un chatbot útil que sigue la solicitud proporcionada."
26
+ }
27
+
28
  model_info = [{"id": "NousResearch/Meta-Llama-3.1-8B-Instruct",
29
  "name": "Meta Llama 3.1 8B Instruct"},
30
  {"id": "mistralai/Mistral-7B-Instruct-v0.3",
 
209
  with gr.Column():
210
  gr.HTML("<center><h1>🤖le Royale</h1></center>")
211
  gr.Markdown(arena_notes)
212
+
213
+ language_dropdown = gr.Dropdown(
214
+ choices=["English", "German", "French", "Spanish"],
215
+ label="Select Language for System Prompt",
216
+ value="English"
217
+ )
218
+ system_prompt = gr.Textbox(
219
+ lines=1,
220
+ label="System Prompt",
221
+ value=system_prompts["English"],
222
+ show_copy_button=True
223
+ )
224
  with gr.Row(variant="panel"):
225
  with gr.Column(scale=1):
226
  submit_btn = gr.Button(value="Generate", variant="primary")
 
239
  top_p = gr.Slider(minimum=0.0, maximum=1.0, value=1.0, label="Top-p", step=0.01)
240
  repetition_penalty = gr.Slider(minimum=0.1, maximum=2.0, value=1.1, label="Repetition Penalty", step=0.1)
241
 
242
+
243
+ language_dropdown.change(
244
+ lambda lang: system_prompts[lang],
245
+ inputs=[language_dropdown],
246
+ outputs=[system_prompt]
247
+ )
248
+
249
  better_bot.select(reveal_bot, inputs=[better_bot, chatbot_a, chatbot_b], outputs=[chatbot_a, chatbot_b]) #fckp outputs=[chatbot_a, chatbot_b]
250
  input_text.submit(generate_both, inputs=[system_prompt, input_text, chatbot_a, chatbot_b, max_new_tokens, temperature, top_p, repetition_penalty], outputs=[chatbot_a, chatbot_b])
251
  submit_btn.click(generate_both, inputs=[system_prompt, input_text, chatbot_a, chatbot_b, max_new_tokens, temperature, top_p, repetition_penalty], outputs=[chatbot_a, chatbot_b])