Mattral commited on
Commit
7d2986f
1 Parent(s): 3a42962

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -7
app.py CHANGED
@@ -41,7 +41,7 @@ def chat_inf(prompt, history, seed, temp, tokens, top_p, rep_p):
41
  yield history
42
 
43
  def clear_fn():
44
- return None, None, None
45
 
46
  rand_val = random.randint(1, 1111111111111111)
47
 
@@ -52,14 +52,13 @@ def check_rand(inp, val):
52
  return gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, value=int(val))
53
 
54
  with gr.Blocks() as app:
55
- gr.HTML("""<center><h1 style='font-size:xx-large;'>Chatbot</h1><br><h3>running on Huggingface Inference Client</h3><br><h7>EXPERIMENTAL""")
56
  with gr.Row():
57
  chat = gr.Chatbot(height=500)
58
  with gr.Group():
59
  with gr.Row():
60
  with gr.Column(scale=3):
61
- inp = gr.Textbox(label="Prompt")
62
- sys_inp = gr.HTML(value=f"<p>{system_prompt_text}</p>") # Display the system prompt
63
  with gr.Row():
64
  with gr.Column(scale=2):
65
  btn = gr.Button("Chat")
@@ -73,14 +72,14 @@ with gr.Blocks() as app:
73
  seed = gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, step=1, value=rand_val)
74
  tokens = gr.Slider(label="Max new tokens", value=3840, minimum=0, maximum=8000, step=64, interactive=True, visible=True, info="The maximum number of tokens")
75
  temp = gr.Slider(label="Temperature", step=0.01, minimum=0.01, maximum=1.0, value=0.9)
76
- top_p = gr.Slider(label="Top-P", step=0.01, minimum=0.01, maximum=1.0, value=0.9)
77
- rep_p = gr.Slider(label="Repetition Penalty", step=0.1, minimum=0.1, maximum=2.0, value=1.0)
78
 
79
  hid1 = gr.Number(value=1, visible=False)
80
 
81
  go = btn.click(check_rand, [rand, seed], seed).then(chat_inf, [inp, chat, seed, temp, tokens, top_p, rep_p], chat)
82
 
83
  stop_btn.click(None, None, None, cancels=[go])
84
- clear_btn.click(clear_fn, None, [inp, sys_inp, chat])
85
 
86
  app.queue(default_concurrency_limit=10).launch()
 
41
  yield history
42
 
43
  def clear_fn():
44
+ return None, None
45
 
46
  rand_val = random.randint(1, 1111111111111111)
47
 
 
52
  return gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, value=int(val))
53
 
54
  with gr.Blocks() as app:
55
+ gr.HTML("""<center><h1 style='font-size:xx-large;'>Chatbot</h1><br><h3>running on Huggingface Inference </h3><br><h7>EXPERIMENTAL""")
56
  with gr.Row():
57
  chat = gr.Chatbot(height=500)
58
  with gr.Group():
59
  with gr.Row():
60
  with gr.Column(scale=3):
61
+ inp = gr.Textbox(label="Prompt", lines=5, interactive=True) # Increased lines and interactive
 
62
  with gr.Row():
63
  with gr.Column(scale=2):
64
  btn = gr.Button("Chat")
 
72
  seed = gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, step=1, value=rand_val)
73
  tokens = gr.Slider(label="Max new tokens", value=3840, minimum=0, maximum=8000, step=64, interactive=True, visible=True, info="The maximum number of tokens")
74
  temp = gr.Slider(label="Temperature", step=0.01, minimum=0.01, maximum=1.0, value=0.9)
75
+ top_p = gr.Slider(label="Top-P", step=0.01, minimum=0.01, maximum 1.0, value=0.9)
76
+ rep_p = gr.Slider(label="Repetition Penalty", step=0.1, minimum=0.1, maximum 2.0, value=1.0)
77
 
78
  hid1 = gr.Number(value=1, visible=False)
79
 
80
  go = btn.click(check_rand, [rand, seed], seed).then(chat_inf, [inp, chat, seed, temp, tokens, top_p, rep_p], chat)
81
 
82
  stop_btn.click(None, None, None, cancels=[go])
83
+ clear_btn.click(clear_fn, None, [inp, chat])
84
 
85
  app.queue(default_concurrency_limit=10).launch()