Mattral commited on
Commit
166e47c
1 Parent(s): 6259c2f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -7
app.py CHANGED
@@ -9,13 +9,17 @@ client = InferenceClient(model)
9
  # Embedded system prompt
10
  system_prompt_text = "You are a smart and helpful co-worker of Thailand based multi-national company PTT, and PTTEP. You help with any kind of request and provide a detailed answer to the question."
11
 
12
- def format_prompt_mixtral(message, history):
 
 
 
 
13
  prompt = "<s>"
14
  if history:
15
  for user_prompt, bot_response in history:
16
  prompt += f"[INST] {user_prompt} [/INST]"
17
  prompt += f" {bot_response}</s> "
18
- prompt += f"[INST] {message} [/INST]"
19
  return prompt
20
 
21
  def chat_inf(prompt, history, seed, temp, tokens, top_p, rep_p):
@@ -31,7 +35,7 @@ def chat_inf(prompt, history, seed, temp, tokens, top_p, rep_p):
31
  seed=seed,
32
  )
33
 
34
- formatted_prompt = format_prompt_mixtral(full_prompt, history)
35
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
36
  output = ""
37
  for response in stream:
@@ -51,8 +55,8 @@ def check_rand(inp, val):
51
  else:
52
  return gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, value=int(val))
53
 
54
- with gr.Blocks() as app:
55
- gr.HTML("""<center><h1 style='font-size:xx-large;'>Chatbot</h1><br><h3>running on Huggingface Inference </h3><br><h7>EXPERIMENTAL""")
56
  with gr.Row():
57
  chat = gr.Chatbot(height=500)
58
  with gr.Group():
@@ -72,8 +76,8 @@ with gr.Blocks() as app:
72
  seed = gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, step=1, value=rand_val)
73
  tokens = gr.Slider(label="Max new tokens", value=3840, minimum=0, maximum=8000, step=64, interactive=True, visible=True, info="The maximum number of tokens")
74
  temp = gr.Slider(label="Temperature", step=0.01, minimum=0.01, maximum=1.0, value=0.9)
75
- top_p = gr.Slider(label="Top-P", step=0.01, minimum=0.01, maximum=1.0, value=0.9)
76
- rep_p = gr.Slider(label="Repetition Penalty", step=0.1, minimum=0.1, maximum=2.0, value=1.0)
77
 
78
  hid1 = gr.Number(value=1, visible=False)
79
 
 
9
  # Embedded system prompt
10
  system_prompt_text = "You are a smart and helpful co-worker of Thailand based multi-national company PTT, and PTTEP. You help with any kind of request and provide a detailed answer to the question."
11
 
12
+ # Read the content of the info.md file
13
+ with open("info.md", "r") as file:
14
+ info_md_content = file.read()
15
+
16
+ def format_prompt_mixtral(message, history, info_md_content):
17
  prompt = "<s>"
18
  if history:
19
  for user_prompt, bot_response in history:
20
  prompt += f"[INST] {user_prompt} [/INST]"
21
  prompt += f" {bot_response}</s> "
22
+ prompt += f"[INST] {info_md_content}\n\n{message} [/INST]"
23
  return prompt
24
 
25
  def chat_inf(prompt, history, seed, temp, tokens, top_p, rep_p):
 
35
  seed=seed,
36
  )
37
 
38
+ formatted_prompt = format_prompt_mixtral(full_prompt, history, info_md_content)
39
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
40
  output = ""
41
  for response in stream:
 
55
  else:
56
  return gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, value=int(val))
57
 
58
+ with gr.Blocks(auth=("Admin", "0112358")) as app: # Add auth here
59
+ gr.HTML("""<center><h1 style='font-size:xx-large;'>Chatbot</h1><br><h3>running on Huggingface Inference Client</h3><br><h7>EXPERIMENTAL""")
60
  with gr.Row():
61
  chat = gr.Chatbot(height=500)
62
  with gr.Group():
 
76
  seed = gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, step=1, value=rand_val)
77
  tokens = gr.Slider(label="Max new tokens", value=3840, minimum=0, maximum=8000, step=64, interactive=True, visible=True, info="The maximum number of tokens")
78
  temp = gr.Slider(label="Temperature", step=0.01, minimum=0.01, maximum=1.0, value=0.9)
79
+ top_p = gr.Slider(label="Top-P", step=0.01, minimum=0.01, maximum 1.0, value=0.9)
80
+ rep_p = gr.Slider(label="Repetition Penalty", step=0.1, minimum=0.1, maximum 2.0, value=1.0)
81
 
82
  hid1 = gr.Number(value=1, visible=False)
83