Mattral commited on
Commit
f26a1bd
1 Parent(s): 2cb01df

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -7
app.py CHANGED
@@ -3,9 +3,12 @@ from huggingface_hub import InferenceClient
3
  import random
4
 
5
  # Define the model to be used
6
- model = "mistralai/Mixtral-8x7B-Instruct-v0.1" #"NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO"
7
  client = InferenceClient(model)
8
 
 
 
 
9
  def format_prompt_mixtral(message, history):
10
  prompt = "<s>"
11
  if history:
@@ -15,9 +18,9 @@ def format_prompt_mixtral(message, history):
15
  prompt += f"[INST] {message} [/INST]"
16
  return prompt
17
 
18
- def chat_inf(system_prompt, prompt, history, seed, temp, tokens, top_p, rep_p):
19
- if system_prompt:
20
- system_prompt = f'{system_prompt}, '
21
 
22
  generate_kwargs = dict(
23
  temperature=temp,
@@ -28,7 +31,7 @@ def chat_inf(system_prompt, prompt, history, seed, temp, tokens, top_p, rep_p):
28
  seed=seed,
29
  )
30
 
31
- formatted_prompt = format_prompt_mixtral(f"{system_prompt}{prompt}", history)
32
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
33
  output = ""
34
  for response in stream:
@@ -56,7 +59,7 @@ with gr.Blocks() as app:
56
  with gr.Row():
57
  with gr.Column(scale=3):
58
  inp = gr.Textbox(label="Prompt")
59
- sys_inp = gr.Textbox(label="System Prompt (optional)")
60
  with gr.Row():
61
  with gr.Column(scale=2):
62
  btn = gr.Button("Chat")
@@ -75,7 +78,7 @@ with gr.Blocks() as app:
75
 
76
  hid1 = gr.Number(value=1, visible=False)
77
 
78
- go = btn.click(check_rand, [rand, seed], seed).then(chat_inf, [sys_inp, inp, chat, seed, temp, tokens, top_p, rep_p], chat)
79
 
80
  stop_btn.click(None, None, None, cancels=[go])
81
  clear_btn.click(clear_fn, None, [inp, sys_inp, chat])
 
3
  import random
4
 
5
  # Define the model to be used
6
+ model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
7
  client = InferenceClient(model)
8
 
9
+ # Embedded system prompt
10
+ system_prompt_text = "You are a smart and helpful co-worker of Thailand based multi-national company PTT, and PTTEP. You help with any kind of request and provide a detailed answer to the question."
11
+
12
  def format_prompt_mixtral(message, history):
13
  prompt = "<s>"
14
  if history:
 
18
  prompt += f"[INST] {message} [/INST]"
19
  return prompt
20
 
21
+ def chat_inf(prompt, history, seed, temp, tokens, top_p, rep_p):
22
+ # Prepend the system prompt to the user prompt
23
+ full_prompt = f"{system_prompt_text}, {prompt}"
24
 
25
  generate_kwargs = dict(
26
  temperature=temp,
 
31
  seed=seed,
32
  )
33
 
34
+ formatted_prompt = format_prompt_mixtral(full_prompt, history)
35
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
36
  output = ""
37
  for response in stream:
 
59
  with gr.Row():
60
  with gr.Column(scale=3):
61
  inp = gr.Textbox(label="Prompt")
62
+ sys_inp = gr.HTML(value=f"<p>{system_prompt_text}</p>", interactive=False) # Display the system prompt
63
  with gr.Row():
64
  with gr.Column(scale=2):
65
  btn = gr.Button("Chat")
 
78
 
79
  hid1 = gr.Number(value=1, visible=False)
80
 
81
+ go = btn.click(check_rand, [rand, seed], seed).then(chat_inf, [inp, chat, seed, temp, tokens, top_p, rep_p], chat)
82
 
83
  stop_btn.click(None, None, None, cancels=[go])
84
  clear_btn.click(clear_fn, None, [inp, sys_inp, chat])