randeom commited on
Commit
f328007
1 Parent(s): 17fba42

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -5,9 +5,10 @@ client = InferenceClient(
5
  "mistralai/Mistral-7B-Instruct-v0.1"
6
  )
7
 
8
-
9
- def format_prompt(message, history):
10
  prompt = "<s>"
 
 
11
  for user_prompt, bot_response in history:
12
  prompt += f"[INST] {user_prompt} [/INST]"
13
  prompt += f" {bot_response}</s> "
@@ -15,7 +16,7 @@ def format_prompt(message, history):
15
  return prompt
16
 
17
  def generate(
18
- prompt, history, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
19
  ):
20
  temperature = float(temperature)
21
  if temperature < 1e-2:
@@ -31,7 +32,7 @@ def generate(
31
  seed=42,
32
  )
33
 
34
- formatted_prompt = format_prompt(prompt, history)
35
 
36
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
37
  output = ""
@@ -41,8 +42,8 @@ def generate(
41
  yield output
42
  return output
43
 
44
-
45
  additional_inputs=[
 
46
  gr.Slider(
47
  label="Temperature",
48
  value=0.9,
@@ -99,4 +100,4 @@ with gr.Blocks(css=css) as demo:
99
  examples=[["What is the secret to life?"], ["Write me a recipe for pancakes."]]
100
  )
101
 
102
- demo.queue().launch(debug=True)
 
5
  "mistralai/Mistral-7B-Instruct-v0.1"
6
  )
7
 
8
+ def format_prompt(message, history, system_prompt=""):
 
9
  prompt = "<s>"
10
+ if system_prompt:
11
+ prompt += f"[SYS] {system_prompt} [/SYS] "
12
  for user_prompt, bot_response in history:
13
  prompt += f"[INST] {user_prompt} [/INST]"
14
  prompt += f" {bot_response}</s> "
 
16
  return prompt
17
 
18
  def generate(
19
+ prompt, history, system_prompt="", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
20
  ):
21
  temperature = float(temperature)
22
  if temperature < 1e-2:
 
32
  seed=42,
33
  )
34
 
35
+ formatted_prompt = format_prompt(prompt, history, system_prompt)
36
 
37
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
38
  output = ""
 
42
  yield output
43
  return output
44
 
 
45
  additional_inputs=[
46
+ gr.Textbox("", label="Optional system prompt"),
47
  gr.Slider(
48
  label="Temperature",
49
  value=0.9,
 
100
  examples=[["What is the secret to life?"], ["Write me a recipe for pancakes."]]
101
  )
102
 
103
+ demo.queue().launch(debug=True)