Tomoniai commited on
Commit
d40212f
1 Parent(s): fdc40ec

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -12,7 +12,7 @@ def format_prompt(message, history):
12
  return prompt
13
 
14
  def generate(
15
- prompt, history, system_prompt, temperature=0.3, max_new_tokens=256, top_p=0.9, repetition_penalty=1.0,
16
  ):
17
  temperature = float(temperature)
18
  if temperature < 1e-2:
@@ -28,9 +28,8 @@ def generate(
28
  seed=42,
29
  )
30
 
31
- system_prompt = "You are Mixtral, a gentle and smart AI assistant who is always ready to help and answer any questions truthfully"
32
 
33
- formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
34
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
35
  output = ""
36
 
@@ -39,12 +38,13 @@ def generate(
39
  yield output
40
  return output
41
 
 
42
  mychatbot = gr.Chatbot(
43
- avatar_images=["./user.png", "./botm.png"], bubble_full_width=False, show_label=False, show_copy_button=True, likeable=True)
44
 
45
- demo = gr.ChatInterface(fn=generate,
46
  chatbot=mychatbot,
47
- title="Tomoniai's Mixtral Chat",
48
  retry_btn=None,
49
  undo_btn=None
50
  )
 
12
  return prompt
13
 
14
  def generate(
15
+ prompt, history, temperature=0.2, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
16
  ):
17
  temperature = float(temperature)
18
  if temperature < 1e-2:
 
28
  seed=42,
29
  )
30
 
31
+ formatted_prompt = format_prompt(prompt, history)
32
 
 
33
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
34
  output = ""
35
 
 
38
  yield output
39
  return output
40
 
41
+
42
  mychatbot = gr.Chatbot(
43
+ avatar_images=["./user.png", "./botm.png"], bubble_full_width=False, show_label=False, show_copy_button=True,)
44
 
45
+ demo = gr.ChatInterface(fn=generate,
46
  chatbot=mychatbot,
47
+ title="Tomoniai Mixtral Chat",
48
  retry_btn=None,
49
  undo_btn=None
50
  )