Mattral commited on
Commit
03afc21
1 Parent(s): 8def8b6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -12
app.py CHANGED
@@ -25,23 +25,24 @@ info2_md_chunks = textwrap.wrap(info2_md_content, chunk_size)
25
  # Combine both sets of chunks
26
  all_chunks = info_md_chunks + info2_md_chunks
27
 
28
- # Initialize history with these chunks
29
- history = []
30
- for chunk in all_chunks:
31
- history.append(("System Information", chunk))
32
-
33
- def format_prompt_mixtral(message, history):
34
  prompt = "<s>"
35
  prompt += f"{system_prompt_text}\n\n" # Add the system prompt
36
 
 
 
 
 
 
37
  if history:
38
  for user_prompt, bot_response in history:
39
- prompt += f"[INST] {user_prompt} [/INST]"
40
- prompt += f" {bot_response}</s> "
 
41
  prompt += f"[INST] {message} [/INST]"
42
  return prompt
43
 
44
- def chat_inf(prompt, history, seed, temp, tokens, top_p, rep_p):
45
  generate_kwargs = dict(
46
  temperature=temp,
47
  max_new_tokens=tokens,
@@ -51,13 +52,13 @@ def chat_inf(prompt, history, seed, temp, tokens, top_p, rep_p):
51
  seed=seed,
52
  )
53
 
54
- formatted_prompt = format_prompt_mixtral(prompt, history)
55
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
56
  output = ""
57
  for response in stream:
58
  output += response.token.text
59
- yield [(prompt, output)]
60
- history.append((prompt, output))
61
  yield history
62
 
63
  def clear_fn():
 
25
  # Combine both sets of chunks
26
  all_chunks = info_md_chunks + info2_md_chunks
27
 
28
+ def format_prompt_mixtral(message, history, chunks):
 
 
 
 
 
29
  prompt = "<s>"
30
  prompt += f"{system_prompt_text}\n\n" # Add the system prompt
31
 
32
+ # Include the initial context from the chunks
33
+ for chunk in chunks:
34
+ prompt += f"[INST] System Information [/INST] {chunk}</s> "
35
+
36
+ # Add conversation history
37
  if history:
38
  for user_prompt, bot_response in history:
39
+ prompt += f"[INST] {user_prompt} [/INST] {bot_response}</s> "
40
+
41
+ # Add the current user message
42
  prompt += f"[INST] {message} [/INST]"
43
  return prompt
44
 
45
+ def chat_inf(message, history, seed, temp, tokens, top_p, rep_p):
46
  generate_kwargs = dict(
47
  temperature=temp,
48
  max_new_tokens=tokens,
 
52
  seed=seed,
53
  )
54
 
55
+ formatted_prompt = format_prompt_mixtral(message, history, all_chunks)
56
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
57
  output = ""
58
  for response in stream:
59
  output += response.token.text
60
+ yield [(message, output)]
61
+ history.append((message, output))
62
  yield history
63
 
64
  def clear_fn():