Ctaake commited on
Commit
bcb63c0
1 Parent(s): b5157e5

Changed implementation of system prompt

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -17,8 +17,8 @@ def format_prompt(message, history, systemPrompt):
17
  # First everything is converted into role format
18
  # First a system prompt
19
  messages = []
20
- messages.append({"role": "user", "content": systemPrompt})
21
- messages.append({"role": "assistant", "content": ""})
22
  # Followed by the message history
23
  for user_message, bot_message in history:
24
  messages.append({"role": "user", "content": user_message})
@@ -27,6 +27,8 @@ def format_prompt(message, history, systemPrompt):
27
  messages.append({"role": "user", "content": message})
28
  # The tokenizer converts into the model format
29
  messages = tokenizer.apply_chat_template(messages, tokenize=False)
 
 
30
  return messages
31
 
32
 
 
17
  # First everything is converted into role format
18
  # First a system prompt
19
  messages = []
20
+ #messages.append({"role": "user", "content": systemPrompt})
21
+ #messages.append({"role": "assistant", "content": ""})
22
  # Followed by the message history
23
  for user_message, bot_message in history:
24
  messages.append({"role": "user", "content": user_message})
 
27
  messages.append({"role": "user", "content": message})
28
  # The tokenizer converts into the model format
29
  messages = tokenizer.apply_chat_template(messages, tokenize=False)
30
+ systemPromptMessage = f"[INST]{systemPrompt}[/INST]"
31
+ messages=systemPromptMessage+messages
32
  return messages
33
 
34