Fully comply to prompt specs

#6
by pcuenq HF staff - opened
Files changed (1) hide show
  1. model.py +8 -3
model.py CHANGED
@@ -19,10 +19,15 @@ tokenizer = AutoTokenizer.from_pretrained(model_id)
19
 
20
  def get_prompt(message: str, chat_history: list[tuple[str, str]],
21
  system_prompt: str) -> str:
22
- texts = [f'[INST] <<SYS>>\n{system_prompt}\n<</SYS>>\n\n']
 
 
23
  for user_input, response in chat_history:
24
- texts.append(f'{user_input.strip()} [/INST] {response.strip()} </s><s> [INST] ')
25
- texts.append(f'{message.strip()} [/INST]')
 
 
 
26
  return ''.join(texts)
27
 
28
 
 
19
 
20
  def get_prompt(message: str, chat_history: list[tuple[str, str]],
21
  system_prompt: str) -> str:
22
+ texts = [f'<s>[INST] <<SYS>>\n{system_prompt}\n<</SYS>>\n\n']
23
+ # The first user input is _not_ stripped
24
+ do_strip = False
25
  for user_input, response in chat_history:
26
+ user_input = user_input.strip() if do_strip else user_input
27
+ do_strip = True
28
+ texts.append(f'{user_input} [/INST] {response.strip()} </s><s>[INST] ')
29
+ message = message.strip() if do_strip else message
30
+ texts.append(f'{message} [/INST]')
31
  return ''.join(texts)
32
 
33