zqh11 commited on
Commit
f20ad0e
1 Parent(s): 20ce98b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -55,7 +55,7 @@ def generate(
55
  conversation.extend([{"role": "user", "content": user}, {"role": "assistant", "content": assistant}])
56
  conversation.append({"role": "user", "content": message})
57
 
58
- input_ids = tokenizer.apply_chat_template(conversation, return_tensors="pt")
59
  if input_ids.shape[1] > MAX_INPUT_TOKEN_LENGTH:
60
  input_ids = input_ids[:, -MAX_INPUT_TOKEN_LENGTH:]
61
  gr.Warning(f"Trimmed input from conversation as it was longer than {MAX_INPUT_TOKEN_LENGTH} tokens.")
 
55
  conversation.extend([{"role": "user", "content": user}, {"role": "assistant", "content": assistant}])
56
  conversation.append({"role": "user", "content": message})
57
 
58
+ input_ids = tokenizer.apply_chat_template(conversation, return_tensors="pt", add_generation_prompt=True)
59
  if input_ids.shape[1] > MAX_INPUT_TOKEN_LENGTH:
60
  input_ids = input_ids[:, -MAX_INPUT_TOKEN_LENGTH:]
61
  gr.Warning(f"Trimmed input from conversation as it was longer than {MAX_INPUT_TOKEN_LENGTH} tokens.")