jbochi commited on
Commit
2553db6
1 Parent(s): 03a4d52

Fix bos_token_id

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -17,7 +17,7 @@ print("T5ForConditionalGeneration loaded from pretrained.")
17
 
18
  def inference(max_length, input_text, history=[]):
19
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
20
- outputs = model.generate(input_ids, max_length=max_length, bos_token_id=0)
21
  result = tokenizer.decode(outputs[0], skip_special_tokens=True)
22
  history.append((input_text, result))
23
  return history, history
 
17
 
18
  def inference(max_length, input_text, history=[]):
19
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
20
+ outputs = model.generate(input_ids, max_length=max_length, bos_token_id=2)
21
  result = tokenizer.decode(outputs[0], skip_special_tokens=True)
22
  history.append((input_text, result))
23
  return history, history