insomeniaT commited on
Commit
ec478a0
1 Parent(s): ba1ed17

increase token length in example codes

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -44,7 +44,7 @@ inputs = tokenizer(text, return_tensors="pt")
44
  inputs.to(device)
45
  model.to(device)
46
 
47
- outputs = model.generate(input_ids=inputs["input_ids"], attention_mask=inputs["attention_mask"], max_new_tokens=10, pad_token_id=tokenizer.eos_token_id)
48
  result = tokenizer.decode(outputs[0], skip_special_tokens=True)
49
  print(result)
50
  ```
 
44
  inputs.to(device)
45
  model.to(device)
46
 
47
+ outputs = model.generate(input_ids=inputs["input_ids"], attention_mask=inputs["attention_mask"], max_new_tokens=300, pad_token_id=tokenizer.eos_token_id)
48
  result = tokenizer.decode(outputs[0], skip_special_tokens=True)
49
  print(result)
50
  ```