lxuechen commited on
Commit
cc447a5
1 Parent(s): 0c60d55

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -62,13 +62,13 @@ tokenizer = transformers.AutoTokenizer.from_pretrained(model_name_or_path)
62
  input_text = "### Human: Give me a good recipe for a chinese dish\n\n### Assistant:"
63
 
64
  outputs = model.generate(
65
- tokenizer(input_text, return_tensors="pt").to(model.device), max_length=1024,
 
66
  temperature=0.7,
67
  top_p=0.9,
68
  do_sample=True,
69
  pad_token_id=tokenizer.pad_token_id,
70
  eos_token_id=tokenizer.eos_token_id,
71
- max_new_tokens=1024
72
  )
73
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))
74
  ```
 
62
  input_text = "### Human: Give me a good recipe for a chinese dish\n\n### Assistant:"
63
 
64
  outputs = model.generate(
65
+ tokenizer(input_text, return_tensors="pt").to(model.device),
66
+ max_length=1024,
67
  temperature=0.7,
68
  top_p=0.9,
69
  do_sample=True,
70
  pad_token_id=tokenizer.pad_token_id,
71
  eos_token_id=tokenizer.eos_token_id,
 
72
  )
73
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))
74
  ```