GODELEV commited on
Commit
d702e45
·
verified ·
1 Parent(s): 1a5c696

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +7 -2
README.md CHANGED
@@ -85,7 +85,7 @@ model = AutoModelForCausalLM.from_pretrained(
85
  )
86
 
87
  # Prepare Prompt
88
- prompt = "Once upon a time, Lily found a mysterious blue key."
89
  inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
90
 
91
  # Generate Story
@@ -95,7 +95,12 @@ output = model.generate(
95
  temperature=0.7,
96
  top_p=0.9,
97
  do_sample=True,
98
- repetition_penalty=1.1
 
 
 
99
  )
100
 
 
 
101
  print(tokenizer.decode(output[0], skip_special_tokens=True))
 
85
  )
86
 
87
  # Prepare Prompt
88
+ prompt = "Once upon a time, Tom found a blue car."
89
  inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
90
 
91
  # Generate Story
 
95
  temperature=0.7,
96
  top_p=0.9,
97
  do_sample=True,
98
+ repetition_penalty=1.1,
99
+ # CRITICAL ADDITIONS BELOW:
100
+ eos_token_id=tokenizer.eos_token_id,
101
+ pad_token_id=tokenizer.pad_token_id
102
  )
103
 
104
+ # Set skip_special_tokens=False first just to verify if the token is there(|endoftext|)
105
+ # then switch back to True for a clean output.
106
  print(tokenizer.decode(output[0], skip_special_tokens=True))