cahya commited on
Commit
2be9212
1 Parent(s): e346883

updated the generator to use temperature and sample

Browse files
Files changed (1) hide show
  1. README.md +9 -3
README.md CHANGED
@@ -44,13 +44,19 @@ ARTICLE_TO_SUMMARIZE = ""
44
  # generate summary
45
  input_ids = tokenizer.encode(ARTICLE_TO_SUMMARIZE, return_tensors='pt')
46
  summary_ids = model.generate(input_ids,
47
- max_length=100,
48
- num_beams=2,
 
49
  repetition_penalty=2.5,
50
  length_penalty=1.0,
51
  early_stopping=True,
52
  no_repeat_ngram_size=2,
53
- use_cache=True)
 
 
 
 
 
54
  summary_text = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
55
  print(summary_text)
56
  ```
44
  # generate summary
45
  input_ids = tokenizer.encode(ARTICLE_TO_SUMMARIZE, return_tensors='pt')
46
  summary_ids = model.generate(input_ids,
47
+ min_length=20,
48
+ max_length=80,
49
+ num_beams=10,
50
  repetition_penalty=2.5,
51
  length_penalty=1.0,
52
  early_stopping=True,
53
  no_repeat_ngram_size=2,
54
+ use_cache=True,
55
+ do_sample = True,
56
+ temperature = 0.8,
57
+ top_k = 50,
58
+ top_p = 0.95)
59
+
60
  summary_text = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
61
  print(summary_text)
62
  ```