jpcorb20 commited on
Commit
37ac972
1 Parent(s): acf9167

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +8 -2
README.md CHANGED
@@ -36,7 +36,7 @@ Used the example/seq2seq/run_summarization.py script from the transformers sourc
36
  max_target_length: 128
37
 
38
  ## Eval results
39
-
40
  eval_gen_len: 35.89,\
41
  eval_loss: 1.3807392120361328,\
42
  eval_rouge1: 47.3372,\
@@ -54,7 +54,13 @@ Used the example/seq2seq/run_summarization.py script from the transformers sourc
54
  tokenizer = PegasusTokenizer.from_pretrained(model_name)
55
  model = PegasusForConditionalGeneration.from_pretrained(model_name)
56
 
57
- src_text = """Carter: Hey Alexis, I just wanted to let you know that I had a really nice time with you tonight.\r\n Alexis: Thanks Carter. Yeah, I really enjoyed myself as well.\r\n Carter: If you are up for it, I would really like to see you again soon.\r\n Alexis: Thanks Carter, I'm flattered. But I have a really busy week coming up.\r\n Carter: Yeah, no worries. I totally understand. But if you ever want to go grab dinner again, just let me know.\r\n Alexis: Yeah of course. Thanks again for tonight. Carter: Sure. Have a great night.\r\n"""
 
 
 
 
 
 
58
 
59
  token_params = dict(max_length=512, truncation=True, padding='longest', return_tensors="pt")
60
  batch = tokenizer(src_text, **token_params)
36
  max_target_length: 128
37
 
38
  ## Eval results
39
+
40
  eval_gen_len: 35.89,\
41
  eval_loss: 1.3807392120361328,\
42
  eval_rouge1: 47.3372,\
54
  tokenizer = PegasusTokenizer.from_pretrained(model_name)
55
  model = PegasusForConditionalGeneration.from_pretrained(model_name)
56
 
57
+ src_text = """Carter: Hey Alexis, I just wanted to let you know that I had a really nice time with you tonight.\\r\
58
+ Alexis: Thanks Carter. Yeah, I really enjoyed myself as well.\\r\
59
+ Carter: If you are up for it, I would really like to see you again soon.\\r\
60
+ Alexis: Thanks Carter, I'm flattered. But I have a really busy week coming up.\\r\
61
+ Carter: Yeah, no worries. I totally understand. But if you ever want to go grab dinner again, just let me know.\\r\
62
+ Alexis: Yeah of course. Thanks again for tonight. Carter: Sure. Have a great night.\\r\
63
+ """
64
 
65
  token_params = dict(max_length=512, truncation=True, padding='longest', return_tensors="pt")
66
  batch = tokenizer(src_text, **token_params)