junyinc commited on
Commit
a084a8e
1 Parent(s): b2c6abf

Upload PegasusForConditionalGeneration

Browse files
Files changed (3) hide show
  1. config.json +2 -71
  2. generation_config.json +1 -0
  3. pytorch_model.bin +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "google/pegasus-large",
3
  "activation_dropout": 0.1,
4
  "activation_function": "relu",
5
  "add_bias_logits": false,
@@ -10,7 +10,6 @@
10
  "attention_dropout": 0.1,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
13
- "classifier_dropout": 0.0,
14
  "d_model": 1024,
15
  "decoder_attention_heads": 16,
16
  "decoder_ffn_dim": 4096,
@@ -24,9 +23,7 @@
24
  "encoder_layers": 16,
25
  "eos_token_id": 1,
26
  "extra_pos_embeddings": 1,
27
- "force_bos_token_to_be_generated": false,
28
  "forced_eos_token_id": 1,
29
- "gradient_checkpointing": false,
30
  "id2label": {
31
  "0": "LABEL_0",
32
  "1": "LABEL_1",
@@ -42,6 +39,7 @@
42
  "length_penalty": 0.8,
43
  "max_length": 350,
44
  "max_position_embeddings": 1024,
 
45
  "model_type": "pegasus",
46
  "normalize_before": true,
47
  "normalize_embedding": false,
@@ -50,73 +48,6 @@
50
  "pad_token_id": 0,
51
  "scale_embedding": true,
52
  "static_position_embeddings": true,
53
- "task_specific_params": {
54
- "summarization_aeslc": {
55
- "length_penalty": 0.6,
56
- "max_length": 32,
57
- "max_position_embeddings": 512
58
- },
59
- "summarization_arxiv": {
60
- "length_penalty": 0.8,
61
- "max_length": 256,
62
- "max_position_embeddings": 1024
63
- },
64
- "summarization_big_patent": {
65
- "length_penalty": 0.7,
66
- "max_length": 256,
67
- "max_position_embeddings": 1024
68
- },
69
- "summarization_billsum": {
70
- "length_penalty": 0.6,
71
- "max_length": 256,
72
- "max_position_embeddings": 1024
73
- },
74
- "summarization_cnn_dailymail": {
75
- "length_penalty": 0.8,
76
- "max_length": 128,
77
- "max_position_embeddings": 1024
78
- },
79
- "summarization_gigaword": {
80
- "length_penalty": 0.6,
81
- "max_length": 32,
82
- "max_position_embeddings": 128
83
- },
84
- "summarization_large": {
85
- "length_penalty": 0.8,
86
- "max_length": 256,
87
- "max_position_embeddings": 1024
88
- },
89
- "summarization_multi_news": {
90
- "length_penalty": 0.8,
91
- "max_length": 256,
92
- "max_position_embeddings": 1024
93
- },
94
- "summarization_newsroom": {
95
- "length_penalty": 0.8,
96
- "max_length": 128,
97
- "max_position_embeddings": 512
98
- },
99
- "summarization_pubmed": {
100
- "length_penalty": 0.8,
101
- "max_length": 256,
102
- "max_position_embeddings": 1024
103
- },
104
- "summarization_reddit_tifu": {
105
- "length_penalty": 0.6,
106
- "max_length": 128,
107
- "max_position_embeddings": 512
108
- },
109
- "summarization_wikihow": {
110
- "length_penalty": 0.6,
111
- "max_length": 256,
112
- "max_position_embeddings": 512
113
- },
114
- "summarization_xsum": {
115
- "length_penalty": 0.8,
116
- "max_length": 64,
117
- "max_position_embeddings": 512
118
- }
119
- },
120
  "torch_dtype": "float32",
121
  "transformers_version": "4.26.1",
122
  "use_cache": true,
 
1
  {
2
+ "_name_or_path": "google/pegasus-cnn_dailymail",
3
  "activation_dropout": 0.1,
4
  "activation_function": "relu",
5
  "add_bias_logits": false,
 
10
  "attention_dropout": 0.1,
11
  "bos_token_id": 0,
12
  "classif_dropout": 0.0,
 
13
  "d_model": 1024,
14
  "decoder_attention_heads": 16,
15
  "decoder_ffn_dim": 4096,
 
23
  "encoder_layers": 16,
24
  "eos_token_id": 1,
25
  "extra_pos_embeddings": 1,
 
26
  "forced_eos_token_id": 1,
 
27
  "id2label": {
28
  "0": "LABEL_0",
29
  "1": "LABEL_1",
 
39
  "length_penalty": 0.8,
40
  "max_length": 350,
41
  "max_position_embeddings": 1024,
42
+ "min_length": 32,
43
  "model_type": "pegasus",
44
  "normalize_before": true,
45
  "normalize_embedding": false,
 
48
  "pad_token_id": 0,
49
  "scale_embedding": true,
50
  "static_position_embeddings": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
  "torch_dtype": "float32",
52
  "transformers_version": "4.26.1",
53
  "use_cache": true,
generation_config.json CHANGED
@@ -6,6 +6,7 @@
6
  "forced_eos_token_id": 1,
7
  "length_penalty": 0.8,
8
  "max_length": 350,
 
9
  "num_beams": 8,
10
  "pad_token_id": 0,
11
  "transformers_version": "4.26.1"
 
6
  "forced_eos_token_id": 1,
7
  "length_penalty": 0.8,
8
  "max_length": 350,
9
+ "min_length": 32,
10
  "num_beams": 8,
11
  "pad_token_id": 0,
12
  "transformers_version": "4.26.1"
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b7562377c25363a15e60f02be22883dba346b5ecea95dfd1e82f05e6b6c1fce0
3
  size 2283804653
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a3ea506aeee2324453b9c727be77b5eeb901e2b59ab0d1af6131e293f23d440
3
  size 2283804653