ratishsp commited on
Commit
bbfc484
1 Parent(s): 80ba612

initial commit

Browse files
README.md CHANGED
@@ -1,3 +1,265 @@
1
  ---
2
  license: apache-2.0
 
 
 
 
 
 
 
3
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
  license: apache-2.0
3
+ tags:
4
+ - generated_from_trainer
5
+ datasets:
6
+ - NewSHead
7
+ model-index:
8
+ - name: Centrum
9
+ results: []
10
  ---
11
+
12
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
13
+ should probably proofread and complete it, then remove this comment. -->
14
+
15
+ # Centrum
16
+
17
+ This model is a fine-tuned version of [allenai/led-base-16384](https://huggingface.co/allenai/led-base-16384) on the NewSHead dataset.
18
+ It achieves the following results on the evaluation set:
19
+ - Loss: 3.5568
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 3e-05
39
+ - train_batch_size: 1
40
+ - eval_batch_size: 4
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 4
44
+ - gradient_accumulation_steps: 4
45
+ - total_train_batch_size: 16
46
+ - total_eval_batch_size: 16
47
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
48
+ - lr_scheduler_type: linear
49
+ - lr_scheduler_warmup_steps: 10000
50
+ - training_steps: 100000
51
+ - mixed_precision_training: Native AMP
52
+ - label_smoothing_factor: 0.1
53
+
54
+ ### Training results
55
+
56
+ | Training Loss | Epoch | Step | Validation Loss |
57
+ |:-------------:|:-----:|:------:|:---------------:|
58
+ | 4.1628 | 0.05 | 500 | 4.0732 |
59
+ | 4.0278 | 0.09 | 1000 | 3.9800 |
60
+ | 4.0008 | 0.14 | 1500 | 3.9283 |
61
+ | 3.9564 | 0.19 | 2000 | 3.8941 |
62
+ | 3.9193 | 0.23 | 2500 | 3.8780 |
63
+ | 3.9185 | 0.28 | 3000 | 3.8501 |
64
+ | 3.8881 | 0.32 | 3500 | 3.8334 |
65
+ | 3.8869 | 0.37 | 4000 | 3.8211 |
66
+ | 3.876 | 0.42 | 4500 | 3.8057 |
67
+ | 3.8552 | 0.46 | 5000 | 3.7954 |
68
+ | 3.8198 | 0.51 | 5500 | 3.7861 |
69
+ | 3.8016 | 0.56 | 6000 | 3.7750 |
70
+ | 3.8033 | 0.6 | 6500 | 3.7651 |
71
+ | 3.7927 | 0.65 | 7000 | 3.7528 |
72
+ | 3.7978 | 0.7 | 7500 | 3.7429 |
73
+ | 3.7727 | 0.74 | 8000 | 3.7367 |
74
+ | 3.7634 | 0.79 | 8500 | 3.7275 |
75
+ | 3.7395 | 0.83 | 9000 | 3.7158 |
76
+ | 3.7432 | 0.88 | 9500 | 3.7066 |
77
+ | 3.7623 | 0.93 | 10000 | 3.7039 |
78
+ | 3.7182 | 0.97 | 10500 | 3.6904 |
79
+ | 3.7146 | 1.02 | 11000 | 3.6881 |
80
+ | 3.681 | 1.07 | 11500 | 3.6797 |
81
+ | 3.6745 | 1.11 | 12000 | 3.6750 |
82
+ | 3.6794 | 1.16 | 12500 | 3.6748 |
83
+ | 3.6802 | 1.21 | 13000 | 3.6696 |
84
+ | 3.665 | 1.25 | 13500 | 3.6609 |
85
+ | 3.6516 | 1.3 | 14000 | 3.6633 |
86
+ | 3.6577 | 1.34 | 14500 | 3.6573 |
87
+ | 3.6409 | 1.39 | 15000 | 3.6519 |
88
+ | 3.6691 | 1.44 | 15500 | 3.6490 |
89
+ | 3.6521 | 1.48 | 16000 | 3.6475 |
90
+ | 3.6435 | 1.53 | 16500 | 3.6465 |
91
+ | 3.6466 | 1.58 | 17000 | 3.6392 |
92
+ | 3.644 | 1.62 | 17500 | 3.6419 |
93
+ | 3.6347 | 1.67 | 18000 | 3.6347 |
94
+ | 3.6205 | 1.71 | 18500 | 3.6328 |
95
+ | 3.6451 | 1.76 | 19000 | 3.6310 |
96
+ | 3.6327 | 1.81 | 19500 | 3.6284 |
97
+ | 3.6166 | 1.85 | 20000 | 3.6267 |
98
+ | 3.622 | 1.9 | 20500 | 3.6212 |
99
+ | 3.6164 | 1.95 | 21000 | 3.6199 |
100
+ | 3.6178 | 1.99 | 21500 | 3.6201 |
101
+ | 3.5892 | 2.04 | 22000 | 3.6201 |
102
+ | 3.5855 | 2.09 | 22500 | 3.6221 |
103
+ | 3.5658 | 2.13 | 23000 | 3.6193 |
104
+ | 3.5916 | 2.18 | 23500 | 3.6144 |
105
+ | 3.5767 | 2.22 | 24000 | 3.6101 |
106
+ | 3.5809 | 2.27 | 24500 | 3.6115 |
107
+ | 3.5561 | 2.32 | 25000 | 3.6110 |
108
+ | 3.5831 | 2.36 | 25500 | 3.6080 |
109
+ | 3.5551 | 2.41 | 26000 | 3.6121 |
110
+ | 3.5588 | 2.46 | 26500 | 3.6072 |
111
+ | 3.5645 | 2.5 | 27000 | 3.6056 |
112
+ | 3.5804 | 2.55 | 27500 | 3.6038 |
113
+ | 3.5712 | 2.6 | 28000 | 3.6052 |
114
+ | 3.5494 | 2.64 | 28500 | 3.6014 |
115
+ | 3.582 | 2.69 | 29000 | 3.5995 |
116
+ | 3.5487 | 2.73 | 29500 | 3.6051 |
117
+ | 3.5709 | 2.78 | 30000 | 3.5954 |
118
+ | 3.5546 | 2.83 | 30500 | 3.5941 |
119
+ | 3.5525 | 2.87 | 31000 | 3.5952 |
120
+ | 3.5603 | 2.92 | 31500 | 3.5972 |
121
+ | 3.5572 | 2.97 | 32000 | 3.5947 |
122
+ | 3.5106 | 3.01 | 32500 | 3.5952 |
123
+ | 3.5142 | 3.06 | 33000 | 3.5937 |
124
+ | 3.506 | 3.11 | 33500 | 3.5965 |
125
+ | 3.515 | 3.15 | 34000 | 3.5932 |
126
+ | 3.5247 | 3.2 | 34500 | 3.5951 |
127
+ | 3.5384 | 3.24 | 35000 | 3.5917 |
128
+ | 3.5165 | 3.29 | 35500 | 3.5887 |
129
+ | 3.5187 | 3.34 | 36000 | 3.5866 |
130
+ | 3.5097 | 3.38 | 36500 | 3.5895 |
131
+ | 3.5136 | 3.43 | 37000 | 3.5878 |
132
+ | 3.5095 | 3.48 | 37500 | 3.5839 |
133
+ | 3.5226 | 3.52 | 38000 | 3.5859 |
134
+ | 3.5277 | 3.57 | 38500 | 3.5827 |
135
+ | 3.4959 | 3.62 | 39000 | 3.5846 |
136
+ | 3.5003 | 3.66 | 39500 | 3.5823 |
137
+ | 3.5095 | 3.71 | 40000 | 3.5820 |
138
+ | 3.4814 | 3.75 | 40500 | 3.5854 |
139
+ | 3.5173 | 3.8 | 41000 | 3.5796 |
140
+ | 3.4968 | 3.85 | 41500 | 3.5810 |
141
+ | 3.5183 | 3.89 | 42000 | 3.5783 |
142
+ | 3.512 | 3.94 | 42500 | 3.5784 |
143
+ | 3.5069 | 3.99 | 43000 | 3.5775 |
144
+ | 3.5014 | 4.03 | 43500 | 3.5819 |
145
+ | 3.4787 | 4.08 | 44000 | 3.5836 |
146
+ | 3.4625 | 4.12 | 44500 | 3.5788 |
147
+ | 3.4902 | 4.17 | 45000 | 3.5784 |
148
+ | 3.4927 | 4.22 | 45500 | 3.5773 |
149
+ | 3.4813 | 4.26 | 46000 | 3.5769 |
150
+ | 3.4637 | 4.31 | 46500 | 3.5761 |
151
+ | 3.4731 | 4.36 | 47000 | 3.5771 |
152
+ | 3.4856 | 4.4 | 47500 | 3.5786 |
153
+ | 3.4579 | 4.45 | 48000 | 3.5790 |
154
+ | 3.5032 | 4.5 | 48500 | 3.5738 |
155
+ | 3.4826 | 4.54 | 49000 | 3.5749 |
156
+ | 3.4709 | 4.59 | 49500 | 3.5746 |
157
+ | 3.4916 | 4.63 | 50000 | 3.5745 |
158
+ | 3.4715 | 4.68 | 50500 | 3.5706 |
159
+ | 3.4926 | 4.73 | 51000 | 3.5729 |
160
+ | 3.4974 | 4.77 | 51500 | 3.5725 |
161
+ | 3.4796 | 4.82 | 52000 | 3.5683 |
162
+ | 3.4817 | 4.87 | 52500 | 3.5707 |
163
+ | 3.4683 | 4.91 | 53000 | 3.5721 |
164
+ | 3.4986 | 4.96 | 53500 | 3.5689 |
165
+ | 3.4763 | 5.01 | 54000 | 3.5716 |
166
+ | 3.4668 | 5.05 | 54500 | 3.5700 |
167
+ | 3.4274 | 5.1 | 55000 | 3.5724 |
168
+ | 3.4499 | 5.14 | 55500 | 3.5717 |
169
+ | 3.4507 | 5.19 | 56000 | 3.5706 |
170
+ | 3.4343 | 5.24 | 56500 | 3.5697 |
171
+ | 3.4151 | 5.28 | 57000 | 3.5710 |
172
+ | 3.4469 | 5.33 | 57500 | 3.5712 |
173
+ | 3.458 | 5.38 | 58000 | 3.5692 |
174
+ | 3.4559 | 5.42 | 58500 | 3.5680 |
175
+ | 3.4354 | 5.47 | 59000 | 3.5683 |
176
+ | 3.4479 | 5.52 | 59500 | 3.5703 |
177
+ | 3.4627 | 5.56 | 60000 | 3.5678 |
178
+ | 3.4478 | 5.61 | 60500 | 3.5659 |
179
+ | 3.4645 | 5.65 | 61000 | 3.5675 |
180
+ | 3.4658 | 5.7 | 61500 | 3.5666 |
181
+ | 3.4657 | 5.75 | 62000 | 3.5658 |
182
+ | 3.4618 | 5.79 | 62500 | 3.5653 |
183
+ | 3.4541 | 5.84 | 63000 | 3.5653 |
184
+ | 3.4552 | 5.89 | 63500 | 3.5648 |
185
+ | 3.4679 | 5.93 | 64000 | 3.5648 |
186
+ | 3.4423 | 5.98 | 64500 | 3.5652 |
187
+ | 3.3893 | 6.03 | 65000 | 3.5646 |
188
+ | 3.4239 | 6.07 | 65500 | 3.5668 |
189
+ | 3.4329 | 6.12 | 66000 | 3.5639 |
190
+ | 3.4151 | 6.16 | 66500 | 3.5649 |
191
+ | 3.4181 | 6.21 | 67000 | 3.5682 |
192
+ | 3.4314 | 6.26 | 67500 | 3.5669 |
193
+ | 3.4245 | 6.3 | 68000 | 3.5629 |
194
+ | 3.421 | 6.35 | 68500 | 3.5663 |
195
+ | 3.4329 | 6.4 | 69000 | 3.5660 |
196
+ | 3.4122 | 6.44 | 69500 | 3.5651 |
197
+ | 3.4362 | 6.49 | 70000 | 3.5628 |
198
+ | 3.4497 | 6.54 | 70500 | 3.5648 |
199
+ | 3.431 | 6.58 | 71000 | 3.5626 |
200
+ | 3.432 | 6.63 | 71500 | 3.5648 |
201
+ | 3.4208 | 6.67 | 72000 | 3.5635 |
202
+ | 3.4526 | 6.72 | 72500 | 3.5645 |
203
+ | 3.4139 | 6.77 | 73000 | 3.5621 |
204
+ | 3.4212 | 6.81 | 73500 | 3.5629 |
205
+ | 3.4352 | 6.86 | 74000 | 3.5597 |
206
+ | 3.4242 | 6.91 | 74500 | 3.5597 |
207
+ | 3.429 | 6.95 | 75000 | 3.5619 |
208
+ | 3.4133 | 7.0 | 75500 | 3.5592 |
209
+ | 3.4086 | 7.04 | 76000 | 3.5621 |
210
+ | 3.4056 | 7.09 | 76500 | 3.5604 |
211
+ | 3.4158 | 7.14 | 77000 | 3.5629 |
212
+ | 3.4153 | 7.18 | 77500 | 3.5609 |
213
+ | 3.4155 | 7.23 | 78000 | 3.5621 |
214
+ | 3.4117 | 7.28 | 78500 | 3.5626 |
215
+ | 3.407 | 7.32 | 79000 | 3.5638 |
216
+ | 3.3977 | 7.37 | 79500 | 3.5604 |
217
+ | 3.4134 | 7.42 | 80000 | 3.5611 |
218
+ | 3.4403 | 7.46 | 80500 | 3.5630 |
219
+ | 3.4002 | 7.51 | 81000 | 3.5601 |
220
+ | 3.4147 | 7.55 | 81500 | 3.5577 |
221
+ | 3.4068 | 7.6 | 82000 | 3.5588 |
222
+ | 3.4165 | 7.65 | 82500 | 3.5613 |
223
+ | 3.409 | 7.69 | 83000 | 3.5596 |
224
+ | 3.4213 | 7.74 | 83500 | 3.5583 |
225
+ | 3.403 | 7.79 | 84000 | 3.5601 |
226
+ | 3.3819 | 7.83 | 84500 | 3.5580 |
227
+ | 3.4182 | 7.88 | 85000 | 3.5570 |
228
+ | 3.4099 | 7.93 | 85500 | 3.5570 |
229
+ | 3.3845 | 7.97 | 86000 | 3.5582 |
230
+ | 3.411 | 8.02 | 86500 | 3.5610 |
231
+ | 3.3952 | 8.06 | 87000 | 3.5588 |
232
+ | 3.4211 | 8.11 | 87500 | 3.5588 |
233
+ | 3.4171 | 8.16 | 88000 | 3.5570 |
234
+ | 3.3825 | 8.2 | 88500 | 3.5607 |
235
+ | 3.3807 | 8.25 | 89000 | 3.5579 |
236
+ | 3.3842 | 8.3 | 89500 | 3.5583 |
237
+ | 3.3809 | 8.34 | 90000 | 3.5596 |
238
+ | 3.4033 | 8.39 | 90500 | 3.5590 |
239
+ | 3.4156 | 8.44 | 91000 | 3.5577 |
240
+ | 3.3927 | 8.48 | 91500 | 3.5585 |
241
+ | 3.4041 | 8.53 | 92000 | 3.5596 |
242
+ | 3.4006 | 8.57 | 92500 | 3.5600 |
243
+ | 3.4007 | 8.62 | 93000 | 3.5578 |
244
+ | 3.4047 | 8.67 | 93500 | 3.5572 |
245
+ | 3.3904 | 8.71 | 94000 | 3.5571 |
246
+ | 3.3888 | 8.76 | 94500 | 3.5581 |
247
+ | 3.3876 | 8.81 | 95000 | 3.5572 |
248
+ | 3.3872 | 8.85 | 95500 | 3.5575 |
249
+ | 3.3753 | 8.9 | 96000 | 3.5577 |
250
+ | 3.3961 | 8.95 | 96500 | 3.5568 |
251
+ | 3.4131 | 8.99 | 97000 | 3.5579 |
252
+ | 3.3647 | 9.04 | 97500 | 3.5573 |
253
+ | 3.3792 | 9.08 | 98000 | 3.5576 |
254
+ | 3.3755 | 9.13 | 98500 | 3.5575 |
255
+ | 3.3981 | 9.18 | 99000 | 3.5573 |
256
+ | 3.3914 | 9.22 | 99500 | 3.5573 |
257
+ | 3.4136 | 9.27 | 100000 | 3.5575 |
258
+
259
+
260
+ ### Framework versions
261
+
262
+ - Transformers 4.20.0.dev0
263
+ - Pytorch 1.11.0
264
+ - Datasets 2.2.2
265
+ - Tokenizers 0.12.1
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "<doc-sep>": 50265
3
+ }
all_results.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 9.27,
3
+ "eval_loss": 3.5567543506622314,
4
+ "eval_runtime": 23.716,
5
+ "eval_samples": 2522,
6
+ "eval_samples_per_second": 106.342,
7
+ "eval_steps_per_second": 6.662,
8
+ "train_loss": 3.524176220359802,
9
+ "train_runtime": 67760.9425,
10
+ "train_samples": 172615,
11
+ "train_samples_per_second": 23.612,
12
+ "train_steps_per_second": 1.476
13
+ }
config.json ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Centrum",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu",
5
+ "architectures": [
6
+ "LEDForConditionalGeneration"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "attention_window": [
10
+ 512,
11
+ 512,
12
+ 512,
13
+ 512,
14
+ 512,
15
+ 512
16
+ ],
17
+ "bos_token_id": 0,
18
+ "classif_dropout": 0.0,
19
+ "classifier_dropout": 0.0,
20
+ "d_model": 768,
21
+ "decoder_attention_heads": 12,
22
+ "decoder_ffn_dim": 3072,
23
+ "decoder_layerdrop": 0.0,
24
+ "decoder_layers": 6,
25
+ "decoder_start_token_id": 2,
26
+ "dropout": 0.1,
27
+ "encoder_attention_heads": 12,
28
+ "encoder_ffn_dim": 3072,
29
+ "encoder_layerdrop": 0.0,
30
+ "encoder_layers": 6,
31
+ "eos_token_id": 2,
32
+ "gradient_checkpointing": false,
33
+ "id2label": {
34
+ "0": "LABEL_0",
35
+ "1": "LABEL_1",
36
+ "2": "LABEL_2"
37
+ },
38
+ "init_std": 0.02,
39
+ "is_encoder_decoder": true,
40
+ "label2id": {
41
+ "LABEL_0": 0,
42
+ "LABEL_1": 1,
43
+ "LABEL_2": 2
44
+ },
45
+ "max_decoder_position_embeddings": 1024,
46
+ "max_encoder_position_embeddings": 4096,
47
+ "model_type": "led",
48
+ "num_hidden_layers": 6,
49
+ "pad_token_id": 1,
50
+ "torch_dtype": "float32",
51
+ "transformers_version": "4.20.0.dev0",
52
+ "use_cache": true,
53
+ "vocab_size": 50266
54
+ }
eval_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 9.27,
3
+ "eval_loss": 3.5567543506622314,
4
+ "eval_runtime": 23.716,
5
+ "eval_samples": 2522,
6
+ "eval_samples_per_second": 106.342,
7
+ "eval_steps_per_second": 6.662
8
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f32eb82307718433cdc1e4683b80a996130bc37ad24e5c459f1b503aee8ec9a
3
+ size 609932849
special_tokens_map.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "cls_token": {
10
+ "content": "<s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "eos_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "mask_token": {
24
+ "content": "<mask>",
25
+ "lstrip": true,
26
+ "normalized": true,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "pad_token": {
31
+ "content": "<pad>",
32
+ "lstrip": false,
33
+ "normalized": true,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ },
37
+ "sep_token": {
38
+ "content": "</s>",
39
+ "lstrip": false,
40
+ "normalized": true,
41
+ "rstrip": false,
42
+ "single_word": false
43
+ },
44
+ "unk_token": {
45
+ "content": "<unk>",
46
+ "lstrip": false,
47
+ "normalized": true,
48
+ "rstrip": false,
49
+ "single_word": false
50
+ }
51
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "bos_token": {
4
+ "__type": "AddedToken",
5
+ "content": "<s>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false
10
+ },
11
+ "cls_token": {
12
+ "__type": "AddedToken",
13
+ "content": "<s>",
14
+ "lstrip": false,
15
+ "normalized": true,
16
+ "rstrip": false,
17
+ "single_word": false
18
+ },
19
+ "eos_token": {
20
+ "__type": "AddedToken",
21
+ "content": "</s>",
22
+ "lstrip": false,
23
+ "normalized": true,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "errors": "replace",
28
+ "mask_token": {
29
+ "__type": "AddedToken",
30
+ "content": "<mask>",
31
+ "lstrip": true,
32
+ "normalized": true,
33
+ "rstrip": false,
34
+ "single_word": false
35
+ },
36
+ "model_max_length": 16384,
37
+ "name_or_path": "allenai/led-base-16384",
38
+ "pad_token": {
39
+ "__type": "AddedToken",
40
+ "content": "<pad>",
41
+ "lstrip": false,
42
+ "normalized": true,
43
+ "rstrip": false,
44
+ "single_word": false
45
+ },
46
+ "sep_token": {
47
+ "__type": "AddedToken",
48
+ "content": "</s>",
49
+ "lstrip": false,
50
+ "normalized": true,
51
+ "rstrip": false,
52
+ "single_word": false
53
+ },
54
+ "special_tokens_map_file": "/home/hpcpudu1/.cache/huggingface/transformers/05da652a7fca41c1c18027c1201e473217bb373e370d1283e3de49d5880cbf0c.cb2244924ab24d706b02fd7fcedaea4531566537687a539ebb94db511fd122a0",
55
+ "tokenizer_class": "LEDTokenizer",
56
+ "trim_offsets": true,
57
+ "unk_token": {
58
+ "__type": "AddedToken",
59
+ "content": "<unk>",
60
+ "lstrip": false,
61
+ "normalized": true,
62
+ "rstrip": false,
63
+ "single_word": false
64
+ }
65
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 9.27,
3
+ "train_loss": 3.524176220359802,
4
+ "train_runtime": 67760.9425,
5
+ "train_samples": 172615,
6
+ "train_samples_per_second": 23.612,
7
+ "train_steps_per_second": 1.476
8
+ }
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68fc8c7d6fb26c77dedb419d46b2f28a4b8b5cf38dcb730a791ffb6a206d2877
3
+ size 3439
vocab.json ADDED
The diff for this file is too large to render. See raw diff