Joshua Lochner commited on
Commit
bd7202e
1 Parent(s): 59eaa57

Next training iteration (185k)

Browse files
added_tokens.json CHANGED
@@ -1 +1 @@
1
- {"SHORT_HYPHENATED_TOKEN": 32105, "END_SPONSOR_TOKEN": 32113, "EXTRACT_SEGMENTS: ": 32100, "[Music]": 32107, "NUMBER_PERCENTAGE_TOKEN": 32103, "LONG_WORD_TOKEN": 32106, "HYPHENATED_URL_TOKEN": 32102, "START_INTERACTION_TOKEN": 32116, "URL_TOKEN": 32101, "END_INTERACTION_TOKEN": 32117, "END_SELFPROMO_TOKEN": 32115, "NUMBER_TOKEN": 32104, "NO_SEGMENT_TOKEN": 32111, "PROFANITY_TOKEN": 32110, "BETWEEN_SEGMENTS_TOKEN": 32118, "START_SELFPROMO_TOKEN": 32114, "[Laughter]": 32109, "[Applause]": 32108, "START_SPONSOR_TOKEN": 32112}
 
1
+ {"START_INTERACTION_TOKEN": 32116, "LONG_WORD_TOKEN": 32106, "NUMBER_PERCENTAGE_TOKEN": 32103, "END_INTERACTION_TOKEN": 32117, "START_SELFPROMO_TOKEN": 32114, "[Music]": 32107, "END_SPONSOR_TOKEN": 32113, "URL_TOKEN": 32101, "NUMBER_TOKEN": 32104, "PROFANITY_TOKEN": 32110, "HYPHENATED_URL_TOKEN": 32102, "END_SELFPROMO_TOKEN": 32115, "NO_SEGMENT_TOKEN": 32111, "SHORT_HYPHENATED_TOKEN": 32105, "BETWEEN_SEGMENTS_TOKEN": 32118, "START_SPONSOR_TOKEN": 32112, "EXTRACT_SEGMENTS: ": 32100, "[Applause]": 32108, "[Laughter]": 32109}
config.json CHANGED
@@ -22,7 +22,7 @@
22
  "relative_attention_num_buckets": 32,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "float32",
25
- "transformers_version": "4.15.0",
26
  "use_cache": true,
27
  "vocab_size": 32119
28
  }
 
22
  "relative_attention_num_buckets": 32,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "float32",
25
+ "transformers_version": "4.16.1",
26
  "use_cache": true,
27
  "vocab_size": 32119
28
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:71bb817c2eb93b2299b2db3e9a4fdf54230ad4f8af14493ca0539096676da785
3
  size 990383053
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a23fa3d63f7b869dd2fcde0ae59b6f84a4d6a7c2e5e7cea918425d09eec82f7a
3
  size 990383053
rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f5898f076f1662c347f647ca9305dacb1097aa523e328bb84cc7c4a92dda75a
3
  size 14503
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c730413130fc63756f54d5820e7a7171dce6b48472fe73812d2eeee7d3ecc44d
3
  size 14503
scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c2e5c883e4333a69700fe4d098d3261cf9eac2c32334388c85d05b1fa6f3a483
3
  size 623
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8fc17a095b50cb3c162e9244c249edb34b9632ba405100031ff659035a9789a7
3
  size 623
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "extra_ids": 100, "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>"], "model_max_length": 512, "name_or_path": "google/t5-v1_1-base", "special_tokens_map_file": "/root/.cache/huggingface/transformers/76bf19bfedb85afbe644966ca9ab7b0404d753a41bf601115bced39f825ffa9c.c94798918c92ded6aeef2d2f0e666d2cc4145eca1aa6e1336fde07f2e13e2f46", "sp_model_kwargs": {}, "tokenizer_class": "T5Tokenizer"}
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "extra_ids": 100, "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>"], "model_max_length": 512, "name_or_path": "google/t5-v1_1-base", "max_length": 768, "special_tokens_map_file": "/root/.cache/huggingface/transformers/76bf19bfedb85afbe644966ca9ab7b0404d753a41bf601115bced39f825ffa9c.c94798918c92ded6aeef2d2f0e666d2cc4145eca1aa6e1336fde07f2e13e2f46", "sp_model_kwargs": {}, "tokenizer_class": "T5Tokenizer"}
trainer_state.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.1612045201747457,
5
- "global_step": 90000,
6
  "is_hyper_param_search": false,
7
  "is_local_process_zero": true,
8
  "is_world_process_zero": true,
@@ -122,11 +122,142 @@
122
  "learning_rate": 4.1939773991262716e-05,
123
  "loss": 0.0966,
124
  "step": 90000
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
125
  }
126
  ],
127
- "max_steps": 558297,
128
- "num_train_epochs": 1,
129
- "total_flos": 8.885642742554112e+16,
130
  "trial_name": null,
131
  "trial_params": null
132
  }
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 1.0634139611882645,
5
+ "global_step": 185000,
6
  "is_hyper_param_search": false,
7
  "is_local_process_zero": true,
8
  "is_world_process_zero": true,
 
122
  "learning_rate": 4.1939773991262716e-05,
123
  "loss": 0.0966,
124
  "step": 90000
125
+ },
126
+ {
127
+ "epoch": 0.17,
128
+ "learning_rate": 4.149198365744398e-05,
129
+ "loss": 0.096,
130
+ "step": 95000
131
+ },
132
+ {
133
+ "epoch": 0.18,
134
+ "learning_rate": 4.104419332362524e-05,
135
+ "loss": 0.0891,
136
+ "step": 100000
137
+ },
138
+ {
139
+ "epoch": 0.18,
140
+ "eval_loss": 0.08249569684267044,
141
+ "eval_runtime": 3211.3155,
142
+ "eval_samples_per_second": 19.317,
143
+ "eval_steps_per_second": 9.659,
144
+ "step": 100000
145
+ },
146
+ {
147
+ "epoch": 0.19,
148
+ "learning_rate": 4.0596402989806505e-05,
149
+ "loss": 0.0897,
150
+ "step": 105000
151
+ },
152
+ {
153
+ "epoch": 0.2,
154
+ "learning_rate": 4.0148612655987766e-05,
155
+ "loss": 0.0895,
156
+ "step": 110000
157
+ },
158
+ {
159
+ "epoch": 0.21,
160
+ "learning_rate": 3.970082232216903e-05,
161
+ "loss": 0.0929,
162
+ "step": 115000
163
+ },
164
+ {
165
+ "epoch": 0.21,
166
+ "learning_rate": 3.925303198835029e-05,
167
+ "loss": 0.0857,
168
+ "step": 120000
169
+ },
170
+ {
171
+ "epoch": 0.22,
172
+ "learning_rate": 3.880524165453155e-05,
173
+ "loss": 0.0879,
174
+ "step": 125000
175
+ },
176
+ {
177
+ "epoch": 0.23,
178
+ "learning_rate": 3.835745132071281e-05,
179
+ "loss": 0.0855,
180
+ "step": 130000
181
+ },
182
+ {
183
+ "epoch": 0.24,
184
+ "learning_rate": 3.790966098689408e-05,
185
+ "loss": 0.0841,
186
+ "step": 135000
187
+ },
188
+ {
189
+ "epoch": 0.25,
190
+ "learning_rate": 3.746187065307534e-05,
191
+ "loss": 0.0866,
192
+ "step": 140000
193
+ },
194
+ {
195
+ "epoch": 0.26,
196
+ "learning_rate": 3.70140803192566e-05,
197
+ "loss": 0.0855,
198
+ "step": 145000
199
+ },
200
+ {
201
+ "epoch": 0.27,
202
+ "learning_rate": 3.6566289985437866e-05,
203
+ "loss": 0.0825,
204
+ "step": 150000
205
+ },
206
+ {
207
+ "epoch": 0.28,
208
+ "learning_rate": 3.611849965161912e-05,
209
+ "loss": 0.082,
210
+ "step": 155000
211
+ },
212
+ {
213
+ "epoch": 0.29,
214
+ "learning_rate": 3.567070931780038e-05,
215
+ "loss": 0.0865,
216
+ "step": 160000
217
+ },
218
+ {
219
+ "epoch": 0.3,
220
+ "learning_rate": 3.522291898398165e-05,
221
+ "loss": 0.0814,
222
+ "step": 165000
223
+ },
224
+ {
225
+ "epoch": 0.98,
226
+ "learning_rate": 1.1404396210797388e-06,
227
+ "loss": 0.0811,
228
+ "step": 170000
229
+ },
230
+ {
231
+ "epoch": 1.0,
232
+ "step": 173968,
233
+ "total_flos": 1.727613380688722e+17,
234
+ "train_loss": 0.004168222484029031,
235
+ "train_runtime": 9992.3727,
236
+ "train_samples_per_second": 34.82,
237
+ "train_steps_per_second": 17.41
238
+ },
239
+ {
240
+ "epoch": 1.01,
241
+ "learning_rate": 4.997033937275821e-05,
242
+ "loss": 0.079,
243
+ "step": 175000
244
+ },
245
+ {
246
+ "epoch": 1.03,
247
+ "learning_rate": 4.982663478340844e-05,
248
+ "loss": 0.0806,
249
+ "step": 180000
250
+ },
251
+ {
252
+ "epoch": 1.06,
253
+ "learning_rate": 4.968293019405868e-05,
254
+ "loss": 0.0784,
255
+ "step": 185000
256
  }
257
  ],
258
+ "max_steps": 1739680,
259
+ "num_train_epochs": 10,
260
+ "total_flos": 1.849684070324828e+17,
261
  "trial_name": null,
262
  "trial_params": null
263
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:da11e8af50c38dbf9430e2d7bbfab617719d848605add5ab7ec3aa4da77adca5
3
- size 3119
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbfa8ba174e9fc1e47a9566b4253523ff02ac3c21b714c241b7469d42c08648c
3
+ size 3183