jorgeortizfuentes commited on
Commit
f3f7d28
1 Parent(s): 8bcca8b

Add best model

Browse files
README.md ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - es
4
+ license: cc-by-4.0
5
+ tags:
6
+ - generated_from_trainer
7
+ datasets:
8
+ - jorgeortizfuentes/toxicity_spanish_hate_speech_v2
9
+ metrics:
10
+ - f1
11
+ model-index:
12
+ - name: hate_speech-dv2-patana-chilean-spanish-bert-8k0iqdv2
13
+ results:
14
+ - task:
15
+ name: Text Classification
16
+ type: text-classification
17
+ dataset:
18
+ name: jorgeortizfuentes/toxicity_spanish_hate_speech_v2
19
+ type: jorgeortizfuentes/toxicity_spanish_hate_speech_v2
20
+ config: null
21
+ split: validation
22
+ metrics:
23
+ - name: F1
24
+ type: f1
25
+ value: 0.8160919540229885
26
+ ---
27
+
28
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
29
+ should probably proofread and complete it, then remove this comment. -->
30
+
31
+ # hate_speech-dv2-patana-chilean-spanish-bert-8k0iqdv2
32
+
33
+ This model is a fine-tuned version of [dccuchile/patana-chilean-spanish-bert](https://huggingface.co/dccuchile/patana-chilean-spanish-bert) on the jorgeortizfuentes/toxicity_spanish_hate_speech_v2 dataset.
34
+ It achieves the following results on the evaluation set:
35
+ - Loss: 0.1628
36
+ - F1: 0.8161
37
+
38
+ ## Model description
39
+
40
+ More information needed
41
+
42
+ ## Intended uses & limitations
43
+
44
+ More information needed
45
+
46
+ ## Training and evaluation data
47
+
48
+ More information needed
49
+
50
+ ## Training procedure
51
+
52
+ ### Training hyperparameters
53
+
54
+ The following hyperparameters were used during training:
55
+ - learning_rate: 4e-05
56
+ - train_batch_size: 128
57
+ - eval_batch_size: 128
58
+ - seed: 13
59
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
60
+ - lr_scheduler_type: linear
61
+ - num_epochs: 10
62
+
63
+ ### Training results
64
+
65
+ | Training Loss | Epoch | Step | Validation Loss | F1 |
66
+ |:-------------:|:-----:|:----:|:---------------:|:------:|
67
+ | 0.0528 | 5.0 | 430 | 0.1698 | 0.7376 |
68
+ | 0.003 | 10.0 | 860 | 0.1628 | 0.8161 |
69
+
70
+
71
+ ### Framework versions
72
+
73
+ - Transformers 4.30.2
74
+ - Pytorch 2.0.1+cu117
75
+ - Datasets 2.13.1
76
+ - Tokenizers 0.13.3
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 10.0,
3
+ "train_loss": 0.02791861295700073,
4
+ "train_runtime": 1253.6999,
5
+ "train_samples": 10892,
6
+ "train_samples_per_second": 86.879,
7
+ "train_steps_per_second": 0.686
8
+ }
checkpoint-500/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "dccuchile/patana-chilean-spanish-bert",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "output_past": true,
20
+ "pad_token_id": 1,
21
+ "position_embedding_type": "absolute",
22
+ "problem_type": "single_label_classification",
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.30.2",
25
+ "type_vocab_size": 2,
26
+ "use_cache": true,
27
+ "vocab_size": 31002
28
+ }
checkpoint-500/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f57e8823e33c33f434989923de54a95f04b81a615e652a5182bd0eea7c7ef1a
3
+ size 439437392
checkpoint-500/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30cf7c4b726a1bfee5f14681bbb4a13fa94e62f02873bceeb79a6352489f18f1
3
+ size 878937221
checkpoint-500/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d4e6b040ff015bd9b06563b5be216f512e8eb05cf5c40a77247d802ab88208b
3
+ size 14639
checkpoint-500/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c9fc6b0c905660fa8c460753a9a9c807f4dbd7b8107893a7b6adabd9e9130ac
3
+ size 627
checkpoint-500/special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
checkpoint-500/tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "clean_up_tokenization_spaces": true,
3
+ "cls_token": "[CLS]",
4
+ "do_basic_tokenize": true,
5
+ "do_lower_case": false,
6
+ "mask_token": "[MASK]",
7
+ "model_max_length": 512,
8
+ "never_split": null,
9
+ "pad_token": "[PAD]",
10
+ "sep_token": "[SEP]",
11
+ "strip_accents": false,
12
+ "tokenize_chinese_chars": true,
13
+ "tokenizer_class": "BertTokenizer",
14
+ "unk_token": "[UNK]"
15
+ }
checkpoint-500/trainer_state.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 5.813953488372093,
5
+ "global_step": 500,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 5.0,
12
+ "learning_rate": 2e-05,
13
+ "loss": 0.0528,
14
+ "step": 430
15
+ },
16
+ {
17
+ "epoch": 5.0,
18
+ "eval_f1": 0.7376237623762376,
19
+ "eval_loss": 0.1697508543729782,
20
+ "eval_runtime": 11.9306,
21
+ "eval_samples_per_second": 228.236,
22
+ "eval_steps_per_second": 1.844,
23
+ "step": 430
24
+ }
25
+ ],
26
+ "max_steps": 860,
27
+ "num_train_epochs": 10,
28
+ "total_flos": 1.66865031309312e+16,
29
+ "trial_name": null,
30
+ "trial_params": null
31
+ }
checkpoint-500/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:285887ee7fbb469bfaa4eb3d94b7655d5d9b9295c618ac1d0ddf4da4b61d80db
3
+ size 4027
checkpoint-500/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "dccuchile/patana-chilean-spanish-bert",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "output_past": true,
20
+ "pad_token_id": 1,
21
+ "position_embedding_type": "absolute",
22
+ "problem_type": "single_label_classification",
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.30.2",
25
+ "type_vocab_size": 2,
26
+ "use_cache": true,
27
+ "vocab_size": 31002
28
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e0ecabdde1a6ed35d4fa5d6164b63cd9a2d7f35fb475a45c369d43b174781fe
3
+ size 439437392
predict_results_hate_speech.txt ADDED
@@ -0,0 +1,801 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ index prediction
2
+ 0 0
3
+ 1 0
4
+ 2 0
5
+ 3 0
6
+ 4 0
7
+ 5 0
8
+ 6 0
9
+ 7 0
10
+ 8 0
11
+ 9 0
12
+ 10 0
13
+ 11 0
14
+ 12 0
15
+ 13 0
16
+ 14 0
17
+ 15 0
18
+ 16 0
19
+ 17 0
20
+ 18 0
21
+ 19 0
22
+ 20 0
23
+ 21 0
24
+ 22 1
25
+ 23 0
26
+ 24 0
27
+ 25 0
28
+ 26 0
29
+ 27 0
30
+ 28 0
31
+ 29 0
32
+ 30 0
33
+ 31 0
34
+ 32 0
35
+ 33 0
36
+ 34 0
37
+ 35 0
38
+ 36 0
39
+ 37 0
40
+ 38 0
41
+ 39 0
42
+ 40 0
43
+ 41 0
44
+ 42 0
45
+ 43 0
46
+ 44 0
47
+ 45 0
48
+ 46 0
49
+ 47 0
50
+ 48 0
51
+ 49 0
52
+ 50 0
53
+ 51 0
54
+ 52 0
55
+ 53 0
56
+ 54 0
57
+ 55 0
58
+ 56 0
59
+ 57 0
60
+ 58 0
61
+ 59 0
62
+ 60 0
63
+ 61 0
64
+ 62 0
65
+ 63 0
66
+ 64 0
67
+ 65 0
68
+ 66 0
69
+ 67 0
70
+ 68 0
71
+ 69 0
72
+ 70 0
73
+ 71 0
74
+ 72 0
75
+ 73 0
76
+ 74 0
77
+ 75 0
78
+ 76 0
79
+ 77 0
80
+ 78 0
81
+ 79 0
82
+ 80 0
83
+ 81 0
84
+ 82 0
85
+ 83 0
86
+ 84 0
87
+ 85 0
88
+ 86 0
89
+ 87 0
90
+ 88 0
91
+ 89 0
92
+ 90 0
93
+ 91 0
94
+ 92 0
95
+ 93 0
96
+ 94 0
97
+ 95 0
98
+ 96 0
99
+ 97 0
100
+ 98 0
101
+ 99 0
102
+ 100 0
103
+ 101 0
104
+ 102 0
105
+ 103 0
106
+ 104 0
107
+ 105 0
108
+ 106 0
109
+ 107 0
110
+ 108 0
111
+ 109 0
112
+ 110 0
113
+ 111 0
114
+ 112 0
115
+ 113 0
116
+ 114 0
117
+ 115 0
118
+ 116 0
119
+ 117 0
120
+ 118 0
121
+ 119 0
122
+ 120 0
123
+ 121 0
124
+ 122 0
125
+ 123 0
126
+ 124 0
127
+ 125 0
128
+ 126 0
129
+ 127 0
130
+ 128 0
131
+ 129 0
132
+ 130 0
133
+ 131 0
134
+ 132 0
135
+ 133 0
136
+ 134 0
137
+ 135 0
138
+ 136 0
139
+ 137 0
140
+ 138 0
141
+ 139 0
142
+ 140 0
143
+ 141 0
144
+ 142 0
145
+ 143 0
146
+ 144 0
147
+ 145 0
148
+ 146 0
149
+ 147 0
150
+ 148 0
151
+ 149 0
152
+ 150 0
153
+ 151 0
154
+ 152 0
155
+ 153 0
156
+ 154 0
157
+ 155 0
158
+ 156 0
159
+ 157 0
160
+ 158 0
161
+ 159 0
162
+ 160 0
163
+ 161 0
164
+ 162 0
165
+ 163 0
166
+ 164 0
167
+ 165 0
168
+ 166 0
169
+ 167 0
170
+ 168 0
171
+ 169 0
172
+ 170 0
173
+ 171 1
174
+ 172 0
175
+ 173 0
176
+ 174 0
177
+ 175 0
178
+ 176 0
179
+ 177 0
180
+ 178 0
181
+ 179 0
182
+ 180 0
183
+ 181 0
184
+ 182 0
185
+ 183 0
186
+ 184 0
187
+ 185 0
188
+ 186 0
189
+ 187 0
190
+ 188 0
191
+ 189 0
192
+ 190 0
193
+ 191 0
194
+ 192 0
195
+ 193 0
196
+ 194 0
197
+ 195 0
198
+ 196 0
199
+ 197 0
200
+ 198 0
201
+ 199 1
202
+ 200 0
203
+ 201 0
204
+ 202 0
205
+ 203 0
206
+ 204 0
207
+ 205 0
208
+ 206 0
209
+ 207 0
210
+ 208 0
211
+ 209 0
212
+ 210 0
213
+ 211 0
214
+ 212 0
215
+ 213 0
216
+ 214 0
217
+ 215 0
218
+ 216 0
219
+ 217 0
220
+ 218 0
221
+ 219 0
222
+ 220 0
223
+ 221 0
224
+ 222 0
225
+ 223 0
226
+ 224 0
227
+ 225 0
228
+ 226 0
229
+ 227 0
230
+ 228 0
231
+ 229 0
232
+ 230 0
233
+ 231 0
234
+ 232 0
235
+ 233 0
236
+ 234 0
237
+ 235 0
238
+ 236 0
239
+ 237 0
240
+ 238 0
241
+ 239 0
242
+ 240 0
243
+ 241 0
244
+ 242 0
245
+ 243 0
246
+ 244 0
247
+ 245 0
248
+ 246 0
249
+ 247 0
250
+ 248 0
251
+ 249 0
252
+ 250 0
253
+ 251 0
254
+ 252 0
255
+ 253 0
256
+ 254 0
257
+ 255 0
258
+ 256 0
259
+ 257 0
260
+ 258 0
261
+ 259 0
262
+ 260 0
263
+ 261 0
264
+ 262 0
265
+ 263 0
266
+ 264 0
267
+ 265 0
268
+ 266 0
269
+ 267 1
270
+ 268 0
271
+ 269 0
272
+ 270 0
273
+ 271 0
274
+ 272 0
275
+ 273 0
276
+ 274 0
277
+ 275 0
278
+ 276 0
279
+ 277 0
280
+ 278 0
281
+ 279 0
282
+ 280 0
283
+ 281 0
284
+ 282 1
285
+ 283 0
286
+ 284 0
287
+ 285 0
288
+ 286 0
289
+ 287 0
290
+ 288 0
291
+ 289 0
292
+ 290 0
293
+ 291 0
294
+ 292 0
295
+ 293 0
296
+ 294 0
297
+ 295 0
298
+ 296 0
299
+ 297 0
300
+ 298 0
301
+ 299 0
302
+ 300 0
303
+ 301 0
304
+ 302 0
305
+ 303 0
306
+ 304 0
307
+ 305 0
308
+ 306 0
309
+ 307 0
310
+ 308 0
311
+ 309 0
312
+ 310 0
313
+ 311 0
314
+ 312 0
315
+ 313 0
316
+ 314 0
317
+ 315 0
318
+ 316 1
319
+ 317 0
320
+ 318 0
321
+ 319 0
322
+ 320 0
323
+ 321 0
324
+ 322 0
325
+ 323 0
326
+ 324 0
327
+ 325 0
328
+ 326 0
329
+ 327 0
330
+ 328 0
331
+ 329 0
332
+ 330 0
333
+ 331 0
334
+ 332 0
335
+ 333 0
336
+ 334 0
337
+ 335 0
338
+ 336 0
339
+ 337 0
340
+ 338 0
341
+ 339 0
342
+ 340 0
343
+ 341 0
344
+ 342 0
345
+ 343 0
346
+ 344 0
347
+ 345 0
348
+ 346 0
349
+ 347 0
350
+ 348 0
351
+ 349 0
352
+ 350 0
353
+ 351 0
354
+ 352 0
355
+ 353 0
356
+ 354 0
357
+ 355 0
358
+ 356 0
359
+ 357 0
360
+ 358 0
361
+ 359 0
362
+ 360 0
363
+ 361 0
364
+ 362 0
365
+ 363 0
366
+ 364 0
367
+ 365 0
368
+ 366 0
369
+ 367 0
370
+ 368 0
371
+ 369 0
372
+ 370 0
373
+ 371 0
374
+ 372 0
375
+ 373 0
376
+ 374 0
377
+ 375 0
378
+ 376 0
379
+ 377 0
380
+ 378 0
381
+ 379 0
382
+ 380 0
383
+ 381 0
384
+ 382 0
385
+ 383 0
386
+ 384 0
387
+ 385 0
388
+ 386 0
389
+ 387 0
390
+ 388 0
391
+ 389 0
392
+ 390 0
393
+ 391 0
394
+ 392 0
395
+ 393 0
396
+ 394 0
397
+ 395 0
398
+ 396 0
399
+ 397 0
400
+ 398 0
401
+ 399 0
402
+ 400 0
403
+ 401 0
404
+ 402 0
405
+ 403 0
406
+ 404 0
407
+ 405 0
408
+ 406 0
409
+ 407 0
410
+ 408 0
411
+ 409 0
412
+ 410 0
413
+ 411 0
414
+ 412 0
415
+ 413 0
416
+ 414 0
417
+ 415 0
418
+ 416 0
419
+ 417 0
420
+ 418 0
421
+ 419 0
422
+ 420 0
423
+ 421 0
424
+ 422 0
425
+ 423 0
426
+ 424 0
427
+ 425 0
428
+ 426 0
429
+ 427 0
430
+ 428 0
431
+ 429 0
432
+ 430 0
433
+ 431 0
434
+ 432 0
435
+ 433 0
436
+ 434 0
437
+ 435 0
438
+ 436 0
439
+ 437 0
440
+ 438 0
441
+ 439 0
442
+ 440 0
443
+ 441 0
444
+ 442 0
445
+ 443 0
446
+ 444 0
447
+ 445 0
448
+ 446 0
449
+ 447 0
450
+ 448 0
451
+ 449 0
452
+ 450 0
453
+ 451 0
454
+ 452 0
455
+ 453 0
456
+ 454 0
457
+ 455 0
458
+ 456 0
459
+ 457 0
460
+ 458 0
461
+ 459 0
462
+ 460 0
463
+ 461 0
464
+ 462 0
465
+ 463 0
466
+ 464 0
467
+ 465 0
468
+ 466 0
469
+ 467 0
470
+ 468 0
471
+ 469 0
472
+ 470 0
473
+ 471 0
474
+ 472 0
475
+ 473 0
476
+ 474 0
477
+ 475 0
478
+ 476 0
479
+ 477 0
480
+ 478 0
481
+ 479 0
482
+ 480 0
483
+ 481 0
484
+ 482 0
485
+ 483 0
486
+ 484 0
487
+ 485 0
488
+ 486 0
489
+ 487 0
490
+ 488 0
491
+ 489 0
492
+ 490 0
493
+ 491 0
494
+ 492 0
495
+ 493 0
496
+ 494 0
497
+ 495 0
498
+ 496 0
499
+ 497 0
500
+ 498 0
501
+ 499 0
502
+ 500 0
503
+ 501 0
504
+ 502 0
505
+ 503 0
506
+ 504 0
507
+ 505 0
508
+ 506 1
509
+ 507 0
510
+ 508 0
511
+ 509 0
512
+ 510 0
513
+ 511 1
514
+ 512 0
515
+ 513 0
516
+ 514 0
517
+ 515 0
518
+ 516 0
519
+ 517 0
520
+ 518 0
521
+ 519 0
522
+ 520 0
523
+ 521 0
524
+ 522 0
525
+ 523 0
526
+ 524 0
527
+ 525 0
528
+ 526 0
529
+ 527 0
530
+ 528 0
531
+ 529 0
532
+ 530 0
533
+ 531 0
534
+ 532 0
535
+ 533 0
536
+ 534 0
537
+ 535 0
538
+ 536 0
539
+ 537 0
540
+ 538 0
541
+ 539 0
542
+ 540 0
543
+ 541 1
544
+ 542 0
545
+ 543 0
546
+ 544 0
547
+ 545 0
548
+ 546 0
549
+ 547 0
550
+ 548 0
551
+ 549 0
552
+ 550 0
553
+ 551 0
554
+ 552 0
555
+ 553 0
556
+ 554 0
557
+ 555 0
558
+ 556 0
559
+ 557 0
560
+ 558 0
561
+ 559 0
562
+ 560 0
563
+ 561 0
564
+ 562 0
565
+ 563 0
566
+ 564 0
567
+ 565 0
568
+ 566 0
569
+ 567 0
570
+ 568 0
571
+ 569 0
572
+ 570 0
573
+ 571 0
574
+ 572 0
575
+ 573 0
576
+ 574 0
577
+ 575 0
578
+ 576 0
579
+ 577 0
580
+ 578 0
581
+ 579 0
582
+ 580 0
583
+ 581 0
584
+ 582 0
585
+ 583 0
586
+ 584 0
587
+ 585 0
588
+ 586 0
589
+ 587 0
590
+ 588 0
591
+ 589 0
592
+ 590 1
593
+ 591 0
594
+ 592 0
595
+ 593 0
596
+ 594 0
597
+ 595 0
598
+ 596 0
599
+ 597 0
600
+ 598 0
601
+ 599 0
602
+ 600 0
603
+ 601 0
604
+ 602 0
605
+ 603 0
606
+ 604 0
607
+ 605 0
608
+ 606 0
609
+ 607 0
610
+ 608 0
611
+ 609 0
612
+ 610 0
613
+ 611 0
614
+ 612 0
615
+ 613 0
616
+ 614 0
617
+ 615 0
618
+ 616 0
619
+ 617 0
620
+ 618 0
621
+ 619 0
622
+ 620 0
623
+ 621 0
624
+ 622 0
625
+ 623 0
626
+ 624 0
627
+ 625 0
628
+ 626 0
629
+ 627 0
630
+ 628 0
631
+ 629 0
632
+ 630 0
633
+ 631 0
634
+ 632 0
635
+ 633 0
636
+ 634 0
637
+ 635 0
638
+ 636 0
639
+ 637 0
640
+ 638 0
641
+ 639 0
642
+ 640 1
643
+ 641 0
644
+ 642 0
645
+ 643 0
646
+ 644 0
647
+ 645 0
648
+ 646 0
649
+ 647 0
650
+ 648 0
651
+ 649 0
652
+ 650 0
653
+ 651 0
654
+ 652 0
655
+ 653 0
656
+ 654 0
657
+ 655 0
658
+ 656 0
659
+ 657 0
660
+ 658 0
661
+ 659 0
662
+ 660 0
663
+ 661 0
664
+ 662 0
665
+ 663 0
666
+ 664 0
667
+ 665 0
668
+ 666 0
669
+ 667 0
670
+ 668 0
671
+ 669 0
672
+ 670 0
673
+ 671 0
674
+ 672 0
675
+ 673 0
676
+ 674 0
677
+ 675 0
678
+ 676 0
679
+ 677 0
680
+ 678 0
681
+ 679 0
682
+ 680 0
683
+ 681 0
684
+ 682 0
685
+ 683 0
686
+ 684 0
687
+ 685 1
688
+ 686 0
689
+ 687 0
690
+ 688 0
691
+ 689 0
692
+ 690 0
693
+ 691 0
694
+ 692 0
695
+ 693 0
696
+ 694 0
697
+ 695 0
698
+ 696 0
699
+ 697 0
700
+ 698 0
701
+ 699 0
702
+ 700 0
703
+ 701 0
704
+ 702 0
705
+ 703 0
706
+ 704 0
707
+ 705 0
708
+ 706 0
709
+ 707 1
710
+ 708 0
711
+ 709 0
712
+ 710 0
713
+ 711 0
714
+ 712 0
715
+ 713 0
716
+ 714 0
717
+ 715 0
718
+ 716 0
719
+ 717 0
720
+ 718 0
721
+ 719 0
722
+ 720 0
723
+ 721 0
724
+ 722 0
725
+ 723 0
726
+ 724 0
727
+ 725 0
728
+ 726 0
729
+ 727 0
730
+ 728 0
731
+ 729 0
732
+ 730 0
733
+ 731 0
734
+ 732 0
735
+ 733 0
736
+ 734 0
737
+ 735 0
738
+ 736 0
739
+ 737 0
740
+ 738 0
741
+ 739 0
742
+ 740 0
743
+ 741 0
744
+ 742 0
745
+ 743 0
746
+ 744 0
747
+ 745 0
748
+ 746 0
749
+ 747 0
750
+ 748 0
751
+ 749 0
752
+ 750 0
753
+ 751 0
754
+ 752 0
755
+ 753 0
756
+ 754 0
757
+ 755 0
758
+ 756 0
759
+ 757 0
760
+ 758 0
761
+ 759 0
762
+ 760 0
763
+ 761 0
764
+ 762 0
765
+ 763 0
766
+ 764 0
767
+ 765 1
768
+ 766 0
769
+ 767 0
770
+ 768 0
771
+ 769 0
772
+ 770 0
773
+ 771 0
774
+ 772 0
775
+ 773 0
776
+ 774 0
777
+ 775 0
778
+ 776 0
779
+ 777 0
780
+ 778 0
781
+ 779 0
782
+ 780 0
783
+ 781 0
784
+ 782 0
785
+ 783 0
786
+ 784 0
787
+ 785 0
788
+ 786 0
789
+ 787 0
790
+ 788 0
791
+ 789 0
792
+ 790 0
793
+ 791 0
794
+ 792 0
795
+ 793 0
796
+ 794 0
797
+ 795 0
798
+ 796 0
799
+ 797 0
800
+ 798 0
801
+ 799 0
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "clean_up_tokenization_spaces": true,
3
+ "cls_token": "[CLS]",
4
+ "do_basic_tokenize": true,
5
+ "do_lower_case": false,
6
+ "mask_token": "[MASK]",
7
+ "model_max_length": 512,
8
+ "never_split": null,
9
+ "pad_token": "[PAD]",
10
+ "sep_token": "[SEP]",
11
+ "strip_accents": false,
12
+ "tokenize_chinese_chars": true,
13
+ "tokenizer_class": "BertTokenizer",
14
+ "unk_token": "[UNK]"
15
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 10.0,
3
+ "train_loss": 0.02791861295700073,
4
+ "train_runtime": 1253.6999,
5
+ "train_samples": 10892,
6
+ "train_samples_per_second": 86.879,
7
+ "train_steps_per_second": 0.686
8
+ }
trainer_state.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 10.0,
5
+ "global_step": 860,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 5.0,
12
+ "learning_rate": 2e-05,
13
+ "loss": 0.0528,
14
+ "step": 430
15
+ },
16
+ {
17
+ "epoch": 5.0,
18
+ "eval_f1": 0.7376237623762376,
19
+ "eval_loss": 0.1697508543729782,
20
+ "eval_runtime": 11.9306,
21
+ "eval_samples_per_second": 228.236,
22
+ "eval_steps_per_second": 1.844,
23
+ "step": 430
24
+ },
25
+ {
26
+ "epoch": 10.0,
27
+ "learning_rate": 0.0,
28
+ "loss": 0.003,
29
+ "step": 860
30
+ },
31
+ {
32
+ "epoch": 10.0,
33
+ "eval_f1": 0.8160919540229885,
34
+ "eval_loss": 0.16279307007789612,
35
+ "eval_runtime": 11.9395,
36
+ "eval_samples_per_second": 228.067,
37
+ "eval_steps_per_second": 1.843,
38
+ "step": 860
39
+ },
40
+ {
41
+ "epoch": 10.0,
42
+ "step": 860,
43
+ "total_flos": 2.86580561498112e+16,
44
+ "train_loss": 0.02791861295700073,
45
+ "train_runtime": 1253.6999,
46
+ "train_samples_per_second": 86.879,
47
+ "train_steps_per_second": 0.686
48
+ }
49
+ ],
50
+ "max_steps": 860,
51
+ "num_train_epochs": 10,
52
+ "total_flos": 2.86580561498112e+16,
53
+ "trial_name": null,
54
+ "trial_params": null
55
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:285887ee7fbb469bfaa4eb3d94b7655d5d9b9295c618ac1d0ddf4da4b61d80db
3
+ size 4027
vocab.txt ADDED
The diff for this file is too large to render. See raw diff