transliterated-akk-en-t5-small-instruct-small-context
Browse files- README.md +235 -0
- added_tokens.json +0 -0
- config.json +33 -0
- generation_config.json +7 -0
- model.safetensors +3 -0
- special_tokens_map.json +125 -0
- spiece.model +3 -0
- tokenizer_config.json +0 -0
- training_args.bin +3 -0
README.md
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
tags:
|
3 |
+
- generated_from_trainer
|
4 |
+
model-index:
|
5 |
+
- name: AraT5v2-base-1024-p-l-akk-en-20240811-231511
|
6 |
+
results: []
|
7 |
+
---
|
8 |
+
|
9 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
10 |
+
should probably proofread and complete it, then remove this comment. -->
|
11 |
+
|
12 |
+
# AraT5v2-base-1024-p-l-akk-en-20240811-231511
|
13 |
+
|
14 |
+
This model was trained from scratch on the None dataset.
|
15 |
+
It achieves the following results on the evaluation set:
|
16 |
+
- Loss: 0.4597
|
17 |
+
|
18 |
+
## Model description
|
19 |
+
|
20 |
+
More information needed
|
21 |
+
|
22 |
+
## Intended uses & limitations
|
23 |
+
|
24 |
+
More information needed
|
25 |
+
|
26 |
+
## Training and evaluation data
|
27 |
+
|
28 |
+
More information needed
|
29 |
+
|
30 |
+
## Training procedure
|
31 |
+
|
32 |
+
### Training hyperparameters
|
33 |
+
|
34 |
+
The following hyperparameters were used during training:
|
35 |
+
- learning_rate: 4e-05
|
36 |
+
- train_batch_size: 1
|
37 |
+
- eval_batch_size: 1
|
38 |
+
- seed: 42
|
39 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
40 |
+
- lr_scheduler_type: linear
|
41 |
+
- num_epochs: 10
|
42 |
+
|
43 |
+
### Training results
|
44 |
+
|
45 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
46 |
+
|:-------------:|:------:|:------:|:---------------:|
|
47 |
+
| 0.364 | 0.0552 | 2500 | 0.4332 |
|
48 |
+
| 0.3744 | 0.1105 | 5000 | 0.4408 |
|
49 |
+
| 0.3825 | 0.1657 | 7500 | 0.4430 |
|
50 |
+
| 0.384 | 0.2210 | 10000 | 0.4377 |
|
51 |
+
| 0.3751 | 0.2762 | 12500 | 0.4421 |
|
52 |
+
| 0.4055 | 0.3314 | 15000 | 0.4372 |
|
53 |
+
| 0.355 | 0.3867 | 17500 | 0.4352 |
|
54 |
+
| 0.3871 | 0.4419 | 20000 | 0.4376 |
|
55 |
+
| 0.4159 | 0.4972 | 22500 | 0.4335 |
|
56 |
+
| 0.3782 | 0.5524 | 25000 | 0.4295 |
|
57 |
+
| 0.384 | 0.6077 | 27500 | 0.4305 |
|
58 |
+
| 0.3782 | 0.6629 | 30000 | 0.4311 |
|
59 |
+
| 0.3708 | 0.7181 | 32500 | 0.4332 |
|
60 |
+
| 0.3809 | 0.7734 | 35000 | 0.4263 |
|
61 |
+
| 0.3964 | 0.8286 | 37500 | 0.4280 |
|
62 |
+
| 0.3832 | 0.8839 | 40000 | 0.4253 |
|
63 |
+
| 0.4052 | 0.9391 | 42500 | 0.4320 |
|
64 |
+
| 0.4015 | 0.9943 | 45000 | 0.4261 |
|
65 |
+
| 0.352 | 1.0496 | 47500 | 0.4307 |
|
66 |
+
| 0.3456 | 1.1048 | 50000 | 0.4318 |
|
67 |
+
| 0.3726 | 1.1601 | 52500 | 0.4366 |
|
68 |
+
| 0.323 | 1.2153 | 55000 | 0.4357 |
|
69 |
+
| 0.3565 | 1.2705 | 57500 | 0.4285 |
|
70 |
+
| 0.3679 | 1.3258 | 60000 | 0.4329 |
|
71 |
+
| 0.3921 | 1.3810 | 62500 | 0.4257 |
|
72 |
+
| 0.3587 | 1.4363 | 65000 | 0.4248 |
|
73 |
+
| 0.3502 | 1.4915 | 67500 | 0.4283 |
|
74 |
+
| 0.3768 | 1.5468 | 70000 | 0.4283 |
|
75 |
+
| 0.3461 | 1.6020 | 72500 | 0.4226 |
|
76 |
+
| 0.3524 | 1.6572 | 75000 | 0.4238 |
|
77 |
+
| 0.3838 | 1.7125 | 77500 | 0.4220 |
|
78 |
+
| 0.3849 | 1.7677 | 80000 | 0.4213 |
|
79 |
+
| 0.3731 | 1.8230 | 82500 | 0.4184 |
|
80 |
+
| 0.3722 | 1.8782 | 85000 | 0.4212 |
|
81 |
+
| 0.3762 | 1.9334 | 87500 | 0.4179 |
|
82 |
+
| 0.3737 | 1.9887 | 90000 | 0.4229 |
|
83 |
+
| 0.3311 | 2.0439 | 92500 | 0.4277 |
|
84 |
+
| 0.3308 | 2.0992 | 95000 | 0.4245 |
|
85 |
+
| 0.3454 | 2.1544 | 97500 | 0.4258 |
|
86 |
+
| 0.2972 | 2.2097 | 100000 | 0.4362 |
|
87 |
+
| 0.3284 | 2.2649 | 102500 | 0.4290 |
|
88 |
+
| 0.3774 | 2.3201 | 105000 | 0.4302 |
|
89 |
+
| 0.3287 | 2.3754 | 107500 | 0.4250 |
|
90 |
+
| 0.3281 | 2.4306 | 110000 | 0.4219 |
|
91 |
+
| 0.3312 | 2.4859 | 112500 | 0.4249 |
|
92 |
+
| 0.3651 | 2.5411 | 115000 | 0.4222 |
|
93 |
+
| 0.3639 | 2.5963 | 117500 | 0.4243 |
|
94 |
+
| 0.3602 | 2.6516 | 120000 | 0.4187 |
|
95 |
+
| 0.3222 | 2.7068 | 122500 | 0.4256 |
|
96 |
+
| 0.3474 | 2.7621 | 125000 | 0.4204 |
|
97 |
+
| 0.3317 | 2.8173 | 127500 | 0.4246 |
|
98 |
+
| 0.3616 | 2.8725 | 130000 | 0.4148 |
|
99 |
+
| 0.3729 | 2.9278 | 132500 | 0.4191 |
|
100 |
+
| 0.352 | 2.9830 | 135000 | 0.4184 |
|
101 |
+
| 0.2849 | 3.0383 | 137500 | 0.4272 |
|
102 |
+
| 0.3148 | 3.0935 | 140000 | 0.4285 |
|
103 |
+
| 0.3032 | 3.1488 | 142500 | 0.4324 |
|
104 |
+
| 0.3306 | 3.2040 | 145000 | 0.4238 |
|
105 |
+
| 0.3377 | 3.2592 | 147500 | 0.4264 |
|
106 |
+
| 0.3373 | 3.3145 | 150000 | 0.4254 |
|
107 |
+
| 0.3079 | 3.3697 | 152500 | 0.4267 |
|
108 |
+
| 0.3165 | 3.4250 | 155000 | 0.4239 |
|
109 |
+
| 0.3469 | 3.4802 | 157500 | 0.4225 |
|
110 |
+
| 0.3102 | 3.5354 | 160000 | 0.4194 |
|
111 |
+
| 0.3231 | 3.5907 | 162500 | 0.4199 |
|
112 |
+
| 0.3383 | 3.6459 | 165000 | 0.4210 |
|
113 |
+
| 0.3048 | 3.7012 | 167500 | 0.4188 |
|
114 |
+
| 0.3222 | 3.7564 | 170000 | 0.4206 |
|
115 |
+
| 0.3505 | 3.8116 | 172500 | 0.4202 |
|
116 |
+
| 0.3209 | 3.8669 | 175000 | 0.4172 |
|
117 |
+
| 0.3146 | 3.9221 | 177500 | 0.4197 |
|
118 |
+
| 0.3237 | 3.9774 | 180000 | 0.4204 |
|
119 |
+
| 0.3087 | 4.0326 | 182500 | 0.4298 |
|
120 |
+
| 0.2979 | 4.0879 | 185000 | 0.4278 |
|
121 |
+
| 0.3046 | 4.1431 | 187500 | 0.4203 |
|
122 |
+
| 0.3145 | 4.1983 | 190000 | 0.4273 |
|
123 |
+
| 0.3511 | 4.2536 | 192500 | 0.4282 |
|
124 |
+
| 0.3845 | 4.3088 | 195000 | 0.4255 |
|
125 |
+
| 0.2889 | 4.3641 | 197500 | 0.4261 |
|
126 |
+
| 0.2764 | 4.4193 | 200000 | 0.4269 |
|
127 |
+
| 0.3089 | 4.4745 | 202500 | 0.4280 |
|
128 |
+
| 0.2928 | 4.5298 | 205000 | 0.4216 |
|
129 |
+
| 0.2982 | 4.5850 | 207500 | 0.4294 |
|
130 |
+
| 0.3008 | 4.6403 | 210000 | 0.4240 |
|
131 |
+
| 0.2997 | 4.6955 | 212500 | 0.4239 |
|
132 |
+
| 0.2964 | 4.7508 | 215000 | 0.4215 |
|
133 |
+
| 0.2822 | 4.8060 | 217500 | 0.4214 |
|
134 |
+
| 0.3216 | 4.8612 | 220000 | 0.4219 |
|
135 |
+
| 0.2873 | 4.9165 | 222500 | 0.4197 |
|
136 |
+
| 0.314 | 4.9717 | 225000 | 0.4214 |
|
137 |
+
| 0.3212 | 5.0270 | 227500 | 0.4292 |
|
138 |
+
| 0.2883 | 5.0822 | 230000 | 0.4333 |
|
139 |
+
| 0.2828 | 5.1374 | 232500 | 0.4341 |
|
140 |
+
| 0.2498 | 5.1927 | 235000 | 0.4357 |
|
141 |
+
| 0.2823 | 5.2479 | 237500 | 0.4289 |
|
142 |
+
| 0.2775 | 5.3032 | 240000 | 0.4352 |
|
143 |
+
| 0.3022 | 5.3584 | 242500 | 0.4329 |
|
144 |
+
| 0.269 | 5.4136 | 245000 | 0.4336 |
|
145 |
+
| 0.2769 | 5.4689 | 247500 | 0.4291 |
|
146 |
+
| 0.2627 | 5.5241 | 250000 | 0.4328 |
|
147 |
+
| 0.2632 | 5.5794 | 252500 | 0.4298 |
|
148 |
+
| 0.2856 | 5.6346 | 255000 | 0.4338 |
|
149 |
+
| 0.3124 | 5.6899 | 257500 | 0.4288 |
|
150 |
+
| 0.2662 | 5.7451 | 260000 | 0.4280 |
|
151 |
+
| 0.2849 | 5.8003 | 262500 | 0.4303 |
|
152 |
+
| 0.2972 | 5.8556 | 265000 | 0.4253 |
|
153 |
+
| 0.2866 | 5.9108 | 267500 | 0.4252 |
|
154 |
+
| 0.2689 | 5.9661 | 270000 | 0.4204 |
|
155 |
+
| 0.2459 | 6.0213 | 272500 | 0.4355 |
|
156 |
+
| 0.281 | 6.0765 | 275000 | 0.4386 |
|
157 |
+
| 0.29 | 6.1318 | 277500 | 0.4396 |
|
158 |
+
| 0.2587 | 6.1870 | 280000 | 0.4383 |
|
159 |
+
| 0.2892 | 6.2423 | 282500 | 0.4393 |
|
160 |
+
| 0.2761 | 6.2975 | 285000 | 0.4393 |
|
161 |
+
| 0.2796 | 6.3527 | 287500 | 0.4378 |
|
162 |
+
| 0.2586 | 6.4080 | 290000 | 0.4330 |
|
163 |
+
| 0.2397 | 6.4632 | 292500 | 0.4412 |
|
164 |
+
| 0.2823 | 6.5185 | 295000 | 0.4306 |
|
165 |
+
| 0.2903 | 6.5737 | 297500 | 0.4351 |
|
166 |
+
| 0.2675 | 6.6290 | 300000 | 0.4369 |
|
167 |
+
| 0.2949 | 6.6842 | 302500 | 0.4438 |
|
168 |
+
| 0.284 | 6.7394 | 305000 | 0.4361 |
|
169 |
+
| 0.2794 | 6.7947 | 307500 | 0.4304 |
|
170 |
+
| 0.2475 | 6.8499 | 310000 | 0.4399 |
|
171 |
+
| 0.2804 | 6.9052 | 312500 | 0.4317 |
|
172 |
+
| 0.2634 | 6.9604 | 315000 | 0.4359 |
|
173 |
+
| 0.2447 | 7.0156 | 317500 | 0.4418 |
|
174 |
+
| 0.2582 | 7.0709 | 320000 | 0.4471 |
|
175 |
+
| 0.2468 | 7.1261 | 322500 | 0.4492 |
|
176 |
+
| 0.2584 | 7.1814 | 325000 | 0.4436 |
|
177 |
+
| 0.2619 | 7.2366 | 327500 | 0.4444 |
|
178 |
+
| 0.2273 | 7.2919 | 330000 | 0.4458 |
|
179 |
+
| 0.2385 | 7.3471 | 332500 | 0.4434 |
|
180 |
+
| 0.2324 | 7.4023 | 335000 | 0.4470 |
|
181 |
+
| 0.2475 | 7.4576 | 337500 | 0.4475 |
|
182 |
+
| 0.2591 | 7.5128 | 340000 | 0.4456 |
|
183 |
+
| 0.2565 | 7.5681 | 342500 | 0.4451 |
|
184 |
+
| 0.2258 | 7.6233 | 345000 | 0.4424 |
|
185 |
+
| 0.2253 | 7.6785 | 347500 | 0.4444 |
|
186 |
+
| 0.2418 | 7.7338 | 350000 | 0.4470 |
|
187 |
+
| 0.2608 | 7.7890 | 352500 | 0.4465 |
|
188 |
+
| 0.2497 | 7.8443 | 355000 | 0.4472 |
|
189 |
+
| 0.2516 | 7.8995 | 357500 | 0.4446 |
|
190 |
+
| 0.2423 | 7.9547 | 360000 | 0.4426 |
|
191 |
+
| 0.2711 | 8.0100 | 362500 | 0.4470 |
|
192 |
+
| 0.2386 | 8.0652 | 365000 | 0.4530 |
|
193 |
+
| 0.2317 | 8.1205 | 367500 | 0.4550 |
|
194 |
+
| 0.243 | 8.1757 | 370000 | 0.4560 |
|
195 |
+
| 0.2273 | 8.2310 | 372500 | 0.4523 |
|
196 |
+
| 0.2463 | 8.2862 | 375000 | 0.4534 |
|
197 |
+
| 0.2435 | 8.3414 | 377500 | 0.4520 |
|
198 |
+
| 0.2805 | 8.3967 | 380000 | 0.4541 |
|
199 |
+
| 0.2437 | 8.4519 | 382500 | 0.4548 |
|
200 |
+
| 0.2583 | 8.5072 | 385000 | 0.4531 |
|
201 |
+
| 0.2241 | 8.5624 | 387500 | 0.4502 |
|
202 |
+
| 0.2531 | 8.6176 | 390000 | 0.4551 |
|
203 |
+
| 0.2393 | 8.6729 | 392500 | 0.4524 |
|
204 |
+
| 0.2506 | 8.7281 | 395000 | 0.4525 |
|
205 |
+
| 0.2222 | 8.7834 | 397500 | 0.4533 |
|
206 |
+
| 0.251 | 8.8386 | 400000 | 0.4518 |
|
207 |
+
| 0.2331 | 8.8938 | 402500 | 0.4555 |
|
208 |
+
| 0.2312 | 8.9491 | 405000 | 0.4507 |
|
209 |
+
| 0.2399 | 9.0043 | 407500 | 0.4557 |
|
210 |
+
| 0.2267 | 9.0596 | 410000 | 0.4574 |
|
211 |
+
| 0.2336 | 9.1148 | 412500 | 0.4580 |
|
212 |
+
| 0.263 | 9.1701 | 415000 | 0.4567 |
|
213 |
+
| 0.2207 | 9.2253 | 417500 | 0.4589 |
|
214 |
+
| 0.2457 | 9.2805 | 420000 | 0.4624 |
|
215 |
+
| 0.2577 | 9.3358 | 422500 | 0.4583 |
|
216 |
+
| 0.19 | 9.3910 | 425000 | 0.4600 |
|
217 |
+
| 0.2513 | 9.4463 | 427500 | 0.4575 |
|
218 |
+
| 0.2647 | 9.5015 | 430000 | 0.4587 |
|
219 |
+
| 0.2704 | 9.5567 | 432500 | 0.4577 |
|
220 |
+
| 0.2397 | 9.6120 | 435000 | 0.4592 |
|
221 |
+
| 0.2436 | 9.6672 | 437500 | 0.4601 |
|
222 |
+
| 0.2595 | 9.7225 | 440000 | 0.4591 |
|
223 |
+
| 0.2617 | 9.7777 | 442500 | 0.4595 |
|
224 |
+
| 0.231 | 9.8330 | 445000 | 0.4604 |
|
225 |
+
| 0.2375 | 9.8882 | 447500 | 0.4594 |
|
226 |
+
| 0.2295 | 9.9434 | 450000 | 0.4597 |
|
227 |
+
| 0.2289 | 9.9987 | 452500 | 0.4597 |
|
228 |
+
|
229 |
+
|
230 |
+
### Framework versions
|
231 |
+
|
232 |
+
- Transformers 4.44.0.dev0
|
233 |
+
- Pytorch 2.5.0.dev20240625
|
234 |
+
- Datasets 2.20.0
|
235 |
+
- Tokenizers 0.19.1
|
added_tokens.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
config.json
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/Users/lee/GitHub/results/AraT5v2-base-1024-p-l-akk-en-20240811-100750/checkpoint-180000",
|
3 |
+
"architectures": [
|
4 |
+
"T5ForConditionalGeneration"
|
5 |
+
],
|
6 |
+
"classifier_dropout": 0.0,
|
7 |
+
"d_ff": 2048,
|
8 |
+
"d_kv": 64,
|
9 |
+
"d_model": 768,
|
10 |
+
"decoder_start_token_id": 0,
|
11 |
+
"dense_act_fn": "gelu_new",
|
12 |
+
"dropout_rate": 0.1,
|
13 |
+
"eos_token_id": 1,
|
14 |
+
"feed_forward_proj": "gated-gelu",
|
15 |
+
"initializer_factor": 1.0,
|
16 |
+
"is_encoder_decoder": true,
|
17 |
+
"is_gated_act": true,
|
18 |
+
"layer_norm_epsilon": 1e-06,
|
19 |
+
"model_type": "t5",
|
20 |
+
"num_decoder_layers": 12,
|
21 |
+
"num_heads": 12,
|
22 |
+
"num_layers": 12,
|
23 |
+
"output_past": true,
|
24 |
+
"pad_token_id": 0,
|
25 |
+
"relative_attention_max_distance": 128,
|
26 |
+
"relative_attention_num_buckets": 32,
|
27 |
+
"tie_word_embeddings": false,
|
28 |
+
"tokenizer_class": "T5Tokenizer",
|
29 |
+
"torch_dtype": "float32",
|
30 |
+
"transformers_version": "4.44.0.dev0",
|
31 |
+
"use_cache": true,
|
32 |
+
"vocab_size": 127361
|
33 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"decoder_start_token_id": 0,
|
4 |
+
"eos_token_id": 1,
|
5 |
+
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.44.0.dev0"
|
7 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4e0d1067719dab50b91ca8bccdbf66839b53f02fb00e8730dfe3ce1e4d4e858c
|
3 |
+
size 1575456624
|
special_tokens_map.json
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<extra_id_0>",
|
4 |
+
"<extra_id_1>",
|
5 |
+
"<extra_id_2>",
|
6 |
+
"<extra_id_3>",
|
7 |
+
"<extra_id_4>",
|
8 |
+
"<extra_id_5>",
|
9 |
+
"<extra_id_6>",
|
10 |
+
"<extra_id_7>",
|
11 |
+
"<extra_id_8>",
|
12 |
+
"<extra_id_9>",
|
13 |
+
"<extra_id_10>",
|
14 |
+
"<extra_id_11>",
|
15 |
+
"<extra_id_12>",
|
16 |
+
"<extra_id_13>",
|
17 |
+
"<extra_id_14>",
|
18 |
+
"<extra_id_15>",
|
19 |
+
"<extra_id_16>",
|
20 |
+
"<extra_id_17>",
|
21 |
+
"<extra_id_18>",
|
22 |
+
"<extra_id_19>",
|
23 |
+
"<extra_id_20>",
|
24 |
+
"<extra_id_21>",
|
25 |
+
"<extra_id_22>",
|
26 |
+
"<extra_id_23>",
|
27 |
+
"<extra_id_24>",
|
28 |
+
"<extra_id_25>",
|
29 |
+
"<extra_id_26>",
|
30 |
+
"<extra_id_27>",
|
31 |
+
"<extra_id_28>",
|
32 |
+
"<extra_id_29>",
|
33 |
+
"<extra_id_30>",
|
34 |
+
"<extra_id_31>",
|
35 |
+
"<extra_id_32>",
|
36 |
+
"<extra_id_33>",
|
37 |
+
"<extra_id_34>",
|
38 |
+
"<extra_id_35>",
|
39 |
+
"<extra_id_36>",
|
40 |
+
"<extra_id_37>",
|
41 |
+
"<extra_id_38>",
|
42 |
+
"<extra_id_39>",
|
43 |
+
"<extra_id_40>",
|
44 |
+
"<extra_id_41>",
|
45 |
+
"<extra_id_42>",
|
46 |
+
"<extra_id_43>",
|
47 |
+
"<extra_id_44>",
|
48 |
+
"<extra_id_45>",
|
49 |
+
"<extra_id_46>",
|
50 |
+
"<extra_id_47>",
|
51 |
+
"<extra_id_48>",
|
52 |
+
"<extra_id_49>",
|
53 |
+
"<extra_id_50>",
|
54 |
+
"<extra_id_51>",
|
55 |
+
"<extra_id_52>",
|
56 |
+
"<extra_id_53>",
|
57 |
+
"<extra_id_54>",
|
58 |
+
"<extra_id_55>",
|
59 |
+
"<extra_id_56>",
|
60 |
+
"<extra_id_57>",
|
61 |
+
"<extra_id_58>",
|
62 |
+
"<extra_id_59>",
|
63 |
+
"<extra_id_60>",
|
64 |
+
"<extra_id_61>",
|
65 |
+
"<extra_id_62>",
|
66 |
+
"<extra_id_63>",
|
67 |
+
"<extra_id_64>",
|
68 |
+
"<extra_id_65>",
|
69 |
+
"<extra_id_66>",
|
70 |
+
"<extra_id_67>",
|
71 |
+
"<extra_id_68>",
|
72 |
+
"<extra_id_69>",
|
73 |
+
"<extra_id_70>",
|
74 |
+
"<extra_id_71>",
|
75 |
+
"<extra_id_72>",
|
76 |
+
"<extra_id_73>",
|
77 |
+
"<extra_id_74>",
|
78 |
+
"<extra_id_75>",
|
79 |
+
"<extra_id_76>",
|
80 |
+
"<extra_id_77>",
|
81 |
+
"<extra_id_78>",
|
82 |
+
"<extra_id_79>",
|
83 |
+
"<extra_id_80>",
|
84 |
+
"<extra_id_81>",
|
85 |
+
"<extra_id_82>",
|
86 |
+
"<extra_id_83>",
|
87 |
+
"<extra_id_84>",
|
88 |
+
"<extra_id_85>",
|
89 |
+
"<extra_id_86>",
|
90 |
+
"<extra_id_87>",
|
91 |
+
"<extra_id_88>",
|
92 |
+
"<extra_id_89>",
|
93 |
+
"<extra_id_90>",
|
94 |
+
"<extra_id_91>",
|
95 |
+
"<extra_id_92>",
|
96 |
+
"<extra_id_93>",
|
97 |
+
"<extra_id_94>",
|
98 |
+
"<extra_id_95>",
|
99 |
+
"<extra_id_96>",
|
100 |
+
"<extra_id_97>",
|
101 |
+
"<extra_id_98>",
|
102 |
+
"<extra_id_99>"
|
103 |
+
],
|
104 |
+
"eos_token": {
|
105 |
+
"content": "</s>",
|
106 |
+
"lstrip": false,
|
107 |
+
"normalized": false,
|
108 |
+
"rstrip": false,
|
109 |
+
"single_word": false
|
110 |
+
},
|
111 |
+
"pad_token": {
|
112 |
+
"content": "<pad>",
|
113 |
+
"lstrip": false,
|
114 |
+
"normalized": false,
|
115 |
+
"rstrip": false,
|
116 |
+
"single_word": false
|
117 |
+
},
|
118 |
+
"unk_token": {
|
119 |
+
"content": "<unk>",
|
120 |
+
"lstrip": false,
|
121 |
+
"normalized": false,
|
122 |
+
"rstrip": false,
|
123 |
+
"single_word": false
|
124 |
+
}
|
125 |
+
}
|
spiece.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:180428eb8e88be6c7d259fb04c9eb3a1c552d799a76741bcd6ee34fa0bf64386
|
3 |
+
size 2353338
|
tokenizer_config.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7dfe7f1b1905a39890e48bf31b0b13b279736d0bd253828ce1a978f315af01ce
|
3 |
+
size 5368
|