akk-en-UBC-NLP/AraT5v2-base-1024
Browse files- README.md +337 -0
- added_tokens.json +0 -0
- config.json +33 -0
- generation_config.json +7 -0
- model.safetensors +3 -0
- special_tokens_map.json +125 -0
- spiece.model +3 -0
- tokenizer_config.json +0 -0
- training_args.bin +3 -0
README.md
ADDED
@@ -0,0 +1,337 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
base_model: UBC-NLP/AraT5v2-base-1024
|
3 |
+
tags:
|
4 |
+
- generated_from_trainer
|
5 |
+
model-index:
|
6 |
+
- name: AraT5v2-base-1024-p-l-akk-en-20240712-212743
|
7 |
+
results: []
|
8 |
+
---
|
9 |
+
|
10 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
11 |
+
should probably proofread and complete it, then remove this comment. -->
|
12 |
+
|
13 |
+
# AraT5v2-base-1024-p-l-akk-en-20240712-212743
|
14 |
+
|
15 |
+
This model is a fine-tuned version of [UBC-NLP/AraT5v2-base-1024](https://huggingface.co/UBC-NLP/AraT5v2-base-1024) on the None dataset.
|
16 |
+
It achieves the following results on the evaluation set:
|
17 |
+
- Loss: 0.1234
|
18 |
+
|
19 |
+
## Model description
|
20 |
+
|
21 |
+
More information needed
|
22 |
+
|
23 |
+
## Intended uses & limitations
|
24 |
+
|
25 |
+
More information needed
|
26 |
+
|
27 |
+
## Training and evaluation data
|
28 |
+
|
29 |
+
More information needed
|
30 |
+
|
31 |
+
## Training procedure
|
32 |
+
|
33 |
+
### Training hyperparameters
|
34 |
+
|
35 |
+
The following hyperparameters were used during training:
|
36 |
+
- learning_rate: 4e-05
|
37 |
+
- train_batch_size: 8
|
38 |
+
- eval_batch_size: 8
|
39 |
+
- seed: 42
|
40 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
41 |
+
- lr_scheduler_type: linear
|
42 |
+
- num_epochs: 25
|
43 |
+
|
44 |
+
### Training results
|
45 |
+
|
46 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
47 |
+
|:-------------:|:-------:|:------:|:---------------:|
|
48 |
+
| 7.7733 | 0.0884 | 500 | 0.7064 |
|
49 |
+
| 0.5896 | 0.1767 | 1000 | 0.4223 |
|
50 |
+
| 0.3694 | 0.2651 | 1500 | 0.3706 |
|
51 |
+
| 0.2998 | 0.3535 | 2000 | 0.3643 |
|
52 |
+
| 0.2951 | 0.4419 | 2500 | 0.2568 |
|
53 |
+
| 0.275 | 0.5302 | 3000 | 0.2534 |
|
54 |
+
| 0.2594 | 0.6186 | 3500 | 0.2449 |
|
55 |
+
| 0.2538 | 0.7070 | 4000 | 0.2313 |
|
56 |
+
| 0.2505 | 0.7953 | 4500 | 0.2303 |
|
57 |
+
| 0.2559 | 0.8837 | 5000 | 0.2237 |
|
58 |
+
| 0.2449 | 0.9721 | 5500 | 0.2190 |
|
59 |
+
| 0.2443 | 1.0604 | 6000 | 0.2165 |
|
60 |
+
| 0.2334 | 1.1488 | 6500 | 0.2142 |
|
61 |
+
| 0.2373 | 1.2372 | 7000 | 0.2116 |
|
62 |
+
| 0.2239 | 1.3256 | 7500 | 0.2103 |
|
63 |
+
| 0.2261 | 1.4139 | 8000 | 0.2079 |
|
64 |
+
| 0.2249 | 1.5023 | 8500 | 0.2057 |
|
65 |
+
| 0.2232 | 1.5907 | 9000 | 0.2055 |
|
66 |
+
| 0.214 | 1.6790 | 9500 | 0.2023 |
|
67 |
+
| 0.2176 | 1.7674 | 10000 | 0.2008 |
|
68 |
+
| 0.2062 | 1.8558 | 10500 | 0.1991 |
|
69 |
+
| 0.2132 | 1.9441 | 11000 | 0.1980 |
|
70 |
+
| 0.2016 | 2.0325 | 11500 | 0.2008 |
|
71 |
+
| 0.2103 | 2.1209 | 12000 | 0.2001 |
|
72 |
+
| 0.2611 | 2.2093 | 12500 | 0.1967 |
|
73 |
+
| 0.2045 | 2.2976 | 13000 | 0.1945 |
|
74 |
+
| 0.2027 | 2.3860 | 13500 | 0.1924 |
|
75 |
+
| 0.1992 | 2.4744 | 14000 | 0.1913 |
|
76 |
+
| 0.2019 | 2.5627 | 14500 | 0.1903 |
|
77 |
+
| 0.1985 | 2.6511 | 15000 | 0.1890 |
|
78 |
+
| 0.1936 | 2.7395 | 15500 | 0.1888 |
|
79 |
+
| 0.1981 | 2.8279 | 16000 | 0.1870 |
|
80 |
+
| 0.1986 | 2.9162 | 16500 | 0.1866 |
|
81 |
+
| 0.1919 | 3.0046 | 17000 | 0.1847 |
|
82 |
+
| 0.1888 | 3.0930 | 17500 | 0.1839 |
|
83 |
+
| 0.1957 | 3.1813 | 18000 | 0.1834 |
|
84 |
+
| 0.1919 | 3.2697 | 18500 | 0.1820 |
|
85 |
+
| 0.1845 | 3.3581 | 19000 | 0.1811 |
|
86 |
+
| 0.1924 | 3.4464 | 19500 | 0.1804 |
|
87 |
+
| 0.1861 | 3.5348 | 20000 | 0.1793 |
|
88 |
+
| 0.1773 | 3.6232 | 20500 | 0.1777 |
|
89 |
+
| 0.1826 | 3.7116 | 21000 | 0.1782 |
|
90 |
+
| 0.19 | 3.7999 | 21500 | 0.1765 |
|
91 |
+
| 0.1827 | 3.8883 | 22000 | 0.1752 |
|
92 |
+
| 0.1848 | 3.9767 | 22500 | 0.1751 |
|
93 |
+
| 0.1828 | 4.0650 | 23000 | 0.1736 |
|
94 |
+
| 0.1759 | 4.1534 | 23500 | 0.1741 |
|
95 |
+
| 0.179 | 4.2418 | 24000 | 0.1723 |
|
96 |
+
| 0.1812 | 4.3302 | 24500 | 0.1722 |
|
97 |
+
| 0.1788 | 4.4185 | 25000 | 0.1711 |
|
98 |
+
| 0.1808 | 4.5069 | 25500 | 0.1705 |
|
99 |
+
| 0.1757 | 4.5953 | 26000 | 0.1693 |
|
100 |
+
| 0.1694 | 4.6836 | 26500 | 0.1687 |
|
101 |
+
| 0.1701 | 4.7720 | 27000 | 0.1681 |
|
102 |
+
| 0.179 | 4.8604 | 27500 | 0.1676 |
|
103 |
+
| 0.1771 | 4.9487 | 28000 | 0.1664 |
|
104 |
+
| 0.1753 | 5.0371 | 28500 | 0.1665 |
|
105 |
+
| 0.1726 | 5.1255 | 29000 | 0.1653 |
|
106 |
+
| 0.1683 | 5.2139 | 29500 | 0.1644 |
|
107 |
+
| 0.1639 | 5.3022 | 30000 | 0.1641 |
|
108 |
+
| 0.1688 | 5.3906 | 30500 | 0.1637 |
|
109 |
+
| 0.1675 | 5.4790 | 31000 | 0.1631 |
|
110 |
+
| 0.1679 | 5.5673 | 31500 | 0.1622 |
|
111 |
+
| 0.1701 | 5.6557 | 32000 | 0.1619 |
|
112 |
+
| 0.1672 | 5.7441 | 32500 | 0.1613 |
|
113 |
+
| 0.1661 | 5.8324 | 33000 | 0.1604 |
|
114 |
+
| 0.1677 | 5.9208 | 33500 | 0.1595 |
|
115 |
+
| 0.1689 | 6.0092 | 34000 | 0.1595 |
|
116 |
+
| 0.1678 | 6.0976 | 34500 | 0.1590 |
|
117 |
+
| 0.165 | 6.1859 | 35000 | 0.1587 |
|
118 |
+
| 0.1636 | 6.2743 | 35500 | 0.1585 |
|
119 |
+
| 0.1641 | 6.3627 | 36000 | 0.1575 |
|
120 |
+
| 0.1598 | 6.4510 | 36500 | 0.1573 |
|
121 |
+
| 0.1563 | 6.5394 | 37000 | 0.1566 |
|
122 |
+
| 0.1612 | 6.6278 | 37500 | 0.1572 |
|
123 |
+
| 0.1668 | 6.7162 | 38000 | 0.1556 |
|
124 |
+
| 0.1625 | 6.8045 | 38500 | 0.1552 |
|
125 |
+
| 0.1561 | 6.8929 | 39000 | 0.1540 |
|
126 |
+
| 0.1571 | 6.9813 | 39500 | 0.1544 |
|
127 |
+
| 0.1628 | 7.0696 | 40000 | 0.1540 |
|
128 |
+
| 0.1582 | 7.1580 | 40500 | 0.1535 |
|
129 |
+
| 0.1481 | 7.2464 | 41000 | 0.1535 |
|
130 |
+
| 0.1537 | 7.3347 | 41500 | 0.1525 |
|
131 |
+
| 0.159 | 7.4231 | 42000 | 0.1519 |
|
132 |
+
| 0.1579 | 7.5115 | 42500 | 0.1512 |
|
133 |
+
| 0.1595 | 7.5999 | 43000 | 0.1518 |
|
134 |
+
| 0.1578 | 7.6882 | 43500 | 0.1504 |
|
135 |
+
| 0.1514 | 7.7766 | 44000 | 0.1505 |
|
136 |
+
| 0.1534 | 7.8650 | 44500 | 0.1501 |
|
137 |
+
| 0.157 | 7.9533 | 45000 | 0.1500 |
|
138 |
+
| 0.1558 | 8.0417 | 45500 | 0.1495 |
|
139 |
+
| 0.1545 | 8.1301 | 46000 | 0.1496 |
|
140 |
+
| 0.1506 | 8.2185 | 46500 | 0.1490 |
|
141 |
+
| 0.1525 | 8.3068 | 47000 | 0.1482 |
|
142 |
+
| 0.1546 | 8.3952 | 47500 | 0.1476 |
|
143 |
+
| 0.1544 | 8.4836 | 48000 | 0.1475 |
|
144 |
+
| 0.1482 | 8.5719 | 48500 | 0.1472 |
|
145 |
+
| 0.1483 | 8.6603 | 49000 | 0.1472 |
|
146 |
+
| 0.1455 | 8.7487 | 49500 | 0.1467 |
|
147 |
+
| 0.1514 | 8.8370 | 50000 | 0.1458 |
|
148 |
+
| 0.1537 | 8.9254 | 50500 | 0.1464 |
|
149 |
+
| 0.1508 | 9.0138 | 51000 | 0.1458 |
|
150 |
+
| 0.1428 | 9.1022 | 51500 | 0.1450 |
|
151 |
+
| 0.1478 | 9.1905 | 52000 | 0.1461 |
|
152 |
+
| 0.1472 | 9.2789 | 52500 | 0.1449 |
|
153 |
+
| 0.1498 | 9.3673 | 53000 | 0.1443 |
|
154 |
+
| 0.1502 | 9.4556 | 53500 | 0.1443 |
|
155 |
+
| 0.1458 | 9.5440 | 54000 | 0.1441 |
|
156 |
+
| 0.1441 | 9.6324 | 54500 | 0.1433 |
|
157 |
+
| 0.1525 | 9.7207 | 55000 | 0.1434 |
|
158 |
+
| 0.148 | 9.8091 | 55500 | 0.1426 |
|
159 |
+
| 0.1458 | 9.8975 | 56000 | 0.1429 |
|
160 |
+
| 0.1476 | 9.9859 | 56500 | 0.1425 |
|
161 |
+
| 0.1413 | 10.0742 | 57000 | 0.1426 |
|
162 |
+
| 0.1488 | 10.1626 | 57500 | 0.1421 |
|
163 |
+
| 0.1457 | 10.2510 | 58000 | 0.1415 |
|
164 |
+
| 0.1429 | 10.3393 | 58500 | 0.1417 |
|
165 |
+
| 0.1382 | 10.4277 | 59000 | 0.1416 |
|
166 |
+
| 0.1466 | 10.5161 | 59500 | 0.1413 |
|
167 |
+
| 0.1412 | 10.6045 | 60000 | 0.1410 |
|
168 |
+
| 0.1447 | 10.6928 | 60500 | 0.1408 |
|
169 |
+
| 0.1426 | 10.7812 | 61000 | 0.1406 |
|
170 |
+
| 0.1488 | 10.8696 | 61500 | 0.1402 |
|
171 |
+
| 0.1402 | 10.9579 | 62000 | 0.1396 |
|
172 |
+
| 0.1385 | 11.0463 | 62500 | 0.1393 |
|
173 |
+
| 0.1415 | 11.1347 | 63000 | 0.1390 |
|
174 |
+
| 0.1429 | 11.2230 | 63500 | 0.1397 |
|
175 |
+
| 0.1415 | 11.3114 | 64000 | 0.1389 |
|
176 |
+
| 0.1407 | 11.3998 | 64500 | 0.1387 |
|
177 |
+
| 0.1349 | 11.4882 | 65000 | 0.1384 |
|
178 |
+
| 0.1418 | 11.5765 | 65500 | 0.1388 |
|
179 |
+
| 0.1394 | 11.6649 | 66000 | 0.1378 |
|
180 |
+
| 0.1415 | 11.7533 | 66500 | 0.1376 |
|
181 |
+
| 0.134 | 11.8416 | 67000 | 0.1373 |
|
182 |
+
| 0.1435 | 11.9300 | 67500 | 0.1370 |
|
183 |
+
| 0.1386 | 12.0184 | 68000 | 0.1373 |
|
184 |
+
| 0.1295 | 12.1068 | 68500 | 0.1368 |
|
185 |
+
| 0.1379 | 12.1951 | 69000 | 0.1365 |
|
186 |
+
| 0.1436 | 12.2835 | 69500 | 0.1368 |
|
187 |
+
| 0.1312 | 12.3719 | 70000 | 0.1361 |
|
188 |
+
| 0.139 | 12.4602 | 70500 | 0.1358 |
|
189 |
+
| 0.1395 | 12.5486 | 71000 | 0.1358 |
|
190 |
+
| 0.1317 | 12.6370 | 71500 | 0.1356 |
|
191 |
+
| 0.1445 | 12.7253 | 72000 | 0.1352 |
|
192 |
+
| 0.1394 | 12.8137 | 72500 | 0.1355 |
|
193 |
+
| 0.1351 | 12.9021 | 73000 | 0.1346 |
|
194 |
+
| 0.1369 | 12.9905 | 73500 | 0.1347 |
|
195 |
+
| 0.1328 | 13.0788 | 74000 | 0.1352 |
|
196 |
+
| 0.132 | 13.1672 | 74500 | 0.1347 |
|
197 |
+
| 0.137 | 13.2556 | 75000 | 0.1344 |
|
198 |
+
| 0.1382 | 13.3439 | 75500 | 0.1342 |
|
199 |
+
| 0.1346 | 13.4323 | 76000 | 0.1334 |
|
200 |
+
| 0.1322 | 13.5207 | 76500 | 0.1334 |
|
201 |
+
| 0.1354 | 13.6090 | 77000 | 0.1333 |
|
202 |
+
| 0.1322 | 13.6974 | 77500 | 0.1335 |
|
203 |
+
| 0.1304 | 13.7858 | 78000 | 0.1331 |
|
204 |
+
| 0.1332 | 13.8742 | 78500 | 0.1332 |
|
205 |
+
| 0.136 | 13.9625 | 79000 | 0.1326 |
|
206 |
+
| 0.1361 | 14.0509 | 79500 | 0.1329 |
|
207 |
+
| 0.1324 | 14.1393 | 80000 | 0.1328 |
|
208 |
+
| 0.1321 | 14.2276 | 80500 | 0.1321 |
|
209 |
+
| 0.1349 | 14.3160 | 81000 | 0.1320 |
|
210 |
+
| 0.1336 | 14.4044 | 81500 | 0.1323 |
|
211 |
+
| 0.1272 | 14.4928 | 82000 | 0.1318 |
|
212 |
+
| 0.1317 | 14.5811 | 82500 | 0.1316 |
|
213 |
+
| 0.1274 | 14.6695 | 83000 | 0.1317 |
|
214 |
+
| 0.1331 | 14.7579 | 83500 | 0.1312 |
|
215 |
+
| 0.132 | 14.8462 | 84000 | 0.1312 |
|
216 |
+
| 0.1318 | 14.9346 | 84500 | 0.1307 |
|
217 |
+
| 0.128 | 15.0230 | 85000 | 0.1305 |
|
218 |
+
| 0.1282 | 15.1113 | 85500 | 0.1307 |
|
219 |
+
| 0.128 | 15.1997 | 86000 | 0.1305 |
|
220 |
+
| 0.1359 | 15.2881 | 86500 | 0.1304 |
|
221 |
+
| 0.1269 | 15.3765 | 87000 | 0.1304 |
|
222 |
+
| 0.1237 | 15.4648 | 87500 | 0.1303 |
|
223 |
+
| 0.1372 | 15.5532 | 88000 | 0.1302 |
|
224 |
+
| 0.1343 | 15.6416 | 88500 | 0.1300 |
|
225 |
+
| 0.1336 | 15.7299 | 89000 | 0.1297 |
|
226 |
+
| 0.1258 | 15.8183 | 89500 | 0.1295 |
|
227 |
+
| 0.1225 | 15.9067 | 90000 | 0.1298 |
|
228 |
+
| 0.1285 | 15.9951 | 90500 | 0.1291 |
|
229 |
+
| 0.1254 | 16.0834 | 91000 | 0.1295 |
|
230 |
+
| 0.1283 | 16.1718 | 91500 | 0.1294 |
|
231 |
+
| 0.1257 | 16.2602 | 92000 | 0.1297 |
|
232 |
+
| 0.1279 | 16.3485 | 92500 | 0.1292 |
|
233 |
+
| 0.1304 | 16.4369 | 93000 | 0.1291 |
|
234 |
+
| 0.1253 | 16.5253 | 93500 | 0.1290 |
|
235 |
+
| 0.1181 | 16.6136 | 94000 | 0.1285 |
|
236 |
+
| 0.1293 | 16.7020 | 94500 | 0.1287 |
|
237 |
+
| 0.1271 | 16.7904 | 95000 | 0.1293 |
|
238 |
+
| 0.1274 | 16.8788 | 95500 | 0.1287 |
|
239 |
+
| 0.1331 | 16.9671 | 96000 | 0.1284 |
|
240 |
+
| 0.1338 | 17.0555 | 96500 | 0.1286 |
|
241 |
+
| 0.1297 | 17.1439 | 97000 | 0.1283 |
|
242 |
+
| 0.1227 | 17.2322 | 97500 | 0.1280 |
|
243 |
+
| 0.1226 | 17.3206 | 98000 | 0.1280 |
|
244 |
+
| 0.1255 | 17.4090 | 98500 | 0.1280 |
|
245 |
+
| 0.1266 | 17.4973 | 99000 | 0.1277 |
|
246 |
+
| 0.1247 | 17.5857 | 99500 | 0.1274 |
|
247 |
+
| 0.1254 | 17.6741 | 100000 | 0.1275 |
|
248 |
+
| 0.1193 | 17.7625 | 100500 | 0.1277 |
|
249 |
+
| 0.1279 | 17.8508 | 101000 | 0.1276 |
|
250 |
+
| 0.1251 | 17.9392 | 101500 | 0.1270 |
|
251 |
+
| 0.1264 | 18.0276 | 102000 | 0.1271 |
|
252 |
+
| 0.1249 | 18.1159 | 102500 | 0.1270 |
|
253 |
+
| 0.1279 | 18.2043 | 103000 | 0.1267 |
|
254 |
+
| 0.1254 | 18.2927 | 103500 | 0.1266 |
|
255 |
+
| 0.1276 | 18.3811 | 104000 | 0.1269 |
|
256 |
+
| 0.1165 | 18.4694 | 104500 | 0.1263 |
|
257 |
+
| 0.122 | 18.5578 | 105000 | 0.1265 |
|
258 |
+
| 0.1281 | 18.6462 | 105500 | 0.1261 |
|
259 |
+
| 0.1224 | 18.7345 | 106000 | 0.1265 |
|
260 |
+
| 0.1209 | 18.8229 | 106500 | 0.1264 |
|
261 |
+
| 0.1233 | 18.9113 | 107000 | 0.1264 |
|
262 |
+
| 0.1218 | 18.9996 | 107500 | 0.1256 |
|
263 |
+
| 0.1217 | 19.0880 | 108000 | 0.1261 |
|
264 |
+
| 0.1227 | 19.1764 | 108500 | 0.1265 |
|
265 |
+
| 0.1303 | 19.2648 | 109000 | 0.1263 |
|
266 |
+
| 0.1188 | 19.3531 | 109500 | 0.1258 |
|
267 |
+
| 0.1221 | 19.4415 | 110000 | 0.1260 |
|
268 |
+
| 0.1249 | 19.5299 | 110500 | 0.1261 |
|
269 |
+
| 0.1295 | 19.6182 | 111000 | 0.1257 |
|
270 |
+
| 0.1226 | 19.7066 | 111500 | 0.1252 |
|
271 |
+
| 0.1199 | 19.7950 | 112000 | 0.1253 |
|
272 |
+
| 0.1177 | 19.8834 | 112500 | 0.1253 |
|
273 |
+
| 0.1193 | 19.9717 | 113000 | 0.1255 |
|
274 |
+
| 0.1181 | 20.0601 | 113500 | 0.1256 |
|
275 |
+
| 0.1207 | 20.1485 | 114000 | 0.1256 |
|
276 |
+
| 0.1235 | 20.2368 | 114500 | 0.1257 |
|
277 |
+
| 0.1209 | 20.3252 | 115000 | 0.1253 |
|
278 |
+
| 0.115 | 20.4136 | 115500 | 0.1251 |
|
279 |
+
| 0.1176 | 20.5019 | 116000 | 0.1252 |
|
280 |
+
| 0.1215 | 20.5903 | 116500 | 0.1249 |
|
281 |
+
| 0.124 | 20.6787 | 117000 | 0.1247 |
|
282 |
+
| 0.1211 | 20.7671 | 117500 | 0.1245 |
|
283 |
+
| 0.1222 | 20.8554 | 118000 | 0.1246 |
|
284 |
+
| 0.1205 | 20.9438 | 118500 | 0.1248 |
|
285 |
+
| 0.1251 | 21.0322 | 119000 | 0.1248 |
|
286 |
+
| 0.1212 | 21.1205 | 119500 | 0.1243 |
|
287 |
+
| 0.1151 | 21.2089 | 120000 | 0.1247 |
|
288 |
+
| 0.1197 | 21.2973 | 120500 | 0.1246 |
|
289 |
+
| 0.122 | 21.3856 | 121000 | 0.1248 |
|
290 |
+
| 0.1226 | 21.4740 | 121500 | 0.1248 |
|
291 |
+
| 0.1214 | 21.5624 | 122000 | 0.1247 |
|
292 |
+
| 0.1232 | 21.6508 | 122500 | 0.1242 |
|
293 |
+
| 0.118 | 21.7391 | 123000 | 0.1245 |
|
294 |
+
| 0.1179 | 21.8275 | 123500 | 0.1242 |
|
295 |
+
| 0.1201 | 21.9159 | 124000 | 0.1243 |
|
296 |
+
| 0.1205 | 22.0042 | 124500 | 0.1245 |
|
297 |
+
| 0.1182 | 22.0926 | 125000 | 0.1242 |
|
298 |
+
| 0.115 | 22.1810 | 125500 | 0.1243 |
|
299 |
+
| 0.1203 | 22.2694 | 126000 | 0.1239 |
|
300 |
+
| 0.1184 | 22.3577 | 126500 | 0.1240 |
|
301 |
+
| 0.1221 | 22.4461 | 127000 | 0.1239 |
|
302 |
+
| 0.1214 | 22.5345 | 127500 | 0.1238 |
|
303 |
+
| 0.1183 | 22.6228 | 128000 | 0.1239 |
|
304 |
+
| 0.1188 | 22.7112 | 128500 | 0.1242 |
|
305 |
+
| 0.1181 | 22.7996 | 129000 | 0.1237 |
|
306 |
+
| 0.1172 | 22.8879 | 129500 | 0.1237 |
|
307 |
+
| 0.122 | 22.9763 | 130000 | 0.1236 |
|
308 |
+
| 0.1194 | 23.0647 | 130500 | 0.1239 |
|
309 |
+
| 0.1171 | 23.1531 | 131000 | 0.1238 |
|
310 |
+
| 0.1178 | 23.2414 | 131500 | 0.1238 |
|
311 |
+
| 0.1192 | 23.3298 | 132000 | 0.1239 |
|
312 |
+
| 0.1193 | 23.4182 | 132500 | 0.1238 |
|
313 |
+
| 0.1201 | 23.5065 | 133000 | 0.1235 |
|
314 |
+
| 0.1208 | 23.5949 | 133500 | 0.1234 |
|
315 |
+
| 0.1194 | 23.6833 | 134000 | 0.1235 |
|
316 |
+
| 0.1155 | 23.7717 | 134500 | 0.1235 |
|
317 |
+
| 0.1177 | 23.8600 | 135000 | 0.1233 |
|
318 |
+
| 0.1187 | 23.9484 | 135500 | 0.1235 |
|
319 |
+
| 0.1167 | 24.0368 | 136000 | 0.1236 |
|
320 |
+
| 0.116 | 24.1251 | 136500 | 0.1235 |
|
321 |
+
| 0.1151 | 24.2135 | 137000 | 0.1235 |
|
322 |
+
| 0.1204 | 24.3019 | 137500 | 0.1235 |
|
323 |
+
| 0.1105 | 24.3902 | 138000 | 0.1235 |
|
324 |
+
| 0.1211 | 24.4786 | 138500 | 0.1234 |
|
325 |
+
| 0.1192 | 24.5670 | 139000 | 0.1235 |
|
326 |
+
| 0.1188 | 24.6554 | 139500 | 0.1234 |
|
327 |
+
| 0.1245 | 24.7437 | 140000 | 0.1234 |
|
328 |
+
| 0.1177 | 24.8321 | 140500 | 0.1234 |
|
329 |
+
| 0.1209 | 24.9205 | 141000 | 0.1234 |
|
330 |
+
|
331 |
+
|
332 |
+
### Framework versions
|
333 |
+
|
334 |
+
- Transformers 4.41.2
|
335 |
+
- Pytorch 2.5.0.dev20240625
|
336 |
+
- Datasets 2.20.0
|
337 |
+
- Tokenizers 0.19.1
|
added_tokens.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
config.json
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "UBC-NLP/AraT5v2-base-1024",
|
3 |
+
"architectures": [
|
4 |
+
"T5ForConditionalGeneration"
|
5 |
+
],
|
6 |
+
"classifier_dropout": 0.0,
|
7 |
+
"d_ff": 2048,
|
8 |
+
"d_kv": 64,
|
9 |
+
"d_model": 768,
|
10 |
+
"decoder_start_token_id": 0,
|
11 |
+
"dense_act_fn": "gelu_new",
|
12 |
+
"dropout_rate": 0.1,
|
13 |
+
"eos_token_id": 1,
|
14 |
+
"feed_forward_proj": "gated-gelu",
|
15 |
+
"initializer_factor": 1.0,
|
16 |
+
"is_encoder_decoder": true,
|
17 |
+
"is_gated_act": true,
|
18 |
+
"layer_norm_epsilon": 1e-06,
|
19 |
+
"model_type": "t5",
|
20 |
+
"num_decoder_layers": 12,
|
21 |
+
"num_heads": 12,
|
22 |
+
"num_layers": 12,
|
23 |
+
"output_past": true,
|
24 |
+
"pad_token_id": 0,
|
25 |
+
"relative_attention_max_distance": 128,
|
26 |
+
"relative_attention_num_buckets": 32,
|
27 |
+
"tie_word_embeddings": false,
|
28 |
+
"tokenizer_class": "T5Tokenizer",
|
29 |
+
"torch_dtype": "float32",
|
30 |
+
"transformers_version": "4.41.2",
|
31 |
+
"use_cache": true,
|
32 |
+
"vocab_size": 126982
|
33 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"decoder_start_token_id": 0,
|
4 |
+
"eos_token_id": 1,
|
5 |
+
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.41.2"
|
7 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:676f8cc36ab739ddb87096a61b350903c526d9d8cce2d5dbc75880c14cc10ff4
|
3 |
+
size 1573128048
|
special_tokens_map.json
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<extra_id_0>",
|
4 |
+
"<extra_id_1>",
|
5 |
+
"<extra_id_2>",
|
6 |
+
"<extra_id_3>",
|
7 |
+
"<extra_id_4>",
|
8 |
+
"<extra_id_5>",
|
9 |
+
"<extra_id_6>",
|
10 |
+
"<extra_id_7>",
|
11 |
+
"<extra_id_8>",
|
12 |
+
"<extra_id_9>",
|
13 |
+
"<extra_id_10>",
|
14 |
+
"<extra_id_11>",
|
15 |
+
"<extra_id_12>",
|
16 |
+
"<extra_id_13>",
|
17 |
+
"<extra_id_14>",
|
18 |
+
"<extra_id_15>",
|
19 |
+
"<extra_id_16>",
|
20 |
+
"<extra_id_17>",
|
21 |
+
"<extra_id_18>",
|
22 |
+
"<extra_id_19>",
|
23 |
+
"<extra_id_20>",
|
24 |
+
"<extra_id_21>",
|
25 |
+
"<extra_id_22>",
|
26 |
+
"<extra_id_23>",
|
27 |
+
"<extra_id_24>",
|
28 |
+
"<extra_id_25>",
|
29 |
+
"<extra_id_26>",
|
30 |
+
"<extra_id_27>",
|
31 |
+
"<extra_id_28>",
|
32 |
+
"<extra_id_29>",
|
33 |
+
"<extra_id_30>",
|
34 |
+
"<extra_id_31>",
|
35 |
+
"<extra_id_32>",
|
36 |
+
"<extra_id_33>",
|
37 |
+
"<extra_id_34>",
|
38 |
+
"<extra_id_35>",
|
39 |
+
"<extra_id_36>",
|
40 |
+
"<extra_id_37>",
|
41 |
+
"<extra_id_38>",
|
42 |
+
"<extra_id_39>",
|
43 |
+
"<extra_id_40>",
|
44 |
+
"<extra_id_41>",
|
45 |
+
"<extra_id_42>",
|
46 |
+
"<extra_id_43>",
|
47 |
+
"<extra_id_44>",
|
48 |
+
"<extra_id_45>",
|
49 |
+
"<extra_id_46>",
|
50 |
+
"<extra_id_47>",
|
51 |
+
"<extra_id_48>",
|
52 |
+
"<extra_id_49>",
|
53 |
+
"<extra_id_50>",
|
54 |
+
"<extra_id_51>",
|
55 |
+
"<extra_id_52>",
|
56 |
+
"<extra_id_53>",
|
57 |
+
"<extra_id_54>",
|
58 |
+
"<extra_id_55>",
|
59 |
+
"<extra_id_56>",
|
60 |
+
"<extra_id_57>",
|
61 |
+
"<extra_id_58>",
|
62 |
+
"<extra_id_59>",
|
63 |
+
"<extra_id_60>",
|
64 |
+
"<extra_id_61>",
|
65 |
+
"<extra_id_62>",
|
66 |
+
"<extra_id_63>",
|
67 |
+
"<extra_id_64>",
|
68 |
+
"<extra_id_65>",
|
69 |
+
"<extra_id_66>",
|
70 |
+
"<extra_id_67>",
|
71 |
+
"<extra_id_68>",
|
72 |
+
"<extra_id_69>",
|
73 |
+
"<extra_id_70>",
|
74 |
+
"<extra_id_71>",
|
75 |
+
"<extra_id_72>",
|
76 |
+
"<extra_id_73>",
|
77 |
+
"<extra_id_74>",
|
78 |
+
"<extra_id_75>",
|
79 |
+
"<extra_id_76>",
|
80 |
+
"<extra_id_77>",
|
81 |
+
"<extra_id_78>",
|
82 |
+
"<extra_id_79>",
|
83 |
+
"<extra_id_80>",
|
84 |
+
"<extra_id_81>",
|
85 |
+
"<extra_id_82>",
|
86 |
+
"<extra_id_83>",
|
87 |
+
"<extra_id_84>",
|
88 |
+
"<extra_id_85>",
|
89 |
+
"<extra_id_86>",
|
90 |
+
"<extra_id_87>",
|
91 |
+
"<extra_id_88>",
|
92 |
+
"<extra_id_89>",
|
93 |
+
"<extra_id_90>",
|
94 |
+
"<extra_id_91>",
|
95 |
+
"<extra_id_92>",
|
96 |
+
"<extra_id_93>",
|
97 |
+
"<extra_id_94>",
|
98 |
+
"<extra_id_95>",
|
99 |
+
"<extra_id_96>",
|
100 |
+
"<extra_id_97>",
|
101 |
+
"<extra_id_98>",
|
102 |
+
"<extra_id_99>"
|
103 |
+
],
|
104 |
+
"eos_token": {
|
105 |
+
"content": "</s>",
|
106 |
+
"lstrip": false,
|
107 |
+
"normalized": false,
|
108 |
+
"rstrip": false,
|
109 |
+
"single_word": false
|
110 |
+
},
|
111 |
+
"pad_token": {
|
112 |
+
"content": "<pad>",
|
113 |
+
"lstrip": false,
|
114 |
+
"normalized": false,
|
115 |
+
"rstrip": false,
|
116 |
+
"single_word": false
|
117 |
+
},
|
118 |
+
"unk_token": {
|
119 |
+
"content": "<unk>",
|
120 |
+
"lstrip": false,
|
121 |
+
"normalized": false,
|
122 |
+
"rstrip": false,
|
123 |
+
"single_word": false
|
124 |
+
}
|
125 |
+
}
|
spiece.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:180428eb8e88be6c7d259fb04c9eb3a1c552d799a76741bcd6ee34fa0bf64386
|
3 |
+
size 2353338
|
tokenizer_config.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:be8e901c7e456a3b0056cac8451761f57aee2095aae9ef809017cef32bf15f04
|
3 |
+
size 5304
|