CreatorPhan
commited on
Commit
•
507c313
1
Parent(s):
3540d17
Upload folder using huggingface_hub (#2)
Browse files- Upload folder using huggingface_hub (edb2131e1c3823ba72b1d279d10abad09a5f6533)
- adapter_model.bin +1 -1
- optimizer.pt +1 -1
- scheduler.pt +1 -1
- trainer_state.json +603 -3
adapter_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 39409357
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b39b2ac3c3f75772f547048fda7ecb323076b9689db7d79915bba156dc508f2f
|
3 |
size 39409357
|
optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 78844421
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c734ecaa394370ee4bcd94cc0b2ae016a26765122f3f76327b28c23f96a22732
|
3 |
size 78844421
|
scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 627
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7d8dcaf05375bb59f736a94e8f8b03d33cdc87bc02411e6527a29996e0a68b3b
|
3 |
size 627
|
trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 1.
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -1207,13 +1207,613 @@
|
|
1207 |
"learning_rate": 0.0001441340782122905,
|
1208 |
"loss": 1.9591,
|
1209 |
"step": 200
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1210 |
}
|
1211 |
],
|
1212 |
"logging_steps": 1,
|
1213 |
"max_steps": 716,
|
1214 |
"num_train_epochs": 4,
|
1215 |
"save_steps": 100,
|
1216 |
-
"total_flos": 1.
|
1217 |
"trial_name": null,
|
1218 |
"trial_params": null
|
1219 |
}
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.675977653631285,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 300,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
1207 |
"learning_rate": 0.0001441340782122905,
|
1208 |
"loss": 1.9591,
|
1209 |
"step": 200
|
1210 |
+
},
|
1211 |
+
{
|
1212 |
+
"epoch": 1.12,
|
1213 |
+
"learning_rate": 0.00014385474860335195,
|
1214 |
+
"loss": 1.8722,
|
1215 |
+
"step": 201
|
1216 |
+
},
|
1217 |
+
{
|
1218 |
+
"epoch": 1.13,
|
1219 |
+
"learning_rate": 0.00014357541899441342,
|
1220 |
+
"loss": 1.8535,
|
1221 |
+
"step": 202
|
1222 |
+
},
|
1223 |
+
{
|
1224 |
+
"epoch": 1.13,
|
1225 |
+
"learning_rate": 0.00014329608938547488,
|
1226 |
+
"loss": 1.8676,
|
1227 |
+
"step": 203
|
1228 |
+
},
|
1229 |
+
{
|
1230 |
+
"epoch": 1.14,
|
1231 |
+
"learning_rate": 0.00014301675977653632,
|
1232 |
+
"loss": 1.8976,
|
1233 |
+
"step": 204
|
1234 |
+
},
|
1235 |
+
{
|
1236 |
+
"epoch": 1.15,
|
1237 |
+
"learning_rate": 0.00014273743016759776,
|
1238 |
+
"loss": 1.7723,
|
1239 |
+
"step": 205
|
1240 |
+
},
|
1241 |
+
{
|
1242 |
+
"epoch": 1.15,
|
1243 |
+
"learning_rate": 0.00014245810055865923,
|
1244 |
+
"loss": 1.8165,
|
1245 |
+
"step": 206
|
1246 |
+
},
|
1247 |
+
{
|
1248 |
+
"epoch": 1.16,
|
1249 |
+
"learning_rate": 0.00014217877094972067,
|
1250 |
+
"loss": 1.7811,
|
1251 |
+
"step": 207
|
1252 |
+
},
|
1253 |
+
{
|
1254 |
+
"epoch": 1.16,
|
1255 |
+
"learning_rate": 0.00014189944134078214,
|
1256 |
+
"loss": 1.908,
|
1257 |
+
"step": 208
|
1258 |
+
},
|
1259 |
+
{
|
1260 |
+
"epoch": 1.17,
|
1261 |
+
"learning_rate": 0.00014162011173184357,
|
1262 |
+
"loss": 1.7663,
|
1263 |
+
"step": 209
|
1264 |
+
},
|
1265 |
+
{
|
1266 |
+
"epoch": 1.17,
|
1267 |
+
"learning_rate": 0.00014134078212290501,
|
1268 |
+
"loss": 1.6779,
|
1269 |
+
"step": 210
|
1270 |
+
},
|
1271 |
+
{
|
1272 |
+
"epoch": 1.18,
|
1273 |
+
"learning_rate": 0.0001410614525139665,
|
1274 |
+
"loss": 1.9039,
|
1275 |
+
"step": 211
|
1276 |
+
},
|
1277 |
+
{
|
1278 |
+
"epoch": 1.18,
|
1279 |
+
"learning_rate": 0.00014078212290502795,
|
1280 |
+
"loss": 1.8033,
|
1281 |
+
"step": 212
|
1282 |
+
},
|
1283 |
+
{
|
1284 |
+
"epoch": 1.19,
|
1285 |
+
"learning_rate": 0.00014050279329608939,
|
1286 |
+
"loss": 1.8251,
|
1287 |
+
"step": 213
|
1288 |
+
},
|
1289 |
+
{
|
1290 |
+
"epoch": 1.2,
|
1291 |
+
"learning_rate": 0.00014022346368715085,
|
1292 |
+
"loss": 1.8505,
|
1293 |
+
"step": 214
|
1294 |
+
},
|
1295 |
+
{
|
1296 |
+
"epoch": 1.2,
|
1297 |
+
"learning_rate": 0.0001399441340782123,
|
1298 |
+
"loss": 1.8147,
|
1299 |
+
"step": 215
|
1300 |
+
},
|
1301 |
+
{
|
1302 |
+
"epoch": 1.21,
|
1303 |
+
"learning_rate": 0.00013966480446927376,
|
1304 |
+
"loss": 1.8419,
|
1305 |
+
"step": 216
|
1306 |
+
},
|
1307 |
+
{
|
1308 |
+
"epoch": 1.21,
|
1309 |
+
"learning_rate": 0.0001393854748603352,
|
1310 |
+
"loss": 1.8401,
|
1311 |
+
"step": 217
|
1312 |
+
},
|
1313 |
+
{
|
1314 |
+
"epoch": 1.22,
|
1315 |
+
"learning_rate": 0.00013910614525139664,
|
1316 |
+
"loss": 1.8912,
|
1317 |
+
"step": 218
|
1318 |
+
},
|
1319 |
+
{
|
1320 |
+
"epoch": 1.22,
|
1321 |
+
"learning_rate": 0.0001388268156424581,
|
1322 |
+
"loss": 1.7548,
|
1323 |
+
"step": 219
|
1324 |
+
},
|
1325 |
+
{
|
1326 |
+
"epoch": 1.23,
|
1327 |
+
"learning_rate": 0.00013854748603351957,
|
1328 |
+
"loss": 1.8741,
|
1329 |
+
"step": 220
|
1330 |
+
},
|
1331 |
+
{
|
1332 |
+
"epoch": 1.23,
|
1333 |
+
"learning_rate": 0.000138268156424581,
|
1334 |
+
"loss": 1.9549,
|
1335 |
+
"step": 221
|
1336 |
+
},
|
1337 |
+
{
|
1338 |
+
"epoch": 1.24,
|
1339 |
+
"learning_rate": 0.00013798882681564248,
|
1340 |
+
"loss": 1.9093,
|
1341 |
+
"step": 222
|
1342 |
+
},
|
1343 |
+
{
|
1344 |
+
"epoch": 1.25,
|
1345 |
+
"learning_rate": 0.00013770949720670392,
|
1346 |
+
"loss": 1.7896,
|
1347 |
+
"step": 223
|
1348 |
+
},
|
1349 |
+
{
|
1350 |
+
"epoch": 1.25,
|
1351 |
+
"learning_rate": 0.00013743016759776538,
|
1352 |
+
"loss": 1.8491,
|
1353 |
+
"step": 224
|
1354 |
+
},
|
1355 |
+
{
|
1356 |
+
"epoch": 1.26,
|
1357 |
+
"learning_rate": 0.00013715083798882682,
|
1358 |
+
"loss": 1.7851,
|
1359 |
+
"step": 225
|
1360 |
+
},
|
1361 |
+
{
|
1362 |
+
"epoch": 1.26,
|
1363 |
+
"learning_rate": 0.00013687150837988826,
|
1364 |
+
"loss": 1.6992,
|
1365 |
+
"step": 226
|
1366 |
+
},
|
1367 |
+
{
|
1368 |
+
"epoch": 1.27,
|
1369 |
+
"learning_rate": 0.00013659217877094973,
|
1370 |
+
"loss": 1.9765,
|
1371 |
+
"step": 227
|
1372 |
+
},
|
1373 |
+
{
|
1374 |
+
"epoch": 1.27,
|
1375 |
+
"learning_rate": 0.00013631284916201117,
|
1376 |
+
"loss": 1.8179,
|
1377 |
+
"step": 228
|
1378 |
+
},
|
1379 |
+
{
|
1380 |
+
"epoch": 1.28,
|
1381 |
+
"learning_rate": 0.00013603351955307263,
|
1382 |
+
"loss": 1.8548,
|
1383 |
+
"step": 229
|
1384 |
+
},
|
1385 |
+
{
|
1386 |
+
"epoch": 1.28,
|
1387 |
+
"learning_rate": 0.0001357541899441341,
|
1388 |
+
"loss": 1.8843,
|
1389 |
+
"step": 230
|
1390 |
+
},
|
1391 |
+
{
|
1392 |
+
"epoch": 1.29,
|
1393 |
+
"learning_rate": 0.00013547486033519554,
|
1394 |
+
"loss": 1.9105,
|
1395 |
+
"step": 231
|
1396 |
+
},
|
1397 |
+
{
|
1398 |
+
"epoch": 1.3,
|
1399 |
+
"learning_rate": 0.00013519553072625698,
|
1400 |
+
"loss": 1.8748,
|
1401 |
+
"step": 232
|
1402 |
+
},
|
1403 |
+
{
|
1404 |
+
"epoch": 1.3,
|
1405 |
+
"learning_rate": 0.00013491620111731844,
|
1406 |
+
"loss": 1.7976,
|
1407 |
+
"step": 233
|
1408 |
+
},
|
1409 |
+
{
|
1410 |
+
"epoch": 1.31,
|
1411 |
+
"learning_rate": 0.00013463687150837988,
|
1412 |
+
"loss": 1.7369,
|
1413 |
+
"step": 234
|
1414 |
+
},
|
1415 |
+
{
|
1416 |
+
"epoch": 1.31,
|
1417 |
+
"learning_rate": 0.00013435754189944135,
|
1418 |
+
"loss": 1.7808,
|
1419 |
+
"step": 235
|
1420 |
+
},
|
1421 |
+
{
|
1422 |
+
"epoch": 1.32,
|
1423 |
+
"learning_rate": 0.0001340782122905028,
|
1424 |
+
"loss": 1.8385,
|
1425 |
+
"step": 236
|
1426 |
+
},
|
1427 |
+
{
|
1428 |
+
"epoch": 1.32,
|
1429 |
+
"learning_rate": 0.00013379888268156423,
|
1430 |
+
"loss": 1.8295,
|
1431 |
+
"step": 237
|
1432 |
+
},
|
1433 |
+
{
|
1434 |
+
"epoch": 1.33,
|
1435 |
+
"learning_rate": 0.00013351955307262572,
|
1436 |
+
"loss": 1.757,
|
1437 |
+
"step": 238
|
1438 |
+
},
|
1439 |
+
{
|
1440 |
+
"epoch": 1.34,
|
1441 |
+
"learning_rate": 0.00013324022346368716,
|
1442 |
+
"loss": 1.7904,
|
1443 |
+
"step": 239
|
1444 |
+
},
|
1445 |
+
{
|
1446 |
+
"epoch": 1.34,
|
1447 |
+
"learning_rate": 0.0001329608938547486,
|
1448 |
+
"loss": 1.7632,
|
1449 |
+
"step": 240
|
1450 |
+
},
|
1451 |
+
{
|
1452 |
+
"epoch": 1.35,
|
1453 |
+
"learning_rate": 0.00013268156424581007,
|
1454 |
+
"loss": 1.7867,
|
1455 |
+
"step": 241
|
1456 |
+
},
|
1457 |
+
{
|
1458 |
+
"epoch": 1.35,
|
1459 |
+
"learning_rate": 0.0001324022346368715,
|
1460 |
+
"loss": 1.8259,
|
1461 |
+
"step": 242
|
1462 |
+
},
|
1463 |
+
{
|
1464 |
+
"epoch": 1.36,
|
1465 |
+
"learning_rate": 0.00013212290502793297,
|
1466 |
+
"loss": 1.6655,
|
1467 |
+
"step": 243
|
1468 |
+
},
|
1469 |
+
{
|
1470 |
+
"epoch": 1.36,
|
1471 |
+
"learning_rate": 0.0001318435754189944,
|
1472 |
+
"loss": 1.6848,
|
1473 |
+
"step": 244
|
1474 |
+
},
|
1475 |
+
{
|
1476 |
+
"epoch": 1.37,
|
1477 |
+
"learning_rate": 0.00013156424581005585,
|
1478 |
+
"loss": 1.7931,
|
1479 |
+
"step": 245
|
1480 |
+
},
|
1481 |
+
{
|
1482 |
+
"epoch": 1.37,
|
1483 |
+
"learning_rate": 0.00013128491620111732,
|
1484 |
+
"loss": 1.7868,
|
1485 |
+
"step": 246
|
1486 |
+
},
|
1487 |
+
{
|
1488 |
+
"epoch": 1.38,
|
1489 |
+
"learning_rate": 0.00013100558659217879,
|
1490 |
+
"loss": 1.7732,
|
1491 |
+
"step": 247
|
1492 |
+
},
|
1493 |
+
{
|
1494 |
+
"epoch": 1.39,
|
1495 |
+
"learning_rate": 0.00013072625698324022,
|
1496 |
+
"loss": 1.7851,
|
1497 |
+
"step": 248
|
1498 |
+
},
|
1499 |
+
{
|
1500 |
+
"epoch": 1.39,
|
1501 |
+
"learning_rate": 0.0001304469273743017,
|
1502 |
+
"loss": 1.7406,
|
1503 |
+
"step": 249
|
1504 |
+
},
|
1505 |
+
{
|
1506 |
+
"epoch": 1.4,
|
1507 |
+
"learning_rate": 0.00013016759776536313,
|
1508 |
+
"loss": 1.5853,
|
1509 |
+
"step": 250
|
1510 |
+
},
|
1511 |
+
{
|
1512 |
+
"epoch": 1.4,
|
1513 |
+
"learning_rate": 0.0001298882681564246,
|
1514 |
+
"loss": 1.8271,
|
1515 |
+
"step": 251
|
1516 |
+
},
|
1517 |
+
{
|
1518 |
+
"epoch": 1.41,
|
1519 |
+
"learning_rate": 0.00012960893854748604,
|
1520 |
+
"loss": 1.6054,
|
1521 |
+
"step": 252
|
1522 |
+
},
|
1523 |
+
{
|
1524 |
+
"epoch": 1.41,
|
1525 |
+
"learning_rate": 0.00012932960893854748,
|
1526 |
+
"loss": 1.6884,
|
1527 |
+
"step": 253
|
1528 |
+
},
|
1529 |
+
{
|
1530 |
+
"epoch": 1.42,
|
1531 |
+
"learning_rate": 0.00012905027932960894,
|
1532 |
+
"loss": 1.7333,
|
1533 |
+
"step": 254
|
1534 |
+
},
|
1535 |
+
{
|
1536 |
+
"epoch": 1.42,
|
1537 |
+
"learning_rate": 0.00012877094972067038,
|
1538 |
+
"loss": 1.803,
|
1539 |
+
"step": 255
|
1540 |
+
},
|
1541 |
+
{
|
1542 |
+
"epoch": 1.43,
|
1543 |
+
"learning_rate": 0.00012849162011173185,
|
1544 |
+
"loss": 1.63,
|
1545 |
+
"step": 256
|
1546 |
+
},
|
1547 |
+
{
|
1548 |
+
"epoch": 1.44,
|
1549 |
+
"learning_rate": 0.00012821229050279331,
|
1550 |
+
"loss": 1.8023,
|
1551 |
+
"step": 257
|
1552 |
+
},
|
1553 |
+
{
|
1554 |
+
"epoch": 1.44,
|
1555 |
+
"learning_rate": 0.00012793296089385475,
|
1556 |
+
"loss": 1.762,
|
1557 |
+
"step": 258
|
1558 |
+
},
|
1559 |
+
{
|
1560 |
+
"epoch": 1.45,
|
1561 |
+
"learning_rate": 0.00012765363128491622,
|
1562 |
+
"loss": 1.7565,
|
1563 |
+
"step": 259
|
1564 |
+
},
|
1565 |
+
{
|
1566 |
+
"epoch": 1.45,
|
1567 |
+
"learning_rate": 0.00012737430167597766,
|
1568 |
+
"loss": 1.5937,
|
1569 |
+
"step": 260
|
1570 |
+
},
|
1571 |
+
{
|
1572 |
+
"epoch": 1.46,
|
1573 |
+
"learning_rate": 0.0001270949720670391,
|
1574 |
+
"loss": 1.7208,
|
1575 |
+
"step": 261
|
1576 |
+
},
|
1577 |
+
{
|
1578 |
+
"epoch": 1.46,
|
1579 |
+
"learning_rate": 0.00012681564245810057,
|
1580 |
+
"loss": 1.8097,
|
1581 |
+
"step": 262
|
1582 |
+
},
|
1583 |
+
{
|
1584 |
+
"epoch": 1.47,
|
1585 |
+
"learning_rate": 0.000126536312849162,
|
1586 |
+
"loss": 1.7042,
|
1587 |
+
"step": 263
|
1588 |
+
},
|
1589 |
+
{
|
1590 |
+
"epoch": 1.47,
|
1591 |
+
"learning_rate": 0.00012625698324022347,
|
1592 |
+
"loss": 1.7892,
|
1593 |
+
"step": 264
|
1594 |
+
},
|
1595 |
+
{
|
1596 |
+
"epoch": 1.48,
|
1597 |
+
"learning_rate": 0.00012597765363128494,
|
1598 |
+
"loss": 1.752,
|
1599 |
+
"step": 265
|
1600 |
+
},
|
1601 |
+
{
|
1602 |
+
"epoch": 1.49,
|
1603 |
+
"learning_rate": 0.00012569832402234638,
|
1604 |
+
"loss": 1.7333,
|
1605 |
+
"step": 266
|
1606 |
+
},
|
1607 |
+
{
|
1608 |
+
"epoch": 1.49,
|
1609 |
+
"learning_rate": 0.00012541899441340784,
|
1610 |
+
"loss": 1.7492,
|
1611 |
+
"step": 267
|
1612 |
+
},
|
1613 |
+
{
|
1614 |
+
"epoch": 1.5,
|
1615 |
+
"learning_rate": 0.00012513966480446928,
|
1616 |
+
"loss": 1.818,
|
1617 |
+
"step": 268
|
1618 |
+
},
|
1619 |
+
{
|
1620 |
+
"epoch": 1.5,
|
1621 |
+
"learning_rate": 0.00012486033519553072,
|
1622 |
+
"loss": 1.7146,
|
1623 |
+
"step": 269
|
1624 |
+
},
|
1625 |
+
{
|
1626 |
+
"epoch": 1.51,
|
1627 |
+
"learning_rate": 0.0001245810055865922,
|
1628 |
+
"loss": 1.7958,
|
1629 |
+
"step": 270
|
1630 |
+
},
|
1631 |
+
{
|
1632 |
+
"epoch": 1.51,
|
1633 |
+
"learning_rate": 0.00012430167597765363,
|
1634 |
+
"loss": 1.7212,
|
1635 |
+
"step": 271
|
1636 |
+
},
|
1637 |
+
{
|
1638 |
+
"epoch": 1.52,
|
1639 |
+
"learning_rate": 0.0001240223463687151,
|
1640 |
+
"loss": 1.8835,
|
1641 |
+
"step": 272
|
1642 |
+
},
|
1643 |
+
{
|
1644 |
+
"epoch": 1.53,
|
1645 |
+
"learning_rate": 0.00012374301675977656,
|
1646 |
+
"loss": 1.7486,
|
1647 |
+
"step": 273
|
1648 |
+
},
|
1649 |
+
{
|
1650 |
+
"epoch": 1.53,
|
1651 |
+
"learning_rate": 0.000123463687150838,
|
1652 |
+
"loss": 1.8103,
|
1653 |
+
"step": 274
|
1654 |
+
},
|
1655 |
+
{
|
1656 |
+
"epoch": 1.54,
|
1657 |
+
"learning_rate": 0.00012318435754189944,
|
1658 |
+
"loss": 1.7557,
|
1659 |
+
"step": 275
|
1660 |
+
},
|
1661 |
+
{
|
1662 |
+
"epoch": 1.54,
|
1663 |
+
"learning_rate": 0.0001229050279329609,
|
1664 |
+
"loss": 1.6198,
|
1665 |
+
"step": 276
|
1666 |
+
},
|
1667 |
+
{
|
1668 |
+
"epoch": 1.55,
|
1669 |
+
"learning_rate": 0.00012262569832402235,
|
1670 |
+
"loss": 1.6971,
|
1671 |
+
"step": 277
|
1672 |
+
},
|
1673 |
+
{
|
1674 |
+
"epoch": 1.55,
|
1675 |
+
"learning_rate": 0.0001223463687150838,
|
1676 |
+
"loss": 1.668,
|
1677 |
+
"step": 278
|
1678 |
+
},
|
1679 |
+
{
|
1680 |
+
"epoch": 1.56,
|
1681 |
+
"learning_rate": 0.00012206703910614525,
|
1682 |
+
"loss": 1.8795,
|
1683 |
+
"step": 279
|
1684 |
+
},
|
1685 |
+
{
|
1686 |
+
"epoch": 1.56,
|
1687 |
+
"learning_rate": 0.0001217877094972067,
|
1688 |
+
"loss": 1.6412,
|
1689 |
+
"step": 280
|
1690 |
+
},
|
1691 |
+
{
|
1692 |
+
"epoch": 1.57,
|
1693 |
+
"learning_rate": 0.00012150837988826816,
|
1694 |
+
"loss": 1.7497,
|
1695 |
+
"step": 281
|
1696 |
+
},
|
1697 |
+
{
|
1698 |
+
"epoch": 1.58,
|
1699 |
+
"learning_rate": 0.00012122905027932962,
|
1700 |
+
"loss": 1.5577,
|
1701 |
+
"step": 282
|
1702 |
+
},
|
1703 |
+
{
|
1704 |
+
"epoch": 1.58,
|
1705 |
+
"learning_rate": 0.00012094972067039108,
|
1706 |
+
"loss": 1.8049,
|
1707 |
+
"step": 283
|
1708 |
+
},
|
1709 |
+
{
|
1710 |
+
"epoch": 1.59,
|
1711 |
+
"learning_rate": 0.00012067039106145253,
|
1712 |
+
"loss": 1.6834,
|
1713 |
+
"step": 284
|
1714 |
+
},
|
1715 |
+
{
|
1716 |
+
"epoch": 1.59,
|
1717 |
+
"learning_rate": 0.00012039106145251397,
|
1718 |
+
"loss": 1.7978,
|
1719 |
+
"step": 285
|
1720 |
+
},
|
1721 |
+
{
|
1722 |
+
"epoch": 1.6,
|
1723 |
+
"learning_rate": 0.00012011173184357542,
|
1724 |
+
"loss": 1.6558,
|
1725 |
+
"step": 286
|
1726 |
+
},
|
1727 |
+
{
|
1728 |
+
"epoch": 1.6,
|
1729 |
+
"learning_rate": 0.00011983240223463687,
|
1730 |
+
"loss": 1.7561,
|
1731 |
+
"step": 287
|
1732 |
+
},
|
1733 |
+
{
|
1734 |
+
"epoch": 1.61,
|
1735 |
+
"learning_rate": 0.00011955307262569833,
|
1736 |
+
"loss": 1.6807,
|
1737 |
+
"step": 288
|
1738 |
+
},
|
1739 |
+
{
|
1740 |
+
"epoch": 1.61,
|
1741 |
+
"learning_rate": 0.00011927374301675978,
|
1742 |
+
"loss": 1.6937,
|
1743 |
+
"step": 289
|
1744 |
+
},
|
1745 |
+
{
|
1746 |
+
"epoch": 1.62,
|
1747 |
+
"learning_rate": 0.00011899441340782122,
|
1748 |
+
"loss": 1.6591,
|
1749 |
+
"step": 290
|
1750 |
+
},
|
1751 |
+
{
|
1752 |
+
"epoch": 1.63,
|
1753 |
+
"learning_rate": 0.0001187150837988827,
|
1754 |
+
"loss": 1.6771,
|
1755 |
+
"step": 291
|
1756 |
+
},
|
1757 |
+
{
|
1758 |
+
"epoch": 1.63,
|
1759 |
+
"learning_rate": 0.00011843575418994415,
|
1760 |
+
"loss": 1.7743,
|
1761 |
+
"step": 292
|
1762 |
+
},
|
1763 |
+
{
|
1764 |
+
"epoch": 1.64,
|
1765 |
+
"learning_rate": 0.00011815642458100559,
|
1766 |
+
"loss": 1.5857,
|
1767 |
+
"step": 293
|
1768 |
+
},
|
1769 |
+
{
|
1770 |
+
"epoch": 1.64,
|
1771 |
+
"learning_rate": 0.00011787709497206705,
|
1772 |
+
"loss": 1.6999,
|
1773 |
+
"step": 294
|
1774 |
+
},
|
1775 |
+
{
|
1776 |
+
"epoch": 1.65,
|
1777 |
+
"learning_rate": 0.0001175977653631285,
|
1778 |
+
"loss": 1.5661,
|
1779 |
+
"step": 295
|
1780 |
+
},
|
1781 |
+
{
|
1782 |
+
"epoch": 1.65,
|
1783 |
+
"learning_rate": 0.00011731843575418995,
|
1784 |
+
"loss": 1.7235,
|
1785 |
+
"step": 296
|
1786 |
+
},
|
1787 |
+
{
|
1788 |
+
"epoch": 1.66,
|
1789 |
+
"learning_rate": 0.0001170391061452514,
|
1790 |
+
"loss": 1.607,
|
1791 |
+
"step": 297
|
1792 |
+
},
|
1793 |
+
{
|
1794 |
+
"epoch": 1.66,
|
1795 |
+
"learning_rate": 0.00011675977653631284,
|
1796 |
+
"loss": 1.68,
|
1797 |
+
"step": 298
|
1798 |
+
},
|
1799 |
+
{
|
1800 |
+
"epoch": 1.67,
|
1801 |
+
"learning_rate": 0.0001164804469273743,
|
1802 |
+
"loss": 1.6938,
|
1803 |
+
"step": 299
|
1804 |
+
},
|
1805 |
+
{
|
1806 |
+
"epoch": 1.68,
|
1807 |
+
"learning_rate": 0.00011620111731843578,
|
1808 |
+
"loss": 1.6315,
|
1809 |
+
"step": 300
|
1810 |
}
|
1811 |
],
|
1812 |
"logging_steps": 1,
|
1813 |
"max_steps": 716,
|
1814 |
"num_train_epochs": 4,
|
1815 |
"save_steps": 100,
|
1816 |
+
"total_flos": 1.5369996759656448e+17,
|
1817 |
"trial_name": null,
|
1818 |
"trial_params": null
|
1819 |
}
|