|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9932659932659933, |
|
"eval_steps": 500, |
|
"global_step": 296, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.006734006734006734, |
|
"grad_norm": 10.587559734000028, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 0.3649, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.013468013468013467, |
|
"grad_norm": 9.693900007266853, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 0.3686, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.020202020202020204, |
|
"grad_norm": 10.061524524104307, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.3574, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.026936026936026935, |
|
"grad_norm": 8.726573115555482, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 0.3702, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03367003367003367, |
|
"grad_norm": 7.3549146274711585, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.309, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04040404040404041, |
|
"grad_norm": 8.093966841100372, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.3129, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04713804713804714, |
|
"grad_norm": 4.842714262161294, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 0.2599, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.05387205387205387, |
|
"grad_norm": 3.6588226035329976, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.2195, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.06060606060606061, |
|
"grad_norm": 4.446495106085817, |
|
"learning_rate": 3e-06, |
|
"loss": 0.2421, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.06734006734006734, |
|
"grad_norm": 2.8148929686162267, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.1974, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 3.100744939706759, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 0.226, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.08080808080808081, |
|
"grad_norm": 5.783874426714631, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.244, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.08754208754208755, |
|
"grad_norm": 3.931311548892614, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 0.2085, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.09427609427609428, |
|
"grad_norm": 3.160745880374999, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.193, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.10101010101010101, |
|
"grad_norm": 2.593859671610231, |
|
"learning_rate": 5e-06, |
|
"loss": 0.1918, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.10774410774410774, |
|
"grad_norm": 2.3411274601291923, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.1658, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.11447811447811448, |
|
"grad_norm": 2.4713577315394915, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 0.1631, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 1.7868674672717264, |
|
"learning_rate": 6e-06, |
|
"loss": 0.1447, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.12794612794612795, |
|
"grad_norm": 2.691562015290166, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 0.1642, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.13468013468013468, |
|
"grad_norm": 2.656849943102207, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.1679, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1414141414141414, |
|
"grad_norm": 1.8157646932459017, |
|
"learning_rate": 7e-06, |
|
"loss": 0.148, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 1.8536112319855758, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.171, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.15488215488215487, |
|
"grad_norm": 1.7970217082935407, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 0.1543, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.16161616161616163, |
|
"grad_norm": 1.6194461511446632, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.1331, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.16835016835016836, |
|
"grad_norm": 1.6671876155213552, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.1997, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1750841750841751, |
|
"grad_norm": 2.8083673280494987, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.149, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 2.3945625699544983, |
|
"learning_rate": 9e-06, |
|
"loss": 0.2017, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.18855218855218855, |
|
"grad_norm": 2.547483046750297, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.1948, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.19528619528619529, |
|
"grad_norm": 2.1650784865978103, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.1371, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.20202020202020202, |
|
"grad_norm": 3.341335706430969, |
|
"learning_rate": 1e-05, |
|
"loss": 0.1475, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.20875420875420875, |
|
"grad_norm": 3.4978479659224306, |
|
"learning_rate": 9.999651284354774e-06, |
|
"loss": 0.1742, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.21548821548821548, |
|
"grad_norm": 2.8495339317165573, |
|
"learning_rate": 9.998605186060138e-06, |
|
"loss": 0.1318, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 2.4440237950440045, |
|
"learning_rate": 9.996861851032426e-06, |
|
"loss": 0.1636, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.22895622895622897, |
|
"grad_norm": 2.606759351085706, |
|
"learning_rate": 9.99442152244292e-06, |
|
"loss": 0.1655, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.2356902356902357, |
|
"grad_norm": 2.4143459809986276, |
|
"learning_rate": 9.991284540683922e-06, |
|
"loss": 0.1492, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 2.2585659926066226, |
|
"learning_rate": 9.98745134332128e-06, |
|
"loss": 0.1426, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.24915824915824916, |
|
"grad_norm": 2.2201124689491434, |
|
"learning_rate": 9.98292246503335e-06, |
|
"loss": 0.1692, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2558922558922559, |
|
"grad_norm": 1.8870018630441274, |
|
"learning_rate": 9.97769853753642e-06, |
|
"loss": 0.1139, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.26262626262626265, |
|
"grad_norm": 1.571794239015075, |
|
"learning_rate": 9.971780289496585e-06, |
|
"loss": 0.1301, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.26936026936026936, |
|
"grad_norm": 1.8448276378849247, |
|
"learning_rate": 9.965168546428122e-06, |
|
"loss": 0.1707, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2760942760942761, |
|
"grad_norm": 1.739556757210725, |
|
"learning_rate": 9.95786423057833e-06, |
|
"loss": 0.1229, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2828282828282828, |
|
"grad_norm": 2.3063563017251667, |
|
"learning_rate": 9.949868360798893e-06, |
|
"loss": 0.1528, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2895622895622896, |
|
"grad_norm": 1.6568054539708137, |
|
"learning_rate": 9.941182052403768e-06, |
|
"loss": 0.1402, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 2.4925535277590254, |
|
"learning_rate": 9.931806517013612e-06, |
|
"loss": 0.1805, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 1.4549013854977153, |
|
"learning_rate": 9.921743062386773e-06, |
|
"loss": 0.1167, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.30976430976430974, |
|
"grad_norm": 1.3094244266752977, |
|
"learning_rate": 9.910993092236878e-06, |
|
"loss": 0.125, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.3164983164983165, |
|
"grad_norm": 1.7107110369252345, |
|
"learning_rate": 9.899558106037039e-06, |
|
"loss": 0.1509, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.32323232323232326, |
|
"grad_norm": 1.2447980159357512, |
|
"learning_rate": 9.887439698810694e-06, |
|
"loss": 0.1101, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.32996632996632996, |
|
"grad_norm": 1.4004468828469865, |
|
"learning_rate": 9.874639560909118e-06, |
|
"loss": 0.1526, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.3367003367003367, |
|
"grad_norm": 1.7542103007123673, |
|
"learning_rate": 9.861159477775653e-06, |
|
"loss": 0.1446, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3434343434343434, |
|
"grad_norm": 1.2222510349896305, |
|
"learning_rate": 9.847001329696653e-06, |
|
"loss": 0.118, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.3501683501683502, |
|
"grad_norm": 1.8427984079271709, |
|
"learning_rate": 9.832167091539215e-06, |
|
"loss": 0.142, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3569023569023569, |
|
"grad_norm": 2.031779034578671, |
|
"learning_rate": 9.816658832475709e-06, |
|
"loss": 0.1314, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 1.6115001478932334, |
|
"learning_rate": 9.800478715695165e-06, |
|
"loss": 0.1389, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 1.3977600344091157, |
|
"learning_rate": 9.783628998101525e-06, |
|
"loss": 0.1223, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3771043771043771, |
|
"grad_norm": 1.2541929470324433, |
|
"learning_rate": 9.766112029998847e-06, |
|
"loss": 0.1188, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.3838383838383838, |
|
"grad_norm": 1.1839769901258788, |
|
"learning_rate": 9.747930254763467e-06, |
|
"loss": 0.1445, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.39057239057239057, |
|
"grad_norm": 2.1758765383002476, |
|
"learning_rate": 9.729086208503174e-06, |
|
"loss": 0.1361, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.39730639730639733, |
|
"grad_norm": 1.308302111463449, |
|
"learning_rate": 9.70958251970347e-06, |
|
"loss": 0.1206, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.40404040404040403, |
|
"grad_norm": 1.2548920096786198, |
|
"learning_rate": 9.689421908860928e-06, |
|
"loss": 0.1547, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.4107744107744108, |
|
"grad_norm": 1.8106730974081224, |
|
"learning_rate": 9.668607188103708e-06, |
|
"loss": 0.1739, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.4175084175084175, |
|
"grad_norm": 1.7503742895673335, |
|
"learning_rate": 9.64714126079933e-06, |
|
"loss": 0.126, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.42424242424242425, |
|
"grad_norm": 1.9002464919585416, |
|
"learning_rate": 9.625027121149665e-06, |
|
"loss": 0.1209, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.43097643097643096, |
|
"grad_norm": 1.3005333196675646, |
|
"learning_rate": 9.602267853773301e-06, |
|
"loss": 0.1198, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.4377104377104377, |
|
"grad_norm": 1.534465904527491, |
|
"learning_rate": 9.578866633275289e-06, |
|
"loss": 0.1118, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 1.2985721012757516, |
|
"learning_rate": 9.554826723804304e-06, |
|
"loss": 0.108, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.4511784511784512, |
|
"grad_norm": 1.7026939587233383, |
|
"learning_rate": 9.530151478597366e-06, |
|
"loss": 0.1229, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.45791245791245794, |
|
"grad_norm": 1.1948241076690054, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.1221, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.46464646464646464, |
|
"grad_norm": 1.4429708012011027, |
|
"learning_rate": 9.478908836546629e-06, |
|
"loss": 0.1154, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.4713804713804714, |
|
"grad_norm": 1.3983865141656082, |
|
"learning_rate": 9.452348587347224e-06, |
|
"loss": 0.1024, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.4781144781144781, |
|
"grad_norm": 1.316559372373498, |
|
"learning_rate": 9.425167296703655e-06, |
|
"loss": 0.1299, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 0.8721397993491847, |
|
"learning_rate": 9.397368756032445e-06, |
|
"loss": 0.0838, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.49158249158249157, |
|
"grad_norm": 2.3999305030312006, |
|
"learning_rate": 9.368956842848014e-06, |
|
"loss": 0.1407, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.4983164983164983, |
|
"grad_norm": 1.2702381897041746, |
|
"learning_rate": 9.339935520221816e-06, |
|
"loss": 0.1185, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.5050505050505051, |
|
"grad_norm": 1.2224610902674924, |
|
"learning_rate": 9.310308836229548e-06, |
|
"loss": 0.1044, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5117845117845118, |
|
"grad_norm": 1.0145648591712295, |
|
"learning_rate": 9.280080923386501e-06, |
|
"loss": 0.0793, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 1.8590770931022098, |
|
"learning_rate": 9.249255998071127e-06, |
|
"loss": 0.1218, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.5252525252525253, |
|
"grad_norm": 1.1046753339013544, |
|
"learning_rate": 9.217838359936914e-06, |
|
"loss": 0.0982, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.531986531986532, |
|
"grad_norm": 1.5353842647876887, |
|
"learning_rate": 9.185832391312644e-06, |
|
"loss": 0.0976, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.5387205387205387, |
|
"grad_norm": 1.6322780255697593, |
|
"learning_rate": 9.153242556591115e-06, |
|
"loss": 0.1073, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 1.4318238106918786, |
|
"learning_rate": 9.120073401606427e-06, |
|
"loss": 0.1535, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.5521885521885522, |
|
"grad_norm": 1.2692358298751287, |
|
"learning_rate": 9.08632955299989e-06, |
|
"loss": 0.1162, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.5589225589225589, |
|
"grad_norm": 0.9386668388367472, |
|
"learning_rate": 9.052015717574683e-06, |
|
"loss": 0.0819, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.5656565656565656, |
|
"grad_norm": 1.578041471845064, |
|
"learning_rate": 9.017136681639307e-06, |
|
"loss": 0.1438, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.5723905723905723, |
|
"grad_norm": 1.3747097561675499, |
|
"learning_rate": 8.981697310339972e-06, |
|
"loss": 0.1129, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5791245791245792, |
|
"grad_norm": 1.2857978501576468, |
|
"learning_rate": 8.94570254698197e-06, |
|
"loss": 0.1153, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.5858585858585859, |
|
"grad_norm": 1.1071476461695378, |
|
"learning_rate": 8.90915741234015e-06, |
|
"loss": 0.1018, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 1.1979772322060398, |
|
"learning_rate": 8.872067003958597e-06, |
|
"loss": 0.0939, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5993265993265994, |
|
"grad_norm": 1.4043079567849108, |
|
"learning_rate": 8.834436495439588e-06, |
|
"loss": 0.1022, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 1.182762910272131, |
|
"learning_rate": 8.796271135721944e-06, |
|
"loss": 0.1099, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.6127946127946128, |
|
"grad_norm": 0.9919737781684954, |
|
"learning_rate": 8.757576248348883e-06, |
|
"loss": 0.101, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.6195286195286195, |
|
"grad_norm": 1.0613946248613535, |
|
"learning_rate": 8.71835723072545e-06, |
|
"loss": 0.1074, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.6262626262626263, |
|
"grad_norm": 1.1119387505650915, |
|
"learning_rate": 8.67861955336566e-06, |
|
"loss": 0.0999, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.632996632996633, |
|
"grad_norm": 1.0663092046509486, |
|
"learning_rate": 8.638368759129433e-06, |
|
"loss": 0.1044, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.6397306397306397, |
|
"grad_norm": 1.5025873764978885, |
|
"learning_rate": 8.597610462449441e-06, |
|
"loss": 0.092, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6464646464646465, |
|
"grad_norm": 1.1482831508449691, |
|
"learning_rate": 8.556350348547978e-06, |
|
"loss": 0.099, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6531986531986532, |
|
"grad_norm": 1.1531128286621257, |
|
"learning_rate": 8.514594172643934e-06, |
|
"loss": 0.1122, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.6599326599326599, |
|
"grad_norm": 0.92890945229994, |
|
"learning_rate": 8.472347759150044e-06, |
|
"loss": 0.0934, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.8710829747699658, |
|
"learning_rate": 8.429617000860441e-06, |
|
"loss": 0.0963, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.6734006734006734, |
|
"grad_norm": 1.5913618273955719, |
|
"learning_rate": 8.386407858128707e-06, |
|
"loss": 0.1017, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6801346801346801, |
|
"grad_norm": 2.0477106016376423, |
|
"learning_rate": 8.342726358036473e-06, |
|
"loss": 0.1643, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.6868686868686869, |
|
"grad_norm": 1.310766070822457, |
|
"learning_rate": 8.298578593552737e-06, |
|
"loss": 0.1129, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.6936026936026936, |
|
"grad_norm": 1.0689180927391349, |
|
"learning_rate": 8.253970722683968e-06, |
|
"loss": 0.0805, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.7003367003367004, |
|
"grad_norm": 1.021500128946588, |
|
"learning_rate": 8.208908967615159e-06, |
|
"loss": 0.0854, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.7070707070707071, |
|
"grad_norm": 1.6146318951430105, |
|
"learning_rate": 8.163399613841903e-06, |
|
"loss": 0.1026, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.7138047138047138, |
|
"grad_norm": 1.0430932372821804, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.0911, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.7205387205387206, |
|
"grad_norm": 0.9627471656747238, |
|
"learning_rate": 8.071063563448341e-06, |
|
"loss": 0.0864, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.978325652238678, |
|
"learning_rate": 8.024249746438189e-06, |
|
"loss": 0.0767, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.734006734006734, |
|
"grad_norm": 1.5716697095342131, |
|
"learning_rate": 7.977014088147375e-06, |
|
"loss": 0.117, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 0.9226258206362579, |
|
"learning_rate": 7.929363177301124e-06, |
|
"loss": 0.0898, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7474747474747475, |
|
"grad_norm": 0.979018465840073, |
|
"learning_rate": 7.881303660546684e-06, |
|
"loss": 0.096, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.7542087542087542, |
|
"grad_norm": 1.405839836354203, |
|
"learning_rate": 7.832842241526212e-06, |
|
"loss": 0.0969, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.7609427609427609, |
|
"grad_norm": 1.7228913938880963, |
|
"learning_rate": 7.78398567994171e-06, |
|
"loss": 0.1229, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.7676767676767676, |
|
"grad_norm": 1.7211262216477865, |
|
"learning_rate": 7.734740790612137e-06, |
|
"loss": 0.1156, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.7744107744107744, |
|
"grad_norm": 1.1471367685176275, |
|
"learning_rate": 7.685114442522831e-06, |
|
"loss": 0.0953, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7811447811447811, |
|
"grad_norm": 1.0484067219863682, |
|
"learning_rate": 7.635113557867395e-06, |
|
"loss": 0.0957, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.7878787878787878, |
|
"grad_norm": 1.3643749264979563, |
|
"learning_rate": 7.584745111082128e-06, |
|
"loss": 0.0985, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.7946127946127947, |
|
"grad_norm": 1.1409003431687448, |
|
"learning_rate": 7.5340161278732e-06, |
|
"loss": 0.0948, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.8013468013468014, |
|
"grad_norm": 1.1845755031970973, |
|
"learning_rate": 7.482933684236654e-06, |
|
"loss": 0.1278, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.8080808080808081, |
|
"grad_norm": 1.2048614884456728, |
|
"learning_rate": 7.431504905471407e-06, |
|
"loss": 0.0866, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.8148148148148148, |
|
"grad_norm": 1.1024340198116762, |
|
"learning_rate": 7.379736965185369e-06, |
|
"loss": 0.0886, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.8215488215488216, |
|
"grad_norm": 1.1875877764472678, |
|
"learning_rate": 7.327637084294818e-06, |
|
"loss": 0.0876, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.8282828282828283, |
|
"grad_norm": 0.8892335379224572, |
|
"learning_rate": 7.2752125300171835e-06, |
|
"loss": 0.0829, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.835016835016835, |
|
"grad_norm": 1.1503853071251364, |
|
"learning_rate": 7.22247061485738e-06, |
|
"loss": 0.0945, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.8417508417508418, |
|
"grad_norm": 0.9116461855370438, |
|
"learning_rate": 7.169418695587791e-06, |
|
"loss": 0.0642, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 0.9169291992757941, |
|
"learning_rate": 7.1160641722221255e-06, |
|
"loss": 0.0762, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.8552188552188552, |
|
"grad_norm": 0.8458786678974077, |
|
"learning_rate": 7.062414486983197e-06, |
|
"loss": 0.0782, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.8619528619528619, |
|
"grad_norm": 2.0440916025334688, |
|
"learning_rate": 7.008477123264849e-06, |
|
"loss": 0.1654, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.8686868686868687, |
|
"grad_norm": 1.2692910029856195, |
|
"learning_rate": 6.954259604588114e-06, |
|
"loss": 0.0993, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.8754208754208754, |
|
"grad_norm": 1.0371836145890874, |
|
"learning_rate": 6.8997694935518e-06, |
|
"loss": 0.0792, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8821548821548821, |
|
"grad_norm": 0.8129848414149057, |
|
"learning_rate": 6.845014390777595e-06, |
|
"loss": 0.0631, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 1.0818279823466028, |
|
"learning_rate": 6.7900019338499005e-06, |
|
"loss": 0.084, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.8956228956228957, |
|
"grad_norm": 1.2323415448858157, |
|
"learning_rate": 6.734739796250477e-06, |
|
"loss": 0.1037, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.9023569023569024, |
|
"grad_norm": 0.9072503259074285, |
|
"learning_rate": 6.6792356862881144e-06, |
|
"loss": 0.1022, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 1.0444248401571283, |
|
"learning_rate": 6.6234973460234184e-06, |
|
"loss": 0.0952, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.9158249158249159, |
|
"grad_norm": 0.7905929983463482, |
|
"learning_rate": 6.567532550188908e-06, |
|
"loss": 0.0717, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.9225589225589226, |
|
"grad_norm": 0.7434134587240314, |
|
"learning_rate": 6.511349105104534e-06, |
|
"loss": 0.0944, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.9292929292929293, |
|
"grad_norm": 0.768332895445934, |
|
"learning_rate": 6.454954847588824e-06, |
|
"loss": 0.0678, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.936026936026936, |
|
"grad_norm": 1.0841144826502787, |
|
"learning_rate": 6.398357643865731e-06, |
|
"loss": 0.0845, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.9427609427609428, |
|
"grad_norm": 1.0657965630616948, |
|
"learning_rate": 6.341565388467425e-06, |
|
"loss": 0.0845, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.9494949494949495, |
|
"grad_norm": 0.7896126816989001, |
|
"learning_rate": 6.284586003133096e-06, |
|
"loss": 0.0922, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.9562289562289562, |
|
"grad_norm": 1.4442123017819188, |
|
"learning_rate": 6.227427435703997e-06, |
|
"loss": 0.1182, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.9629629629629629, |
|
"grad_norm": 0.9662733291916084, |
|
"learning_rate": 6.170097659014812e-06, |
|
"loss": 0.0915, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 0.9920505746388618, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.093, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.9764309764309764, |
|
"grad_norm": 1.09207653298068, |
|
"learning_rate": 6.054956487486212e-06, |
|
"loss": 0.0919, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.9831649831649831, |
|
"grad_norm": 0.7024022249718929, |
|
"learning_rate": 5.997161153257963e-06, |
|
"loss": 0.0786, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.98989898989899, |
|
"grad_norm": 0.8553768429510301, |
|
"learning_rate": 5.939226728751733e-06, |
|
"loss": 0.091, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.9966329966329966, |
|
"grad_norm": 0.7368165994452094, |
|
"learning_rate": 5.88116129502361e-06, |
|
"loss": 0.0841, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.9966329966329966, |
|
"eval_loss": 0.0851789191365242, |
|
"eval_runtime": 74.3023, |
|
"eval_samples_per_second": 26.89, |
|
"eval_steps_per_second": 0.848, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.0033670033670035, |
|
"grad_norm": 1.3624837722625542, |
|
"learning_rate": 5.82297295140367e-06, |
|
"loss": 0.0942, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.0101010101010102, |
|
"grad_norm": 0.8091703688887699, |
|
"learning_rate": 5.764669814366231e-06, |
|
"loss": 0.0673, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.0168350168350169, |
|
"grad_norm": 1.1689964332508505, |
|
"learning_rate": 5.70626001639771e-06, |
|
"loss": 0.0904, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.0235690235690236, |
|
"grad_norm": 0.9720282544537069, |
|
"learning_rate": 5.647751704862263e-06, |
|
"loss": 0.0764, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.0303030303030303, |
|
"grad_norm": 0.7814529677090204, |
|
"learning_rate": 5.589153040865333e-06, |
|
"loss": 0.0766, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.037037037037037, |
|
"grad_norm": 0.6208562049624894, |
|
"learning_rate": 5.530472198115291e-06, |
|
"loss": 0.0546, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.0437710437710437, |
|
"grad_norm": 1.0609611170144315, |
|
"learning_rate": 5.471717361783312e-06, |
|
"loss": 0.0907, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.0505050505050506, |
|
"grad_norm": 0.9895862720799716, |
|
"learning_rate": 5.412896727361663e-06, |
|
"loss": 0.0756, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.0572390572390573, |
|
"grad_norm": 0.8283511990419397, |
|
"learning_rate": 5.354018499520536e-06, |
|
"loss": 0.0733, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.063973063973064, |
|
"grad_norm": 0.7064918637395274, |
|
"learning_rate": 5.2950908909636144e-06, |
|
"loss": 0.0669, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.0707070707070707, |
|
"grad_norm": 0.7673084442281245, |
|
"learning_rate": 5.2361221212825175e-06, |
|
"loss": 0.063, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.0774410774410774, |
|
"grad_norm": 0.7418675838307957, |
|
"learning_rate": 5.177120415810271e-06, |
|
"loss": 0.0556, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.0841750841750841, |
|
"grad_norm": 0.9302869463074369, |
|
"learning_rate": 5.11809400447399e-06, |
|
"loss": 0.0937, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 1.2470452298913122, |
|
"learning_rate": 5.059051120646924e-06, |
|
"loss": 0.0827, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.0976430976430978, |
|
"grad_norm": 0.5572520947809881, |
|
"learning_rate": 5e-06, |
|
"loss": 0.0537, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.1043771043771045, |
|
"grad_norm": 0.9002485043341543, |
|
"learning_rate": 4.940948879353078e-06, |
|
"loss": 0.088, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 0.7257311476037439, |
|
"learning_rate": 4.8819059955260105e-06, |
|
"loss": 0.0531, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.1178451178451179, |
|
"grad_norm": 0.7312827459622674, |
|
"learning_rate": 4.822879584189732e-06, |
|
"loss": 0.0734, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.1245791245791246, |
|
"grad_norm": 0.6288896372866642, |
|
"learning_rate": 4.763877878717484e-06, |
|
"loss": 0.0538, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.1313131313131313, |
|
"grad_norm": 1.002048278793203, |
|
"learning_rate": 4.704909109036387e-06, |
|
"loss": 0.0843, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.138047138047138, |
|
"grad_norm": 0.7883344068499383, |
|
"learning_rate": 4.645981500479466e-06, |
|
"loss": 0.0656, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.144781144781145, |
|
"grad_norm": 0.8382010718049546, |
|
"learning_rate": 4.587103272638339e-06, |
|
"loss": 0.0811, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.1515151515151516, |
|
"grad_norm": 0.7760482065843696, |
|
"learning_rate": 4.528282638216689e-06, |
|
"loss": 0.0504, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.1582491582491583, |
|
"grad_norm": 0.7830043876217352, |
|
"learning_rate": 4.46952780188471e-06, |
|
"loss": 0.0715, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.164983164983165, |
|
"grad_norm": 1.0630254048827579, |
|
"learning_rate": 4.410846959134667e-06, |
|
"loss": 0.0947, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.1717171717171717, |
|
"grad_norm": 1.0925071520075709, |
|
"learning_rate": 4.352248295137739e-06, |
|
"loss": 0.0879, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.1784511784511784, |
|
"grad_norm": 0.6169774600371488, |
|
"learning_rate": 4.293739983602292e-06, |
|
"loss": 0.0611, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.1851851851851851, |
|
"grad_norm": 0.9939160789185896, |
|
"learning_rate": 4.23533018563377e-06, |
|
"loss": 0.0883, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.1919191919191918, |
|
"grad_norm": 1.1279865240839675, |
|
"learning_rate": 4.17702704859633e-06, |
|
"loss": 0.0913, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.1986531986531987, |
|
"grad_norm": 0.9240576378263561, |
|
"learning_rate": 4.118838704976392e-06, |
|
"loss": 0.0766, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.2053872053872055, |
|
"grad_norm": 1.2001616277657225, |
|
"learning_rate": 4.06077327124827e-06, |
|
"loss": 0.1161, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.2121212121212122, |
|
"grad_norm": 0.8147954747129799, |
|
"learning_rate": 4.002838846742039e-06, |
|
"loss": 0.0735, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.2188552188552189, |
|
"grad_norm": 0.7054084380327903, |
|
"learning_rate": 3.94504351251379e-06, |
|
"loss": 0.0594, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.2255892255892256, |
|
"grad_norm": 1.1137616381347046, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.1081, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.2323232323232323, |
|
"grad_norm": 0.671871366714128, |
|
"learning_rate": 3.829902340985189e-06, |
|
"loss": 0.0697, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.239057239057239, |
|
"grad_norm": 0.7473723466728038, |
|
"learning_rate": 3.7725725642960047e-06, |
|
"loss": 0.0621, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.2457912457912457, |
|
"grad_norm": 0.592505978208727, |
|
"learning_rate": 3.7154139968669043e-06, |
|
"loss": 0.058, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.2525252525252526, |
|
"grad_norm": 0.8357383325436504, |
|
"learning_rate": 3.658434611532578e-06, |
|
"loss": 0.0637, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.2592592592592593, |
|
"grad_norm": 0.7866708646652821, |
|
"learning_rate": 3.6016423561342707e-06, |
|
"loss": 0.0627, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.265993265993266, |
|
"grad_norm": 0.714908923134987, |
|
"learning_rate": 3.545045152411178e-06, |
|
"loss": 0.0632, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.2727272727272727, |
|
"grad_norm": 0.5485873177126037, |
|
"learning_rate": 3.4886508948954656e-06, |
|
"loss": 0.0599, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.2794612794612794, |
|
"grad_norm": 1.09587990802126, |
|
"learning_rate": 3.4324674498110956e-06, |
|
"loss": 0.0613, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.2861952861952861, |
|
"grad_norm": 0.8460192690372663, |
|
"learning_rate": 3.3765026539765832e-06, |
|
"loss": 0.0847, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.2929292929292928, |
|
"grad_norm": 0.7434506857298966, |
|
"learning_rate": 3.3207643137118872e-06, |
|
"loss": 0.0592, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.2996632996632997, |
|
"grad_norm": 0.7460450427195592, |
|
"learning_rate": 3.2652602037495247e-06, |
|
"loss": 0.0579, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.3063973063973064, |
|
"grad_norm": 1.1738509828924104, |
|
"learning_rate": 3.2099980661501016e-06, |
|
"loss": 0.0788, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.3131313131313131, |
|
"grad_norm": 0.8215622674250468, |
|
"learning_rate": 3.154985609222405e-06, |
|
"loss": 0.0656, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.3198653198653199, |
|
"grad_norm": 0.7211793878206931, |
|
"learning_rate": 3.1002305064482006e-06, |
|
"loss": 0.0643, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.3265993265993266, |
|
"grad_norm": 0.975561078244494, |
|
"learning_rate": 3.045740395411886e-06, |
|
"loss": 0.0671, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.5847136475401894, |
|
"learning_rate": 2.991522876735154e-06, |
|
"loss": 0.0483, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.34006734006734, |
|
"grad_norm": 0.7843079810152986, |
|
"learning_rate": 2.9375855130168046e-06, |
|
"loss": 0.0689, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.3468013468013469, |
|
"grad_norm": 0.8005399461255156, |
|
"learning_rate": 2.8839358277778758e-06, |
|
"loss": 0.0664, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.3535353535353536, |
|
"grad_norm": 0.6772138624158527, |
|
"learning_rate": 2.83058130441221e-06, |
|
"loss": 0.0615, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.3602693602693603, |
|
"grad_norm": 0.7191213036719818, |
|
"learning_rate": 2.7775293851426233e-06, |
|
"loss": 0.0774, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.367003367003367, |
|
"grad_norm": 0.7605152392156752, |
|
"learning_rate": 2.7247874699828186e-06, |
|
"loss": 0.0928, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.3737373737373737, |
|
"grad_norm": 0.6195090121798972, |
|
"learning_rate": 2.6723629157051844e-06, |
|
"loss": 0.0596, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.3804713804713804, |
|
"grad_norm": 0.6643501098994239, |
|
"learning_rate": 2.6202630348146323e-06, |
|
"loss": 0.0523, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.387205387205387, |
|
"grad_norm": 0.8554198672675404, |
|
"learning_rate": 2.5684950945285937e-06, |
|
"loss": 0.0903, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.393939393939394, |
|
"grad_norm": 0.8364523777121037, |
|
"learning_rate": 2.517066315763348e-06, |
|
"loss": 0.0758, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.4006734006734007, |
|
"grad_norm": 0.7135150317434039, |
|
"learning_rate": 2.4659838721268005e-06, |
|
"loss": 0.0603, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.4074074074074074, |
|
"grad_norm": 0.6321370814051162, |
|
"learning_rate": 2.4152548889178722e-06, |
|
"loss": 0.093, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.4141414141414141, |
|
"grad_norm": 0.600961736861407, |
|
"learning_rate": 2.364886442132606e-06, |
|
"loss": 0.0443, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.4208754208754208, |
|
"grad_norm": 0.9011898089044157, |
|
"learning_rate": 2.3148855574771706e-06, |
|
"loss": 0.0713, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.4276094276094276, |
|
"grad_norm": 0.572568082620369, |
|
"learning_rate": 2.265259209387867e-06, |
|
"loss": 0.0567, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.4343434343434343, |
|
"grad_norm": 0.5810025449791688, |
|
"learning_rate": 2.2160143200582906e-06, |
|
"loss": 0.0561, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.4410774410774412, |
|
"grad_norm": 0.5830304960007473, |
|
"learning_rate": 2.16715775847379e-06, |
|
"loss": 0.048, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.4478114478114479, |
|
"grad_norm": 0.6676748383092154, |
|
"learning_rate": 2.1186963394533165e-06, |
|
"loss": 0.0593, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 0.6977658852399721, |
|
"learning_rate": 2.0706368226988772e-06, |
|
"loss": 0.0594, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.4612794612794613, |
|
"grad_norm": 0.506749158721991, |
|
"learning_rate": 2.0229859118526244e-06, |
|
"loss": 0.0556, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.468013468013468, |
|
"grad_norm": 0.7142907436715384, |
|
"learning_rate": 1.9757502535618137e-06, |
|
"loss": 0.0524, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.4747474747474747, |
|
"grad_norm": 0.6949958378386697, |
|
"learning_rate": 1.928936436551661e-06, |
|
"loss": 0.0666, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.4814814814814814, |
|
"grad_norm": 0.7555665471689874, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.0716, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.4882154882154883, |
|
"grad_norm": 0.8498899722890332, |
|
"learning_rate": 1.8366003861580966e-06, |
|
"loss": 0.0521, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.494949494949495, |
|
"grad_norm": 0.7857371564300432, |
|
"learning_rate": 1.7910910323848435e-06, |
|
"loss": 0.0523, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.5016835016835017, |
|
"grad_norm": 0.6313660611784068, |
|
"learning_rate": 1.7460292773160315e-06, |
|
"loss": 0.0485, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.5084175084175084, |
|
"grad_norm": 0.790847117411599, |
|
"learning_rate": 1.7014214064472646e-06, |
|
"loss": 0.0808, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.5151515151515151, |
|
"grad_norm": 1.1225175609085682, |
|
"learning_rate": 1.6572736419635288e-06, |
|
"loss": 0.0875, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.5218855218855218, |
|
"grad_norm": 0.8723669974198242, |
|
"learning_rate": 1.6135921418712959e-06, |
|
"loss": 0.0614, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.5286195286195285, |
|
"grad_norm": 0.5803673000386498, |
|
"learning_rate": 1.5703829991395602e-06, |
|
"loss": 0.0521, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.5353535353535355, |
|
"grad_norm": 0.5913655803974819, |
|
"learning_rate": 1.5276522408499567e-06, |
|
"loss": 0.0567, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.542087542087542, |
|
"grad_norm": 0.5913682879212468, |
|
"learning_rate": 1.4854058273560667e-06, |
|
"loss": 0.0575, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.5488215488215489, |
|
"grad_norm": 0.6925238047248119, |
|
"learning_rate": 1.4436496514520253e-06, |
|
"loss": 0.0582, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 0.6395725970290016, |
|
"learning_rate": 1.4023895375505608e-06, |
|
"loss": 0.0605, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.5622895622895623, |
|
"grad_norm": 0.5799228625915099, |
|
"learning_rate": 1.361631240870569e-06, |
|
"loss": 0.0572, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.569023569023569, |
|
"grad_norm": 1.1307976257157992, |
|
"learning_rate": 1.321380446634342e-06, |
|
"loss": 0.0683, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.5757575757575757, |
|
"grad_norm": 0.53126448503362, |
|
"learning_rate": 1.281642769274552e-06, |
|
"loss": 0.044, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.5824915824915826, |
|
"grad_norm": 0.5123950135517319, |
|
"learning_rate": 1.242423751651119e-06, |
|
"loss": 0.0804, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.589225589225589, |
|
"grad_norm": 0.6099923775928446, |
|
"learning_rate": 1.2037288642780575e-06, |
|
"loss": 0.0408, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.595959595959596, |
|
"grad_norm": 0.6248835012817582, |
|
"learning_rate": 1.165563504560413e-06, |
|
"loss": 0.0544, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.6026936026936027, |
|
"grad_norm": 0.6510387000168023, |
|
"learning_rate": 1.1279329960414047e-06, |
|
"loss": 0.0815, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.6094276094276094, |
|
"grad_norm": 0.6393553382001962, |
|
"learning_rate": 1.0908425876598512e-06, |
|
"loss": 0.0538, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.6161616161616161, |
|
"grad_norm": 0.5759287160512089, |
|
"learning_rate": 1.0542974530180327e-06, |
|
"loss": 0.0574, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.6228956228956228, |
|
"grad_norm": 0.7684356456332082, |
|
"learning_rate": 1.0183026896600284e-06, |
|
"loss": 0.0704, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.6296296296296298, |
|
"grad_norm": 0.7092834204839467, |
|
"learning_rate": 9.82863318360695e-07, |
|
"loss": 0.0688, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.6363636363636362, |
|
"grad_norm": 0.8525774187670475, |
|
"learning_rate": 9.479842824253182e-07, |
|
"loss": 0.058, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.6430976430976432, |
|
"grad_norm": 0.494283557256197, |
|
"learning_rate": 9.136704470001101e-07, |
|
"loss": 0.0455, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.6498316498316499, |
|
"grad_norm": 0.5072796987904575, |
|
"learning_rate": 8.799265983935734e-07, |
|
"loss": 0.0573, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.6565656565656566, |
|
"grad_norm": 0.6544970574576625, |
|
"learning_rate": 8.46757443408886e-07, |
|
"loss": 0.0606, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.6632996632996633, |
|
"grad_norm": 0.5325045241743026, |
|
"learning_rate": 8.141676086873574e-07, |
|
"loss": 0.0476, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.67003367003367, |
|
"grad_norm": 0.571954858670152, |
|
"learning_rate": 7.821616400630866e-07, |
|
"loss": 0.0553, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.676767676767677, |
|
"grad_norm": 0.7124362240685431, |
|
"learning_rate": 7.507440019288742e-07, |
|
"loss": 0.0648, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.6835016835016834, |
|
"grad_norm": 0.8074026482316966, |
|
"learning_rate": 7.199190766135001e-07, |
|
"loss": 0.0677, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.6902356902356903, |
|
"grad_norm": 0.49821428047955796, |
|
"learning_rate": 6.896911637704534e-07, |
|
"loss": 0.0414, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.696969696969697, |
|
"grad_norm": 0.6996554021349439, |
|
"learning_rate": 6.600644797781847e-07, |
|
"loss": 0.0666, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.7037037037037037, |
|
"grad_norm": 0.5640914695952624, |
|
"learning_rate": 6.310431571519865e-07, |
|
"loss": 0.076, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.7104377104377104, |
|
"grad_norm": 0.5394553999404826, |
|
"learning_rate": 6.026312439675553e-07, |
|
"loss": 0.0498, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.7171717171717171, |
|
"grad_norm": 0.47737348342117025, |
|
"learning_rate": 5.748327032963464e-07, |
|
"loss": 0.0446, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.723905723905724, |
|
"grad_norm": 0.5870399819271779, |
|
"learning_rate": 5.476514126527771e-07, |
|
"loss": 0.085, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.7306397306397305, |
|
"grad_norm": 0.6059552958362416, |
|
"learning_rate": 5.210911634533722e-07, |
|
"loss": 0.0628, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.7373737373737375, |
|
"grad_norm": 0.4612493978879547, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.045, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.7441077441077442, |
|
"grad_norm": 0.9732831834165256, |
|
"learning_rate": 4.698485214026349e-07, |
|
"loss": 0.0719, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.7508417508417509, |
|
"grad_norm": 0.7084062235433236, |
|
"learning_rate": 4.4517327619569784e-07, |
|
"loss": 0.0451, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.7575757575757576, |
|
"grad_norm": 0.5150893466864267, |
|
"learning_rate": 4.211333667247125e-07, |
|
"loss": 0.054, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.7643097643097643, |
|
"grad_norm": 0.7197694152938333, |
|
"learning_rate": 3.9773214622669974e-07, |
|
"loss": 0.0528, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.7710437710437712, |
|
"grad_norm": 0.7937673202702573, |
|
"learning_rate": 3.7497287885033763e-07, |
|
"loss": 0.0551, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.7807725839915187, |
|
"learning_rate": 3.528587392006716e-07, |
|
"loss": 0.0503, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.7845117845117846, |
|
"grad_norm": 0.5505115937056173, |
|
"learning_rate": 3.313928118962906e-07, |
|
"loss": 0.0623, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.791245791245791, |
|
"grad_norm": 0.6073147135377622, |
|
"learning_rate": 3.105780911390738e-07, |
|
"loss": 0.0674, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.797979797979798, |
|
"grad_norm": 0.4386889000592362, |
|
"learning_rate": 2.904174802965293e-07, |
|
"loss": 0.0337, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.8047138047138047, |
|
"grad_norm": 0.5371720109059138, |
|
"learning_rate": 2.7091379149682683e-07, |
|
"loss": 0.056, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.8114478114478114, |
|
"grad_norm": 0.4897357275916543, |
|
"learning_rate": 2.520697452365345e-07, |
|
"loss": 0.0442, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 0.6143440216760881, |
|
"learning_rate": 2.3388797000115427e-07, |
|
"loss": 0.0644, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.8249158249158248, |
|
"grad_norm": 0.6184051449052006, |
|
"learning_rate": 2.163710018984766e-07, |
|
"loss": 0.0494, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.8316498316498318, |
|
"grad_norm": 0.5174263740286721, |
|
"learning_rate": 1.9952128430483718e-07, |
|
"loss": 0.0733, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.8383838383838382, |
|
"grad_norm": 0.5711547890486989, |
|
"learning_rate": 1.8334116752429243e-07, |
|
"loss": 0.0546, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.8451178451178452, |
|
"grad_norm": 0.543912361463854, |
|
"learning_rate": 1.6783290846078714e-07, |
|
"loss": 0.0467, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.8518518518518519, |
|
"grad_norm": 0.6068680622136619, |
|
"learning_rate": 1.5299867030334815e-07, |
|
"loss": 0.0569, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.8585858585858586, |
|
"grad_norm": 0.5927823999637648, |
|
"learning_rate": 1.388405222243472e-07, |
|
"loss": 0.0533, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.8653198653198653, |
|
"grad_norm": 0.47694438499884656, |
|
"learning_rate": 1.253604390908819e-07, |
|
"loss": 0.0564, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.872053872053872, |
|
"grad_norm": 0.5019664348313649, |
|
"learning_rate": 1.1256030118930727e-07, |
|
"loss": 0.0421, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.878787878787879, |
|
"grad_norm": 0.5244072558220748, |
|
"learning_rate": 1.0044189396296144e-07, |
|
"loss": 0.0443, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.8855218855218854, |
|
"grad_norm": 0.5694979771175117, |
|
"learning_rate": 8.900690776312282e-08, |
|
"loss": 0.0519, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.8922558922558923, |
|
"grad_norm": 0.5383345898694687, |
|
"learning_rate": 7.825693761322861e-08, |
|
"loss": 0.0645, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.898989898989899, |
|
"grad_norm": 0.58996301402648, |
|
"learning_rate": 6.819348298638839e-08, |
|
"loss": 0.0534, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.9057239057239057, |
|
"grad_norm": 0.4519126591505613, |
|
"learning_rate": 5.881794759623194e-08, |
|
"loss": 0.0397, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.9124579124579124, |
|
"grad_norm": 0.459397062343545, |
|
"learning_rate": 5.013163920110864e-08, |
|
"loss": 0.0562, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.9191919191919191, |
|
"grad_norm": 0.6692683465122977, |
|
"learning_rate": 4.21357694216723e-08, |
|
"loss": 0.0536, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.925925925925926, |
|
"grad_norm": 0.5515458851174179, |
|
"learning_rate": 3.483145357187967e-08, |
|
"loss": 0.0709, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.9326599326599325, |
|
"grad_norm": 0.7465197849269988, |
|
"learning_rate": 2.8219710503416543e-08, |
|
"loss": 0.0656, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.9393939393939394, |
|
"grad_norm": 0.5871951635424402, |
|
"learning_rate": 2.230146246358256e-08, |
|
"loss": 0.0667, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.9461279461279462, |
|
"grad_norm": 0.4842964057678575, |
|
"learning_rate": 1.7077534966650767e-08, |
|
"loss": 0.0408, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.9528619528619529, |
|
"grad_norm": 0.5849783564235355, |
|
"learning_rate": 1.2548656678721404e-08, |
|
"loss": 0.067, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.9595959595959596, |
|
"grad_norm": 0.6264347265804303, |
|
"learning_rate": 8.715459316078756e-09, |
|
"loss": 0.0492, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.9663299663299663, |
|
"grad_norm": 0.5290968473577989, |
|
"learning_rate": 5.578477557081074e-09, |
|
"loss": 0.0543, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.9730639730639732, |
|
"grad_norm": 0.4771694715604212, |
|
"learning_rate": 3.1381489675746946e-09, |
|
"loss": 0.0419, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.9797979797979797, |
|
"grad_norm": 0.7387620656079369, |
|
"learning_rate": 1.3948139398628492e-09, |
|
"loss": 0.0613, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.9865319865319866, |
|
"grad_norm": 0.5259388355087269, |
|
"learning_rate": 3.487156452258722e-10, |
|
"loss": 0.0405, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.9932659932659933, |
|
"grad_norm": 0.7981313513015809, |
|
"learning_rate": 0.0, |
|
"loss": 0.0774, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.9932659932659933, |
|
"eval_loss": 0.06412597745656967, |
|
"eval_runtime": 70.2541, |
|
"eval_samples_per_second": 28.44, |
|
"eval_steps_per_second": 0.897, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.9932659932659933, |
|
"step": 296, |
|
"total_flos": 1.5329682729109094e+17, |
|
"train_loss": 0.098479394486325, |
|
"train_runtime": 5160.5268, |
|
"train_samples_per_second": 7.355, |
|
"train_steps_per_second": 0.057 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 296, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.5329682729109094e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|