|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0202020202020203, |
|
"eval_steps": 38, |
|
"global_step": 600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003367003367003367, |
|
"grad_norm": 2.0, |
|
"learning_rate": 2.2727272727272729e-07, |
|
"loss": 1.0833, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003367003367003367, |
|
"eval_loss": 1.032954454421997, |
|
"eval_runtime": 8.7985, |
|
"eval_samples_per_second": 56.828, |
|
"eval_steps_per_second": 3.637, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006734006734006734, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.5454545454545457e-07, |
|
"loss": 0.996, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010101010101010102, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 6.818181818181818e-07, |
|
"loss": 1.0489, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.013468013468013467, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 1.0546, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.016835016835016835, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.1363636363636364e-06, |
|
"loss": 1.0341, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.020202020202020204, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 1.3636363636363636e-06, |
|
"loss": 1.0136, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02356902356902357, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 1.590909090909091e-06, |
|
"loss": 1.0305, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.026936026936026935, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 1.0226, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.030303030303030304, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 2.0454545454545457e-06, |
|
"loss": 1.0745, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03367003367003367, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 2.2727272727272728e-06, |
|
"loss": 1.0232, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.037037037037037035, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.0106, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04040404040404041, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 1.0541, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04377104377104377, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.954545454545455e-06, |
|
"loss": 1.0693, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04713804713804714, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 3.181818181818182e-06, |
|
"loss": 1.0338, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.050505050505050504, |
|
"grad_norm": 2.0, |
|
"learning_rate": 3.409090909090909e-06, |
|
"loss": 1.0464, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05387205387205387, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 1.0684, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05723905723905724, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 3.863636363636364e-06, |
|
"loss": 1.0117, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06060606060606061, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 4.0909090909090915e-06, |
|
"loss": 1.0138, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06397306397306397, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 4.3181818181818185e-06, |
|
"loss": 0.9873, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06734006734006734, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 1.0524, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0707070707070707, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.772727272727273e-06, |
|
"loss": 1.0479, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5e-06, |
|
"loss": 0.9894, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07744107744107744, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.2272727272727274e-06, |
|
"loss": 0.983, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08080808080808081, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 1.0563, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08417508417508418, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 5.681818181818183e-06, |
|
"loss": 0.9966, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08754208754208755, |
|
"grad_norm": 1.25, |
|
"learning_rate": 5.90909090909091e-06, |
|
"loss": 0.9922, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.136363636363637e-06, |
|
"loss": 1.0677, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09427609427609428, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 1.0325, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09764309764309764, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 6.590909090909091e-06, |
|
"loss": 1.0159, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10101010101010101, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 6.818181818181818e-06, |
|
"loss": 1.0384, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10437710437710437, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 7.045454545454546e-06, |
|
"loss": 0.9949, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.10774410774410774, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 1.0284, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.0245, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11447811447811448, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 7.727272727272727e-06, |
|
"loss": 1.0103, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11784511784511785, |
|
"grad_norm": 1.125, |
|
"learning_rate": 7.954545454545455e-06, |
|
"loss": 1.0129, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 0.965, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12457912457912458, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 8.40909090909091e-06, |
|
"loss": 1.0027, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.12794612794612795, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 8.636363636363637e-06, |
|
"loss": 1.0093, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.12794612794612795, |
|
"eval_loss": 0.9909602999687195, |
|
"eval_runtime": 8.9064, |
|
"eval_samples_per_second": 56.139, |
|
"eval_steps_per_second": 3.593, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13131313131313133, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 8.863636363636365e-06, |
|
"loss": 0.98, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.13468013468013468, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 1.0, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13804713804713806, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 9.318181818181819e-06, |
|
"loss": 0.9658, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1414141414141414, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 9.545454545454547e-06, |
|
"loss": 0.9656, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1447811447811448, |
|
"grad_norm": 1.0, |
|
"learning_rate": 9.772727272727273e-06, |
|
"loss": 0.9528, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9546, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15151515151515152, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 9.999965606811854e-06, |
|
"loss": 0.9406, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15488215488215487, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.99986242772057e-06, |
|
"loss": 0.9975, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15824915824915825, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 9.99969046414561e-06, |
|
"loss": 0.9567, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16161616161616163, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 9.999449718452724e-06, |
|
"loss": 0.9707, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16498316498316498, |
|
"grad_norm": 1.0, |
|
"learning_rate": 9.999140193953923e-06, |
|
"loss": 0.9559, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.16835016835016836, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 9.998761894907414e-06, |
|
"loss": 0.9398, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1717171717171717, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 9.998314826517564e-06, |
|
"loss": 0.9329, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1750841750841751, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 9.997798994934812e-06, |
|
"loss": 0.9478, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.17845117845117844, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 9.997214407255602e-06, |
|
"loss": 0.9062, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 9.996561071522264e-06, |
|
"loss": 0.9089, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18518518518518517, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 9.995838996722916e-06, |
|
"loss": 0.9561, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.18855218855218855, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 9.995048192791341e-06, |
|
"loss": 0.8948, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1919191919191919, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 9.994188670606845e-06, |
|
"loss": 0.9095, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19528619528619529, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 9.993260441994116e-06, |
|
"loss": 0.9137, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19865319865319866, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9.992263519723046e-06, |
|
"loss": 0.9012, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.20202020202020202, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 9.99119791750857e-06, |
|
"loss": 0.9074, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2053872053872054, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 9.990063650010473e-06, |
|
"loss": 0.9106, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.20875420875420875, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.988860732833183e-06, |
|
"loss": 0.9012, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21212121212121213, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 9.987589182525562e-06, |
|
"loss": 0.8928, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.21548821548821548, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 9.98624901658068e-06, |
|
"loss": 0.9487, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.21885521885521886, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 9.984840253435569e-06, |
|
"loss": 0.8887, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9.983362912470967e-06, |
|
"loss": 0.9152, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2255892255892256, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 9.981817014011066e-06, |
|
"loss": 0.8849, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.22895622895622897, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 9.980202579323212e-06, |
|
"loss": 0.9386, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.23232323232323232, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 9.978519630617632e-06, |
|
"loss": 0.8651, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2356902356902357, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 9.976768191047109e-06, |
|
"loss": 0.9044, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23905723905723905, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 9.974948284706684e-06, |
|
"loss": 0.8702, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 9.973059936633308e-06, |
|
"loss": 0.8748, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.24579124579124578, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 9.971103172805504e-06, |
|
"loss": 0.8443, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.24915824915824916, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 9.969078020143013e-06, |
|
"loss": 0.8605, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.25252525252525254, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 9.966984506506415e-06, |
|
"loss": 0.8661, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2558922558922559, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 9.964822660696753e-06, |
|
"loss": 0.9169, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2558922558922559, |
|
"eval_loss": 0.8667835593223572, |
|
"eval_runtime": 8.6819, |
|
"eval_samples_per_second": 57.591, |
|
"eval_steps_per_second": 3.686, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.25925925925925924, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 9.96259251245514e-06, |
|
"loss": 0.8719, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.26262626262626265, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 9.960294092462332e-06, |
|
"loss": 0.8438, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.265993265993266, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 9.957927432338332e-06, |
|
"loss": 0.8723, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.26936026936026936, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 9.95549256464193e-06, |
|
"loss": 0.8987, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 9.952989522870275e-06, |
|
"loss": 0.8188, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2760942760942761, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 9.950418341458398e-06, |
|
"loss": 0.8521, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.27946127946127947, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 9.947779055778752e-06, |
|
"loss": 0.8305, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2828282828282828, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 9.945071702140716e-06, |
|
"loss": 0.856, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.28619528619528617, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 9.9422963177901e-06, |
|
"loss": 0.8512, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2895622895622896, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 9.939452940908627e-06, |
|
"loss": 0.818, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.29292929292929293, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 9.936541610613417e-06, |
|
"loss": 0.8071, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 9.933562366956445e-06, |
|
"loss": 0.8559, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2996632996632997, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 9.930515250923985e-06, |
|
"loss": 0.8646, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 9.927400304436048e-06, |
|
"loss": 0.8179, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3063973063973064, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 9.924217570345814e-06, |
|
"loss": 0.8093, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.30976430976430974, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 9.920967092439028e-06, |
|
"loss": 0.8073, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.31313131313131315, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 9.917648915433413e-06, |
|
"loss": 0.9031, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3164983164983165, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 9.914263084978041e-06, |
|
"loss": 0.8568, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.31986531986531985, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9.910809647652716e-06, |
|
"loss": 0.7819, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.32323232323232326, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 9.907288650967324e-06, |
|
"loss": 0.8236, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3265993265993266, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 9.903700143361185e-06, |
|
"loss": 0.8418, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.32996632996632996, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 9.900044174202389e-06, |
|
"loss": 0.8313, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 9.896320793787106e-06, |
|
"loss": 0.8158, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.3367003367003367, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 9.892530053338909e-06, |
|
"loss": 0.7956, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3400673400673401, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 9.888672005008054e-06, |
|
"loss": 0.8154, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.3434343434343434, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 9.884746701870778e-06, |
|
"loss": 0.8322, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.3468013468013468, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 9.880754197928553e-06, |
|
"loss": 0.8133, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3501683501683502, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 9.876694548107358e-06, |
|
"loss": 0.7882, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.35353535353535354, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 9.87256780825691e-06, |
|
"loss": 0.8264, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3569023569023569, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 9.868374035149906e-06, |
|
"loss": 0.7894, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3602693602693603, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 9.864113286481237e-06, |
|
"loss": 0.7807, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 9.859785620867197e-06, |
|
"loss": 0.7895, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.367003367003367, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.855391097844671e-06, |
|
"loss": 0.7936, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 9.850929777870324e-06, |
|
"loss": 0.8024, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.37373737373737376, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 9.846401722319759e-06, |
|
"loss": 0.7542, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3771043771043771, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 9.841806993486686e-06, |
|
"loss": 0.7689, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.38047138047138046, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 9.837145654582055e-06, |
|
"loss": 0.7133, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3838383838383838, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 9.832417769733185e-06, |
|
"loss": 0.795, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.3838383838383838, |
|
"eval_loss": 0.7676474452018738, |
|
"eval_runtime": 8.7789, |
|
"eval_samples_per_second": 56.955, |
|
"eval_steps_per_second": 3.645, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.3872053872053872, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 9.827623403982893e-06, |
|
"loss": 0.7992, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.39057239057239057, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 9.822762623288584e-06, |
|
"loss": 0.7678, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3939393939393939, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 9.81783549452136e-06, |
|
"loss": 0.7861, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.39730639730639733, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 9.812842085465086e-06, |
|
"loss": 0.7447, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4006734006734007, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 9.807782464815463e-06, |
|
"loss": 0.7218, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.40404040404040403, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 9.802656702179088e-06, |
|
"loss": 0.7637, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.4074074074074074, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 9.797464868072489e-06, |
|
"loss": 0.7588, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.4107744107744108, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 9.792207033921152e-06, |
|
"loss": 0.7539, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.41414141414141414, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 9.786883272058554e-06, |
|
"loss": 0.7524, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.4175084175084175, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 9.781493655725149e-06, |
|
"loss": 0.731, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4208754208754209, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 9.776038259067375e-06, |
|
"loss": 0.8022, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.42424242424242425, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 9.770517157136625e-06, |
|
"loss": 0.7528, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.4276094276094276, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 9.764930425888216e-06, |
|
"loss": 0.7037, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.43097643097643096, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 9.759278142180348e-06, |
|
"loss": 0.7519, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.43434343434343436, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 9.753560383773046e-06, |
|
"loss": 0.8163, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.4377104377104377, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.747777229327085e-06, |
|
"loss": 0.7292, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.44107744107744107, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 9.741928758402912e-06, |
|
"loss": 0.7351, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 9.736015051459551e-06, |
|
"loss": 0.7429, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4478114478114478, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 9.730036189853497e-06, |
|
"loss": 0.7616, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.4511784511784512, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.723992255837596e-06, |
|
"loss": 0.7263, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 9.717883332559911e-06, |
|
"loss": 0.7427, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.45791245791245794, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 9.71170950406258e-06, |
|
"loss": 0.7261, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4612794612794613, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 9.705470855280661e-06, |
|
"loss": 0.754, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.46464646464646464, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 9.699167472040965e-06, |
|
"loss": 0.7554, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.468013468013468, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 9.692799441060868e-06, |
|
"loss": 0.7488, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4713804713804714, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.686366849947126e-06, |
|
"loss": 0.7066, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.47474747474747475, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 9.679869787194664e-06, |
|
"loss": 0.7818, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4781144781144781, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 9.673308342185366e-06, |
|
"loss": 0.7289, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.48148148148148145, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 9.666682605186834e-06, |
|
"loss": 0.7301, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 9.659992667351157e-06, |
|
"loss": 0.7673, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4882154882154882, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 9.653238620713652e-06, |
|
"loss": 0.6884, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.49158249158249157, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 9.646420558191596e-06, |
|
"loss": 0.7043, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.494949494949495, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 9.639538573582952e-06, |
|
"loss": 0.7122, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.4983164983164983, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 9.632592761565078e-06, |
|
"loss": 0.7232, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5016835016835017, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 9.625583217693419e-06, |
|
"loss": 0.7311, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5050505050505051, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 9.618510038400203e-06, |
|
"loss": 0.7322, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5084175084175084, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 9.611373320993104e-06, |
|
"loss": 0.6983, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.5117845117845118, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.604173163653906e-06, |
|
"loss": 0.6999, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5117845117845118, |
|
"eval_loss": 0.7242563366889954, |
|
"eval_runtime": 8.7081, |
|
"eval_samples_per_second": 57.418, |
|
"eval_steps_per_second": 3.675, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5151515151515151, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 9.596909665437155e-06, |
|
"loss": 0.7469, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 9.589582926268798e-06, |
|
"loss": 0.7176, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5218855218855218, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 9.582193046944799e-06, |
|
"loss": 0.7455, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5252525252525253, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 9.574740129129767e-06, |
|
"loss": 0.7204, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.5286195286195287, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 9.567224275355538e-06, |
|
"loss": 0.7321, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.531986531986532, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.559645589019787e-06, |
|
"loss": 0.7455, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.5353535353535354, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 9.552004174384582e-06, |
|
"loss": 0.7464, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.5387205387205387, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 9.544300136574973e-06, |
|
"loss": 0.7138, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5420875420875421, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 9.536533581577526e-06, |
|
"loss": 0.7321, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 9.528704616238875e-06, |
|
"loss": 0.7205, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.5488215488215489, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 9.520813348264252e-06, |
|
"loss": 0.7107, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.5521885521885522, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 9.512859886216003e-06, |
|
"loss": 0.6975, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.7209, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5589225589225589, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 9.496766818424612e-06, |
|
"loss": 0.6932, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5622895622895623, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 9.488627434078232e-06, |
|
"loss": 0.6676, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5656565656565656, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 9.480426298448706e-06, |
|
"loss": 0.6909, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.569023569023569, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 9.472163524361317e-06, |
|
"loss": 0.7142, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5723905723905723, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 9.46383922548932e-06, |
|
"loss": 0.7168, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5757575757575758, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.455453516352385e-06, |
|
"loss": 0.6618, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5791245791245792, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 9.447006512315025e-06, |
|
"loss": 0.694, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5824915824915825, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 9.438498329584995e-06, |
|
"loss": 0.6858, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5858585858585859, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 9.429929085211711e-06, |
|
"loss": 0.7419, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5892255892255892, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 9.421298897084622e-06, |
|
"loss": 0.6734, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 9.412607883931608e-06, |
|
"loss": 0.7188, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5959595959595959, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.403856165317322e-06, |
|
"loss": 0.6732, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5993265993265994, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 9.39504386164157e-06, |
|
"loss": 0.6594, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6026936026936027, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 9.38617109413764e-06, |
|
"loss": 0.7153, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 9.377237984870636e-06, |
|
"loss": 0.6762, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6094276094276094, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.3682446567358e-06, |
|
"loss": 0.6839, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.6127946127946128, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 9.359191233456821e-06, |
|
"loss": 0.6949, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.6161616161616161, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 9.350077839584139e-06, |
|
"loss": 0.7045, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.6195286195286195, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 9.34090460049322e-06, |
|
"loss": 0.668, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.622895622895623, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 9.33167164238284e-06, |
|
"loss": 0.7144, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6262626262626263, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 9.322379092273345e-06, |
|
"loss": 0.7084, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6296296296296297, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.313027078004903e-06, |
|
"loss": 0.7555, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.632996632996633, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 9.303615728235753e-06, |
|
"loss": 0.6963, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 9.29414517244042e-06, |
|
"loss": 0.6785, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.6397306397306397, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 9.284615540907947e-06, |
|
"loss": 0.7246, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6397306397306397, |
|
"eval_loss": 0.6989265084266663, |
|
"eval_runtime": 8.6934, |
|
"eval_samples_per_second": 57.515, |
|
"eval_steps_per_second": 3.681, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6430976430976431, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 9.275026964740101e-06, |
|
"loss": 0.6995, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.6464646464646465, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.265379575849561e-06, |
|
"loss": 0.6988, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.6498316498316499, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 9.255673506958114e-06, |
|
"loss": 0.6569, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.6531986531986532, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 9.245908891594818e-06, |
|
"loss": 0.6645, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.6565656565656566, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 9.236085864094178e-06, |
|
"loss": 0.6745, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6599326599326599, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 9.226204559594284e-06, |
|
"loss": 0.7157, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.6632996632996633, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 9.216265114034964e-06, |
|
"loss": 0.7298, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 9.206267664155906e-06, |
|
"loss": 0.6439, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.67003367003367, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 9.19621234749478e-06, |
|
"loss": 0.6719, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6734006734006734, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.186099302385345e-06, |
|
"loss": 0.6496, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6767676767676768, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 9.175928667955547e-06, |
|
"loss": 0.709, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.6801346801346801, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 9.1657005841256e-06, |
|
"loss": 0.7054, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.6835016835016835, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 9.155415191606074e-06, |
|
"loss": 0.6941, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.6868686868686869, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 9.145072631895942e-06, |
|
"loss": 0.6919, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.6902356902356902, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 9.134673047280644e-06, |
|
"loss": 0.7098, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6936026936026936, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 9.12421658083013e-06, |
|
"loss": 0.6902, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.696969696969697, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 9.113703376396885e-06, |
|
"loss": 0.7228, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.7003367003367004, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 9.103133578613959e-06, |
|
"loss": 0.6961, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.7037037037037037, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 9.092507332892968e-06, |
|
"loss": 0.6689, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.7070707070707071, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 9.081824785422099e-06, |
|
"loss": 0.6552, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7104377104377104, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 9.071086083164099e-06, |
|
"loss": 0.6739, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.7138047138047138, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.060291373854252e-06, |
|
"loss": 0.699, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.7171717171717171, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 9.04944080599834e-06, |
|
"loss": 0.6964, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.7205387205387206, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 9.038534528870618e-06, |
|
"loss": 0.678, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.7239057239057239, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 9.027572692511739e-06, |
|
"loss": 0.6847, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 9.016555447726704e-06, |
|
"loss": 0.6811, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7306397306397306, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 9.005482946082784e-06, |
|
"loss": 0.6831, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.734006734006734, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 8.994355339907429e-06, |
|
"loss": 0.6803, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.7373737373737373, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 8.98317278228618e-06, |
|
"loss": 0.6639, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 8.971935427060563e-06, |
|
"loss": 0.6509, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.7441077441077442, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 8.960643428825965e-06, |
|
"loss": 0.647, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.7474747474747475, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 8.949296942929515e-06, |
|
"loss": 0.6686, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.7508417508417509, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 8.937896125467941e-06, |
|
"loss": 0.6512, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.7542087542087542, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 8.92644113328543e-06, |
|
"loss": 0.6544, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.7575757575757576, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 8.91493212397146e-06, |
|
"loss": 0.7091, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.7609427609427609, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 8.90336925585864e-06, |
|
"loss": 0.6522, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.7643097643097643, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 8.891752688020532e-06, |
|
"loss": 0.6923, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.7676767676767676, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 8.880082580269454e-06, |
|
"loss": 0.6873, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.7676767676767676, |
|
"eval_loss": 0.6816474795341492, |
|
"eval_runtime": 8.7387, |
|
"eval_samples_per_second": 57.217, |
|
"eval_steps_per_second": 3.662, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.7710437710437711, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 8.868359093154292e-06, |
|
"loss": 0.6989, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.7744107744107744, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 8.856582387958286e-06, |
|
"loss": 0.6424, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 8.84475262669681e-06, |
|
"loss": 0.6477, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.7811447811447811, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 8.832869972115148e-06, |
|
"loss": 0.6623, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.7845117845117845, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 8.820934587686247e-06, |
|
"loss": 0.6642, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.7878787878787878, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 8.808946637608476e-06, |
|
"loss": 0.6819, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.7912457912457912, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 8.796906286803366e-06, |
|
"loss": 0.7039, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.7946127946127947, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 8.784813700913336e-06, |
|
"loss": 0.7232, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.797979797979798, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 8.77266904629942e-06, |
|
"loss": 0.6687, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.8013468013468014, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 8.760472490038974e-06, |
|
"loss": 0.6664, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.8047138047138047, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 8.748224199923378e-06, |
|
"loss": 0.6752, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.8080808080808081, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 8.735924344455732e-06, |
|
"loss": 0.6373, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8114478114478114, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 8.723573092848535e-06, |
|
"loss": 0.656, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.8148148148148148, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 8.71117061502135e-06, |
|
"loss": 0.6747, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 8.698717081598484e-06, |
|
"loss": 0.6878, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.8215488215488216, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 8.68621266390662e-06, |
|
"loss": 0.6381, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.8249158249158249, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 8.673657533972477e-06, |
|
"loss": 0.7127, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8282828282828283, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 8.661051864520434e-06, |
|
"loss": 0.6888, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.8316498316498316, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 8.64839582897015e-06, |
|
"loss": 0.6444, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.835016835016835, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 8.635689601434197e-06, |
|
"loss": 0.6876, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.8383838383838383, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 8.622933356715639e-06, |
|
"loss": 0.7125, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.8417508417508418, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 8.61012727030565e-06, |
|
"loss": 0.6826, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.8451178451178452, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 8.597271518381083e-06, |
|
"loss": 0.6719, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 8.584366277802057e-06, |
|
"loss": 0.6723, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.8518518518518519, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 8.571411726109518e-06, |
|
"loss": 0.6899, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.8552188552188552, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 8.558408041522801e-06, |
|
"loss": 0.7042, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.8585858585858586, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 8.545355402937174e-06, |
|
"loss": 0.6429, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.8619528619528619, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 8.532253989921377e-06, |
|
"loss": 0.6586, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.8653198653198653, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 8.519103982715158e-06, |
|
"loss": 0.6212, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.8686868686868687, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 8.505905562226784e-06, |
|
"loss": 0.6421, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.8720538720538721, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 8.492658910030558e-06, |
|
"loss": 0.6051, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.8754208754208754, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 8.479364208364322e-06, |
|
"loss": 0.6454, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.8787878787878788, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 8.466021640126946e-06, |
|
"loss": 0.6579, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.8821548821548821, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 8.452631388875814e-06, |
|
"loss": 0.6046, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.8855218855218855, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 8.439193638824296e-06, |
|
"loss": 0.6481, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 8.425708574839221e-06, |
|
"loss": 0.6418, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.8922558922558923, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 8.412176382438327e-06, |
|
"loss": 0.7079, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.8956228956228957, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 8.398597247787706e-06, |
|
"loss": 0.7014, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.8956228956228957, |
|
"eval_loss": 0.66867995262146, |
|
"eval_runtime": 8.7476, |
|
"eval_samples_per_second": 57.158, |
|
"eval_steps_per_second": 3.658, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.898989898989899, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 8.384971357699255e-06, |
|
"loss": 0.631, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.9023569023569024, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 8.371298899628091e-06, |
|
"loss": 0.691, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.9057239057239057, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 8.357580061669985e-06, |
|
"loss": 0.6443, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 8.343815032558768e-06, |
|
"loss": 0.6683, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9124579124579124, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 8.330004001663734e-06, |
|
"loss": 0.6208, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.9158249158249159, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 8.316147158987036e-06, |
|
"loss": 0.6554, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.9191919191919192, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 8.30224469516107e-06, |
|
"loss": 0.6884, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.9225589225589226, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 8.288296801445863e-06, |
|
"loss": 0.6633, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.9259259259259259, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 8.274303669726427e-06, |
|
"loss": 0.6521, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.9292929292929293, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 8.260265492510121e-06, |
|
"loss": 0.6429, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.9326599326599326, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 8.246182462924022e-06, |
|
"loss": 0.6604, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.936026936026936, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 8.232054774712239e-06, |
|
"loss": 0.6438, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.9393939393939394, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 8.217882622233268e-06, |
|
"loss": 0.6319, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.9427609427609428, |
|
"grad_norm": 0.5, |
|
"learning_rate": 8.203666200457312e-06, |
|
"loss": 0.6349, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9461279461279462, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 8.1894057049636e-06, |
|
"loss": 0.6657, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.9494949494949495, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 8.175101331937692e-06, |
|
"loss": 0.6866, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.9528619528619529, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 8.160753278168787e-06, |
|
"loss": 0.6544, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.9562289562289562, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 8.146361741047007e-06, |
|
"loss": 0.676, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.9595959595959596, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 8.131926918560692e-06, |
|
"loss": 0.6102, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.9629629629629629, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.6865, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.9663299663299664, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 8.102928212422519e-06, |
|
"loss": 0.6298, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 8.08836472771384e-06, |
|
"loss": 0.6789, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.9730639730639731, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 8.073758755521506e-06, |
|
"loss": 0.6157, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.9764309764309764, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 8.059110496783891e-06, |
|
"loss": 0.6269, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.9797979797979798, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 8.044420153021123e-06, |
|
"loss": 0.6405, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.9831649831649831, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 8.02968792633231e-06, |
|
"loss": 0.6657, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.9865319865319865, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 8.014914019392743e-06, |
|
"loss": 0.6373, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.98989898989899, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 8.00009863545113e-06, |
|
"loss": 0.6495, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.9932659932659933, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 7.985241978326786e-06, |
|
"loss": 0.6003, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.9966329966329966, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 7.970344252406832e-06, |
|
"loss": 0.6543, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 7.955405662643384e-06, |
|
"loss": 0.6625, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.0033670033670035, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 7.940426414550732e-06, |
|
"loss": 0.6776, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.0067340067340067, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 7.925406714202518e-06, |
|
"loss": 0.6661, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.0101010101010102, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 7.910346768228893e-06, |
|
"loss": 0.6743, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.0134680134680134, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 7.895246783813679e-06, |
|
"loss": 0.6527, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.0168350168350169, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 7.880106968691516e-06, |
|
"loss": 0.6195, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.02020202020202, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 7.864927531145012e-06, |
|
"loss": 0.5992, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.0235690235690236, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 7.849708680001862e-06, |
|
"loss": 0.6586, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.0235690235690236, |
|
"eval_loss": 0.6584768295288086, |
|
"eval_runtime": 8.7375, |
|
"eval_samples_per_second": 57.225, |
|
"eval_steps_per_second": 3.662, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.026936026936027, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 7.834450624631998e-06, |
|
"loss": 0.6381, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.0303030303030303, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 7.81915357494468e-06, |
|
"loss": 0.6492, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.0336700336700337, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 7.803817741385636e-06, |
|
"loss": 0.6632, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.037037037037037, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 7.788443334934148e-06, |
|
"loss": 0.6424, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.0404040404040404, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 7.773030567100158e-06, |
|
"loss": 0.6309, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.0437710437710437, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 7.757579649921354e-06, |
|
"loss": 0.6469, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.0471380471380471, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 7.742090795960258e-06, |
|
"loss": 0.6216, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.0505050505050506, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 7.726564218301298e-06, |
|
"loss": 0.6249, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.0538720538720538, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 7.711000130547875e-06, |
|
"loss": 0.671, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.0572390572390573, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 7.695398746819431e-06, |
|
"loss": 0.6604, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.0606060606060606, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 7.679760281748492e-06, |
|
"loss": 0.626, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.063973063973064, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 7.664084950477732e-06, |
|
"loss": 0.6336, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.0673400673400673, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 7.648372968656995e-06, |
|
"loss": 0.6813, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.0707070707070707, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 7.632624552440337e-06, |
|
"loss": 0.6539, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.074074074074074, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 7.616839918483061e-06, |
|
"loss": 0.6378, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.0774410774410774, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 7.601019283938718e-06, |
|
"loss": 0.7115, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.0808080808080809, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 7.58516286645613e-06, |
|
"loss": 0.6628, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.0841750841750841, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 7.569270884176401e-06, |
|
"loss": 0.633, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.0875420875420876, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 7.553343555729904e-06, |
|
"loss": 0.6339, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 7.5373811002332785e-06, |
|
"loss": 0.6557, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.0942760942760943, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 7.521383737286423e-06, |
|
"loss": 0.663, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.0976430976430978, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 7.505351686969458e-06, |
|
"loss": 0.6663, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.101010101010101, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 7.4892851698397174e-06, |
|
"loss": 0.6393, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.1043771043771045, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 7.4731844069286965e-06, |
|
"loss": 0.6323, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.1077441077441077, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 7.457049619739023e-06, |
|
"loss": 0.6084, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 7.440881030241407e-06, |
|
"loss": 0.6615, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.1144781144781144, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 7.424678860871584e-06, |
|
"loss": 0.6438, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.1178451178451179, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 7.408443334527257e-06, |
|
"loss": 0.6653, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.121212121212121, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 7.3921746745650315e-06, |
|
"loss": 0.6286, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.1245791245791246, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 7.3758731047973416e-06, |
|
"loss": 0.6356, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.127946127946128, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 7.3595388494893684e-06, |
|
"loss": 0.6423, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.1313131313131313, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 7.343172133355958e-06, |
|
"loss": 0.6313, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.1346801346801347, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 7.326773181558532e-06, |
|
"loss": 0.637, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.138047138047138, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 7.310342219701981e-06, |
|
"loss": 0.6153, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.1414141414141414, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 7.293879473831573e-06, |
|
"loss": 0.6582, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.144781144781145, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 7.2773851704298304e-06, |
|
"loss": 0.6364, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.1481481481481481, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 7.260859536413429e-06, |
|
"loss": 0.6126, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.1515151515151516, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 7.244302799130064e-06, |
|
"loss": 0.6532, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.1515151515151516, |
|
"eval_loss": 0.6510886549949646, |
|
"eval_runtime": 8.7236, |
|
"eval_samples_per_second": 57.316, |
|
"eval_steps_per_second": 3.668, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.1548821548821548, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 7.227715186355326e-06, |
|
"loss": 0.6067, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.1582491582491583, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 7.211096926289572e-06, |
|
"loss": 0.6796, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.1616161616161615, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 7.19444824755478e-06, |
|
"loss": 0.6242, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.164983164983165, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 7.1777693791914025e-06, |
|
"loss": 0.6288, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.1683501683501682, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 7.161060550655227e-06, |
|
"loss": 0.6908, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.1717171717171717, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 7.144321991814205e-06, |
|
"loss": 0.6809, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.1750841750841752, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 7.1275539329452984e-06, |
|
"loss": 0.6421, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.1784511784511784, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 7.110756604731307e-06, |
|
"loss": 0.6613, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.1818181818181819, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 7.0939302382577e-06, |
|
"loss": 0.6596, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.1851851851851851, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 7.0770750650094335e-06, |
|
"loss": 0.6539, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.1885521885521886, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 7.060191316867764e-06, |
|
"loss": 0.6451, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.1919191919191918, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 7.0432792261070605e-06, |
|
"loss": 0.6211, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.1952861952861953, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 7.026339025391615e-06, |
|
"loss": 0.6458, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.1986531986531987, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 7.00937094777243e-06, |
|
"loss": 0.6579, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.202020202020202, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 6.992375226684016e-06, |
|
"loss": 0.6337, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.2053872053872055, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 6.975352095941194e-06, |
|
"loss": 0.671, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.2087542087542087, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 6.958301789735853e-06, |
|
"loss": 0.6332, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.2121212121212122, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 6.941224542633748e-06, |
|
"loss": 0.6147, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.2154882154882154, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 6.924120589571275e-06, |
|
"loss": 0.6628, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.2188552188552189, |
|
"grad_norm": 0.375, |
|
"learning_rate": 6.906990165852218e-06, |
|
"loss": 0.6444, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 6.889833507144534e-06, |
|
"loss": 0.6607, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.2255892255892256, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 6.872650849477098e-06, |
|
"loss": 0.669, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.228956228956229, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 6.855442429236463e-06, |
|
"loss": 0.6476, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.2323232323232323, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 6.838208483163601e-06, |
|
"loss": 0.6087, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.2356902356902357, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 6.820949248350653e-06, |
|
"loss": 0.6628, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.239057239057239, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 6.803664962237665e-06, |
|
"loss": 0.6354, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.2424242424242424, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 6.786355862609315e-06, |
|
"loss": 0.6096, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.2457912457912457, |
|
"grad_norm": 0.375, |
|
"learning_rate": 6.769022187591654e-06, |
|
"loss": 0.5832, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.2491582491582491, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 6.751664175648817e-06, |
|
"loss": 0.6322, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.2525252525252526, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 6.734282065579757e-06, |
|
"loss": 0.6183, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.2558922558922558, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 6.716876096514944e-06, |
|
"loss": 0.6339, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.2592592592592593, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 6.699446507913083e-06, |
|
"loss": 0.6411, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.2626262626262625, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 6.681993539557828e-06, |
|
"loss": 0.6566, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.265993265993266, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 6.664517431554466e-06, |
|
"loss": 0.6109, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.2693602693602695, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 6.6470184243266235e-06, |
|
"loss": 0.6523, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.2727272727272727, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 6.6294967586129614e-06, |
|
"loss": 0.6139, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.2760942760942762, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 6.611952675463859e-06, |
|
"loss": 0.6616, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.2794612794612794, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 6.594386416238095e-06, |
|
"loss": 0.6334, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.2794612794612794, |
|
"eval_loss": 0.6462748050689697, |
|
"eval_runtime": 8.7236, |
|
"eval_samples_per_second": 57.316, |
|
"eval_steps_per_second": 3.668, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.2828282828282829, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 6.576798222599532e-06, |
|
"loss": 0.6525, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.2861952861952861, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 6.559188336513794e-06, |
|
"loss": 0.6356, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.2895622895622896, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 6.5415570002449315e-06, |
|
"loss": 0.6291, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.2929292929292928, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 6.523904456352089e-06, |
|
"loss": 0.6585, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.2962962962962963, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 6.5062309476861714e-06, |
|
"loss": 0.632, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.2996632996632997, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 6.488536717386504e-06, |
|
"loss": 0.6063, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.303030303030303, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 6.470822008877482e-06, |
|
"loss": 0.6473, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.3063973063973064, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 6.453087065865229e-06, |
|
"loss": 0.6384, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.3097643097643097, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 6.435332132334233e-06, |
|
"loss": 0.6375, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.3131313131313131, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 6.4175574525440065e-06, |
|
"loss": 0.6372, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.3164983164983166, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 6.3997632710257095e-06, |
|
"loss": 0.6345, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.3198653198653199, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 6.381949832578796e-06, |
|
"loss": 0.6516, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.3232323232323233, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 6.364117382267642e-06, |
|
"loss": 0.6354, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.3265993265993266, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 6.346266165418173e-06, |
|
"loss": 0.615, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.32996632996633, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 6.328396427614496e-06, |
|
"loss": 0.6414, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 6.310508414695511e-06, |
|
"loss": 0.6424, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.3367003367003367, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 6.292602372751536e-06, |
|
"loss": 0.6381, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.34006734006734, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 6.274678548120921e-06, |
|
"loss": 0.6542, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.3434343434343434, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 6.25673718738665e-06, |
|
"loss": 0.6461, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.3468013468013469, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 6.238778537372966e-06, |
|
"loss": 0.6367, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.3501683501683501, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 6.2208028451419575e-06, |
|
"loss": 0.6461, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.3535353535353536, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 6.2028103579901725e-06, |
|
"loss": 0.6532, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.3569023569023568, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 6.184801323445208e-06, |
|
"loss": 0.629, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.3602693602693603, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 6.166775989262309e-06, |
|
"loss": 0.6723, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.3636363636363638, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 6.1487346034209605e-06, |
|
"loss": 0.6587, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.367003367003367, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 6.130677414121474e-06, |
|
"loss": 0.6294, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.3703703703703702, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.6628, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.3737373737373737, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 6.094516619032975e-06, |
|
"loss": 0.6444, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.3771043771043772, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 6.076413510717973e-06, |
|
"loss": 0.6869, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.3804713804713804, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 6.058295593886013e-06, |
|
"loss": 0.638, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.3838383838383839, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 6.040163117790261e-06, |
|
"loss": 0.6067, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.387205387205387, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 6.022016331884185e-06, |
|
"loss": 0.6231, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.3905723905723906, |
|
"grad_norm": 0.5, |
|
"learning_rate": 6.003855485818111e-06, |
|
"loss": 0.6267, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.393939393939394, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 5.985680829435798e-06, |
|
"loss": 0.6307, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.3973063973063973, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 5.967492612770999e-06, |
|
"loss": 0.647, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.4006734006734007, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 5.9492910860440135e-06, |
|
"loss": 0.6344, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.404040404040404, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 5.931076499658258e-06, |
|
"loss": 0.646, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.4074074074074074, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 5.91284910419681e-06, |
|
"loss": 0.5968, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.4074074074074074, |
|
"eval_loss": 0.6433593034744263, |
|
"eval_runtime": 8.8065, |
|
"eval_samples_per_second": 56.776, |
|
"eval_steps_per_second": 3.634, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.410774410774411, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 5.894609150418964e-06, |
|
"loss": 0.6348, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.4141414141414141, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 5.876356889256788e-06, |
|
"loss": 0.6255, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.4175084175084174, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 5.858092571811661e-06, |
|
"loss": 0.6316, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.4208754208754208, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 5.839816449350824e-06, |
|
"loss": 0.627, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.4242424242424243, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 5.821528773303929e-06, |
|
"loss": 0.6571, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.4276094276094276, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 5.803229795259566e-06, |
|
"loss": 0.6146, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.430976430976431, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 5.784919766961813e-06, |
|
"loss": 0.6388, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.4343434343434343, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 5.76659894030677e-06, |
|
"loss": 0.6474, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.4377104377104377, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 5.748267567339093e-06, |
|
"loss": 0.6553, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.4410774410774412, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 5.729925900248524e-06, |
|
"loss": 0.6037, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 5.711574191366427e-06, |
|
"loss": 0.5933, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.4478114478114479, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 5.693212693162311e-06, |
|
"loss": 0.676, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.4511784511784511, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 5.674841658240365e-06, |
|
"loss": 0.6578, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 5.656461339335968e-06, |
|
"loss": 0.6329, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.457912457912458, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 5.638071989312228e-06, |
|
"loss": 0.6635, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.4612794612794613, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 5.619673861156498e-06, |
|
"loss": 0.6239, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.4646464646464645, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 5.60126720797689e-06, |
|
"loss": 0.6189, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.468013468013468, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 5.5828522829987965e-06, |
|
"loss": 0.6171, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.4713804713804715, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 5.564429339561411e-06, |
|
"loss": 0.6071, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.4747474747474747, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 5.5459986311142365e-06, |
|
"loss": 0.6652, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.4781144781144782, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 5.527560411213604e-06, |
|
"loss": 0.5801, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.4814814814814814, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 5.509114933519179e-06, |
|
"loss": 0.6077, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.4848484848484849, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 5.490662451790472e-06, |
|
"loss": 0.6046, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.4882154882154883, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 5.4722032198833595e-06, |
|
"loss": 0.6483, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.4915824915824916, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 5.453737491746572e-06, |
|
"loss": 0.6694, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.494949494949495, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 5.435265521418215e-06, |
|
"loss": 0.6048, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.4983164983164983, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 5.41678756302227e-06, |
|
"loss": 0.6167, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.5016835016835017, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 5.3983038707650955e-06, |
|
"loss": 0.6296, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.5050505050505052, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 5.379814698931935e-06, |
|
"loss": 0.6524, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.5084175084175084, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 5.361320301883413e-06, |
|
"loss": 0.6326, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.5117845117845117, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 5.342820934052042e-06, |
|
"loss": 0.6675, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.5151515151515151, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 5.324316849938715e-06, |
|
"loss": 0.6346, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.5185185185185186, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 5.3058083041092145e-06, |
|
"loss": 0.6381, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.5218855218855218, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 5.2872955511906974e-06, |
|
"loss": 0.6551, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.5252525252525253, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 5.268778845868201e-06, |
|
"loss": 0.6186, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.5286195286195285, |
|
"grad_norm": 0.5, |
|
"learning_rate": 5.250258442881139e-06, |
|
"loss": 0.6483, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.531986531986532, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 5.231734597019792e-06, |
|
"loss": 0.6584, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.5353535353535355, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 5.2132075631218075e-06, |
|
"loss": 0.6366, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.5353535353535355, |
|
"eval_loss": 0.6413553357124329, |
|
"eval_runtime": 8.8625, |
|
"eval_samples_per_second": 56.418, |
|
"eval_steps_per_second": 3.611, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.5387205387205387, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 5.194677596068689e-06, |
|
"loss": 0.6116, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.542087542087542, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 5.176144950782296e-06, |
|
"loss": 0.6505, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 5.1576098822213285e-06, |
|
"loss": 0.6524, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.5488215488215489, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 5.139072645377829e-06, |
|
"loss": 0.5802, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.5521885521885523, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 5.120533495273664e-06, |
|
"loss": 0.6071, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 5.101992686957028e-06, |
|
"loss": 0.6279, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.5589225589225588, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 5.0834504754989225e-06, |
|
"loss": 0.6595, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.5622895622895623, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 5.064907115989655e-06, |
|
"loss": 0.6813, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.5656565656565657, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 5.046362863535327e-06, |
|
"loss": 0.6255, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.569023569023569, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 5.0278179732543244e-06, |
|
"loss": 0.6627, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.5723905723905722, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 5.009272700273804e-06, |
|
"loss": 0.6293, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.5757575757575757, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 4.990727299726196e-06, |
|
"loss": 0.6171, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.5791245791245792, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 4.972182026745678e-06, |
|
"loss": 0.6408, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.5824915824915826, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 4.953637136464675e-06, |
|
"loss": 0.597, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.5858585858585859, |
|
"grad_norm": 0.375, |
|
"learning_rate": 4.935092884010347e-06, |
|
"loss": 0.6075, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.589225589225589, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 4.91654952450108e-06, |
|
"loss": 0.6153, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.5925925925925926, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 4.898007313042975e-06, |
|
"loss": 0.6299, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.595959595959596, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 4.879466504726337e-06, |
|
"loss": 0.6415, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.5993265993265995, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 4.860927354622173e-06, |
|
"loss": 0.646, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.6026936026936027, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 4.842390117778673e-06, |
|
"loss": 0.6473, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.606060606060606, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 4.8238550492177065e-06, |
|
"loss": 0.6569, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.6094276094276094, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 4.805322403931312e-06, |
|
"loss": 0.6664, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.612794612794613, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 4.786792436878193e-06, |
|
"loss": 0.6481, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.6161616161616161, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 4.768265402980209e-06, |
|
"loss": 0.5946, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.6195286195286194, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 4.749741557118863e-06, |
|
"loss": 0.6426, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.6228956228956228, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.7312211541318e-06, |
|
"loss": 0.6235, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.6262626262626263, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 4.712704448809305e-06, |
|
"loss": 0.6413, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.6296296296296298, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 4.694191695890788e-06, |
|
"loss": 0.6307, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.632996632996633, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 4.6756831500612846e-06, |
|
"loss": 0.6458, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.6363636363636362, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 4.657179065947959e-06, |
|
"loss": 0.6619, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.6397306397306397, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 4.638679698116588e-06, |
|
"loss": 0.6408, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.6430976430976432, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 4.620185301068067e-06, |
|
"loss": 0.6118, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.6464646464646466, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 4.601696129234907e-06, |
|
"loss": 0.6229, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.6498316498316499, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 4.583212436977731e-06, |
|
"loss": 0.6485, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.6531986531986531, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 4.564734478581785e-06, |
|
"loss": 0.6682, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.6565656565656566, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 4.546262508253429e-06, |
|
"loss": 0.6672, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.65993265993266, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 4.527796780116642e-06, |
|
"loss": 0.6653, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.6632996632996633, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 4.509337548209529e-06, |
|
"loss": 0.6126, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.6632996632996633, |
|
"eval_loss": 0.6399661302566528, |
|
"eval_runtime": 8.7141, |
|
"eval_samples_per_second": 57.378, |
|
"eval_steps_per_second": 3.672, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 4.4908850664808245e-06, |
|
"loss": 0.6047, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.67003367003367, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 4.472439588786398e-06, |
|
"loss": 0.6404, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.6734006734006734, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 4.454001368885764e-06, |
|
"loss": 0.6162, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.676767676767677, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 4.4355706604385905e-06, |
|
"loss": 0.6247, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.6801346801346801, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 4.417147717001205e-06, |
|
"loss": 0.6434, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.6835016835016834, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 4.3987327920231124e-06, |
|
"loss": 0.621, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.6868686868686869, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 4.380326138843503e-06, |
|
"loss": 0.6744, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.6902356902356903, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 4.3619280106877716e-06, |
|
"loss": 0.6396, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.6936026936026936, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 4.343538660664033e-06, |
|
"loss": 0.6274, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.696969696969697, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 4.325158341759638e-06, |
|
"loss": 0.649, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.7003367003367003, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 4.30678730683769e-06, |
|
"loss": 0.6166, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.7037037037037037, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 4.2884258086335755e-06, |
|
"loss": 0.6649, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.7070707070707072, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 4.270074099751478e-06, |
|
"loss": 0.6178, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.7104377104377104, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 4.251732432660909e-06, |
|
"loss": 0.6422, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.7138047138047137, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 4.233401059693231e-06, |
|
"loss": 0.6151, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.7171717171717171, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 4.215080233038189e-06, |
|
"loss": 0.6273, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.7205387205387206, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.1967702047404365e-06, |
|
"loss": 0.6463, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.723905723905724, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.178471226696073e-06, |
|
"loss": 0.6308, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.7272727272727273, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 4.160183550649176e-06, |
|
"loss": 0.6747, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.7306397306397305, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 4.141907428188341e-06, |
|
"loss": 0.6368, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.734006734006734, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 4.123643110743214e-06, |
|
"loss": 0.6121, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.7373737373737375, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 4.105390849581037e-06, |
|
"loss": 0.5883, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.7407407407407407, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 4.087150895803192e-06, |
|
"loss": 0.6139, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.7441077441077442, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 4.0689235003417425e-06, |
|
"loss": 0.6331, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.7474747474747474, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 4.050708913955987e-06, |
|
"loss": 0.6562, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.7508417508417509, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 4.032507387229002e-06, |
|
"loss": 0.6649, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.7542087542087543, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 4.014319170564202e-06, |
|
"loss": 0.6331, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.7575757575757576, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 3.996144514181891e-06, |
|
"loss": 0.6033, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.7609427609427608, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 3.977983668115818e-06, |
|
"loss": 0.5911, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.7643097643097643, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 3.959836882209739e-06, |
|
"loss": 0.5901, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.7676767676767677, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 3.941704406113989e-06, |
|
"loss": 0.6544, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.7710437710437712, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 3.923586489282028e-06, |
|
"loss": 0.5829, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.7744107744107744, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 3.905483380967027e-06, |
|
"loss": 0.6231, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.625, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.7811447811447811, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 3.869322585878527e-06, |
|
"loss": 0.6534, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.7845117845117846, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 3.8512653965790395e-06, |
|
"loss": 0.6285, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.7878787878787878, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 3.8332240107376925e-06, |
|
"loss": 0.6146, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.791245791245791, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 3.815198676554794e-06, |
|
"loss": 0.6564, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.791245791245791, |
|
"eval_loss": 0.6390766501426697, |
|
"eval_runtime": 8.8208, |
|
"eval_samples_per_second": 56.684, |
|
"eval_steps_per_second": 3.628, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.7946127946127945, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 3.7971896420098296e-06, |
|
"loss": 0.6665, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.797979797979798, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 3.779197154858044e-06, |
|
"loss": 0.5799, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.8013468013468015, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 3.761221462627034e-06, |
|
"loss": 0.6358, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.8047138047138047, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 3.7432628126133497e-06, |
|
"loss": 0.6194, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.808080808080808, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 3.7253214518790814e-06, |
|
"loss": 0.6313, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.8114478114478114, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 3.7073976272484647e-06, |
|
"loss": 0.6127, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.8148148148148149, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 3.689491585304491e-06, |
|
"loss": 0.617, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 3.671603572385506e-06, |
|
"loss": 0.5871, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.8215488215488216, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 3.6537338345818273e-06, |
|
"loss": 0.667, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.8249158249158248, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 3.63588261773236e-06, |
|
"loss": 0.6431, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.8282828282828283, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 3.6180501674212052e-06, |
|
"loss": 0.6173, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.8316498316498318, |
|
"grad_norm": 0.375, |
|
"learning_rate": 3.600236728974292e-06, |
|
"loss": 0.6238, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.835016835016835, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 3.5824425474559956e-06, |
|
"loss": 0.6155, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.8383838383838382, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 3.5646678676657687e-06, |
|
"loss": 0.655, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.8417508417508417, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 3.546912934134773e-06, |
|
"loss": 0.63, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.8451178451178452, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 3.529177991122519e-06, |
|
"loss": 0.6469, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.8484848484848486, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 3.5114632826134977e-06, |
|
"loss": 0.6211, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.8518518518518519, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 3.4937690523138302e-06, |
|
"loss": 0.6184, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.855218855218855, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 3.4760955436479134e-06, |
|
"loss": 0.637, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.8585858585858586, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 3.4584429997550685e-06, |
|
"loss": 0.6394, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.861952861952862, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 3.440811663486206e-06, |
|
"loss": 0.6373, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.8653198653198653, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 3.423201777400469e-06, |
|
"loss": 0.6505, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.8686868686868687, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 3.4056135837619077e-06, |
|
"loss": 0.6813, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.872053872053872, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 3.388047324536143e-06, |
|
"loss": 0.6306, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.8754208754208754, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 3.3705032413870402e-06, |
|
"loss": 0.6565, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.878787878787879, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 3.3529815756733773e-06, |
|
"loss": 0.6375, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.8821548821548821, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 3.335482568445536e-06, |
|
"loss": 0.623, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.8855218855218854, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 3.318006460442173e-06, |
|
"loss": 0.6253, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.8888888888888888, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 3.3005534920869175e-06, |
|
"loss": 0.6161, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.8922558922558923, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 3.2831239034850593e-06, |
|
"loss": 0.627, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.8956228956228958, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 3.265717934420244e-06, |
|
"loss": 0.6625, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.898989898989899, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 3.248335824351183e-06, |
|
"loss": 0.642, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.9023569023569022, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 3.230977812408348e-06, |
|
"loss": 0.618, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.9057239057239057, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 3.213644137390686e-06, |
|
"loss": 0.6347, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.9090909090909092, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 3.196335037762337e-06, |
|
"loss": 0.6344, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.9124579124579124, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 3.1790507516493473e-06, |
|
"loss": 0.6328, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.9158249158249159, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 3.1617915168363994e-06, |
|
"loss": 0.6214, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.9191919191919191, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 3.1445575707635383e-06, |
|
"loss": 0.6296, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.9191919191919191, |
|
"eval_loss": 0.6386533379554749, |
|
"eval_runtime": 8.8263, |
|
"eval_samples_per_second": 56.649, |
|
"eval_steps_per_second": 3.626, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.9225589225589226, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 3.127349150522903e-06, |
|
"loss": 0.6003, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.925925925925926, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 3.110166492855468e-06, |
|
"loss": 0.636, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.9292929292929293, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 3.093009834147784e-06, |
|
"loss": 0.6082, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.9326599326599325, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 3.0758794104287275e-06, |
|
"loss": 0.6572, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.936026936026936, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 3.0587754573662508e-06, |
|
"loss": 0.6485, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.9393939393939394, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 3.041698210264149e-06, |
|
"loss": 0.6575, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.942760942760943, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 3.0246479040588077e-06, |
|
"loss": 0.6596, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.9461279461279462, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 3.0076247733159846e-06, |
|
"loss": 0.6281, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.9494949494949494, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 2.9906290522275738e-06, |
|
"loss": 0.6262, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.9528619528619529, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 2.9736609746083853e-06, |
|
"loss": 0.6568, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.9562289562289563, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 2.9567207738929395e-06, |
|
"loss": 0.5815, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.9595959595959596, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 2.939808683132238e-06, |
|
"loss": 0.643, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.9629629629629628, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 2.9229249349905686e-06, |
|
"loss": 0.6048, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.9663299663299663, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 2.906069761742302e-06, |
|
"loss": 0.6503, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.9696969696969697, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.889243395268696e-06, |
|
"loss": 0.63, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.9730639730639732, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 2.872446067054703e-06, |
|
"loss": 0.6258, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.9764309764309764, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 2.8556780081857966e-06, |
|
"loss": 0.6202, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.9797979797979797, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 2.8389394493447732e-06, |
|
"loss": 0.626, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.9831649831649831, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 2.8222306208085988e-06, |
|
"loss": 0.6197, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.9865319865319866, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 2.805551752445222e-06, |
|
"loss": 0.6703, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.98989898989899, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 2.7889030737104283e-06, |
|
"loss": 0.6288, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.9932659932659933, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 2.772284813644675e-06, |
|
"loss": 0.612, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.9966329966329965, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 2.755697200869937e-06, |
|
"loss": 0.6372, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 2.7391404635865725e-06, |
|
"loss": 0.6565, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.0033670033670035, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 2.7226148295701704e-06, |
|
"loss": 0.6427, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.006734006734007, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 2.7061205261684307e-06, |
|
"loss": 0.6403, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.01010101010101, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 2.689657780298019e-06, |
|
"loss": 0.6027, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.0134680134680134, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 2.6732268184414695e-06, |
|
"loss": 0.5689, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.016835016835017, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 2.6568278666440417e-06, |
|
"loss": 0.6573, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.0202020202020203, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 2.6404611505106337e-06, |
|
"loss": 0.6421, |
|
"step": 600 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 891, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 75, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.091501379551232e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|