|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9990917347865577, |
|
"eval_steps": 184, |
|
"global_step": 550, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018165304268846503, |
|
"grad_norm": 38.5, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 0.8686, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0018165304268846503, |
|
"eval_loss": 0.866981029510498, |
|
"eval_runtime": 34.9333, |
|
"eval_samples_per_second": 5.725, |
|
"eval_steps_per_second": 1.431, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0036330608537693005, |
|
"grad_norm": 45.5, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 0.8814, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005449591280653951, |
|
"grad_norm": 34.75, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 0.8713, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007266121707538601, |
|
"grad_norm": 26.625, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 0.8797, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009082652134423252, |
|
"grad_norm": 48.0, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.8693, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010899182561307902, |
|
"grad_norm": 102.5, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 0.877, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012715712988192553, |
|
"grad_norm": 73.5, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 0.8732, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014532243415077202, |
|
"grad_norm": 143.0, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 0.8666, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01634877384196185, |
|
"grad_norm": 58.75, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 0.8559, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.018165304268846504, |
|
"grad_norm": 56.0, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.8493, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.019981834695731154, |
|
"grad_norm": 68.5, |
|
"learning_rate": 2.2e-06, |
|
"loss": 0.8313, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.021798365122615803, |
|
"grad_norm": 76.5, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.8413, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023614895549500452, |
|
"grad_norm": 73.0, |
|
"learning_rate": 2.6e-06, |
|
"loss": 0.8287, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025431425976385105, |
|
"grad_norm": 21.875, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 0.8349, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.027247956403269755, |
|
"grad_norm": 62.75, |
|
"learning_rate": 3e-06, |
|
"loss": 0.8052, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.029064486830154404, |
|
"grad_norm": 95.0, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 0.8093, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.030881017257039057, |
|
"grad_norm": 26.625, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 0.7839, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0326975476839237, |
|
"grad_norm": 10.9375, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 0.796, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03451407811080836, |
|
"grad_norm": 10.5, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 0.7889, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03633060853769301, |
|
"grad_norm": 11.8125, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.776, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03814713896457766, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 0.7824, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03996366939146231, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.7597, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04178019981834696, |
|
"grad_norm": 16.25, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 0.7666, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.043596730245231606, |
|
"grad_norm": 11.75, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.7579, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.045413260672116255, |
|
"grad_norm": 67.5, |
|
"learning_rate": 5e-06, |
|
"loss": 0.7548, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.047229791099000905, |
|
"grad_norm": 78.5, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.754, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04904632152588556, |
|
"grad_norm": 71.5, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 0.7631, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05086285195277021, |
|
"grad_norm": 47.25, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.7508, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05267938237965486, |
|
"grad_norm": 14.6875, |
|
"learning_rate": 5.8e-06, |
|
"loss": 0.7522, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05449591280653951, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 6e-06, |
|
"loss": 0.7282, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05631244323342416, |
|
"grad_norm": 8.375, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 0.7432, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05812897366030881, |
|
"grad_norm": 11.9375, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.7436, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05994550408719346, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 0.7403, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.061762034514078114, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.7307, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06357856494096276, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 7e-06, |
|
"loss": 0.7297, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0653950953678474, |
|
"grad_norm": 35.0, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.7254, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06721162579473206, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 7.4e-06, |
|
"loss": 0.726, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06902815622161672, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.7124, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07084468664850137, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 0.7159, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07266121707538602, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.7216, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07447774750227067, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 8.2e-06, |
|
"loss": 0.7005, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07629427792915532, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.7049, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07811080835603997, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 8.6e-06, |
|
"loss": 0.6968, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07992733878292461, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.6837, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08174386920980926, |
|
"grad_norm": 2.25, |
|
"learning_rate": 9e-06, |
|
"loss": 0.6973, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08356039963669391, |
|
"grad_norm": 24.25, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.6861, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08537693006357856, |
|
"grad_norm": 14.375, |
|
"learning_rate": 9.4e-06, |
|
"loss": 0.6894, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08719346049046321, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.6908, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08900999091734786, |
|
"grad_norm": 3.28125, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 0.696, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09082652134423251, |
|
"grad_norm": 5.875, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6732, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09264305177111716, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 1.02e-05, |
|
"loss": 0.6765, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09445958219800181, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.663, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09627611262488647, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 1.0600000000000002e-05, |
|
"loss": 0.678, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09809264305177112, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.6655, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09990917347865577, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 0.6566, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10172570390554042, |
|
"grad_norm": 2.375, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.6616, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10354223433242507, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 1.14e-05, |
|
"loss": 0.6524, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10535876475930972, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.6599, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10717529518619437, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 1.18e-05, |
|
"loss": 0.6574, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10899182561307902, |
|
"grad_norm": 3.0, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.6606, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11080835603996367, |
|
"grad_norm": 3.234375, |
|
"learning_rate": 1.22e-05, |
|
"loss": 0.6541, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11262488646684832, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.6331, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11444141689373297, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 0.6632, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11625794732061762, |
|
"grad_norm": 8.125, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.6609, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11807447774750227, |
|
"grad_norm": 6.875, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 0.6529, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11989100817438691, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.6448, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12170753860127158, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 0.6516, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12352406902815623, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.6437, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12534059945504086, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 1.38e-05, |
|
"loss": 0.635, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.1271571298819255, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.6374, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12897366030881016, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 1.4200000000000001e-05, |
|
"loss": 0.648, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.1307901907356948, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.6387, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13260672116257946, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 1.46e-05, |
|
"loss": 0.6249, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.1344232515894641, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.6266, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1362397820163488, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.6143, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13805631244323344, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.6152, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13987284287011809, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 1.54e-05, |
|
"loss": 0.6254, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14168937329700274, |
|
"grad_norm": 1.9375, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.6048, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14350590372388738, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 1.58e-05, |
|
"loss": 0.6151, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14532243415077203, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.611, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14713896457765668, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.62e-05, |
|
"loss": 0.6195, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14895549500454133, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.6244, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.15077202543142598, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.66e-05, |
|
"loss": 0.6003, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15258855585831063, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.6004, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15440508628519528, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 1.7e-05, |
|
"loss": 0.6015, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15622161671207993, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.6015, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15803814713896458, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.7400000000000003e-05, |
|
"loss": 0.596, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15985467756584923, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.6109, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16167120799273388, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.7800000000000002e-05, |
|
"loss": 0.5912, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16348773841961853, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.6066, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16530426884650318, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 0.6014, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16712079927338783, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.5858, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16893732970027248, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 1.86e-05, |
|
"loss": 0.6087, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.17075386012715713, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.88e-05, |
|
"loss": 0.609, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17257039055404177, |
|
"grad_norm": 1.9375, |
|
"learning_rate": 1.9e-05, |
|
"loss": 0.5747, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17438692098092642, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 0.5767, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17620345140781107, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 1.94e-05, |
|
"loss": 0.5859, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17801998183469572, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 0.5856, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17983651226158037, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.98e-05, |
|
"loss": 0.5988, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18165304268846502, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 2e-05, |
|
"loss": 0.5928, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18346957311534967, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 1.9999950652018585e-05, |
|
"loss": 0.5682, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.18528610354223432, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 1.999980260856137e-05, |
|
"loss": 0.5799, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.18710263396911897, |
|
"grad_norm": 1.75, |
|
"learning_rate": 1.99995558710895e-05, |
|
"loss": 0.5892, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.18891916439600362, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.9999210442038164e-05, |
|
"loss": 0.5722, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1907356948228883, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.9998766324816606e-05, |
|
"loss": 0.573, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.19255222524977295, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.9998223523808092e-05, |
|
"loss": 0.5635, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1943687556766576, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.9997582044369843e-05, |
|
"loss": 0.5743, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.19618528610354224, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 1.9996841892833e-05, |
|
"loss": 0.5653, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1980018165304269, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.9996003076502567e-05, |
|
"loss": 0.5792, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.19981834695731154, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.9995065603657317e-05, |
|
"loss": 0.5746, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2016348773841962, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.9994029483549732e-05, |
|
"loss": 0.5647, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.20345140781108084, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 1.9992894726405894e-05, |
|
"loss": 0.5647, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2052679382379655, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 1.9991661343425402e-05, |
|
"loss": 0.5704, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20708446866485014, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 1.999032934678125e-05, |
|
"loss": 0.5676, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2089009990917348, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.9988898749619702e-05, |
|
"loss": 0.568, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.21071752951861944, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 1.998736956606018e-05, |
|
"loss": 0.5689, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2125340599455041, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.9985741811195098e-05, |
|
"loss": 0.5741, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21435059037238874, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 1.998401550108975e-05, |
|
"loss": 0.569, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2161671207992734, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.9982190652782122e-05, |
|
"loss": 0.5636, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21798365122615804, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 1.9980267284282718e-05, |
|
"loss": 0.5531, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2198001816530427, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.997824541457442e-05, |
|
"loss": 0.5594, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.22161671207992734, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.9976125063612254e-05, |
|
"loss": 0.5459, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.22343324250681199, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.997390625232324e-05, |
|
"loss": 0.5517, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.22524977293369663, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 1.997158900260614e-05, |
|
"loss": 0.5602, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22706630336058128, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.9969173337331283e-05, |
|
"loss": 0.5568, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22888283378746593, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.99666592803403e-05, |
|
"loss": 0.5667, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.23069936421435058, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.9964046856445926e-05, |
|
"loss": 0.5614, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.23251589464123523, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.9961336091431728e-05, |
|
"loss": 0.5445, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23433242506811988, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.995852701205186e-05, |
|
"loss": 0.5668, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.23614895549500453, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.99556196460308e-05, |
|
"loss": 0.5474, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23796548592188918, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.9952614022063085e-05, |
|
"loss": 0.5516, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23978201634877383, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 1.9949510169813006e-05, |
|
"loss": 0.5472, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24159854677565848, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.9946308119914323e-05, |
|
"loss": 0.5502, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24341507720254316, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.994300790396999e-05, |
|
"loss": 0.5493, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.2452316076294278, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 1.99396095545518e-05, |
|
"loss": 0.5548, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24704813805631246, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 1.9936113105200085e-05, |
|
"loss": 0.5576, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.2488646684831971, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.9932518590423396e-05, |
|
"loss": 0.5506, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2506811989100817, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 1.9928826045698138e-05, |
|
"loss": 0.5462, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2524977293369664, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 1.992503550746824e-05, |
|
"loss": 0.5623, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.254314259763851, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.9921147013144782e-05, |
|
"loss": 0.5529, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2561307901907357, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 1.9917160601105632e-05, |
|
"loss": 0.55, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2579473206176203, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.9913076310695068e-05, |
|
"loss": 0.5379, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.259763851044505, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 1.990889418222339e-05, |
|
"loss": 0.5436, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2615803814713896, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.9904614256966514e-05, |
|
"loss": 0.5423, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2633969118982743, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 1.990023657716558e-05, |
|
"loss": 0.5336, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2652134423251589, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.989576118602651e-05, |
|
"loss": 0.5353, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2670299727520436, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 1.989118812771962e-05, |
|
"loss": 0.5512, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2688465031789282, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 1.988651744737914e-05, |
|
"loss": 0.5402, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2706630336058129, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.9881749191102807e-05, |
|
"loss": 0.5418, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2724795640326976, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.9876883405951378e-05, |
|
"loss": 0.5424, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2742960944595822, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 1.9871920139948193e-05, |
|
"loss": 0.522, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2761126248864669, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.986685944207868e-05, |
|
"loss": 0.5352, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2779291553133515, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 1.9861701362289892e-05, |
|
"loss": 0.5331, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27974568574023617, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.9856445951489984e-05, |
|
"loss": 0.5345, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2815622161671208, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.985109326154774e-05, |
|
"loss": 0.5413, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.28337874659400547, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.9845643345292055e-05, |
|
"loss": 0.5436, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2851952770208901, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.9840096256511398e-05, |
|
"loss": 0.5292, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28701180744777477, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.98344520499533e-05, |
|
"loss": 0.5216, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2888283378746594, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.9828710781323793e-05, |
|
"loss": 0.5282, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.29064486830154407, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.982287250728689e-05, |
|
"loss": 0.5299, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2924613987284287, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 1.9816937285463992e-05, |
|
"loss": 0.5237, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.29427792915531337, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.981090517443334e-05, |
|
"loss": 0.5231, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.296094459582198, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.9804776233729446e-05, |
|
"loss": 0.544, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.29791099000908267, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 1.979855052384247e-05, |
|
"loss": 0.5439, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2997275204359673, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.979222810621766e-05, |
|
"loss": 0.5332, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.30154405086285196, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 1.978580904325472e-05, |
|
"loss": 0.5308, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3033605812897366, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.977929339830722e-05, |
|
"loss": 0.5259, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.30517711171662126, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.9772681235681936e-05, |
|
"loss": 0.5262, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3069936421435059, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 1.976597262063825e-05, |
|
"loss": 0.5348, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.30881017257039056, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 1.9759167619387474e-05, |
|
"loss": 0.5412, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3106267029972752, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.9752266299092234e-05, |
|
"loss": 0.5194, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.31244323342415986, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.9745268727865774e-05, |
|
"loss": 0.5355, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3142597638510445, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.9738174974771288e-05, |
|
"loss": 0.5241, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.31607629427792916, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.9730985109821268e-05, |
|
"loss": 0.5317, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3178928247048138, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.9723699203976768e-05, |
|
"loss": 0.5216, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.31970935513169846, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.971631732914674e-05, |
|
"loss": 0.5123, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3215258855585831, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.9708839558187313e-05, |
|
"loss": 0.5209, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.32334241598546776, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 1.970126596490106e-05, |
|
"loss": 0.5271, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.32515894641235243, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.9693596624036294e-05, |
|
"loss": 0.5136, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.32697547683923706, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.9685831611286312e-05, |
|
"loss": 0.5257, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.32879200726612173, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.9677971003288657e-05, |
|
"loss": 0.5186, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.33060853769300635, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 1.9670014877624353e-05, |
|
"loss": 0.5198, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.33242506811989103, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.966196331281715e-05, |
|
"loss": 0.5207, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.33424159854677565, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.965381638833274e-05, |
|
"loss": 0.514, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33424159854677565, |
|
"eval_loss": 0.5189763903617859, |
|
"eval_runtime": 34.9275, |
|
"eval_samples_per_second": 5.726, |
|
"eval_steps_per_second": 1.432, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33605812897366033, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.9645574184577982e-05, |
|
"loss": 0.5245, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.33787465940054495, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.96372367829001e-05, |
|
"loss": 0.5097, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33969118982742963, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.9628804265585878e-05, |
|
"loss": 0.5277, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.34150772025431425, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 1.962027671586086e-05, |
|
"loss": 0.5154, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.34332425068119893, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.961165421788852e-05, |
|
"loss": 0.5241, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.34514078110808355, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.9602936856769432e-05, |
|
"loss": 0.5145, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3469573115349682, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.959412471854043e-05, |
|
"loss": 0.5147, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.34877384196185285, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.958521789017376e-05, |
|
"loss": 0.5267, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3505903723887375, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.9576216459576222e-05, |
|
"loss": 0.5065, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.35240690281562215, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.9567120515588307e-05, |
|
"loss": 0.5087, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3542234332425068, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 1.9557930147983303e-05, |
|
"loss": 0.524, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.35603996366939145, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.9548645447466433e-05, |
|
"loss": 0.5176, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3578564940962761, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.9539266505673938e-05, |
|
"loss": 0.5081, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35967302452316074, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.952979341517219e-05, |
|
"loss": 0.5296, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3614895549500454, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.9520226269456767e-05, |
|
"loss": 0.4961, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.36330608537693004, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.9510565162951538e-05, |
|
"loss": 0.5091, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3651226158038147, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.9500810191007717e-05, |
|
"loss": 0.5169, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.36693914623069934, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.9490961449902946e-05, |
|
"loss": 0.5044, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.368755676657584, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.948101903684032e-05, |
|
"loss": 0.5146, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.37057220708446864, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.9470983049947446e-05, |
|
"loss": 0.5171, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.3723887375113533, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.9460853588275454e-05, |
|
"loss": 0.5081, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.37420526793823794, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.945063075179805e-05, |
|
"loss": 0.5098, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3760217983651226, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.94403146414105e-05, |
|
"loss": 0.5025, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.37783832879200724, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.9429905358928648e-05, |
|
"loss": 0.5231, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3796548592188919, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.9419403007087908e-05, |
|
"loss": 0.505, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3814713896457766, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.9408807689542257e-05, |
|
"loss": 0.4997, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3832879200726612, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.9398119510863197e-05, |
|
"loss": 0.5099, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.3851044504995459, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 1.9387338576538743e-05, |
|
"loss": 0.5036, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.3869209809264305, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.9376464992972358e-05, |
|
"loss": 0.5158, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3887375113533152, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.9365498867481926e-05, |
|
"loss": 0.5072, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.3905540417801998, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.9354440308298676e-05, |
|
"loss": 0.5058, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3923705722070845, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.9343289424566122e-05, |
|
"loss": 0.4971, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3941871026339691, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.9332046326338985e-05, |
|
"loss": 0.504, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3960036330608538, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.932071112458211e-05, |
|
"loss": 0.5174, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3978201634877384, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.930928393116936e-05, |
|
"loss": 0.5216, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3996366939146231, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.9297764858882516e-05, |
|
"loss": 0.4991, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4014532243415077, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.9286154021410177e-05, |
|
"loss": 0.4929, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4032697547683924, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 1.9274451533346617e-05, |
|
"loss": 0.507, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.405086285195277, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.926265751019067e-05, |
|
"loss": 0.4942, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4069028156221617, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.925077206834458e-05, |
|
"loss": 0.4991, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.4087193460490463, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.9238795325112867e-05, |
|
"loss": 0.5008, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.410535876475931, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.922672739870115e-05, |
|
"loss": 0.4975, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4123524069028156, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.9214568408214986e-05, |
|
"loss": 0.4997, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.4141689373297003, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.9202318473658707e-05, |
|
"loss": 0.5072, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4159854677565849, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.9189977715934214e-05, |
|
"loss": 0.5099, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4178019981834696, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.9177546256839814e-05, |
|
"loss": 0.508, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4196185286103542, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.916502421906898e-05, |
|
"loss": 0.5061, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.4214350590372389, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.9152411726209176e-05, |
|
"loss": 0.511, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4232515894641235, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.913970890274061e-05, |
|
"loss": 0.4973, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.4250681198910082, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.912691587403503e-05, |
|
"loss": 0.5123, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4268846503178928, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.9114032766354453e-05, |
|
"loss": 0.5033, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4287011807447775, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.9101059706849957e-05, |
|
"loss": 0.5, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4305177111716621, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.9087996823560404e-05, |
|
"loss": 0.4999, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4323342415985468, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.907484424541117e-05, |
|
"loss": 0.5152, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.43415077202543145, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 1.9061602102212898e-05, |
|
"loss": 0.4996, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.4359673024523161, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.9048270524660197e-05, |
|
"loss": 0.4941, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.43778383287920075, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.903484964433035e-05, |
|
"loss": 0.4895, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4396003633060854, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 1.902133959368203e-05, |
|
"loss": 0.5014, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.44141689373297005, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.9007740506053983e-05, |
|
"loss": 0.4971, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.44323342415985467, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.899405251566371e-05, |
|
"loss": 0.4969, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.44504995458673935, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.8980275757606157e-05, |
|
"loss": 0.4883, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.44686648501362397, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.896641036785236e-05, |
|
"loss": 0.5083, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.44868301544050865, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.8952456483248117e-05, |
|
"loss": 0.4947, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.45049954586739327, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.893841424151264e-05, |
|
"loss": 0.5, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.45231607629427795, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.892428378123718e-05, |
|
"loss": 0.4889, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.45413260672116257, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 1.891006524188368e-05, |
|
"loss": 0.4956, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.45594913714804725, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 1.8895758763783383e-05, |
|
"loss": 0.5052, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.45776566757493187, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.8881364488135448e-05, |
|
"loss": 0.4849, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.45958219800181654, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.8866882557005567e-05, |
|
"loss": 0.4914, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.46139872842870117, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 1.8852313113324553e-05, |
|
"loss": 0.5086, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.46321525885558584, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.8837656300886937e-05, |
|
"loss": 0.5043, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.46503178928247046, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.8822912264349535e-05, |
|
"loss": 0.4941, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.46684831970935514, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.8808081149230036e-05, |
|
"loss": 0.497, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.46866485013623976, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.8793163101905562e-05, |
|
"loss": 0.4948, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.47048138056312444, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 1.877815826961122e-05, |
|
"loss": 0.481, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.47229791099000906, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.8763066800438638e-05, |
|
"loss": 0.4863, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.47411444141689374, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.8747888843334528e-05, |
|
"loss": 0.4854, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.47593097184377836, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.8732624548099204e-05, |
|
"loss": 0.487, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.47774750227066304, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.8717274065385092e-05, |
|
"loss": 0.4972, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.47956403269754766, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 1.870183754669526e-05, |
|
"loss": 0.4982, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.48138056312443234, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.8686315144381914e-05, |
|
"loss": 0.5032, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.48319709355131696, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.86707070116449e-05, |
|
"loss": 0.4951, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.48501362397820164, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.8655013302530193e-05, |
|
"loss": 0.487, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.4868301544050863, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.8639234171928355e-05, |
|
"loss": 0.4897, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.48864668483197093, |
|
"grad_norm": 1.375, |
|
"learning_rate": 1.862336977557304e-05, |
|
"loss": 0.4806, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4904632152588556, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.860742027003944e-05, |
|
"loss": 0.4892, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.49227974568574023, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.8591385812742724e-05, |
|
"loss": 0.4881, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.4940962761126249, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.8575266561936526e-05, |
|
"loss": 0.4903, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.49591280653950953, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.855906267671133e-05, |
|
"loss": 0.4995, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4977293369663942, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.8542774316992953e-05, |
|
"loss": 0.4713, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.49954586739327883, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.8526401643540924e-05, |
|
"loss": 0.4769, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5013623978201635, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.850994481794692e-05, |
|
"loss": 0.4778, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.5031789282470481, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.8493404002633167e-05, |
|
"loss": 0.4859, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5049954586739328, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 1.8476779360850833e-05, |
|
"loss": 0.4899, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5068119891008175, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.8460071056678424e-05, |
|
"loss": 0.4879, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.508628519527702, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.8443279255020153e-05, |
|
"loss": 0.485, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5104450499545867, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.8426404121604324e-05, |
|
"loss": 0.4809, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5122615803814714, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.8409445822981694e-05, |
|
"loss": 0.4929, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5140781108083561, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.8392404526523816e-05, |
|
"loss": 0.4864, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5158946412352406, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.837528040042142e-05, |
|
"loss": 0.4846, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5177111716621253, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 1.8358073613682705e-05, |
|
"loss": 0.47, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.51952770208901, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 1.8340784336131715e-05, |
|
"loss": 0.4803, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5213442325158947, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.8323412738406638e-05, |
|
"loss": 0.4951, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5231607629427792, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 1.830595899195813e-05, |
|
"loss": 0.4787, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5249772933696639, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 1.828842326904762e-05, |
|
"loss": 0.4841, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5267938237965486, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.827080574274562e-05, |
|
"loss": 0.4903, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5286103542234333, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.825310658693e-05, |
|
"loss": 0.4875, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5304268846503178, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 1.8235325976284276e-05, |
|
"loss": 0.4772, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5322434150772025, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.8217464086295904e-05, |
|
"loss": 0.4805, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5340599455040872, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.8199521093254524e-05, |
|
"loss": 0.4964, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5358764759309719, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.8181497174250236e-05, |
|
"loss": 0.486, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5376930063578564, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.816339250717184e-05, |
|
"loss": 0.4828, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5395095367847411, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.8145207270705095e-05, |
|
"loss": 0.4772, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5413260672116258, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.812694164433094e-05, |
|
"loss": 0.4739, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5431425976385105, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.8108595808323736e-05, |
|
"loss": 0.4746, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5449591280653951, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.8090169943749477e-05, |
|
"loss": 0.4777, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5467756584922797, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.8071664232464005e-05, |
|
"loss": 0.4825, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5485921889191644, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.8053078857111218e-05, |
|
"loss": 0.4796, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5504087193460491, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.8034414001121278e-05, |
|
"loss": 0.4947, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5522252497729337, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.8015669848708768e-05, |
|
"loss": 0.4741, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5540417801998183, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 1.799684658487091e-05, |
|
"loss": 0.4742, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.555858310626703, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.7977944395385713e-05, |
|
"loss": 0.4848, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5576748410535877, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.795896346681016e-05, |
|
"loss": 0.4763, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5594913714804723, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 1.7939903986478354e-05, |
|
"loss": 0.4814, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5613079019073569, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.7920766142499673e-05, |
|
"loss": 0.4778, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5631244323342416, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.7901550123756906e-05, |
|
"loss": 0.4781, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5649409627611263, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 1.78822561199044e-05, |
|
"loss": 0.4747, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5667574931880109, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.786288432136619e-05, |
|
"loss": 0.4736, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5685740236148955, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.7843434919334103e-05, |
|
"loss": 0.4803, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5703905540417802, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.7823908105765883e-05, |
|
"loss": 0.4774, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5722070844686649, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.7804304073383298e-05, |
|
"loss": 0.4864, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5740236148955495, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.7784623015670237e-05, |
|
"loss": 0.4705, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5758401453224341, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.7764865126870788e-05, |
|
"loss": 0.4672, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5776566757493188, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 1.7745030601987338e-05, |
|
"loss": 0.4758, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5794732061762035, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.7725119636778644e-05, |
|
"loss": 0.473, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5812897366030881, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.7705132427757895e-05, |
|
"loss": 0.4677, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5831062670299727, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 1.7685069172190766e-05, |
|
"loss": 0.4733, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5849227974568574, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.76649300680935e-05, |
|
"loss": 0.4816, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5867393278837421, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.764471531423092e-05, |
|
"loss": 0.4918, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5885558583106267, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 1.762442511011448e-05, |
|
"loss": 0.4732, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5903723887375113, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.7604059656000313e-05, |
|
"loss": 0.4685, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.592188919164396, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.7583619152887222e-05, |
|
"loss": 0.4748, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5940054495912807, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 1.756310380251472e-05, |
|
"loss": 0.4669, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5958219800181653, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.754251380736104e-05, |
|
"loss": 0.466, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.59763851044505, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.7521849370641116e-05, |
|
"loss": 0.4591, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5994550408719346, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 1.7501110696304598e-05, |
|
"loss": 0.4607, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6012715712988193, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.7480297989033824e-05, |
|
"loss": 0.4698, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.6030881017257039, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 1.7459411454241822e-05, |
|
"loss": 0.4713, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6049046321525886, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 1.7438451298070252e-05, |
|
"loss": 0.4632, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6067211625794732, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.7417417727387392e-05, |
|
"loss": 0.4675, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6085376930063578, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.73963109497861e-05, |
|
"loss": 0.4812, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6103542234332425, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.737513117358174e-05, |
|
"loss": 0.4746, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6121707538601272, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.735387860781016e-05, |
|
"loss": 0.468, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6139872842870118, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.7332553462225604e-05, |
|
"loss": 0.4721, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6158038147138964, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 1.7311155947298644e-05, |
|
"loss": 0.4696, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6176203451407811, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.7289686274214116e-05, |
|
"loss": 0.478, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6194368755676658, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.726814465486903e-05, |
|
"loss": 0.47, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6212534059945504, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.7246531301870467e-05, |
|
"loss": 0.4697, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.623069936421435, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.7224846428533498e-05, |
|
"loss": 0.4752, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6248864668483197, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.720309024887907e-05, |
|
"loss": 0.4713, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6267029972752044, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.718126297763189e-05, |
|
"loss": 0.4718, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.628519527702089, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 1.7159364830218312e-05, |
|
"loss": 0.4798, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6303360581289736, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.7137396022764216e-05, |
|
"loss": 0.4691, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6321525885558583, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.7115356772092858e-05, |
|
"loss": 0.4685, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.633969118982743, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.709324729572274e-05, |
|
"loss": 0.4716, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6357856494096276, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 0.4775, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6376021798365122, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.7048818539423616e-05, |
|
"loss": 0.473, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6394187102633969, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.7026499697988496e-05, |
|
"loss": 0.4677, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6412352406902816, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.7004111507838067e-05, |
|
"loss": 0.4711, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6430517711171662, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 1.698165418993473e-05, |
|
"loss": 0.4677, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6448683015440508, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.6959127965923144e-05, |
|
"loss": 0.4761, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6466848319709355, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.693653305812805e-05, |
|
"loss": 0.4593, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6485013623978202, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.6913869689552066e-05, |
|
"loss": 0.4805, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6503178928247049, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.6891138083873486e-05, |
|
"loss": 0.4702, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6521344232515894, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.6868338465444086e-05, |
|
"loss": 0.4714, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6539509536784741, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.684547105928689e-05, |
|
"loss": 0.4627, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6557674841053588, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.6822536091093967e-05, |
|
"loss": 0.4633, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6575840145322435, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.6799533787224192e-05, |
|
"loss": 0.4751, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.659400544959128, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 1.6776464374701026e-05, |
|
"loss": 0.4731, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6612170753860127, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 1.6753328081210244e-05, |
|
"loss": 0.4631, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6630336058128974, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.6730125135097736e-05, |
|
"loss": 0.4576, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6648501362397821, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.6706855765367202e-05, |
|
"loss": 0.4645, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.6683520201677933e-05, |
|
"loss": 0.4628, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6684831970935513, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.666011867434252e-05, |
|
"loss": 0.4769, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6684831970935513, |
|
"eval_loss": 0.4684128165245056, |
|
"eval_runtime": 34.9278, |
|
"eval_samples_per_second": 5.726, |
|
"eval_steps_per_second": 1.432, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.670299727520436, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.6636651414324586e-05, |
|
"loss": 0.466, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6721162579473207, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.661311865323652e-05, |
|
"loss": 0.4649, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6739327883742052, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.6589520623337173e-05, |
|
"loss": 0.4734, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6757493188010899, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.6565857557529567e-05, |
|
"loss": 0.4594, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6775658492279746, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 1.6542129689358613e-05, |
|
"loss": 0.4647, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6793823796548593, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.651833725300879e-05, |
|
"loss": 0.4553, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6811989100817438, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 1.6494480483301836e-05, |
|
"loss": 0.4703, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6830154405086285, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.6470559615694445e-05, |
|
"loss": 0.4627, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6848319709355132, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 1.6446574886275914e-05, |
|
"loss": 0.4641, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6866485013623979, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.6422526531765846e-05, |
|
"loss": 0.4709, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6884650317892824, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 1.6398414789511784e-05, |
|
"loss": 0.4615, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6902815622161671, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.63742398974869e-05, |
|
"loss": 0.4565, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6920980926430518, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.6350002094287608e-05, |
|
"loss": 0.4635, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6939146230699365, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.6325701619131246e-05, |
|
"loss": 0.4538, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.695731153496821, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.6301338711853695e-05, |
|
"loss": 0.4598, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6975476839237057, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.6276913612907005e-05, |
|
"loss": 0.4586, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6993642143505904, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 1.6252426563357054e-05, |
|
"loss": 0.4695, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.701180744777475, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.6227877804881126e-05, |
|
"loss": 0.4743, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.7029972752043597, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 1.6203267579765563e-05, |
|
"loss": 0.4701, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7048138056312443, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.6178596130903345e-05, |
|
"loss": 0.4578, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.706630336058129, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 1.6153863701791717e-05, |
|
"loss": 0.4635, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7084468664850136, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.6129070536529767e-05, |
|
"loss": 0.4641, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7102633969118983, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.6104216879816027e-05, |
|
"loss": 0.4715, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7120799273387829, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 1.6079302976946055e-05, |
|
"loss": 0.4641, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7138964577656676, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.6054329073810016e-05, |
|
"loss": 0.4569, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7157129881925522, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 1.602929541689025e-05, |
|
"loss": 0.461, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7175295186194369, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 1.6004202253258844e-05, |
|
"loss": 0.4575, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7193460490463215, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.597904983057519e-05, |
|
"loss": 0.4693, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7211625794732062, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 1.595383839708355e-05, |
|
"loss": 0.46, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7229791099000908, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.5928568201610593e-05, |
|
"loss": 0.4654, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7247956403269755, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 1.5903239493562948e-05, |
|
"loss": 0.4575, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7266121707538601, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.5877852522924733e-05, |
|
"loss": 0.4535, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7284287011807448, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 1.5852407540255103e-05, |
|
"loss": 0.4591, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7302452316076294, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 1.5826904796685763e-05, |
|
"loss": 0.4627, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7320617620345141, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 1.5801344543918495e-05, |
|
"loss": 0.4614, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7338782924613987, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.5775727034222675e-05, |
|
"loss": 0.4685, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7356948228882834, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.575005252043279e-05, |
|
"loss": 0.4719, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.737511353315168, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 1.572432125594591e-05, |
|
"loss": 0.4637, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7393278837420527, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.5698533494719238e-05, |
|
"loss": 0.459, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7411444141689373, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 1.567268949126757e-05, |
|
"loss": 0.4762, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.742960944595822, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 1.5646789500660772e-05, |
|
"loss": 0.457, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.7447774750227066, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.5620833778521306e-05, |
|
"loss": 0.4666, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7465940054495913, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.5594822581021673e-05, |
|
"loss": 0.4609, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7484105358764759, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.556875616488188e-05, |
|
"loss": 0.4581, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7502270663033606, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 1.5542634787366942e-05, |
|
"loss": 0.4659, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7520435967302452, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.5516458706284306e-05, |
|
"loss": 0.4686, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7538601271571299, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 1.549022817998132e-05, |
|
"loss": 0.454, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7556766575840145, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.5463943467342694e-05, |
|
"loss": 0.4529, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7574931880108992, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.5437604827787925e-05, |
|
"loss": 0.4668, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7593097184377838, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.541121252126876e-05, |
|
"loss": 0.4632, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7611262488646685, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 1.5384766808266603e-05, |
|
"loss": 0.4579, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7629427792915532, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 1.5358267949789968e-05, |
|
"loss": 0.4507, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7647593097184378, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 1.5331716207371888e-05, |
|
"loss": 0.4527, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7665758401453224, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 1.5305111843067343e-05, |
|
"loss": 0.4684, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7683923705722071, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.5278455119450666e-05, |
|
"loss": 0.4613, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7702089009990918, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 1.5251746299612959e-05, |
|
"loss": 0.4552, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7720254314259763, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.5224985647159489e-05, |
|
"loss": 0.4582, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.773841961852861, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.5198173426207095e-05, |
|
"loss": 0.4601, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7756584922797457, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 1.5171309901381572e-05, |
|
"loss": 0.4487, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7774750227066304, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.5144395337815066e-05, |
|
"loss": 0.4581, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.779291553133515, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 1.5117430001143451e-05, |
|
"loss": 0.4645, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.7811080835603996, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.5090414157503715e-05, |
|
"loss": 0.4586, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7829246139872843, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 1.5063348073531325e-05, |
|
"loss": 0.4542, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.784741144414169, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 1.503623201635761e-05, |
|
"loss": 0.4485, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7865576748410535, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.5009066253607101e-05, |
|
"loss": 0.4696, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7883742052679382, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 1.498185105339491e-05, |
|
"loss": 0.4636, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7901907356948229, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.4954586684324077e-05, |
|
"loss": 0.4597, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7920072661217076, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 1.4927273415482916e-05, |
|
"loss": 0.453, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7938237965485921, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 1.4899911516442367e-05, |
|
"loss": 0.457, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7956403269754768, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.4872501257253325e-05, |
|
"loss": 0.4625, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7974568574023615, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 1.484504290844398e-05, |
|
"loss": 0.4496, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7992733878292462, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.4817536741017153e-05, |
|
"loss": 0.4508, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8010899182561307, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 1.4789983026447612e-05, |
|
"loss": 0.4589, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.8029064486830154, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.4762382036679393e-05, |
|
"loss": 0.4499, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.8047229791099001, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.4734734044123123e-05, |
|
"loss": 0.4476, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.8065395095367848, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 1.470703932165333e-05, |
|
"loss": 0.452, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.8083560399636693, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.4679298142605735e-05, |
|
"loss": 0.4438, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.810172570390554, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 1.4651510780774585e-05, |
|
"loss": 0.4426, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8119891008174387, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.462367751040992e-05, |
|
"loss": 0.451, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8138056312443234, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.4595798606214882e-05, |
|
"loss": 0.4637, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.815622161671208, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.4567874343342996e-05, |
|
"loss": 0.4576, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8174386920980926, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.4539904997395468e-05, |
|
"loss": 0.4755, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8192552225249773, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.4511890844418453e-05, |
|
"loss": 0.4501, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.821071752951862, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.4483832160900326e-05, |
|
"loss": 0.4471, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8228882833787466, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 1.4455729223768966e-05, |
|
"loss": 0.4491, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8247048138056312, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 1.442758231038902e-05, |
|
"loss": 0.4429, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8265213442325159, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 1.4399391698559153e-05, |
|
"loss": 0.4597, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8283378746594006, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 1.437115766650933e-05, |
|
"loss": 0.4525, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8301544050862852, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.4342880492898048e-05, |
|
"loss": 0.4568, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8319709355131698, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.4314560456809592e-05, |
|
"loss": 0.4491, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8337874659400545, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 1.4286197837751286e-05, |
|
"loss": 0.4501, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8356039963669392, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.4257792915650728e-05, |
|
"loss": 0.4601, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8374205267938238, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.4229345970853032e-05, |
|
"loss": 0.4474, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8392370572207084, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.4200857284118067e-05, |
|
"loss": 0.4454, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8410535876475931, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.4172327136617656e-05, |
|
"loss": 0.4454, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8428701180744778, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 1.4143755809932843e-05, |
|
"loss": 0.4471, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8446866485013624, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.411514358605109e-05, |
|
"loss": 0.4513, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.846503178928247, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 1.4086490747363492e-05, |
|
"loss": 0.4702, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8483197093551317, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.4057797576662e-05, |
|
"loss": 0.4456, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8501362397820164, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 1.4029064357136628e-05, |
|
"loss": 0.4347, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.851952770208901, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.4000291372372647e-05, |
|
"loss": 0.4546, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8537693006357856, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.3971478906347806e-05, |
|
"loss": 0.4448, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8555858310626703, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 1.3942627243429512e-05, |
|
"loss": 0.4443, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.857402361489555, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 1.3913736668372027e-05, |
|
"loss": 0.4598, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8592188919164396, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.3884807466313664e-05, |
|
"loss": 0.444, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8610354223433242, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 1.3855839922773968e-05, |
|
"loss": 0.4509, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8628519527702089, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 1.3826834323650899e-05, |
|
"loss": 0.4655, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8646684831970936, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 1.3797790955218014e-05, |
|
"loss": 0.4516, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8664850136239782, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 1.3768710104121628e-05, |
|
"loss": 0.458, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8683015440508629, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 1.3739592057378005e-05, |
|
"loss": 0.4561, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8701180744777475, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.3710437102370511e-05, |
|
"loss": 0.4539, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8719346049046321, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 1.3681245526846782e-05, |
|
"loss": 0.4428, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8737511353315168, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 1.365201761891588e-05, |
|
"loss": 0.4426, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8755676657584015, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.3622753667045459e-05, |
|
"loss": 0.4518, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8773841961852861, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 1.3593453960058909e-05, |
|
"loss": 0.4453, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8792007266121707, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.3564118787132507e-05, |
|
"loss": 0.4581, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8810172570390554, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 1.3534748437792573e-05, |
|
"loss": 0.4543, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8828337874659401, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 1.350534320191259e-05, |
|
"loss": 0.4446, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8846503178928247, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.347590336971037e-05, |
|
"loss": 0.4394, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8864668483197093, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.344642923174517e-05, |
|
"loss": 0.4523, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.888283378746594, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.3416921078914835e-05, |
|
"loss": 0.4602, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.8900999091734787, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.3387379202452917e-05, |
|
"loss": 0.4418, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8919164396003633, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 1.3357803893925807e-05, |
|
"loss": 0.4399, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8937329700272479, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.3328195445229869e-05, |
|
"loss": 0.4492, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8955495004541326, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 1.329855414858853e-05, |
|
"loss": 0.4605, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.8973660308810173, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 1.3268880296549424e-05, |
|
"loss": 0.4497, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8991825613079019, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.3239174181981496e-05, |
|
"loss": 0.4604, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.9009990917347865, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.3209436098072095e-05, |
|
"loss": 0.4542, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.9028156221616712, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 1.317966633832411e-05, |
|
"loss": 0.4468, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.9046321525885559, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 1.3149865196553049e-05, |
|
"loss": 0.4623, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.9064486830154405, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 1.3120032966884151e-05, |
|
"loss": 0.4479, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.9082652134423251, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 0.456, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9100817438692098, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.306027642188501e-05, |
|
"loss": 0.4532, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.9118982742960945, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 1.3030352696327741e-05, |
|
"loss": 0.4517, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.9137148047229791, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.3000399062412763e-05, |
|
"loss": 0.4304, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.9155313351498637, |
|
"grad_norm": 1.0, |
|
"learning_rate": 1.297041581577035e-05, |
|
"loss": 0.4352, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.9173478655767484, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 1.294040325232304e-05, |
|
"loss": 0.4319, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9191643960036331, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 1.2910361668282718e-05, |
|
"loss": 0.451, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9209809264305178, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 1.2880291360147694e-05, |
|
"loss": 0.4431, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9227974568574023, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 1.2850192624699762e-05, |
|
"loss": 0.4596, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.924613987284287, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.2820065759001295e-05, |
|
"loss": 0.4516, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9264305177111717, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 1.2789911060392295e-05, |
|
"loss": 0.4492, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9282470481380564, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 1.2759728826487461e-05, |
|
"loss": 0.4476, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.9300635785649409, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 1.2729519355173254e-05, |
|
"loss": 0.4468, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.9318801089918256, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.2699282944604968e-05, |
|
"loss": 0.4413, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.9336966394187103, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 1.2669019893203758e-05, |
|
"loss": 0.4409, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.935513169845595, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 1.2638730499653731e-05, |
|
"loss": 0.4381, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9373297002724795, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 1.2608415062898971e-05, |
|
"loss": 0.4573, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.9391462306993642, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 1.25780738821406e-05, |
|
"loss": 0.4509, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.9409627611262489, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 1.2547707256833823e-05, |
|
"loss": 0.4566, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9427792915531336, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 1.2517315486684973e-05, |
|
"loss": 0.4379, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9445958219800181, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 1.2486898871648552e-05, |
|
"loss": 0.439, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9464123524069028, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.2456457711924266e-05, |
|
"loss": 0.4341, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.9482288828337875, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 1.2425992307954075e-05, |
|
"loss": 0.4375, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.9500454132606722, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 1.2395502960419221e-05, |
|
"loss": 0.4627, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9518619436875567, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 1.236498997023725e-05, |
|
"loss": 0.445, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.9536784741144414, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 1.2334453638559057e-05, |
|
"loss": 0.4422, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.9554950045413261, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.2303894266765908e-05, |
|
"loss": 0.4506, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.9573115349682108, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 1.2273312156466466e-05, |
|
"loss": 0.4475, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.9591280653950953, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 1.2242707609493814e-05, |
|
"loss": 0.436, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.96094459582198, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 1.2212080927902474e-05, |
|
"loss": 0.4527, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.9627611262488647, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 1.2181432413965428e-05, |
|
"loss": 0.445, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9645776566757494, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 1.2150762370171137e-05, |
|
"loss": 0.4447, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.9663941871026339, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 1.212007109922055e-05, |
|
"loss": 0.4446, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9682107175295186, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 1.2089358904024117e-05, |
|
"loss": 0.4394, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.9700272479564033, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 1.2058626087698814e-05, |
|
"loss": 0.4407, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.971843778383288, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 1.2027872953565125e-05, |
|
"loss": 0.4396, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9736603088101726, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 1.1997099805144071e-05, |
|
"loss": 0.4557, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9754768392370572, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 1.19663069461542e-05, |
|
"loss": 0.4415, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.9772933696639419, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 1.1935494680508606e-05, |
|
"loss": 0.4474, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.9791099000908265, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 1.1904663312311902e-05, |
|
"loss": 0.4546, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.9809264305177112, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.187381314585725e-05, |
|
"loss": 0.4402, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9827429609445958, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 1.1842944485623335e-05, |
|
"loss": 0.4257, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.9845594913714805, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 1.1812057636271374e-05, |
|
"loss": 0.4409, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.9863760217983651, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 1.17811529026421e-05, |
|
"loss": 0.4522, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.9881925522252498, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 1.1750230589752763e-05, |
|
"loss": 0.4594, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.9900090826521344, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.1719291002794096e-05, |
|
"loss": 0.437, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.9918256130790191, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 1.1688334447127338e-05, |
|
"loss": 0.4266, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.9936421435059037, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 1.1657361228281198e-05, |
|
"loss": 0.4377, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.9954586739327884, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 1.1626371651948839e-05, |
|
"loss": 0.4373, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.997275204359673, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 1.1595366023984864e-05, |
|
"loss": 0.4395, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.9990917347865577, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 1.156434465040231e-05, |
|
"loss": 0.4454, |
|
"step": 550 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 550, |
|
"total_flos": 6.492324211615334e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|