|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.982174688057041, |
|
"eval_steps": 500, |
|
"global_step": 2800, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0035650623885918, |
|
"grad_norm": 544.0, |
|
"learning_rate": 7.142857142857143e-07, |
|
"loss": 53.7211, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017825311942959002, |
|
"grad_norm": 592.0, |
|
"learning_rate": 3.5714285714285714e-06, |
|
"loss": 57.6044, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.035650623885918005, |
|
"grad_norm": 608.0, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 57.8426, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.053475935828877004, |
|
"grad_norm": 440.0, |
|
"learning_rate": 1.0714285714285714e-05, |
|
"loss": 48.446, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07130124777183601, |
|
"grad_norm": 374.0, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 39.2254, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08912655971479501, |
|
"grad_norm": 66.5, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 27.6479, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10695187165775401, |
|
"grad_norm": 35.75, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 24.2563, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12477718360071301, |
|
"grad_norm": 32.0, |
|
"learning_rate": 2.5e-05, |
|
"loss": 23.2193, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.14260249554367202, |
|
"grad_norm": 19.75, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 21.5174, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.16042780748663102, |
|
"grad_norm": 11.0625, |
|
"learning_rate": 3.2142857142857144e-05, |
|
"loss": 19.7135, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.17825311942959002, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 19.1551, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.19607843137254902, |
|
"grad_norm": 9.0, |
|
"learning_rate": 3.928571428571429e-05, |
|
"loss": 18.2747, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.21390374331550802, |
|
"grad_norm": 13.1875, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 17.2428, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.23172905525846701, |
|
"grad_norm": 23.25, |
|
"learning_rate": 4.642857142857143e-05, |
|
"loss": 16.4017, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.24955436720142601, |
|
"grad_norm": 57.0, |
|
"learning_rate": 5e-05, |
|
"loss": 14.1658, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.26737967914438504, |
|
"grad_norm": 57.5, |
|
"learning_rate": 5.3571428571428575e-05, |
|
"loss": 9.052, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.28520499108734404, |
|
"grad_norm": 10.875, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 3.1244, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 10.0625, |
|
"learning_rate": 6.0714285714285715e-05, |
|
"loss": 2.0444, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.32085561497326204, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 6.428571428571429e-05, |
|
"loss": 1.7451, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.33868092691622104, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 6.785714285714286e-05, |
|
"loss": 1.5311, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.35650623885918004, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 7.142857142857143e-05, |
|
"loss": 1.4329, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.37433155080213903, |
|
"grad_norm": 2.78125, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.2936, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.39215686274509803, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 7.857142857142858e-05, |
|
"loss": 1.2525, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.40998217468805703, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 8.214285714285714e-05, |
|
"loss": 1.2163, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.42780748663101603, |
|
"grad_norm": 10.3125, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 1.1466, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.44563279857397503, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 8.92857142857143e-05, |
|
"loss": 1.1372, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.46345811051693403, |
|
"grad_norm": 1.375, |
|
"learning_rate": 9.285714285714286e-05, |
|
"loss": 1.0929, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.48128342245989303, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 9.642857142857143e-05, |
|
"loss": 1.0724, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.49910873440285203, |
|
"grad_norm": 15.125, |
|
"learning_rate": 0.0001, |
|
"loss": 1.0491, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5169340463458111, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 0.00010357142857142859, |
|
"loss": 1.03, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5347593582887701, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 0.00010714285714285715, |
|
"loss": 0.9893, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5525846702317291, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 0.00011071428571428572, |
|
"loss": 0.9907, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5704099821746881, |
|
"grad_norm": 7.21875, |
|
"learning_rate": 0.00011428571428571428, |
|
"loss": 0.9565, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5882352941176471, |
|
"grad_norm": 7.375, |
|
"learning_rate": 0.00011785714285714287, |
|
"loss": 0.937, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 0.00012142857142857143, |
|
"loss": 0.9235, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6238859180035651, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 0.000125, |
|
"loss": 0.9509, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6417112299465241, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 0.00012857142857142858, |
|
"loss": 0.9249, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6595365418894831, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 0.00013214285714285715, |
|
"loss": 0.9199, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6773618538324421, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 0.00013571428571428572, |
|
"loss": 0.9006, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6951871657754011, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 0.0001392857142857143, |
|
"loss": 0.9527, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.7130124777183601, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 0.00014285714285714287, |
|
"loss": 0.9255, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7308377896613191, |
|
"grad_norm": 4.875, |
|
"learning_rate": 0.00014642857142857141, |
|
"loss": 0.9124, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7486631016042781, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 0.894, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7664884135472371, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 0.0001535714285714286, |
|
"loss": 0.8926, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7843137254901961, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 0.00015714285714285716, |
|
"loss": 0.8809, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8021390374331551, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 0.00016071428571428573, |
|
"loss": 0.88, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.8199643493761141, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 0.00016428571428571428, |
|
"loss": 0.8727, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8377896613190731, |
|
"grad_norm": 1.875, |
|
"learning_rate": 0.00016785714285714288, |
|
"loss": 0.854, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8556149732620321, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 0.00017142857142857143, |
|
"loss": 0.8838, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8734402852049911, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 0.000175, |
|
"loss": 0.8374, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8912655971479501, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 0.0001785714285714286, |
|
"loss": 0.8766, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 0.00018214285714285714, |
|
"loss": 0.8761, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.9269162210338681, |
|
"grad_norm": 2.5, |
|
"learning_rate": 0.00018571428571428572, |
|
"loss": 0.8501, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9447415329768271, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 0.0001892857142857143, |
|
"loss": 0.8405, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9625668449197861, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 0.00019285714285714286, |
|
"loss": 0.8425, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9803921568627451, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 0.00019642857142857144, |
|
"loss": 0.8156, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.9982174688057041, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8742, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9982174688057041, |
|
"eval_loss": 2.193777322769165, |
|
"eval_runtime": 1.0432, |
|
"eval_samples_per_second": 4.793, |
|
"eval_steps_per_second": 1.917, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.0160427807486632, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 0.00019999805729315381, |
|
"loss": 0.8053, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.0338680926916222, |
|
"grad_norm": 15.875, |
|
"learning_rate": 0.0001999922292480975, |
|
"loss": 0.7993, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.0516934046345812, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 0.00019998251609127464, |
|
"loss": 0.7952, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.0695187165775402, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 0.00019996891820008164, |
|
"loss": 0.8014, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.0873440285204992, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 0.00019995143610285277, |
|
"loss": 0.8063, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.1051693404634582, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 0.00019993007047883988, |
|
"loss": 0.7976, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.1229946524064172, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 0.0001999048221581858, |
|
"loss": 0.8032, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.1408199643493762, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 0.00019987569212189224, |
|
"loss": 0.803, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.1586452762923352, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 0.00019984268150178167, |
|
"loss": 0.7784, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.1764705882352942, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 0.0001998057915804532, |
|
"loss": 0.8112, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.1942959001782532, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 0.0001997650237912329, |
|
"loss": 0.8125, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.2121212121212122, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 0.00019972037971811802, |
|
"loss": 0.8039, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.2299465240641712, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 0.00019967186109571552, |
|
"loss": 0.8025, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.2477718360071302, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 0.00019961946980917456, |
|
"loss": 0.7937, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.2655971479500892, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 0.00019956320789411337, |
|
"loss": 0.7789, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.2834224598930482, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 0.00019950307753654017, |
|
"loss": 0.7762, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.3012477718360071, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 0.00019943908107276798, |
|
"loss": 0.7617, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.3190730837789661, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 0.00019937122098932428, |
|
"loss": 0.7645, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.3368983957219251, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 0.00019929949992285396, |
|
"loss": 0.764, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.3547237076648841, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 0.00019922392066001722, |
|
"loss": 0.781, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.3725490196078431, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 0.00019914448613738106, |
|
"loss": 0.7643, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.3903743315508021, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 0.0001990611994413053, |
|
"loss": 0.7628, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.4081996434937611, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 0.00019897406380782261, |
|
"loss": 0.7739, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.4260249554367201, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 0.00019888308262251285, |
|
"loss": 0.774, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.4438502673796791, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 0.00019878825942037148, |
|
"loss": 0.7485, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.4616755793226381, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00019868959788567212, |
|
"loss": 0.7735, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.4795008912655971, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 0.0001985871018518236, |
|
"loss": 0.7649, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.4973262032085561, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 0.00019848077530122083, |
|
"loss": 0.7589, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.5151515151515151, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 0.00019837062236509014, |
|
"loss": 0.7574, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.5329768270944741, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 0.00019825664732332884, |
|
"loss": 0.7551, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.5508021390374331, |
|
"grad_norm": 0.74609375, |
|
"learning_rate": 0.00019813885460433879, |
|
"loss": 0.7595, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.5686274509803921, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 0.00019801724878485438, |
|
"loss": 0.7716, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.5864527629233511, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 0.00019789183458976484, |
|
"loss": 0.7629, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.6042780748663101, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 0.00019776261689193048, |
|
"loss": 0.7372, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.6221033868092691, |
|
"grad_norm": 1.125, |
|
"learning_rate": 0.00019762960071199333, |
|
"loss": 0.7505, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.6399286987522281, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 0.00019749279121818235, |
|
"loss": 0.747, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.6577540106951871, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 0.00019735219372611233, |
|
"loss": 0.7641, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.6755793226381461, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 0.00019720781369857746, |
|
"loss": 0.7568, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.6934046345811051, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 0.0001970596567453391, |
|
"loss": 0.7767, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.7112299465240641, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.0001969077286229078, |
|
"loss": 0.7286, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.7290552584670231, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 0.00019675203523431966, |
|
"loss": 0.742, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.7468805704099821, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 0.00019659258262890683, |
|
"loss": 0.7531, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.7647058823529411, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.0001964293770020628, |
|
"loss": 0.7452, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.7825311942959001, |
|
"grad_norm": 4.25, |
|
"learning_rate": 0.0001962624246950012, |
|
"loss": 0.7418, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.8003565062388591, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 0.00019609173219450998, |
|
"loss": 0.7584, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 0.0001959173061326988, |
|
"loss": 0.7544, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.8360071301247771, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 0.0001957391532867418, |
|
"loss": 0.7601, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.8538324420677363, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 0.0001955572805786141, |
|
"loss": 0.7313, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.8716577540106951, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 0.0001953716950748227, |
|
"loss": 0.7241, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.8894830659536543, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 0.00019518240398613227, |
|
"loss": 0.7512, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.9073083778966131, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 0.0001949894146672846, |
|
"loss": 0.7244, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.9251336898395723, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 0.0001947927346167132, |
|
"loss": 0.7434, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.9429590017825311, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 0.0001945923714762516, |
|
"loss": 0.7387, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.9607843137254903, |
|
"grad_norm": 4.125, |
|
"learning_rate": 0.00019438833303083678, |
|
"loss": 0.7388, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.9786096256684491, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 0.00019418062720820637, |
|
"loss": 0.7361, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.9964349376114083, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 0.00019396926207859084, |
|
"loss": 0.7213, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.146196126937866, |
|
"eval_runtime": 1.0228, |
|
"eval_samples_per_second": 4.888, |
|
"eval_steps_per_second": 1.955, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.014260249554367, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 0.00019375424585439994, |
|
"loss": 0.6961, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.0320855614973263, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 0.0001935355868899034, |
|
"loss": 0.6878, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.049910873440285, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 0.00019331329368090666, |
|
"loss": 0.6976, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.0677361853832443, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 0.00019308737486442045, |
|
"loss": 0.6947, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.085561497326203, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 0.00019285783921832537, |
|
"loss": 0.6682, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.1033868092691623, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 0.0001926246956610309, |
|
"loss": 0.6653, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.121212121212121, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 0.0001923879532511287, |
|
"loss": 0.6826, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.1390374331550803, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 0.00019214762118704076, |
|
"loss": 0.6856, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.156862745098039, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 0.00019190370880666207, |
|
"loss": 0.7042, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.1746880570409983, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 0.00019165622558699763, |
|
"loss": 0.6811, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.192513368983957, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 0.00019140518114379434, |
|
"loss": 0.6949, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.2103386809269163, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00019115058523116733, |
|
"loss": 0.6886, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.228163992869875, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.0001908924477412211, |
|
"loss": 0.6918, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.2459893048128343, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 0.000190630778703665, |
|
"loss": 0.6844, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.263814616755793, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 0.00019036558828542367, |
|
"loss": 0.6939, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.2816399286987523, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.0001900968867902419, |
|
"loss": 0.6966, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.299465240641711, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 0.00018982468465828442, |
|
"loss": 0.6894, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.3172905525846703, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.0001895489924657301, |
|
"loss": 0.6878, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.335115864527629, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 0.00018926982092436117, |
|
"loss": 0.6878, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.3529411764705883, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 0.0001889871808811469, |
|
"loss": 0.6829, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.370766488413547, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 0.00018870108331782217, |
|
"loss": 0.7115, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.3885918003565063, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 0.00018841153935046098, |
|
"loss": 0.6921, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.406417112299465, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 0.00018811856022904423, |
|
"loss": 0.6963, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.4242424242424243, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 0.00018782215733702286, |
|
"loss": 0.6968, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.442067736185383, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 0.00018752234219087538, |
|
"loss": 0.6953, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.4598930481283423, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 0.00018721912643966055, |
|
"loss": 0.6898, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.477718360071301, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 0.00018691252186456465, |
|
"loss": 0.6777, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.4955436720142603, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 0.7143, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.5133689839572195, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 0.0001862891940253613, |
|
"loss": 0.708, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.5311942959001783, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 0.00018597249498011903, |
|
"loss": 0.6775, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.549019607843137, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 0.00018565245554778517, |
|
"loss": 0.6673, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.5668449197860963, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00018532908816321558, |
|
"loss": 0.6801, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.5846702317290555, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 0.0001850024053905709, |
|
"loss": 0.6834, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.6024955436720143, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 0.00018467241992282843, |
|
"loss": 0.6809, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.620320855614973, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.0001843391445812886, |
|
"loss": 0.6793, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.6381461675579323, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00018400259231507717, |
|
"loss": 0.686, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.6559714795008915, |
|
"grad_norm": 2.328125, |
|
"learning_rate": 0.00018366277620064197, |
|
"loss": 0.6829, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.6737967914438503, |
|
"grad_norm": 1.5, |
|
"learning_rate": 0.0001833197094412449, |
|
"loss": 0.6862, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.691622103386809, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.00018297340536644875, |
|
"loss": 0.6791, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.7094474153297683, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 0.0001826238774315995, |
|
"loss": 0.6924, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 12.25, |
|
"learning_rate": 0.00018227113921730334, |
|
"loss": 0.6946, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.7450980392156863, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 0.0001819152044288992, |
|
"loss": 0.6999, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.762923351158645, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00018155608689592604, |
|
"loss": 0.694, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.7807486631016043, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00018119380057158568, |
|
"loss": 0.6958, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.7985739750445635, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00018082835953220054, |
|
"loss": 0.6951, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.8163992869875223, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.00018045977797666684, |
|
"loss": 0.6826, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.834224598930481, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00018008807022590283, |
|
"loss": 0.6889, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.8520499108734403, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00017971325072229226, |
|
"loss": 0.6953, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.8698752228163995, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 0.00017933533402912354, |
|
"loss": 0.6755, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.8877005347593583, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00017895433483002354, |
|
"loss": 0.6981, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.905525846702317, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.00017857026792838737, |
|
"loss": 0.6827, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.9233511586452763, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 0.000178183148246803, |
|
"loss": 0.6895, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.9411764705882355, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 0.00017779299082647148, |
|
"loss": 0.678, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.9590017825311943, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 0.00017739981082662276, |
|
"loss": 0.6959, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.976827094474153, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.0001770036235239263, |
|
"loss": 0.7065, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.9946524064171123, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.0001766044443118978, |
|
"loss": 0.675, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.998217468805704, |
|
"eval_loss": 2.1484053134918213, |
|
"eval_runtime": 1.0318, |
|
"eval_samples_per_second": 4.846, |
|
"eval_steps_per_second": 1.938, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 3.0124777183600715, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 0.00017620228870030108, |
|
"loss": 0.6463, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.0303030303030303, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.0001757971723145453, |
|
"loss": 0.6127, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.0481283422459895, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 0.000175389110895078, |
|
"loss": 0.5995, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.0659536541889483, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00017497812029677344, |
|
"loss": 0.6274, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.0837789661319075, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 0.00017456421648831655, |
|
"loss": 0.6126, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.1016042780748663, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 0.00017414741555158266, |
|
"loss": 0.632, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.1194295900178255, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 0.0001737277336810124, |
|
"loss": 0.6282, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.1372549019607843, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00017330518718298264, |
|
"loss": 0.6381, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.1550802139037435, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 0.00017287979247517286, |
|
"loss": 0.6231, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 3.1729055258467023, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00017245156608592727, |
|
"loss": 0.619, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.1907308377896615, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00017202052465361268, |
|
"loss": 0.6245, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 3.2085561497326203, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00017158668492597186, |
|
"loss": 0.626, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.2263814616755795, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 0.00017115006375947303, |
|
"loss": 0.6136, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 3.2442067736185383, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00017071067811865476, |
|
"loss": 0.631, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.2620320855614975, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00017026854507546692, |
|
"loss": 0.6254, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 3.2798573975044563, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00016982368180860728, |
|
"loss": 0.6268, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.2976827094474155, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00016937610560285418, |
|
"loss": 0.6283, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 3.3155080213903743, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.0001689258338483947, |
|
"loss": 0.6185, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.00016847288404014935, |
|
"loss": 0.6363, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 3.3511586452762923, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00016801727377709194, |
|
"loss": 0.6231, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.3689839572192515, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.00016755902076156604, |
|
"loss": 0.6178, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 3.3868092691622103, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 0.00016709814279859702, |
|
"loss": 0.6196, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.4046345811051695, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.0001666346577952004, |
|
"loss": 0.6196, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 3.4224598930481283, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00016616858375968595, |
|
"loss": 0.6329, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.4402852049910875, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 0.00016569993880095806, |
|
"loss": 0.631, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 3.4581105169340463, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 0.00016522874112781213, |
|
"loss": 0.6481, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.4759358288770055, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 0.00016475500904822706, |
|
"loss": 0.6349, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.4937611408199643, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.00016427876096865394, |
|
"loss": 0.6409, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.5115864527629235, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 0.0001638000153933009, |
|
"loss": 0.6424, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 3.5294117647058822, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 0.000163318790923414, |
|
"loss": 0.6337, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.5472370766488415, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 0.00016283510625655472, |
|
"loss": 0.6283, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 3.5650623885918002, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 0.00016234898018587337, |
|
"loss": 0.6275, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.5828877005347595, |
|
"grad_norm": 1.125, |
|
"learning_rate": 0.00016186043159937882, |
|
"loss": 0.6319, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 3.6007130124777182, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 0.00016136947947920476, |
|
"loss": 0.6268, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.6185383244206775, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00016087614290087208, |
|
"loss": 0.6329, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 3.6363636363636362, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.00016038044103254775, |
|
"loss": 0.6183, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.6541889483065955, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 0.00015988239313430004, |
|
"loss": 0.6359, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 3.6720142602495542, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00015938201855735014, |
|
"loss": 0.6376, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.6898395721925135, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.00015887933674332046, |
|
"loss": 0.6314, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 3.7076648841354722, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 0.000158374367223479, |
|
"loss": 0.631, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.7254901960784315, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0001578671296179806, |
|
"loss": 0.6348, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 3.7433155080213902, |
|
"grad_norm": 0.75, |
|
"learning_rate": 0.0001573576436351046, |
|
"loss": 0.6371, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.7611408199643495, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 0.00015684592907048926, |
|
"loss": 0.6365, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 3.7789661319073082, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 0.0001563320058063622, |
|
"loss": 0.6477, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.7967914438502675, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 0.0001558158938107684, |
|
"loss": 0.6472, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 3.8146167557932262, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 0.00015529761313679393, |
|
"loss": 0.6352, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.8324420677361855, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 0.00015477718392178716, |
|
"loss": 0.6299, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 3.8502673796791442, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 0.00015425462638657595, |
|
"loss": 0.6255, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.8680926916221035, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 0.0001537299608346824, |
|
"loss": 0.6318, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 3.8859180035650622, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00015320320765153367, |
|
"loss": 0.6308, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.9037433155080214, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.0001526743873036701, |
|
"loss": 0.6436, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 3.9215686274509802, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 0.0001521435203379498, |
|
"loss": 0.6412, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.9393939393939394, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00015161062738075067, |
|
"loss": 0.6399, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 3.9572192513368982, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 0.00015107572913716858, |
|
"loss": 0.631, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.9750445632798574, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.0001505388463902131, |
|
"loss": 0.6188, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 3.9928698752228167, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 0.6439, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 2.214867115020752, |
|
"eval_runtime": 1.0238, |
|
"eval_samples_per_second": 4.884, |
|
"eval_steps_per_second": 1.954, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 4.010695187165775, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.00014945921090294076, |
|
"loss": 0.5755, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 4.028520499108734, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 0.00014891650011092896, |
|
"loss": 0.5648, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.046345811051693, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 0.000148371888710524, |
|
"loss": 0.5641, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 4.064171122994653, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 0.00014782539786213183, |
|
"loss": 0.5618, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.081996434937611, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 0.0001472770487991827, |
|
"loss": 0.5628, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 4.09982174688057, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.0001467268628273062, |
|
"loss": 0.558, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.117647058823529, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 0.00014617486132350343, |
|
"loss": 0.5556, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 4.135472370766489, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 0.0001456210657353163, |
|
"loss": 0.565, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.153297682709447, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 0.00014506549757999454, |
|
"loss": 0.5682, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 4.171122994652406, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 0.00014450817844365921, |
|
"loss": 0.5699, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.188948306595365, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 0.0001439491299804645, |
|
"loss": 0.5665, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 4.206773618538325, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 0.00014338837391175582, |
|
"loss": 0.5795, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.224598930481283, |
|
"grad_norm": 0.8828125, |
|
"learning_rate": 0.00014282593202522627, |
|
"loss": 0.5707, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 4.242424242424242, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 0.00014226182617406996, |
|
"loss": 0.5649, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.260249554367201, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.00014169607827613283, |
|
"loss": 0.5594, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 4.278074866310161, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 0.00014112871031306119, |
|
"loss": 0.5871, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.295900178253119, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 0.00014055974432944753, |
|
"loss": 0.5714, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 4.313725490196078, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 0.00013998920243197407, |
|
"loss": 0.5731, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.331550802139038, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 0.00013941710678855396, |
|
"loss": 0.5717, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 4.349376114081997, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 0.00013884347962746948, |
|
"loss": 0.5773, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.367201426024955, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 0.5818, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 4.385026737967914, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 0.00013769171996210052, |
|
"loss": 0.5575, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.402852049910873, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 0.00013711363220844379, |
|
"loss": 0.5638, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 4.420677361853833, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.00013653410243663952, |
|
"loss": 0.5702, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.438502673796791, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00013595315316381677, |
|
"loss": 0.5773, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 4.45632798573975, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 0.00013537080696225814, |
|
"loss": 0.5686, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.47415329768271, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 0.00013478708645852272, |
|
"loss": 0.5714, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 4.491978609625669, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 0.00013420201433256689, |
|
"loss": 0.5718, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.509803921568627, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 0.0001336156133168631, |
|
"loss": 0.5846, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 4.527629233511586, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.00013302790619551674, |
|
"loss": 0.5761, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 4.545454545454545, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.00013243891580338072, |
|
"loss": 0.5737, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 4.563279857397505, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00013184866502516845, |
|
"loss": 0.5854, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.581105169340463, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00013125717679456447, |
|
"loss": 0.589, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 4.598930481283422, |
|
"grad_norm": 0.90234375, |
|
"learning_rate": 0.00013066447409333345, |
|
"loss": 0.5706, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.616755793226382, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00013007057995042732, |
|
"loss": 0.5733, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 4.634581105169341, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00012947551744109043, |
|
"loss": 0.5871, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.652406417112299, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.000128879309685963, |
|
"loss": 0.5843, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 4.670231729055258, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 0.00012828197985018276, |
|
"loss": 0.5677, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 4.688057040998218, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00012768355114248494, |
|
"loss": 0.5654, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 4.705882352941177, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00012708404681430053, |
|
"loss": 0.5741, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 4.723707664884135, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00012648349015885273, |
|
"loss": 0.5678, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 4.741532976827094, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.00012588190451025207, |
|
"loss": 0.5756, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.759358288770054, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.00012527931324258975, |
|
"loss": 0.572, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 4.777183600713013, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00012467573976902935, |
|
"loss": 0.5911, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.795008912655971, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00012407120754089732, |
|
"loss": 0.5817, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 4.81283422459893, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00012346574004677154, |
|
"loss": 0.5788, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.83065953654189, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00012285936081156897, |
|
"loss": 0.5816, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 4.848484848484849, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00012225209339563145, |
|
"loss": 0.5847, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.866310160427807, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00012164396139381029, |
|
"loss": 0.5869, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 4.884135472370766, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 0.00012103498843454959, |
|
"loss": 0.5827, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.901960784313726, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00012042519817896804, |
|
"loss": 0.576, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 4.919786096256685, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00011981461431993977, |
|
"loss": 0.5958, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 4.937611408199643, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00011920326058117364, |
|
"loss": 0.585, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 4.955436720142602, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 0.00011859116071629149, |
|
"loss": 0.5904, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 4.973262032085562, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00011797833850790527, |
|
"loss": 0.5828, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 4.991087344028521, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.569, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.998217468805704, |
|
"eval_loss": 2.3224310874938965, |
|
"eval_runtime": 1.0378, |
|
"eval_samples_per_second": 4.818, |
|
"eval_steps_per_second": 1.927, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 5.008912655971479, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00011675062233047364, |
|
"loss": 0.5598, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 5.026737967914438, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 0.00011613577606328068, |
|
"loss": 0.5118, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 5.044563279857398, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00011552030285443515, |
|
"loss": 0.5109, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 5.062388591800357, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00011490422661761744, |
|
"loss": 0.5033, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 5.080213903743315, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00011428757128993802, |
|
"loss": 0.5113, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 5.098039215686274, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 0.00011367036083100735, |
|
"loss": 0.4987, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 5.115864527629234, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.00011305261922200519, |
|
"loss": 0.5111, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 5.133689839572193, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.00011243437046474853, |
|
"loss": 0.5186, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 5.151515151515151, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 0.0001118156385807593, |
|
"loss": 0.5054, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 5.16934046345811, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00011119644761033078, |
|
"loss": 0.5073, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 5.18716577540107, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00011057682161159379, |
|
"loss": 0.5298, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 5.204991087344029, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00010995678465958168, |
|
"loss": 0.504, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 5.222816399286987, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00010933636084529506, |
|
"loss": 0.5361, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 5.240641711229946, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.00010871557427476583, |
|
"loss": 0.521, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 5.258467023172906, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00010809444906812033, |
|
"loss": 0.522, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 5.276292335115865, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 0.00010747300935864243, |
|
"loss": 0.5188, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 5.294117647058823, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 0.00010685127929183567, |
|
"loss": 0.5203, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 5.311942959001782, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00010622928302448523, |
|
"loss": 0.5182, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 5.329768270944742, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00010560704472371919, |
|
"loss": 0.5317, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 5.347593582887701, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 0.00010498458856606972, |
|
"loss": 0.5186, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 5.365418894830659, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00010436193873653361, |
|
"loss": 0.5155, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 5.383244206773618, |
|
"grad_norm": 0.80859375, |
|
"learning_rate": 0.0001037391194276326, |
|
"loss": 0.5198, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 5.401069518716578, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 0.00010311615483847332, |
|
"loss": 0.5184, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 5.418894830659537, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.0001024930691738073, |
|
"loss": 0.5256, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 5.436720142602495, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00010186988664309023, |
|
"loss": 0.5203, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 5.454545454545454, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 0.00010124663145954152, |
|
"loss": 0.5166, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 5.472370766488414, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00010062332783920336, |
|
"loss": 0.5168, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 5.490196078431373, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.0001, |
|
"loss": 0.5153, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 5.508021390374331, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 9.937667216079665e-05, |
|
"loss": 0.5308, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 5.52584670231729, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 9.875336854045851e-05, |
|
"loss": 0.5148, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 5.54367201426025, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 9.813011335690981e-05, |
|
"loss": 0.5247, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 5.561497326203209, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 9.750693082619273e-05, |
|
"loss": 0.5219, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 5.579322638146167, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 9.688384516152672e-05, |
|
"loss": 0.5213, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 5.597147950089127, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 9.626088057236745e-05, |
|
"loss": 0.5279, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 5.614973262032086, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 9.563806126346642e-05, |
|
"loss": 0.5256, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 5.632798573975045, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 9.501541143393028e-05, |
|
"loss": 0.529, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 5.650623885918003, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 9.439295527628081e-05, |
|
"loss": 0.5163, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 5.668449197860962, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 9.37707169755148e-05, |
|
"loss": 0.5255, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 5.686274509803922, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 9.314872070816434e-05, |
|
"loss": 0.5236, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 5.704099821746881, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 9.252699064135758e-05, |
|
"loss": 0.5272, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 5.721925133689839, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 9.190555093187967e-05, |
|
"loss": 0.5276, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 5.739750445632799, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 9.128442572523417e-05, |
|
"loss": 0.519, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 5.757575757575758, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 9.066363915470495e-05, |
|
"loss": 0.5245, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 5.775401069518717, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 9.004321534041835e-05, |
|
"loss": 0.5296, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 5.793226381461675, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 8.942317838840623e-05, |
|
"loss": 0.5191, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 5.811051693404634, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 8.880355238966923e-05, |
|
"loss": 0.5236, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 5.828877005347594, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 8.818436141924072e-05, |
|
"loss": 0.5321, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 5.846702317290553, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 8.756562953525152e-05, |
|
"loss": 0.5367, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 5.864527629233511, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 8.694738077799488e-05, |
|
"loss": 0.5208, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 5.882352941176471, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 8.632963916899268e-05, |
|
"loss": 0.5223, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 5.90017825311943, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 8.571242871006202e-05, |
|
"loss": 0.5221, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 5.918003565062389, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 8.509577338238255e-05, |
|
"loss": 0.5299, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 5.935828877005347, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 8.447969714556484e-05, |
|
"loss": 0.5341, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 5.953654188948306, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 8.386422393671933e-05, |
|
"loss": 0.524, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 5.971479500891266, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 8.324937766952638e-05, |
|
"loss": 0.5377, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 5.989304812834225, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 8.263518223330697e-05, |
|
"loss": 0.5317, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 2.4839205741882324, |
|
"eval_runtime": 1.0218, |
|
"eval_samples_per_second": 4.893, |
|
"eval_steps_per_second": 1.957, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 6.007130124777183, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 8.202166149209474e-05, |
|
"loss": 0.4958, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 6.024955436720143, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 8.140883928370855e-05, |
|
"loss": 0.4602, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 6.042780748663102, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 8.07967394188264e-05, |
|
"loss": 0.4597, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 6.0606060606060606, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 8.018538568006027e-05, |
|
"loss": 0.4568, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 6.078431372549019, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 7.957480182103198e-05, |
|
"loss": 0.4586, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 6.096256684491979, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 7.896501156545045e-05, |
|
"loss": 0.4695, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 6.114081996434938, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 7.835603860618972e-05, |
|
"loss": 0.4623, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 6.1319073083778965, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 7.774790660436858e-05, |
|
"loss": 0.4671, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 6.149732620320855, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 7.714063918843106e-05, |
|
"loss": 0.4631, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 6.167557932263815, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 7.653425995322851e-05, |
|
"loss": 0.4692, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 6.185383244206774, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 7.592879245910273e-05, |
|
"loss": 0.468, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 6.2032085561497325, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 7.532426023097063e-05, |
|
"loss": 0.4651, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 6.221033868092691, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 7.472068675741025e-05, |
|
"loss": 0.4745, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 6.238859180035651, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 7.411809548974792e-05, |
|
"loss": 0.4688, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 6.25668449197861, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 7.351650984114728e-05, |
|
"loss": 0.4645, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 6.2745098039215685, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 7.291595318569951e-05, |
|
"loss": 0.4657, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 6.292335115864527, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 7.231644885751507e-05, |
|
"loss": 0.4659, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 6.310160427807487, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 7.171802014981726e-05, |
|
"loss": 0.4715, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 6.327985739750446, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 7.112069031403704e-05, |
|
"loss": 0.4681, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 6.3458110516934045, |
|
"grad_norm": 0.625, |
|
"learning_rate": 7.052448255890957e-05, |
|
"loss": 0.4625, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 6.363636363636363, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 6.992942004957271e-05, |
|
"loss": 0.4675, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 6.381461675579323, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 6.933552590666659e-05, |
|
"loss": 0.4713, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 6.399286987522282, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 6.874282320543557e-05, |
|
"loss": 0.4707, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 6.4171122994652405, |
|
"grad_norm": 0.625, |
|
"learning_rate": 6.815133497483157e-05, |
|
"loss": 0.4678, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 6.434937611408199, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 6.756108419661931e-05, |
|
"loss": 0.4701, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 6.452762923351159, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 6.697209380448333e-05, |
|
"loss": 0.4721, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 6.470588235294118, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 6.638438668313695e-05, |
|
"loss": 0.4767, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 6.4884135472370765, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 6.579798566743314e-05, |
|
"loss": 0.4772, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 6.506238859180035, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 6.521291354147727e-05, |
|
"loss": 0.4749, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 6.524064171122995, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 6.462919303774186e-05, |
|
"loss": 0.4686, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 6.541889483065954, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 6.404684683618325e-05, |
|
"loss": 0.4761, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 6.5597147950089125, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 6.34658975633605e-05, |
|
"loss": 0.4785, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 6.577540106951871, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 6.288636779155621e-05, |
|
"loss": 0.473, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 6.595365418894831, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 6.230828003789949e-05, |
|
"loss": 0.4684, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 6.61319073083779, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 6.173165676349103e-05, |
|
"loss": 0.4638, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 6.6310160427807485, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 6.115652037253053e-05, |
|
"loss": 0.473, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 6.648841354723707, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 6.058289321144608e-05, |
|
"loss": 0.4724, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 6.001079756802592e-05, |
|
"loss": 0.4811, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 6.684491978609626, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 5.944025567055251e-05, |
|
"loss": 0.4698, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 6.7023172905525845, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 5.887128968693887e-05, |
|
"loss": 0.4789, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 6.720142602495543, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 5.830392172386723e-05, |
|
"loss": 0.459, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 6.737967914438503, |
|
"grad_norm": 0.625, |
|
"learning_rate": 5.773817382593008e-05, |
|
"loss": 0.4668, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 6.755793226381462, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 5.717406797477372e-05, |
|
"loss": 0.467, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 6.7736185383244205, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 5.6611626088244194e-05, |
|
"loss": 0.4729, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 6.791443850267379, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 5.6050870019535494e-05, |
|
"loss": 0.4677, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 6.809269162210339, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 5.549182155634076e-05, |
|
"loss": 0.4672, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 6.827094474153298, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 5.493450242000546e-05, |
|
"loss": 0.4764, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 6.8449197860962565, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 5.43789342646837e-05, |
|
"loss": 0.4708, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 6.862745098039216, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 5.382513867649663e-05, |
|
"loss": 0.4603, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 6.880570409982175, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 5.32731371726938e-05, |
|
"loss": 0.4689, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 6.898395721925134, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 5.272295120081732e-05, |
|
"loss": 0.4701, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 6.9162210338680925, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 5.217460213786821e-05, |
|
"loss": 0.472, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 6.934046345811051, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 5.162811128947602e-05, |
|
"loss": 0.4717, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 6.951871657754011, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 5.108349988907111e-05, |
|
"loss": 0.4766, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 6.96969696969697, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 5.054078909705926e-05, |
|
"loss": 0.465, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 6.9875222816399285, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 0.472, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 6.998217468805704, |
|
"eval_loss": 2.6540169715881348, |
|
"eval_runtime": 1.0444, |
|
"eval_samples_per_second": 4.787, |
|
"eval_steps_per_second": 1.915, |
|
"step": 1963 |
|
}, |
|
{ |
|
"epoch": 7.005347593582887, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 4.946115360978696e-05, |
|
"loss": 0.4574, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 7.023172905525847, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 4.892427086283147e-05, |
|
"loss": 0.4256, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 7.040998217468806, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 4.8389372619249326e-05, |
|
"loss": 0.4282, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 7.0588235294117645, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 4.78564796620502e-05, |
|
"loss": 0.4247, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 7.076648841354723, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 4.732561269632992e-05, |
|
"loss": 0.4263, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 7.094474153297683, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 4.6796792348466356e-05, |
|
"loss": 0.4238, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 7.112299465240642, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 4.6270039165317605e-05, |
|
"loss": 0.4357, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 7.1301247771836005, |
|
"grad_norm": 0.74609375, |
|
"learning_rate": 4.574537361342407e-05, |
|
"loss": 0.4243, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 7.14795008912656, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 4.522281607821288e-05, |
|
"loss": 0.4226, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 7.165775401069519, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 4.470238686320606e-05, |
|
"loss": 0.4256, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 7.183600713012478, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 4.4184106189231625e-05, |
|
"loss": 0.4238, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 7.2014260249554365, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 4.3667994193637796e-05, |
|
"loss": 0.429, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 7.219251336898395, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 4.315407092951078e-05, |
|
"loss": 0.4252, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 7.237076648841355, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 4.264235636489542e-05, |
|
"loss": 0.425, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 7.254901960784314, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 4.213287038201943e-05, |
|
"loss": 0.4201, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 7.2727272727272725, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 4.1625632776521037e-05, |
|
"loss": 0.4321, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 7.290552584670232, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 4.112066325667954e-05, |
|
"loss": 0.4215, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 7.308377896613191, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 4.0617981442649855e-05, |
|
"loss": 0.4248, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 7.32620320855615, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 4.0117606865699975e-05, |
|
"loss": 0.4308, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 7.3440285204991085, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 3.961955896745224e-05, |
|
"loss": 0.4227, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 7.361853832442068, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 3.9123857099127936e-05, |
|
"loss": 0.4292, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 7.379679144385027, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 3.863052052079528e-05, |
|
"loss": 0.4275, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 7.397504456327986, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 3.813956840062118e-05, |
|
"loss": 0.429, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 7.4153297682709445, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 3.7651019814126654e-05, |
|
"loss": 0.4301, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 7.433155080213904, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 3.716489374344527e-05, |
|
"loss": 0.4264, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 7.450980392156863, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 3.668120907658603e-05, |
|
"loss": 0.4255, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 7.468805704099822, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 3.6199984606699155e-05, |
|
"loss": 0.4261, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 7.4866310160427805, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 3.5721239031346066e-05, |
|
"loss": 0.4319, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 7.50445632798574, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 3.5244990951772974e-05, |
|
"loss": 0.4233, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 7.522281639928699, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 3.477125887218792e-05, |
|
"loss": 0.4264, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 7.540106951871658, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 3.430006119904197e-05, |
|
"loss": 0.4343, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 7.5579322638146165, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 3.383141624031408e-05, |
|
"loss": 0.434, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 7.575757575757576, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 3.336534220479961e-05, |
|
"loss": 0.4326, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 7.593582887700535, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 3.290185720140301e-05, |
|
"loss": 0.4299, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 7.611408199643494, |
|
"grad_norm": 0.625, |
|
"learning_rate": 3.244097923843398e-05, |
|
"loss": 0.4219, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 7.6292335115864525, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 3.198272622290804e-05, |
|
"loss": 0.4262, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 7.647058823529412, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 3.152711595985065e-05, |
|
"loss": 0.428, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 7.664884135472371, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 3.10741661516053e-05, |
|
"loss": 0.4282, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 7.68270944741533, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 3.062389439714584e-05, |
|
"loss": 0.4346, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 7.7005347593582885, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 3.0176318191392726e-05, |
|
"loss": 0.4253, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 7.718360071301248, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 2.9731454924533086e-05, |
|
"loss": 0.4218, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 7.736185383244207, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 2.9289321881345254e-05, |
|
"loss": 0.4284, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 7.754010695187166, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 2.8849936240527008e-05, |
|
"loss": 0.4359, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 7.7718360071301245, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 2.8413315074028158e-05, |
|
"loss": 0.4255, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 7.789661319073084, |
|
"grad_norm": 0.625, |
|
"learning_rate": 2.797947534638736e-05, |
|
"loss": 0.4396, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 7.807486631016043, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 2.7548433914072734e-05, |
|
"loss": 0.4244, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 7.825311942959002, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 2.7120207524827168e-05, |
|
"loss": 0.4363, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 7.8431372549019605, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 2.669481281701739e-05, |
|
"loss": 0.4212, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 7.86096256684492, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 2.6272266318987603e-05, |
|
"loss": 0.4321, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 7.878787878787879, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 2.5852584448417328e-05, |
|
"loss": 0.4362, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 7.896613190730838, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 2.5435783511683443e-05, |
|
"loss": 0.4327, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 7.9144385026737964, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 2.502187970322657e-05, |
|
"loss": 0.4261, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 7.932263814616756, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 2.461088910492202e-05, |
|
"loss": 0.4314, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 7.950089126559715, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 2.420282768545469e-05, |
|
"loss": 0.4356, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 7.967914438502674, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 2.3797711299698923e-05, |
|
"loss": 0.429, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 7.9857397504456324, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 2.339555568810221e-05, |
|
"loss": 0.4306, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 2.8791282176971436, |
|
"eval_runtime": 1.0241, |
|
"eval_samples_per_second": 4.883, |
|
"eval_steps_per_second": 1.953, |
|
"step": 2244 |
|
}, |
|
{ |
|
"epoch": 8.003565062388592, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 2.2996376476073723e-05, |
|
"loss": 0.4213, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 8.02139037433155, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 2.260018917337726e-05, |
|
"loss": 0.4101, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 8.03921568627451, |
|
"grad_norm": 0.625, |
|
"learning_rate": 2.2207009173528527e-05, |
|
"loss": 0.4112, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 8.057040998217468, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 2.181685175319702e-05, |
|
"loss": 0.4117, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 8.074866310160427, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 2.142973207161265e-05, |
|
"loss": 0.4099, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 8.092691622103386, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 2.1045665169976468e-05, |
|
"loss": 0.4074, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 8.110516934046347, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 2.0664665970876496e-05, |
|
"loss": 0.4103, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 8.128342245989305, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 2.0286749277707782e-05, |
|
"loss": 0.4073, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 8.146167557932264, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 1.9911929774097215e-05, |
|
"loss": 0.4024, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 8.163992869875223, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 1.9540222023333166e-05, |
|
"loss": 0.4016, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 8.181818181818182, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 1.917164046779948e-05, |
|
"loss": 0.414, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 8.19964349376114, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 1.880619942841435e-05, |
|
"loss": 0.4106, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 8.2174688057041, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 1.8443913104073983e-05, |
|
"loss": 0.3989, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 8.235294117647058, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 1.808479557110081e-05, |
|
"loss": 0.4134, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 8.253119429590019, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 1.7728860782696664e-05, |
|
"loss": 0.4121, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 8.270944741532977, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 1.7376122568400532e-05, |
|
"loss": 0.4034, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 8.288770053475936, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 1.702659463355125e-05, |
|
"loss": 0.4085, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 8.306595365418895, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 1.668029055875512e-05, |
|
"loss": 0.4109, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 8.324420677361854, |
|
"grad_norm": 0.625, |
|
"learning_rate": 1.6337223799358026e-05, |
|
"loss": 0.4103, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 8.342245989304812, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 1.5997407684922862e-05, |
|
"loss": 0.4104, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 8.360071301247771, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 1.566085541871145e-05, |
|
"loss": 0.408, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 8.37789661319073, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 1.5327580077171587e-05, |
|
"loss": 0.4095, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 8.39572192513369, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 1.4997594609429088e-05, |
|
"loss": 0.4044, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 8.41354723707665, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 1.467091183678444e-05, |
|
"loss": 0.4029, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 8.431372549019608, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 1.4347544452214868e-05, |
|
"loss": 0.4042, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 8.449197860962567, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 1.402750501988097e-05, |
|
"loss": 0.4043, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 8.467023172905526, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 1.3710805974638696e-05, |
|
"loss": 0.4041, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 8.484848484848484, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 1.339745962155613e-05, |
|
"loss": 0.4155, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 8.502673796791443, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 1.308747813543536e-05, |
|
"loss": 0.4106, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 8.520499108734402, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 1.2780873560339468e-05, |
|
"loss": 0.4112, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 8.538324420677363, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 1.2477657809124631e-05, |
|
"loss": 0.4062, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 8.556149732620321, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 1.2177842662977135e-05, |
|
"loss": 0.4053, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 8.57397504456328, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 1.188143977095576e-05, |
|
"loss": 0.4135, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 8.591800356506239, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 1.1588460649539035e-05, |
|
"loss": 0.4011, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 8.609625668449198, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 1.129891668217783e-05, |
|
"loss": 0.4061, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 8.627450980392156, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 1.1012819118853147e-05, |
|
"loss": 0.4119, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 8.645276292335115, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 1.0730179075638868e-05, |
|
"loss": 0.409, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 8.663101604278076, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 1.0451007534269907e-05, |
|
"loss": 0.4089, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 8.680926916221035, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 1.0175315341715597e-05, |
|
"loss": 0.4066, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 8.698752228163993, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 9.903113209758096e-06, |
|
"loss": 0.4117, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 8.716577540106952, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 9.634411714576353e-06, |
|
"loss": 0.4017, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 8.73440285204991, |
|
"grad_norm": 0.625, |
|
"learning_rate": 9.369221296335006e-06, |
|
"loss": 0.4036, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 8.75222816399287, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 9.107552258778907e-06, |
|
"loss": 0.4019, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 8.770053475935828, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 8.849414768832687e-06, |
|
"loss": 0.4119, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 8.787878787878787, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 8.5948188562057e-06, |
|
"loss": 0.4065, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 8.805704099821746, |
|
"grad_norm": 0.625, |
|
"learning_rate": 8.343774413002381e-06, |
|
"loss": 0.405, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 8.823529411764707, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 8.096291193337934e-06, |
|
"loss": 0.4049, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 8.841354723707665, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 7.852378812959227e-06, |
|
"loss": 0.4033, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 8.859180035650624, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 7.612046748871327e-06, |
|
"loss": 0.4044, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 8.877005347593583, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 7.375304338969136e-06, |
|
"loss": 0.4068, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 8.894830659536542, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 7.142160781674645e-06, |
|
"loss": 0.4035, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 8.9126559714795, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 6.9126251355795864e-06, |
|
"loss": 0.4083, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 8.93048128342246, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 6.68670631909335e-06, |
|
"loss": 0.4059, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 8.94830659536542, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 6.464413110096601e-06, |
|
"loss": 0.4114, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 8.966131907308379, |
|
"grad_norm": 0.625, |
|
"learning_rate": 6.245754145600091e-06, |
|
"loss": 0.4132, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 8.983957219251337, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 6.030737921409169e-06, |
|
"loss": 0.4106, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 8.998217468805704, |
|
"eval_loss": 3.001107692718506, |
|
"eval_runtime": 1.0503, |
|
"eval_samples_per_second": 4.76, |
|
"eval_steps_per_second": 1.904, |
|
"step": 2524 |
|
}, |
|
{ |
|
"epoch": 9.001782531194296, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 5.8193727917936536e-06, |
|
"loss": 0.4088, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 9.019607843137255, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 5.611666969163243e-06, |
|
"loss": 0.4124, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 9.037433155080214, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 5.4076285237483984e-06, |
|
"loss": 0.405, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 9.055258467023172, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 5.20726538328683e-06, |
|
"loss": 0.3947, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 9.073083778966131, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 5.0105853327154e-06, |
|
"loss": 0.4077, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 9.090909090909092, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 4.817596013867764e-06, |
|
"loss": 0.402, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 9.10873440285205, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 4.628304925177318e-06, |
|
"loss": 0.4053, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 9.12655971479501, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 4.442719421385922e-06, |
|
"loss": 0.4063, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 9.144385026737968, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 4.260846713258193e-06, |
|
"loss": 0.4066, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 9.162210338680927, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 4.082693867301224e-06, |
|
"loss": 0.4103, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 9.180035650623886, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 3.908267805490051e-06, |
|
"loss": 0.4052, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 9.197860962566844, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 3.7375753049987973e-06, |
|
"loss": 0.4004, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 9.215686274509803, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 3.570622997937234e-06, |
|
"loss": 0.4049, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 9.233511586452764, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 3.40741737109318e-06, |
|
"loss": 0.4048, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 9.251336898395722, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 3.247964765680389e-06, |
|
"loss": 0.4052, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 9.269162210338681, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 3.092271377092215e-06, |
|
"loss": 0.4098, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 9.28698752228164, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 2.9403432546609043e-06, |
|
"loss": 0.4011, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 9.304812834224599, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 2.7921863014225503e-06, |
|
"loss": 0.4075, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 9.322638146167558, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 2.647806273887665e-06, |
|
"loss": 0.4045, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 9.340463458110516, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 2.5072087818176382e-06, |
|
"loss": 0.399, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 9.358288770053475, |
|
"grad_norm": 0.625, |
|
"learning_rate": 2.3703992880066638e-06, |
|
"loss": 0.4012, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 9.376114081996436, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 2.237383108069546e-06, |
|
"loss": 0.4092, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 9.393939393939394, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 2.1081654102351635e-06, |
|
"loss": 0.3956, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 9.411764705882353, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 1.9827512151456173e-06, |
|
"loss": 0.4007, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 9.429590017825312, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 1.8611453956612347e-06, |
|
"loss": 0.4005, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 9.44741532976827, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 1.7433526766711728e-06, |
|
"loss": 0.4045, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 9.46524064171123, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 1.6293776349098677e-06, |
|
"loss": 0.4041, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 9.483065953654188, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 1.5192246987791981e-06, |
|
"loss": 0.403, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 9.500891265597147, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 1.4128981481764115e-06, |
|
"loss": 0.3989, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 9.518716577540108, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 1.3104021143278911e-06, |
|
"loss": 0.4132, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 9.536541889483066, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 1.2117405796285286e-06, |
|
"loss": 0.401, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 9.554367201426025, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 1.1169173774871478e-06, |
|
"loss": 0.3957, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 9.572192513368984, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 1.0259361921774013e-06, |
|
"loss": 0.4003, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 9.590017825311943, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 9.388005586947191e-07, |
|
"loss": 0.4009, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 9.607843137254902, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 8.555138626189618e-07, |
|
"loss": 0.4052, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 9.62566844919786, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 7.760793399827937e-07, |
|
"loss": 0.4009, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 9.643493761140821, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 7.00500077146038e-07, |
|
"loss": 0.4017, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 9.66131907308378, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 6.287790106757396e-07, |
|
"loss": 0.3964, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 9.679144385026738, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 5.609189272320237e-07, |
|
"loss": 0.4022, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 9.696969696969697, |
|
"grad_norm": 0.625, |
|
"learning_rate": 4.969224634598591e-07, |
|
"loss": 0.404, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 9.714795008912656, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 4.3679210588661866e-07, |
|
"loss": 0.409, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 9.732620320855615, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 3.805301908254455e-07, |
|
"loss": 0.4033, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 9.750445632798574, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 3.281389042844918e-07, |
|
"loss": 0.4111, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 9.768270944741532, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 2.7962028188198706e-07, |
|
"loss": 0.3937, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 9.786096256684491, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 2.3497620876711257e-07, |
|
"loss": 0.4115, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 9.803921568627452, |
|
"grad_norm": 0.625, |
|
"learning_rate": 1.9420841954681525e-07, |
|
"loss": 0.4025, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 9.82174688057041, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 1.5731849821833954e-07, |
|
"loss": 0.4012, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 9.83957219251337, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 1.2430787810776555e-07, |
|
"loss": 0.402, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 9.857397504456328, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 9.517784181422019e-08, |
|
"loss": 0.4128, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 9.875222816399287, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 6.992952116013918e-08, |
|
"loss": 0.4023, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 9.893048128342246, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 4.856389714723575e-08, |
|
"loss": 0.4023, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 9.910873440285204, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 3.1081799918375454e-08, |
|
"loss": 0.4105, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 9.928698752228165, |
|
"grad_norm": 0.625, |
|
"learning_rate": 1.7483908725357545e-08, |
|
"loss": 0.4085, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 9.946524064171124, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 7.770751902513862e-09, |
|
"loss": 0.3979, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 9.964349376114082, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 1.9427068461808083e-09, |
|
"loss": 0.4084, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 9.982174688057041, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.0, |
|
"loss": 0.4021, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 9.982174688057041, |
|
"eval_loss": 3.022949695587158, |
|
"eval_runtime": 1.033, |
|
"eval_samples_per_second": 4.84, |
|
"eval_steps_per_second": 1.936, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 9.982174688057041, |
|
"step": 2800, |
|
"total_flos": 4.2808448846481326e+18, |
|
"train_loss": 1.3059567311831883, |
|
"train_runtime": 23112.9867, |
|
"train_samples_per_second": 1.939, |
|
"train_steps_per_second": 0.121 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2800, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.2808448846481326e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|