|
{ |
|
"best_metric": 0.9375, |
|
"best_model_checkpoint": "./resnet50/checkpoint-2436", |
|
"epoch": 30.0, |
|
"eval_steps": 500, |
|
"global_step": 2520, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.11904761904761904, |
|
"grad_norm": 0.48410382866859436, |
|
"learning_rate": 7.936507936507937e-07, |
|
"loss": 1.3866, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.23809523809523808, |
|
"grad_norm": 0.4765987694263458, |
|
"learning_rate": 1.5873015873015873e-06, |
|
"loss": 1.3861, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 0.43654772639274597, |
|
"learning_rate": 2.380952380952381e-06, |
|
"loss": 1.3859, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 0.5733897686004639, |
|
"learning_rate": 3.1746031746031746e-06, |
|
"loss": 1.3855, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5952380952380952, |
|
"grad_norm": 0.457210898399353, |
|
"learning_rate": 3.968253968253968e-06, |
|
"loss": 1.385, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 0.5107349157333374, |
|
"learning_rate": 4.761904761904762e-06, |
|
"loss": 1.3843, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.6200265288352966, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 1.3832, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.6969241499900818, |
|
"learning_rate": 6.349206349206349e-06, |
|
"loss": 1.382, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.58375, |
|
"eval_loss": 1.3806676864624023, |
|
"eval_runtime": 5.5764, |
|
"eval_samples_per_second": 143.462, |
|
"eval_steps_per_second": 17.933, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.0714285714285714, |
|
"grad_norm": 0.616682231426239, |
|
"learning_rate": 7.1428571428571436e-06, |
|
"loss": 1.3805, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1904761904761905, |
|
"grad_norm": 0.774887204170227, |
|
"learning_rate": 7.936507936507936e-06, |
|
"loss": 1.3786, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.3095238095238095, |
|
"grad_norm": 0.862555205821991, |
|
"learning_rate": 8.730158730158731e-06, |
|
"loss": 1.3754, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 0.7880147695541382, |
|
"learning_rate": 9.523809523809525e-06, |
|
"loss": 1.3714, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5476190476190477, |
|
"grad_norm": 0.9495492577552795, |
|
"learning_rate": 1.031746031746032e-05, |
|
"loss": 1.3663, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 1.0720769166946411, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 1.3595, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.7857142857142856, |
|
"grad_norm": 1.4009815454483032, |
|
"learning_rate": 1.1904761904761905e-05, |
|
"loss": 1.3506, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9047619047619047, |
|
"grad_norm": 1.4374886751174927, |
|
"learning_rate": 1.2698412698412699e-05, |
|
"loss": 1.3401, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.44875, |
|
"eval_loss": 1.31219482421875, |
|
"eval_runtime": 5.4479, |
|
"eval_samples_per_second": 146.845, |
|
"eval_steps_per_second": 18.356, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.0238095238095237, |
|
"grad_norm": 1.4388487339019775, |
|
"learning_rate": 1.3492063492063494e-05, |
|
"loss": 1.3256, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.142857142857143, |
|
"grad_norm": 2.0602617263793945, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 1.3085, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.261904761904762, |
|
"grad_norm": 2.5268349647521973, |
|
"learning_rate": 1.507936507936508e-05, |
|
"loss": 1.2787, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.380952380952381, |
|
"grad_norm": 2.7516028881073, |
|
"learning_rate": 1.5873015873015872e-05, |
|
"loss": 1.2603, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.6878774166107178, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.2355, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.619047619047619, |
|
"grad_norm": 1.6872291564941406, |
|
"learning_rate": 1.7460317460317463e-05, |
|
"loss": 1.201, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.738095238095238, |
|
"grad_norm": 1.6558163166046143, |
|
"learning_rate": 1.8253968253968254e-05, |
|
"loss": 1.18, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"grad_norm": 2.9151134490966797, |
|
"learning_rate": 1.904761904761905e-05, |
|
"loss": 1.1485, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.9761904761904763, |
|
"grad_norm": 1.5142790079116821, |
|
"learning_rate": 1.9841269841269845e-05, |
|
"loss": 1.1534, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.5, |
|
"eval_loss": 1.121809482574463, |
|
"eval_runtime": 5.3391, |
|
"eval_samples_per_second": 149.838, |
|
"eval_steps_per_second": 18.73, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.0952380952380953, |
|
"grad_norm": 1.396998643875122, |
|
"learning_rate": 1.9938271604938272e-05, |
|
"loss": 1.1074, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.2142857142857144, |
|
"grad_norm": 1.8247098922729492, |
|
"learning_rate": 1.9850088183421517e-05, |
|
"loss": 1.1146, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 3.359285831451416, |
|
"learning_rate": 1.9761904761904763e-05, |
|
"loss": 1.0917, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.4523809523809526, |
|
"grad_norm": 2.1544318199157715, |
|
"learning_rate": 1.9673721340388008e-05, |
|
"loss": 1.0922, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.571428571428571, |
|
"grad_norm": 1.6087664365768433, |
|
"learning_rate": 1.9585537918871253e-05, |
|
"loss": 1.076, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.6904761904761907, |
|
"grad_norm": 2.3240392208099365, |
|
"learning_rate": 1.94973544973545e-05, |
|
"loss": 1.0453, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.8095238095238093, |
|
"grad_norm": 2.1408257484436035, |
|
"learning_rate": 1.9409171075837744e-05, |
|
"loss": 1.0366, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.928571428571429, |
|
"grad_norm": 4.0489583015441895, |
|
"learning_rate": 1.932098765432099e-05, |
|
"loss": 1.0343, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.67625, |
|
"eval_loss": 1.0234503746032715, |
|
"eval_runtime": 5.3529, |
|
"eval_samples_per_second": 149.453, |
|
"eval_steps_per_second": 18.682, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.0476190476190474, |
|
"grad_norm": 3.3416860103607178, |
|
"learning_rate": 1.9232804232804235e-05, |
|
"loss": 1.0142, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.166666666666667, |
|
"grad_norm": 2.0407114028930664, |
|
"learning_rate": 1.914462081128748e-05, |
|
"loss": 0.9883, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.285714285714286, |
|
"grad_norm": 1.7006970643997192, |
|
"learning_rate": 1.9056437389770726e-05, |
|
"loss": 0.9741, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.404761904761905, |
|
"grad_norm": 2.021171808242798, |
|
"learning_rate": 1.8968253968253968e-05, |
|
"loss": 0.9754, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.523809523809524, |
|
"grad_norm": 4.418124675750732, |
|
"learning_rate": 1.8880070546737216e-05, |
|
"loss": 0.9428, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.642857142857143, |
|
"grad_norm": 2.6121156215667725, |
|
"learning_rate": 1.8791887125220462e-05, |
|
"loss": 0.9401, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.761904761904762, |
|
"grad_norm": 2.1322250366210938, |
|
"learning_rate": 1.8703703703703707e-05, |
|
"loss": 0.9409, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.880952380952381, |
|
"grad_norm": 1.571338415145874, |
|
"learning_rate": 1.861552028218695e-05, |
|
"loss": 0.9062, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": Infinity, |
|
"learning_rate": 1.853615520282187e-05, |
|
"loss": 0.9117, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.7175, |
|
"eval_loss": 0.9020006656646729, |
|
"eval_runtime": 5.4379, |
|
"eval_samples_per_second": 147.116, |
|
"eval_steps_per_second": 18.389, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 5.119047619047619, |
|
"grad_norm": 4.1433868408203125, |
|
"learning_rate": 1.8447971781305116e-05, |
|
"loss": 0.8893, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 5.238095238095238, |
|
"grad_norm": 2.4193081855773926, |
|
"learning_rate": 1.835978835978836e-05, |
|
"loss": 0.8883, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 5.357142857142857, |
|
"grad_norm": 3.0261456966400146, |
|
"learning_rate": 1.8271604938271607e-05, |
|
"loss": 0.8375, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 5.476190476190476, |
|
"grad_norm": 4.649360656738281, |
|
"learning_rate": 1.8183421516754852e-05, |
|
"loss": 0.8617, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 5.595238095238095, |
|
"grad_norm": 2.217705011367798, |
|
"learning_rate": 1.8095238095238097e-05, |
|
"loss": 0.8377, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 5.714285714285714, |
|
"grad_norm": 3.888078212738037, |
|
"learning_rate": 1.8007054673721343e-05, |
|
"loss": 0.8027, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 5.833333333333333, |
|
"grad_norm": 3.240481376647949, |
|
"learning_rate": 1.7918871252204585e-05, |
|
"loss": 0.8258, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 5.9523809523809526, |
|
"grad_norm": 2.535261631011963, |
|
"learning_rate": 1.783068783068783e-05, |
|
"loss": 0.8169, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.7925, |
|
"eval_loss": 0.8129910230636597, |
|
"eval_runtime": 5.2312, |
|
"eval_samples_per_second": 152.927, |
|
"eval_steps_per_second": 19.116, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 6.071428571428571, |
|
"grad_norm": 3.822288751602173, |
|
"learning_rate": 1.774250440917108e-05, |
|
"loss": 0.7932, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 6.190476190476191, |
|
"grad_norm": 2.448084592819214, |
|
"learning_rate": 1.7654320987654324e-05, |
|
"loss": 0.7807, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 6.309523809523809, |
|
"grad_norm": 4.276307106018066, |
|
"learning_rate": 1.7566137566137566e-05, |
|
"loss": 0.7736, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 6.428571428571429, |
|
"grad_norm": 3.5429012775421143, |
|
"learning_rate": 1.747795414462081e-05, |
|
"loss": 0.7453, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 6.5476190476190474, |
|
"grad_norm": 2.4782090187072754, |
|
"learning_rate": 1.7389770723104057e-05, |
|
"loss": 0.7327, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 5.342073440551758, |
|
"learning_rate": 1.7301587301587302e-05, |
|
"loss": 0.7247, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 6.785714285714286, |
|
"grad_norm": 10.06567668914795, |
|
"learning_rate": 1.7213403880070548e-05, |
|
"loss": 0.7088, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 6.904761904761905, |
|
"grad_norm": 3.217452049255371, |
|
"learning_rate": 1.713403880070547e-05, |
|
"loss": 0.7058, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.815, |
|
"eval_loss": 0.7232482433319092, |
|
"eval_runtime": 5.4248, |
|
"eval_samples_per_second": 147.471, |
|
"eval_steps_per_second": 18.434, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 7.023809523809524, |
|
"grad_norm": 4.111032009124756, |
|
"learning_rate": 1.7045855379188714e-05, |
|
"loss": 0.6987, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 7.142857142857143, |
|
"grad_norm": 2.667541027069092, |
|
"learning_rate": 1.695767195767196e-05, |
|
"loss": 0.684, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 7.261904761904762, |
|
"grad_norm": 8.819822311401367, |
|
"learning_rate": 1.6869488536155205e-05, |
|
"loss": 0.674, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 7.380952380952381, |
|
"grad_norm": 3.9701991081237793, |
|
"learning_rate": 1.6781305114638447e-05, |
|
"loss": 0.6199, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 8.835700035095215, |
|
"learning_rate": 1.6693121693121696e-05, |
|
"loss": 0.6241, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 7.619047619047619, |
|
"grad_norm": 3.7322373390197754, |
|
"learning_rate": 1.660493827160494e-05, |
|
"loss": 0.6087, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 7.738095238095238, |
|
"grad_norm": 4.442748069763184, |
|
"learning_rate": 1.6516754850088187e-05, |
|
"loss": 0.589, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 7.857142857142857, |
|
"grad_norm": 3.6890928745269775, |
|
"learning_rate": 1.642857142857143e-05, |
|
"loss": 0.5804, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 7.976190476190476, |
|
"grad_norm": 7.095566749572754, |
|
"learning_rate": 1.6340388007054674e-05, |
|
"loss": 0.5556, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.83375, |
|
"eval_loss": 0.58147794008255, |
|
"eval_runtime": 6.0471, |
|
"eval_samples_per_second": 132.295, |
|
"eval_steps_per_second": 16.537, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 8.095238095238095, |
|
"grad_norm": 4.2159318923950195, |
|
"learning_rate": 1.625220458553792e-05, |
|
"loss": 0.577, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 8.214285714285714, |
|
"grad_norm": 3.634282350540161, |
|
"learning_rate": 1.6164021164021168e-05, |
|
"loss": 0.5226, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 8.333333333333334, |
|
"grad_norm": 3.7204463481903076, |
|
"learning_rate": 1.607583774250441e-05, |
|
"loss": 0.5397, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 8.452380952380953, |
|
"grad_norm": 8.489058494567871, |
|
"learning_rate": 1.5987654320987655e-05, |
|
"loss": 0.515, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 8.571428571428571, |
|
"grad_norm": 8.01997184753418, |
|
"learning_rate": 1.58994708994709e-05, |
|
"loss": 0.5017, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 8.69047619047619, |
|
"grad_norm": 5.835849761962891, |
|
"learning_rate": 1.5811287477954146e-05, |
|
"loss": 0.5008, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 8.80952380952381, |
|
"grad_norm": 10.542766571044922, |
|
"learning_rate": 1.572310405643739e-05, |
|
"loss": 0.476, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 8.928571428571429, |
|
"grad_norm": 7.684937953948975, |
|
"learning_rate": 1.5634920634920637e-05, |
|
"loss": 0.4527, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.86625, |
|
"eval_loss": 0.4813559651374817, |
|
"eval_runtime": 6.5617, |
|
"eval_samples_per_second": 121.92, |
|
"eval_steps_per_second": 15.24, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 9.047619047619047, |
|
"grad_norm": 6.5577569007873535, |
|
"learning_rate": 1.5546737213403882e-05, |
|
"loss": 0.4875, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 9.166666666666666, |
|
"grad_norm": 6.667921543121338, |
|
"learning_rate": 1.5458553791887128e-05, |
|
"loss": 0.4581, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 9.285714285714286, |
|
"grad_norm": 5.33371114730835, |
|
"learning_rate": 1.537037037037037e-05, |
|
"loss": 0.4269, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 9.404761904761905, |
|
"grad_norm": 4.12121057510376, |
|
"learning_rate": 1.5282186948853618e-05, |
|
"loss": 0.4173, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 9.523809523809524, |
|
"grad_norm": 7.042411804199219, |
|
"learning_rate": 1.5194003527336862e-05, |
|
"loss": 0.4312, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 9.642857142857142, |
|
"grad_norm": 6.229002952575684, |
|
"learning_rate": 1.5105820105820109e-05, |
|
"loss": 0.421, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 9.761904761904763, |
|
"grad_norm": 9.01046085357666, |
|
"learning_rate": 1.5017636684303351e-05, |
|
"loss": 0.4182, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 9.880952380952381, |
|
"grad_norm": 9.333113670349121, |
|
"learning_rate": 1.4929453262786598e-05, |
|
"loss": 0.4065, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 5.7903876304626465, |
|
"learning_rate": 1.4841269841269843e-05, |
|
"loss": 0.3994, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.86875, |
|
"eval_loss": 0.43535658717155457, |
|
"eval_runtime": 6.424, |
|
"eval_samples_per_second": 124.534, |
|
"eval_steps_per_second": 15.567, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 10.119047619047619, |
|
"grad_norm": 4.672136306762695, |
|
"learning_rate": 1.4753086419753087e-05, |
|
"loss": 0.3692, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 10.238095238095237, |
|
"grad_norm": 9.825118064880371, |
|
"learning_rate": 1.4664902998236332e-05, |
|
"loss": 0.3946, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 10.357142857142858, |
|
"grad_norm": 10.594053268432617, |
|
"learning_rate": 1.4576719576719578e-05, |
|
"loss": 0.3836, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 10.476190476190476, |
|
"grad_norm": 6.456384658813477, |
|
"learning_rate": 1.4488536155202823e-05, |
|
"loss": 0.3922, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 10.595238095238095, |
|
"grad_norm": 7.883987903594971, |
|
"learning_rate": 1.4400352733686067e-05, |
|
"loss": 0.3767, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 10.714285714285714, |
|
"grad_norm": 3.752121686935425, |
|
"learning_rate": 1.4312169312169312e-05, |
|
"loss": 0.3731, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 10.833333333333334, |
|
"grad_norm": 5.878788948059082, |
|
"learning_rate": 1.422398589065256e-05, |
|
"loss": 0.3581, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 10.952380952380953, |
|
"grad_norm": 10.106928825378418, |
|
"learning_rate": 1.4135802469135805e-05, |
|
"loss": 0.3399, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_accuracy": 0.89, |
|
"eval_loss": 0.3747052252292633, |
|
"eval_runtime": 6.1766, |
|
"eval_samples_per_second": 129.52, |
|
"eval_steps_per_second": 16.19, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 11.071428571428571, |
|
"grad_norm": 13.221107482910156, |
|
"learning_rate": 1.4047619047619048e-05, |
|
"loss": 0.3512, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 11.19047619047619, |
|
"grad_norm": 5.013436317443848, |
|
"learning_rate": 1.3959435626102294e-05, |
|
"loss": 0.3217, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 11.30952380952381, |
|
"grad_norm": 4.782650470733643, |
|
"learning_rate": 1.3871252204585539e-05, |
|
"loss": 0.3603, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 11.428571428571429, |
|
"grad_norm": 7.278000354766846, |
|
"learning_rate": 1.3783068783068784e-05, |
|
"loss": 0.3193, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 11.547619047619047, |
|
"grad_norm": 13.20319652557373, |
|
"learning_rate": 1.3694885361552028e-05, |
|
"loss": 0.3429, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 11.666666666666666, |
|
"grad_norm": 10.208937644958496, |
|
"learning_rate": 1.3606701940035273e-05, |
|
"loss": 0.3391, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 11.785714285714286, |
|
"grad_norm": 8.55504322052002, |
|
"learning_rate": 1.351851851851852e-05, |
|
"loss": 0.3286, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 11.904761904761905, |
|
"grad_norm": 9.174301147460938, |
|
"learning_rate": 1.3430335097001766e-05, |
|
"loss": 0.3157, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.895, |
|
"eval_loss": 0.33855053782463074, |
|
"eval_runtime": 5.5685, |
|
"eval_samples_per_second": 143.664, |
|
"eval_steps_per_second": 17.958, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 12.023809523809524, |
|
"grad_norm": 9.196775436401367, |
|
"learning_rate": 1.334215167548501e-05, |
|
"loss": 0.32, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 12.142857142857142, |
|
"grad_norm": 7.491143703460693, |
|
"learning_rate": 1.3253968253968255e-05, |
|
"loss": 0.3211, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 12.261904761904763, |
|
"grad_norm": 10.411942481994629, |
|
"learning_rate": 1.31657848324515e-05, |
|
"loss": 0.31, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 12.380952380952381, |
|
"grad_norm": 7.7979021072387695, |
|
"learning_rate": 1.3077601410934746e-05, |
|
"loss": 0.301, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"grad_norm": 16.540781021118164, |
|
"learning_rate": 1.298941798941799e-05, |
|
"loss": 0.2879, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 12.619047619047619, |
|
"grad_norm": 8.34303092956543, |
|
"learning_rate": 1.2901234567901235e-05, |
|
"loss": 0.2837, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 12.738095238095237, |
|
"grad_norm": 5.1471405029296875, |
|
"learning_rate": 1.2813051146384482e-05, |
|
"loss": 0.2943, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 12.857142857142858, |
|
"grad_norm": 12.514649391174316, |
|
"learning_rate": 1.2724867724867727e-05, |
|
"loss": 0.3033, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 12.976190476190476, |
|
"grad_norm": 14.337882041931152, |
|
"learning_rate": 1.263668430335097e-05, |
|
"loss": 0.3094, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_accuracy": 0.90375, |
|
"eval_loss": 0.3166072368621826, |
|
"eval_runtime": 5.6029, |
|
"eval_samples_per_second": 142.783, |
|
"eval_steps_per_second": 17.848, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 13.095238095238095, |
|
"grad_norm": 7.664865493774414, |
|
"learning_rate": 1.2548500881834216e-05, |
|
"loss": 0.3027, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 13.214285714285714, |
|
"grad_norm": 7.5357489585876465, |
|
"learning_rate": 1.2460317460317461e-05, |
|
"loss": 0.303, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 13.333333333333334, |
|
"grad_norm": 10.726656913757324, |
|
"learning_rate": 1.2372134038800707e-05, |
|
"loss": 0.2897, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 13.452380952380953, |
|
"grad_norm": 12.0068359375, |
|
"learning_rate": 1.228395061728395e-05, |
|
"loss": 0.2949, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 13.571428571428571, |
|
"grad_norm": 9.883105278015137, |
|
"learning_rate": 1.2195767195767196e-05, |
|
"loss": 0.2573, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 13.69047619047619, |
|
"grad_norm": 9.254776000976562, |
|
"learning_rate": 1.2107583774250443e-05, |
|
"loss": 0.2838, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 13.80952380952381, |
|
"grad_norm": 6.0406880378723145, |
|
"learning_rate": 1.2019400352733688e-05, |
|
"loss": 0.2558, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 13.928571428571429, |
|
"grad_norm": 6.664582252502441, |
|
"learning_rate": 1.1931216931216932e-05, |
|
"loss": 0.2839, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_accuracy": 0.90125, |
|
"eval_loss": 0.31680428981781006, |
|
"eval_runtime": 5.4208, |
|
"eval_samples_per_second": 147.579, |
|
"eval_steps_per_second": 18.447, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 14.047619047619047, |
|
"grad_norm": 11.964251518249512, |
|
"learning_rate": 1.1843033509700177e-05, |
|
"loss": 0.2688, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 14.166666666666666, |
|
"grad_norm": 6.548354148864746, |
|
"learning_rate": 1.1754850088183423e-05, |
|
"loss": 0.2545, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 14.285714285714286, |
|
"grad_norm": 14.916826248168945, |
|
"learning_rate": 1.1666666666666668e-05, |
|
"loss": 0.2618, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 14.404761904761905, |
|
"grad_norm": 15.267316818237305, |
|
"learning_rate": 1.1578483245149912e-05, |
|
"loss": 0.2442, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 14.523809523809524, |
|
"grad_norm": 8.463184356689453, |
|
"learning_rate": 1.1490299823633157e-05, |
|
"loss": 0.2618, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 14.642857142857142, |
|
"grad_norm": 13.68342113494873, |
|
"learning_rate": 1.1402116402116404e-05, |
|
"loss": 0.2447, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 14.761904761904763, |
|
"grad_norm": 4.8457112312316895, |
|
"learning_rate": 1.131393298059965e-05, |
|
"loss": 0.2361, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 14.880952380952381, |
|
"grad_norm": 6.4722185134887695, |
|
"learning_rate": 1.1225749559082893e-05, |
|
"loss": 0.2519, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 15.844517707824707, |
|
"learning_rate": 1.1137566137566138e-05, |
|
"loss": 0.2658, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_accuracy": 0.91, |
|
"eval_loss": 0.2803078591823578, |
|
"eval_runtime": 5.3029, |
|
"eval_samples_per_second": 150.862, |
|
"eval_steps_per_second": 18.858, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 15.119047619047619, |
|
"grad_norm": 20.211238861083984, |
|
"learning_rate": 1.1049382716049384e-05, |
|
"loss": 0.2386, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 15.238095238095237, |
|
"grad_norm": 6.853188991546631, |
|
"learning_rate": 1.0961199294532629e-05, |
|
"loss": 0.2326, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 15.357142857142858, |
|
"grad_norm": 8.602685928344727, |
|
"learning_rate": 1.0873015873015873e-05, |
|
"loss": 0.2553, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 15.476190476190476, |
|
"grad_norm": 4.291443824768066, |
|
"learning_rate": 1.0784832451499118e-05, |
|
"loss": 0.2434, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 15.595238095238095, |
|
"grad_norm": 6.306275367736816, |
|
"learning_rate": 1.0696649029982365e-05, |
|
"loss": 0.2277, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 15.714285714285714, |
|
"grad_norm": 5.804137706756592, |
|
"learning_rate": 1.060846560846561e-05, |
|
"loss": 0.2248, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 15.833333333333334, |
|
"grad_norm": 13.754738807678223, |
|
"learning_rate": 1.0520282186948854e-05, |
|
"loss": 0.2313, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 15.952380952380953, |
|
"grad_norm": 8.410860061645508, |
|
"learning_rate": 1.04320987654321e-05, |
|
"loss": 0.2331, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_accuracy": 0.9125, |
|
"eval_loss": 0.26975059509277344, |
|
"eval_runtime": 5.4078, |
|
"eval_samples_per_second": 147.934, |
|
"eval_steps_per_second": 18.492, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 16.071428571428573, |
|
"grad_norm": 9.222555160522461, |
|
"learning_rate": 1.0352733686067021e-05, |
|
"loss": 0.2394, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 16.19047619047619, |
|
"grad_norm": 9.493171691894531, |
|
"learning_rate": 1.0264550264550266e-05, |
|
"loss": 0.2213, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 16.30952380952381, |
|
"grad_norm": 4.863091945648193, |
|
"learning_rate": 1.0176366843033512e-05, |
|
"loss": 0.2447, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 16.428571428571427, |
|
"grad_norm": 9.915203094482422, |
|
"learning_rate": 1.0088183421516755e-05, |
|
"loss": 0.2369, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 16.547619047619047, |
|
"grad_norm": 7.485960960388184, |
|
"learning_rate": 1e-05, |
|
"loss": 0.213, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 16.666666666666668, |
|
"grad_norm": 5.043095111846924, |
|
"learning_rate": 9.911816578483246e-06, |
|
"loss": 0.2035, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 16.785714285714285, |
|
"grad_norm": 12.636078834533691, |
|
"learning_rate": 9.823633156966492e-06, |
|
"loss": 0.2312, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 16.904761904761905, |
|
"grad_norm": 13.119421005249023, |
|
"learning_rate": 9.735449735449735e-06, |
|
"loss": 0.2271, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_accuracy": 0.90625, |
|
"eval_loss": 0.2662568986415863, |
|
"eval_runtime": 5.5739, |
|
"eval_samples_per_second": 143.526, |
|
"eval_steps_per_second": 17.941, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 17.023809523809526, |
|
"grad_norm": 19.145736694335938, |
|
"learning_rate": 9.64726631393298e-06, |
|
"loss": 0.2089, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 17.142857142857142, |
|
"grad_norm": 13.143487930297852, |
|
"learning_rate": 9.559082892416226e-06, |
|
"loss": 0.2036, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 17.261904761904763, |
|
"grad_norm": 13.245809555053711, |
|
"learning_rate": 9.470899470899471e-06, |
|
"loss": 0.1902, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 17.38095238095238, |
|
"grad_norm": 15.815811157226562, |
|
"learning_rate": 9.382716049382717e-06, |
|
"loss": 0.2234, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"grad_norm": 8.950321197509766, |
|
"learning_rate": 9.294532627865962e-06, |
|
"loss": 0.2258, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 17.61904761904762, |
|
"grad_norm": 6.937575817108154, |
|
"learning_rate": 9.206349206349207e-06, |
|
"loss": 0.212, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 17.738095238095237, |
|
"grad_norm": 9.044116020202637, |
|
"learning_rate": 9.118165784832453e-06, |
|
"loss": 0.2084, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 17.857142857142858, |
|
"grad_norm": 9.224007606506348, |
|
"learning_rate": 9.029982363315696e-06, |
|
"loss": 0.2036, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 17.976190476190474, |
|
"grad_norm": 5.575143814086914, |
|
"learning_rate": 8.941798941798942e-06, |
|
"loss": 0.1895, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_accuracy": 0.92375, |
|
"eval_loss": 0.2639971375465393, |
|
"eval_runtime": 5.9356, |
|
"eval_samples_per_second": 134.779, |
|
"eval_steps_per_second": 16.847, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 18.095238095238095, |
|
"grad_norm": 10.496304512023926, |
|
"learning_rate": 8.853615520282187e-06, |
|
"loss": 0.1908, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 18.214285714285715, |
|
"grad_norm": 8.395342826843262, |
|
"learning_rate": 8.765432098765432e-06, |
|
"loss": 0.202, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 18.333333333333332, |
|
"grad_norm": 6.879480838775635, |
|
"learning_rate": 8.677248677248678e-06, |
|
"loss": 0.1825, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 18.452380952380953, |
|
"grad_norm": 6.519415855407715, |
|
"learning_rate": 8.589065255731923e-06, |
|
"loss": 0.2025, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 18.571428571428573, |
|
"grad_norm": 14.108360290527344, |
|
"learning_rate": 8.500881834215169e-06, |
|
"loss": 0.2156, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 18.69047619047619, |
|
"grad_norm": 8.98913288116455, |
|
"learning_rate": 8.412698412698414e-06, |
|
"loss": 0.1895, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 18.80952380952381, |
|
"grad_norm": 7.803181171417236, |
|
"learning_rate": 8.324514991181658e-06, |
|
"loss": 0.1829, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 18.928571428571427, |
|
"grad_norm": 6.913029193878174, |
|
"learning_rate": 8.236331569664903e-06, |
|
"loss": 0.1914, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_accuracy": 0.9225, |
|
"eval_loss": 0.23814032971858978, |
|
"eval_runtime": 6.1225, |
|
"eval_samples_per_second": 130.665, |
|
"eval_steps_per_second": 16.333, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 19.047619047619047, |
|
"grad_norm": 8.117836952209473, |
|
"learning_rate": 8.148148148148148e-06, |
|
"loss": 0.1953, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 19.166666666666668, |
|
"grad_norm": 7.4035139083862305, |
|
"learning_rate": 8.059964726631394e-06, |
|
"loss": 0.1755, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 19.285714285714285, |
|
"grad_norm": 9.961386680603027, |
|
"learning_rate": 7.971781305114639e-06, |
|
"loss": 0.1826, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 19.404761904761905, |
|
"grad_norm": 3.902977705001831, |
|
"learning_rate": 7.883597883597884e-06, |
|
"loss": 0.2038, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 19.523809523809526, |
|
"grad_norm": 11.7597074508667, |
|
"learning_rate": 7.79541446208113e-06, |
|
"loss": 0.1755, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 19.642857142857142, |
|
"grad_norm": 5.295619964599609, |
|
"learning_rate": 7.707231040564375e-06, |
|
"loss": 0.1821, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 19.761904761904763, |
|
"grad_norm": 8.691162109375, |
|
"learning_rate": 7.61904761904762e-06, |
|
"loss": 0.1931, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 19.88095238095238, |
|
"grad_norm": 14.22559928894043, |
|
"learning_rate": 7.530864197530865e-06, |
|
"loss": 0.1842, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 19.08734130859375, |
|
"learning_rate": 7.4426807760141095e-06, |
|
"loss": 0.1741, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_accuracy": 0.92125, |
|
"eval_loss": 0.2286679595708847, |
|
"eval_runtime": 6.3863, |
|
"eval_samples_per_second": 125.269, |
|
"eval_steps_per_second": 15.659, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 20.11904761904762, |
|
"grad_norm": 11.843003273010254, |
|
"learning_rate": 7.354497354497355e-06, |
|
"loss": 0.1704, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 20.238095238095237, |
|
"grad_norm": 4.4936299324035645, |
|
"learning_rate": 7.2663139329806e-06, |
|
"loss": 0.1833, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 20.357142857142858, |
|
"grad_norm": 3.9449779987335205, |
|
"learning_rate": 7.178130511463846e-06, |
|
"loss": 0.1779, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 20.476190476190474, |
|
"grad_norm": 9.936880111694336, |
|
"learning_rate": 7.08994708994709e-06, |
|
"loss": 0.1789, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 20.595238095238095, |
|
"grad_norm": 5.483986854553223, |
|
"learning_rate": 7.0017636684303355e-06, |
|
"loss": 0.1776, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 20.714285714285715, |
|
"grad_norm": 8.04780101776123, |
|
"learning_rate": 6.913580246913581e-06, |
|
"loss": 0.1737, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 20.833333333333332, |
|
"grad_norm": 6.709885120391846, |
|
"learning_rate": 6.825396825396826e-06, |
|
"loss": 0.1855, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 20.952380952380953, |
|
"grad_norm": 7.882541656494141, |
|
"learning_rate": 6.737213403880071e-06, |
|
"loss": 0.1682, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"eval_accuracy": 0.92625, |
|
"eval_loss": 0.21940070390701294, |
|
"eval_runtime": 5.4374, |
|
"eval_samples_per_second": 147.128, |
|
"eval_steps_per_second": 18.391, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 21.071428571428573, |
|
"grad_norm": 5.953088760375977, |
|
"learning_rate": 6.649029982363316e-06, |
|
"loss": 0.1741, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 21.19047619047619, |
|
"grad_norm": 7.106684684753418, |
|
"learning_rate": 6.560846560846561e-06, |
|
"loss": 0.1745, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 21.30952380952381, |
|
"grad_norm": 13.026501655578613, |
|
"learning_rate": 6.472663139329807e-06, |
|
"loss": 0.1844, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 21.428571428571427, |
|
"grad_norm": 3.5783817768096924, |
|
"learning_rate": 6.384479717813051e-06, |
|
"loss": 0.1636, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 21.547619047619047, |
|
"grad_norm": 10.339872360229492, |
|
"learning_rate": 6.296296296296297e-06, |
|
"loss": 0.1766, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 21.666666666666668, |
|
"grad_norm": 13.413084030151367, |
|
"learning_rate": 6.208112874779542e-06, |
|
"loss": 0.1643, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 21.785714285714285, |
|
"grad_norm": 6.75968074798584, |
|
"learning_rate": 6.119929453262787e-06, |
|
"loss": 0.1589, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 21.904761904761905, |
|
"grad_norm": 3.4033656120300293, |
|
"learning_rate": 6.031746031746032e-06, |
|
"loss": 0.1569, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"eval_accuracy": 0.93375, |
|
"eval_loss": 0.21772493422031403, |
|
"eval_runtime": 5.4159, |
|
"eval_samples_per_second": 147.714, |
|
"eval_steps_per_second": 18.464, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 22.023809523809526, |
|
"grad_norm": 28.122211456298828, |
|
"learning_rate": 5.943562610229277e-06, |
|
"loss": 0.1648, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 22.142857142857142, |
|
"grad_norm": 9.448488235473633, |
|
"learning_rate": 5.855379188712523e-06, |
|
"loss": 0.1397, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 22.261904761904763, |
|
"grad_norm": 7.661059379577637, |
|
"learning_rate": 5.767195767195768e-06, |
|
"loss": 0.1756, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 22.38095238095238, |
|
"grad_norm": 10.659194946289062, |
|
"learning_rate": 5.6790123456790125e-06, |
|
"loss": 0.1588, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 22.5, |
|
"grad_norm": 3.608710527420044, |
|
"learning_rate": 5.590828924162258e-06, |
|
"loss": 0.1656, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 22.61904761904762, |
|
"grad_norm": 10.583845138549805, |
|
"learning_rate": 5.502645502645503e-06, |
|
"loss": 0.1618, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 22.738095238095237, |
|
"grad_norm": 8.316349029541016, |
|
"learning_rate": 5.4144620811287486e-06, |
|
"loss": 0.1541, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 22.857142857142858, |
|
"grad_norm": 9.836575508117676, |
|
"learning_rate": 5.326278659611993e-06, |
|
"loss": 0.1588, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 22.976190476190474, |
|
"grad_norm": 7.263607501983643, |
|
"learning_rate": 5.2380952380952384e-06, |
|
"loss": 0.144, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"eval_accuracy": 0.93375, |
|
"eval_loss": 0.21350684762001038, |
|
"eval_runtime": 5.4573, |
|
"eval_samples_per_second": 146.592, |
|
"eval_steps_per_second": 18.324, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 23.095238095238095, |
|
"grad_norm": 9.257414817810059, |
|
"learning_rate": 5.149911816578484e-06, |
|
"loss": 0.1624, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 23.214285714285715, |
|
"grad_norm": 23.810653686523438, |
|
"learning_rate": 5.061728395061729e-06, |
|
"loss": 0.1528, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 23.333333333333332, |
|
"grad_norm": 8.397602081298828, |
|
"learning_rate": 4.973544973544974e-06, |
|
"loss": 0.1506, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 23.452380952380953, |
|
"grad_norm": 3.9385476112365723, |
|
"learning_rate": 4.885361552028219e-06, |
|
"loss": 0.1427, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 23.571428571428573, |
|
"grad_norm": 9.915690422058105, |
|
"learning_rate": 4.7971781305114636e-06, |
|
"loss": 0.1548, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 23.69047619047619, |
|
"grad_norm": 10.970438003540039, |
|
"learning_rate": 4.708994708994709e-06, |
|
"loss": 0.165, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 23.80952380952381, |
|
"grad_norm": 12.917925834655762, |
|
"learning_rate": 4.620811287477954e-06, |
|
"loss": 0.1658, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 23.928571428571427, |
|
"grad_norm": 16.563697814941406, |
|
"learning_rate": 4.5326278659612e-06, |
|
"loss": 0.1581, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_accuracy": 0.9325, |
|
"eval_loss": 0.20941905677318573, |
|
"eval_runtime": 6.2026, |
|
"eval_samples_per_second": 128.978, |
|
"eval_steps_per_second": 16.122, |
|
"step": 2016 |
|
}, |
|
{ |
|
"epoch": 24.047619047619047, |
|
"grad_norm": 5.663702487945557, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.1742, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 24.166666666666668, |
|
"grad_norm": 21.897550582885742, |
|
"learning_rate": 4.3562610229276895e-06, |
|
"loss": 0.1663, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 24.285714285714285, |
|
"grad_norm": 7.413302421569824, |
|
"learning_rate": 4.268077601410935e-06, |
|
"loss": 0.1478, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 24.404761904761905, |
|
"grad_norm": 9.5319242477417, |
|
"learning_rate": 4.17989417989418e-06, |
|
"loss": 0.1616, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 24.523809523809526, |
|
"grad_norm": 7.299759864807129, |
|
"learning_rate": 4.091710758377425e-06, |
|
"loss": 0.1601, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 24.642857142857142, |
|
"grad_norm": 7.582053184509277, |
|
"learning_rate": 4.00352733686067e-06, |
|
"loss": 0.1481, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 24.761904761904763, |
|
"grad_norm": 6.607138633728027, |
|
"learning_rate": 3.9153439153439155e-06, |
|
"loss": 0.1447, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 24.88095238095238, |
|
"grad_norm": 5.343249320983887, |
|
"learning_rate": 3.827160493827161e-06, |
|
"loss": 0.1366, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"grad_norm": 11.388031005859375, |
|
"learning_rate": 3.7389770723104058e-06, |
|
"loss": 0.1426, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"eval_accuracy": 0.935, |
|
"eval_loss": 0.2057761698961258, |
|
"eval_runtime": 6.4445, |
|
"eval_samples_per_second": 124.136, |
|
"eval_steps_per_second": 15.517, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 25.11904761904762, |
|
"grad_norm": 13.238511085510254, |
|
"learning_rate": 3.6507936507936507e-06, |
|
"loss": 0.1451, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 25.238095238095237, |
|
"grad_norm": 18.473859786987305, |
|
"learning_rate": 3.562610229276896e-06, |
|
"loss": 0.146, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 25.357142857142858, |
|
"grad_norm": 8.533565521240234, |
|
"learning_rate": 3.474426807760141e-06, |
|
"loss": 0.15, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 25.476190476190474, |
|
"grad_norm": 8.290474891662598, |
|
"learning_rate": 3.3862433862433864e-06, |
|
"loss": 0.1367, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 25.595238095238095, |
|
"grad_norm": 3.6419670581817627, |
|
"learning_rate": 3.2980599647266313e-06, |
|
"loss": 0.1392, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 25.714285714285715, |
|
"grad_norm": 16.031980514526367, |
|
"learning_rate": 3.2098765432098767e-06, |
|
"loss": 0.1496, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 25.833333333333332, |
|
"grad_norm": 16.360395431518555, |
|
"learning_rate": 3.1216931216931216e-06, |
|
"loss": 0.1705, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 25.952380952380953, |
|
"grad_norm": 7.6555280685424805, |
|
"learning_rate": 3.033509700176367e-06, |
|
"loss": 0.1409, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"eval_accuracy": 0.93375, |
|
"eval_loss": 0.2027110904455185, |
|
"eval_runtime": 6.4093, |
|
"eval_samples_per_second": 124.818, |
|
"eval_steps_per_second": 15.602, |
|
"step": 2184 |
|
}, |
|
{ |
|
"epoch": 26.071428571428573, |
|
"grad_norm": 13.298584938049316, |
|
"learning_rate": 2.945326278659612e-06, |
|
"loss": 0.1339, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 26.19047619047619, |
|
"grad_norm": 5.4013800621032715, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 0.1388, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 26.30952380952381, |
|
"grad_norm": 6.846310138702393, |
|
"learning_rate": 2.768959435626102e-06, |
|
"loss": 0.1379, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 26.428571428571427, |
|
"grad_norm": 8.436290740966797, |
|
"learning_rate": 2.6807760141093476e-06, |
|
"loss": 0.1404, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 26.547619047619047, |
|
"grad_norm": 8.173290252685547, |
|
"learning_rate": 2.5925925925925925e-06, |
|
"loss": 0.1526, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 26.666666666666668, |
|
"grad_norm": 7.008592128753662, |
|
"learning_rate": 2.504409171075838e-06, |
|
"loss": 0.1656, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 26.785714285714285, |
|
"grad_norm": 14.528263092041016, |
|
"learning_rate": 2.416225749559083e-06, |
|
"loss": 0.1403, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 26.904761904761905, |
|
"grad_norm": 19.173906326293945, |
|
"learning_rate": 2.328042328042328e-06, |
|
"loss": 0.1445, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"eval_accuracy": 0.9275, |
|
"eval_loss": 0.20720510184764862, |
|
"eval_runtime": 5.4406, |
|
"eval_samples_per_second": 147.044, |
|
"eval_steps_per_second": 18.38, |
|
"step": 2268 |
|
}, |
|
{ |
|
"epoch": 27.023809523809526, |
|
"grad_norm": 7.463446140289307, |
|
"learning_rate": 2.239858906525573e-06, |
|
"loss": 0.1377, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 27.142857142857142, |
|
"grad_norm": 11.728429794311523, |
|
"learning_rate": 2.1516754850088184e-06, |
|
"loss": 0.1315, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 27.261904761904763, |
|
"grad_norm": 4.965158939361572, |
|
"learning_rate": 2.0634920634920634e-06, |
|
"loss": 0.1466, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 27.38095238095238, |
|
"grad_norm": 5.408288955688477, |
|
"learning_rate": 1.9753086419753087e-06, |
|
"loss": 0.1418, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 27.5, |
|
"grad_norm": 5.822543621063232, |
|
"learning_rate": 1.887125220458554e-06, |
|
"loss": 0.1189, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 27.61904761904762, |
|
"grad_norm": 16.357952117919922, |
|
"learning_rate": 1.798941798941799e-06, |
|
"loss": 0.1595, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 27.738095238095237, |
|
"grad_norm": 7.142815589904785, |
|
"learning_rate": 1.7107583774250442e-06, |
|
"loss": 0.1362, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 27.857142857142858, |
|
"grad_norm": 3.4048264026641846, |
|
"learning_rate": 1.6225749559082893e-06, |
|
"loss": 0.1226, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 27.976190476190474, |
|
"grad_norm": 3.4568896293640137, |
|
"learning_rate": 1.5343915343915345e-06, |
|
"loss": 0.1472, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"eval_accuracy": 0.93375, |
|
"eval_loss": 0.2012936919927597, |
|
"eval_runtime": 5.3691, |
|
"eval_samples_per_second": 149.0, |
|
"eval_steps_per_second": 18.625, |
|
"step": 2352 |
|
}, |
|
{ |
|
"epoch": 28.095238095238095, |
|
"grad_norm": 25.373470306396484, |
|
"learning_rate": 1.4462081128747796e-06, |
|
"loss": 0.1368, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 28.214285714285715, |
|
"grad_norm": 14.415323257446289, |
|
"learning_rate": 1.3580246913580248e-06, |
|
"loss": 0.145, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 28.333333333333332, |
|
"grad_norm": 9.555523872375488, |
|
"learning_rate": 1.26984126984127e-06, |
|
"loss": 0.1556, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 28.452380952380953, |
|
"grad_norm": 13.204785346984863, |
|
"learning_rate": 1.181657848324515e-06, |
|
"loss": 0.1495, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 28.571428571428573, |
|
"grad_norm": 13.89194393157959, |
|
"learning_rate": 1.0934744268077602e-06, |
|
"loss": 0.1221, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 28.69047619047619, |
|
"grad_norm": 21.069202423095703, |
|
"learning_rate": 1.0052910052910054e-06, |
|
"loss": 0.1376, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 28.80952380952381, |
|
"grad_norm": 13.420437812805176, |
|
"learning_rate": 9.171075837742504e-07, |
|
"loss": 0.135, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 28.928571428571427, |
|
"grad_norm": 6.938393592834473, |
|
"learning_rate": 8.289241622574956e-07, |
|
"loss": 0.1329, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"eval_accuracy": 0.9375, |
|
"eval_loss": 0.19804717600345612, |
|
"eval_runtime": 5.2492, |
|
"eval_samples_per_second": 152.406, |
|
"eval_steps_per_second": 19.051, |
|
"step": 2436 |
|
}, |
|
{ |
|
"epoch": 29.047619047619047, |
|
"grad_norm": 6.732052803039551, |
|
"learning_rate": 7.407407407407407e-07, |
|
"loss": 0.1373, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 29.166666666666668, |
|
"grad_norm": 11.9608154296875, |
|
"learning_rate": 6.525573192239859e-07, |
|
"loss": 0.1288, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 29.285714285714285, |
|
"grad_norm": 9.937846183776855, |
|
"learning_rate": 5.64373897707231e-07, |
|
"loss": 0.1267, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 29.404761904761905, |
|
"grad_norm": 6.138334274291992, |
|
"learning_rate": 4.7619047619047623e-07, |
|
"loss": 0.1358, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 29.523809523809526, |
|
"grad_norm": 5.230373859405518, |
|
"learning_rate": 3.880070546737214e-07, |
|
"loss": 0.1473, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 29.642857142857142, |
|
"grad_norm": 8.851503372192383, |
|
"learning_rate": 2.9982363315696647e-07, |
|
"loss": 0.1507, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 29.761904761904763, |
|
"grad_norm": 5.106894493103027, |
|
"learning_rate": 2.1164021164021165e-07, |
|
"loss": 0.1262, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 29.88095238095238, |
|
"grad_norm": 9.42648696899414, |
|
"learning_rate": 1.234567901234568e-07, |
|
"loss": 0.1397, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"grad_norm": 8.953765869140625, |
|
"learning_rate": 3.527336860670194e-08, |
|
"loss": 0.132, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"eval_accuracy": 0.93625, |
|
"eval_loss": 0.1968877911567688, |
|
"eval_runtime": 5.803, |
|
"eval_samples_per_second": 137.859, |
|
"eval_steps_per_second": 17.232, |
|
"step": 2520 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2520, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 30, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.019477021294592e+19, |
|
"train_batch_size": 192, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|