|
{ |
|
"best_metric": 0.4069381058216095, |
|
"best_model_checkpoint": "mikhail_panzo/zlm-fil_b64_le5_s8000/checkpoint-5500", |
|
"epoch": 260.8695652173913, |
|
"eval_steps": 500, |
|
"global_step": 6000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 2.1739130434782608, |
|
"grad_norm": 5.992666721343994, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 0.9248, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 4.3478260869565215, |
|
"grad_norm": 3.4944448471069336, |
|
"learning_rate": 4.95e-07, |
|
"loss": 0.8709, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 6.521739130434782, |
|
"grad_norm": 1.9902064800262451, |
|
"learning_rate": 7.450000000000001e-07, |
|
"loss": 0.8005, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 8.695652173913043, |
|
"grad_norm": 1.4649031162261963, |
|
"learning_rate": 9.950000000000002e-07, |
|
"loss": 0.733, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 10.869565217391305, |
|
"grad_norm": 1.4645055532455444, |
|
"learning_rate": 1.2450000000000002e-06, |
|
"loss": 0.6758, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 13.043478260869565, |
|
"grad_norm": 1.5862590074539185, |
|
"learning_rate": 1.495e-06, |
|
"loss": 0.6246, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 15.217391304347826, |
|
"grad_norm": 1.198890209197998, |
|
"learning_rate": 1.745e-06, |
|
"loss": 0.5917, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 17.391304347826086, |
|
"grad_norm": 1.324212908744812, |
|
"learning_rate": 1.9950000000000004e-06, |
|
"loss": 0.5721, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 19.565217391304348, |
|
"grad_norm": 1.1638662815093994, |
|
"learning_rate": 2.245e-06, |
|
"loss": 0.5635, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 21.73913043478261, |
|
"grad_norm": 0.9293428063392639, |
|
"learning_rate": 2.4950000000000003e-06, |
|
"loss": 0.5541, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 21.73913043478261, |
|
"eval_loss": 0.49769046902656555, |
|
"eval_runtime": 12.8521, |
|
"eval_samples_per_second": 12.527, |
|
"eval_steps_per_second": 1.634, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 23.91304347826087, |
|
"grad_norm": 0.7982977628707886, |
|
"learning_rate": 2.7450000000000004e-06, |
|
"loss": 0.5487, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 26.08695652173913, |
|
"grad_norm": 0.8491495251655579, |
|
"learning_rate": 2.995e-06, |
|
"loss": 0.5373, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 28.26086956521739, |
|
"grad_norm": 0.7158030867576599, |
|
"learning_rate": 3.2450000000000003e-06, |
|
"loss": 0.5271, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 30.434782608695652, |
|
"grad_norm": 1.2020411491394043, |
|
"learning_rate": 3.495e-06, |
|
"loss": 0.5201, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 32.608695652173914, |
|
"grad_norm": 0.7339745163917542, |
|
"learning_rate": 3.745e-06, |
|
"loss": 0.5126, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 34.78260869565217, |
|
"grad_norm": 1.1129218339920044, |
|
"learning_rate": 3.995000000000001e-06, |
|
"loss": 0.5136, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 36.95652173913044, |
|
"grad_norm": 0.7733961939811707, |
|
"learning_rate": 4.245e-06, |
|
"loss": 0.5099, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 39.130434782608695, |
|
"grad_norm": 1.1559356451034546, |
|
"learning_rate": 4.495e-06, |
|
"loss": 0.5047, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 41.30434782608695, |
|
"grad_norm": 0.873518705368042, |
|
"learning_rate": 4.745e-06, |
|
"loss": 0.4955, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 43.47826086956522, |
|
"grad_norm": 0.703760027885437, |
|
"learning_rate": 4.9950000000000005e-06, |
|
"loss": 0.4931, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 43.47826086956522, |
|
"eval_loss": 0.4528730511665344, |
|
"eval_runtime": 12.5404, |
|
"eval_samples_per_second": 12.838, |
|
"eval_steps_per_second": 1.675, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 45.65217391304348, |
|
"grad_norm": 0.9317522644996643, |
|
"learning_rate": 5.245e-06, |
|
"loss": 0.4922, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 47.82608695652174, |
|
"grad_norm": 0.9118973016738892, |
|
"learning_rate": 5.495000000000001e-06, |
|
"loss": 0.4855, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"grad_norm": 2.0197670459747314, |
|
"learning_rate": 5.745000000000001e-06, |
|
"loss": 0.485, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 52.17391304347826, |
|
"grad_norm": 1.1067094802856445, |
|
"learning_rate": 5.995000000000001e-06, |
|
"loss": 0.4872, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 54.34782608695652, |
|
"grad_norm": 1.20389723777771, |
|
"learning_rate": 6.245000000000001e-06, |
|
"loss": 0.4836, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 56.52173913043478, |
|
"grad_norm": 0.9784926176071167, |
|
"learning_rate": 6.4950000000000005e-06, |
|
"loss": 0.4793, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 58.69565217391305, |
|
"grad_norm": 1.4276039600372314, |
|
"learning_rate": 6.745000000000001e-06, |
|
"loss": 0.4774, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 60.869565217391305, |
|
"grad_norm": 1.117233157157898, |
|
"learning_rate": 6.995000000000001e-06, |
|
"loss": 0.4776, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 63.04347826086956, |
|
"grad_norm": 0.9267759919166565, |
|
"learning_rate": 7.245000000000001e-06, |
|
"loss": 0.4682, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 65.21739130434783, |
|
"grad_norm": 0.8514929413795471, |
|
"learning_rate": 7.495000000000001e-06, |
|
"loss": 0.4695, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 65.21739130434783, |
|
"eval_loss": 0.43296581506729126, |
|
"eval_runtime": 12.858, |
|
"eval_samples_per_second": 12.521, |
|
"eval_steps_per_second": 1.633, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 67.3913043478261, |
|
"grad_norm": 1.6638849973678589, |
|
"learning_rate": 7.745e-06, |
|
"loss": 0.4674, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 69.56521739130434, |
|
"grad_norm": 0.8030112981796265, |
|
"learning_rate": 7.995e-06, |
|
"loss": 0.4652, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 71.73913043478261, |
|
"grad_norm": 1.2407337427139282, |
|
"learning_rate": 8.245000000000002e-06, |
|
"loss": 0.4638, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 73.91304347826087, |
|
"grad_norm": 1.0331224203109741, |
|
"learning_rate": 8.495e-06, |
|
"loss": 0.4685, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 76.08695652173913, |
|
"grad_norm": 2.4681615829467773, |
|
"learning_rate": 8.745000000000002e-06, |
|
"loss": 0.4627, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 78.26086956521739, |
|
"grad_norm": 0.9344178438186646, |
|
"learning_rate": 8.995000000000001e-06, |
|
"loss": 0.4575, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 80.43478260869566, |
|
"grad_norm": 1.0561341047286987, |
|
"learning_rate": 9.245e-06, |
|
"loss": 0.4621, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 82.6086956521739, |
|
"grad_norm": 0.9816989302635193, |
|
"learning_rate": 9.495000000000001e-06, |
|
"loss": 0.4544, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 84.78260869565217, |
|
"grad_norm": 1.4779256582260132, |
|
"learning_rate": 9.745e-06, |
|
"loss": 0.4579, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 86.95652173913044, |
|
"grad_norm": 1.1634950637817383, |
|
"learning_rate": 9.995000000000002e-06, |
|
"loss": 0.4518, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 86.95652173913044, |
|
"eval_loss": 0.42301931977272034, |
|
"eval_runtime": 12.725, |
|
"eval_samples_per_second": 12.652, |
|
"eval_steps_per_second": 1.65, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 89.1304347826087, |
|
"grad_norm": 1.0197867155075073, |
|
"learning_rate": 9.918333333333335e-06, |
|
"loss": 0.4501, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 91.30434782608695, |
|
"grad_norm": 1.2543045282363892, |
|
"learning_rate": 9.835000000000002e-06, |
|
"loss": 0.4499, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 93.47826086956522, |
|
"grad_norm": 0.8574827909469604, |
|
"learning_rate": 9.751666666666667e-06, |
|
"loss": 0.4485, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 95.65217391304348, |
|
"grad_norm": 1.3784998655319214, |
|
"learning_rate": 9.668333333333334e-06, |
|
"loss": 0.4507, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 97.82608695652173, |
|
"grad_norm": 0.9518347978591919, |
|
"learning_rate": 9.585e-06, |
|
"loss": 0.4486, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"grad_norm": 2.422320604324341, |
|
"learning_rate": 9.501666666666667e-06, |
|
"loss": 0.4455, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 102.17391304347827, |
|
"grad_norm": 1.3441377878189087, |
|
"learning_rate": 9.418333333333334e-06, |
|
"loss": 0.45, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 104.34782608695652, |
|
"grad_norm": 0.969160795211792, |
|
"learning_rate": 9.335000000000001e-06, |
|
"loss": 0.4454, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 106.52173913043478, |
|
"grad_norm": 2.1275179386138916, |
|
"learning_rate": 9.251666666666668e-06, |
|
"loss": 0.4459, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 108.69565217391305, |
|
"grad_norm": 1.3673362731933594, |
|
"learning_rate": 9.168333333333333e-06, |
|
"loss": 0.4442, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 108.69565217391305, |
|
"eval_loss": 0.4178585410118103, |
|
"eval_runtime": 13.0534, |
|
"eval_samples_per_second": 12.334, |
|
"eval_steps_per_second": 1.609, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 110.8695652173913, |
|
"grad_norm": 1.3769398927688599, |
|
"learning_rate": 9.085e-06, |
|
"loss": 0.4406, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 113.04347826086956, |
|
"grad_norm": 0.9913681745529175, |
|
"learning_rate": 9.001666666666667e-06, |
|
"loss": 0.4389, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 115.21739130434783, |
|
"grad_norm": 1.1747106313705444, |
|
"learning_rate": 8.918333333333334e-06, |
|
"loss": 0.4386, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 117.3913043478261, |
|
"grad_norm": 1.0514781475067139, |
|
"learning_rate": 8.835000000000001e-06, |
|
"loss": 0.4393, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 119.56521739130434, |
|
"grad_norm": 1.8967915773391724, |
|
"learning_rate": 8.751666666666668e-06, |
|
"loss": 0.4421, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 121.73913043478261, |
|
"grad_norm": 0.8795832395553589, |
|
"learning_rate": 8.668333333333335e-06, |
|
"loss": 0.436, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 123.91304347826087, |
|
"grad_norm": 0.7928704023361206, |
|
"learning_rate": 8.585000000000002e-06, |
|
"loss": 0.4333, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 126.08695652173913, |
|
"grad_norm": 1.2805510759353638, |
|
"learning_rate": 8.501666666666667e-06, |
|
"loss": 0.4326, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 128.2608695652174, |
|
"grad_norm": 1.420920968055725, |
|
"learning_rate": 8.418333333333334e-06, |
|
"loss": 0.4317, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 130.43478260869566, |
|
"grad_norm": 0.7063888907432556, |
|
"learning_rate": 8.335e-06, |
|
"loss": 0.4344, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 130.43478260869566, |
|
"eval_loss": 0.41350311040878296, |
|
"eval_runtime": 12.8066, |
|
"eval_samples_per_second": 12.572, |
|
"eval_steps_per_second": 1.64, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 132.6086956521739, |
|
"grad_norm": 1.8065855503082275, |
|
"learning_rate": 8.251666666666668e-06, |
|
"loss": 0.4361, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 134.7826086956522, |
|
"grad_norm": 0.8073704838752747, |
|
"learning_rate": 8.168333333333334e-06, |
|
"loss": 0.4339, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 136.95652173913044, |
|
"grad_norm": 1.2890065908432007, |
|
"learning_rate": 8.085000000000001e-06, |
|
"loss": 0.4325, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 139.1304347826087, |
|
"grad_norm": 1.336401104927063, |
|
"learning_rate": 8.001666666666668e-06, |
|
"loss": 0.4334, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 141.30434782608697, |
|
"grad_norm": 1.2965891361236572, |
|
"learning_rate": 7.918333333333333e-06, |
|
"loss": 0.4298, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 143.47826086956522, |
|
"grad_norm": 0.8761409521102905, |
|
"learning_rate": 7.835e-06, |
|
"loss": 0.4231, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 145.65217391304347, |
|
"grad_norm": 1.1475930213928223, |
|
"learning_rate": 7.751666666666667e-06, |
|
"loss": 0.4316, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 147.82608695652175, |
|
"grad_norm": 0.8305974006652832, |
|
"learning_rate": 7.668333333333334e-06, |
|
"loss": 0.4277, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 150.0, |
|
"grad_norm": 1.6335935592651367, |
|
"learning_rate": 7.585e-06, |
|
"loss": 0.4248, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 152.17391304347825, |
|
"grad_norm": 1.1171984672546387, |
|
"learning_rate": 7.501666666666667e-06, |
|
"loss": 0.4318, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 152.17391304347825, |
|
"eval_loss": 0.4111216962337494, |
|
"eval_runtime": 13.0991, |
|
"eval_samples_per_second": 12.291, |
|
"eval_steps_per_second": 1.603, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 154.34782608695653, |
|
"grad_norm": 0.8932999968528748, |
|
"learning_rate": 7.418333333333334e-06, |
|
"loss": 0.4287, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 156.52173913043478, |
|
"grad_norm": 0.9644981622695923, |
|
"learning_rate": 7.335000000000001e-06, |
|
"loss": 0.4253, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 158.69565217391303, |
|
"grad_norm": 1.5559085607528687, |
|
"learning_rate": 7.251666666666667e-06, |
|
"loss": 0.4282, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 160.8695652173913, |
|
"grad_norm": 1.0696306228637695, |
|
"learning_rate": 7.168333333333334e-06, |
|
"loss": 0.4221, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 163.04347826086956, |
|
"grad_norm": 1.0170419216156006, |
|
"learning_rate": 7.085000000000001e-06, |
|
"loss": 0.4254, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 165.2173913043478, |
|
"grad_norm": 0.7487155795097351, |
|
"learning_rate": 7.001666666666668e-06, |
|
"loss": 0.4269, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 167.3913043478261, |
|
"grad_norm": 0.7589255571365356, |
|
"learning_rate": 6.918333333333334e-06, |
|
"loss": 0.4252, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 169.56521739130434, |
|
"grad_norm": 0.9557852745056152, |
|
"learning_rate": 6.835000000000001e-06, |
|
"loss": 0.4224, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 171.7391304347826, |
|
"grad_norm": 0.7511025667190552, |
|
"learning_rate": 6.7516666666666675e-06, |
|
"loss": 0.4201, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 173.91304347826087, |
|
"grad_norm": 0.9032562375068665, |
|
"learning_rate": 6.668333333333334e-06, |
|
"loss": 0.4201, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 173.91304347826087, |
|
"eval_loss": 0.41097140312194824, |
|
"eval_runtime": 12.8806, |
|
"eval_samples_per_second": 12.499, |
|
"eval_steps_per_second": 1.63, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 176.08695652173913, |
|
"grad_norm": 0.9379887580871582, |
|
"learning_rate": 6.5866666666666666e-06, |
|
"loss": 0.426, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 178.2608695652174, |
|
"grad_norm": 1.2516026496887207, |
|
"learning_rate": 6.5033333333333335e-06, |
|
"loss": 0.4244, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 180.43478260869566, |
|
"grad_norm": 0.898951530456543, |
|
"learning_rate": 6.42e-06, |
|
"loss": 0.4212, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 182.6086956521739, |
|
"grad_norm": 0.810229480266571, |
|
"learning_rate": 6.336666666666667e-06, |
|
"loss": 0.4188, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 184.7826086956522, |
|
"grad_norm": 0.8796959519386292, |
|
"learning_rate": 6.253333333333333e-06, |
|
"loss": 0.4196, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 186.95652173913044, |
|
"grad_norm": 0.8476919531822205, |
|
"learning_rate": 6.17e-06, |
|
"loss": 0.4215, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 189.1304347826087, |
|
"grad_norm": 0.8933534622192383, |
|
"learning_rate": 6.086666666666667e-06, |
|
"loss": 0.4209, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 191.30434782608697, |
|
"grad_norm": 0.965189516544342, |
|
"learning_rate": 6.003333333333334e-06, |
|
"loss": 0.4157, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 193.47826086956522, |
|
"grad_norm": 0.6911047101020813, |
|
"learning_rate": 5.92e-06, |
|
"loss": 0.4156, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 195.65217391304347, |
|
"grad_norm": 0.98875492811203, |
|
"learning_rate": 5.836666666666667e-06, |
|
"loss": 0.4185, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 195.65217391304347, |
|
"eval_loss": 0.40913259983062744, |
|
"eval_runtime": 13.1748, |
|
"eval_samples_per_second": 12.22, |
|
"eval_steps_per_second": 1.594, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 197.82608695652175, |
|
"grad_norm": 0.8193331360816956, |
|
"learning_rate": 5.753333333333334e-06, |
|
"loss": 0.4171, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 200.0, |
|
"grad_norm": 2.4391345977783203, |
|
"learning_rate": 5.67e-06, |
|
"loss": 0.4156, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 202.17391304347825, |
|
"grad_norm": 0.7653400301933289, |
|
"learning_rate": 5.586666666666667e-06, |
|
"loss": 0.4198, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 204.34782608695653, |
|
"grad_norm": 0.9531933069229126, |
|
"learning_rate": 5.503333333333334e-06, |
|
"loss": 0.4165, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 206.52173913043478, |
|
"grad_norm": 1.7444751262664795, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 0.417, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 208.69565217391303, |
|
"grad_norm": 0.9747681021690369, |
|
"learning_rate": 5.336666666666667e-06, |
|
"loss": 0.4133, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 210.8695652173913, |
|
"grad_norm": 1.063536524772644, |
|
"learning_rate": 5.2533333333333336e-06, |
|
"loss": 0.4153, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 213.04347826086956, |
|
"grad_norm": 0.8719898462295532, |
|
"learning_rate": 5.1700000000000005e-06, |
|
"loss": 0.4104, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 215.2173913043478, |
|
"grad_norm": 0.7953347563743591, |
|
"learning_rate": 5.086666666666667e-06, |
|
"loss": 0.4204, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 217.3913043478261, |
|
"grad_norm": 0.9870523810386658, |
|
"learning_rate": 5.0033333333333334e-06, |
|
"loss": 0.4153, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 217.3913043478261, |
|
"eval_loss": 0.4096957743167877, |
|
"eval_runtime": 13.2951, |
|
"eval_samples_per_second": 12.11, |
|
"eval_steps_per_second": 1.58, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 219.56521739130434, |
|
"grad_norm": 0.9140251874923706, |
|
"learning_rate": 4.92e-06, |
|
"loss": 0.4142, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 221.7391304347826, |
|
"grad_norm": 0.7339205741882324, |
|
"learning_rate": 4.836666666666667e-06, |
|
"loss": 0.4126, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 223.91304347826087, |
|
"grad_norm": 0.8861171007156372, |
|
"learning_rate": 4.753333333333333e-06, |
|
"loss": 0.4189, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 226.08695652173913, |
|
"grad_norm": 0.7763547301292419, |
|
"learning_rate": 4.670000000000001e-06, |
|
"loss": 0.415, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 228.2608695652174, |
|
"grad_norm": 0.913635790348053, |
|
"learning_rate": 4.586666666666667e-06, |
|
"loss": 0.413, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 230.43478260869566, |
|
"grad_norm": 1.0177322626113892, |
|
"learning_rate": 4.503333333333333e-06, |
|
"loss": 0.4119, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 232.6086956521739, |
|
"grad_norm": 1.0655298233032227, |
|
"learning_rate": 4.42e-06, |
|
"loss": 0.4122, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 234.7826086956522, |
|
"grad_norm": 0.84236079454422, |
|
"learning_rate": 4.336666666666667e-06, |
|
"loss": 0.4104, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 236.95652173913044, |
|
"grad_norm": 1.0191460847854614, |
|
"learning_rate": 4.253333333333334e-06, |
|
"loss": 0.4128, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 239.1304347826087, |
|
"grad_norm": 0.7806400060653687, |
|
"learning_rate": 4.17e-06, |
|
"loss": 0.414, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 239.1304347826087, |
|
"eval_loss": 0.4069381058216095, |
|
"eval_runtime": 13.5672, |
|
"eval_samples_per_second": 11.867, |
|
"eval_steps_per_second": 1.548, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 241.30434782608697, |
|
"grad_norm": 0.866113007068634, |
|
"learning_rate": 4.086666666666667e-06, |
|
"loss": 0.4117, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 243.47826086956522, |
|
"grad_norm": 0.8896365165710449, |
|
"learning_rate": 4.003333333333334e-06, |
|
"loss": 0.4161, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 245.65217391304347, |
|
"grad_norm": 0.8311805129051208, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 0.4094, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 247.82608695652175, |
|
"grad_norm": 1.686919927597046, |
|
"learning_rate": 3.836666666666667e-06, |
|
"loss": 0.4128, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 250.0, |
|
"grad_norm": 1.9034643173217773, |
|
"learning_rate": 3.753333333333334e-06, |
|
"loss": 0.4143, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 252.17391304347825, |
|
"grad_norm": 0.8296219706535339, |
|
"learning_rate": 3.6700000000000004e-06, |
|
"loss": 0.4123, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 254.34782608695653, |
|
"grad_norm": 0.6855341792106628, |
|
"learning_rate": 3.5866666666666673e-06, |
|
"loss": 0.4099, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 256.5217391304348, |
|
"grad_norm": 1.0125635862350464, |
|
"learning_rate": 3.5033333333333334e-06, |
|
"loss": 0.4104, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 258.69565217391306, |
|
"grad_norm": 0.8589877486228943, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 0.4069, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 260.8695652173913, |
|
"grad_norm": 0.9898667931556702, |
|
"learning_rate": 3.3366666666666668e-06, |
|
"loss": 0.4113, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 260.8695652173913, |
|
"eval_loss": 0.40804117918014526, |
|
"eval_runtime": 13.2074, |
|
"eval_samples_per_second": 12.19, |
|
"eval_steps_per_second": 1.59, |
|
"step": 6000 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 8000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 348, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.271570164893091e+16, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|