|
{ |
|
"best_metric": 0.40008437633514404, |
|
"best_model_checkpoint": "mikhail-panzo/zlm-ceb_b64_le5_s8000/checkpoint-6000", |
|
"epoch": 313.72549019607845, |
|
"eval_steps": 500, |
|
"global_step": 8000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.9607843137254903, |
|
"grad_norm": 1.6111680269241333, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.821, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 3.9215686274509802, |
|
"grad_norm": 1.9582017660140991, |
|
"learning_rate": 5e-06, |
|
"loss": 0.673, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 5.882352941176471, |
|
"grad_norm": 0.9988609552383423, |
|
"learning_rate": 7.5e-06, |
|
"loss": 0.5518, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 7.8431372549019605, |
|
"grad_norm": 1.0111173391342163, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5172, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 9.803921568627452, |
|
"grad_norm": 0.9361169338226318, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.4999, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 11.764705882352942, |
|
"grad_norm": 0.8657887578010559, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.4911, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 13.72549019607843, |
|
"grad_norm": 0.8616572618484497, |
|
"learning_rate": 1.75e-05, |
|
"loss": 0.4757, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 15.686274509803921, |
|
"grad_norm": 1.100374698638916, |
|
"learning_rate": 2e-05, |
|
"loss": 0.47, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 17.647058823529413, |
|
"grad_norm": 1.3860764503479004, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.4671, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 19.607843137254903, |
|
"grad_norm": 1.2481627464294434, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.4626, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 19.607843137254903, |
|
"eval_loss": 0.42633864283561707, |
|
"eval_runtime": 6.9188, |
|
"eval_samples_per_second": 26.016, |
|
"eval_steps_per_second": 3.324, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 21.568627450980394, |
|
"grad_norm": 1.079897165298462, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 0.4593, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 23.529411764705884, |
|
"grad_norm": 0.9019867777824402, |
|
"learning_rate": 3e-05, |
|
"loss": 0.4474, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 25.49019607843137, |
|
"grad_norm": 1.900229811668396, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 0.4439, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 27.45098039215686, |
|
"grad_norm": 1.3894062042236328, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.4397, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 29.41176470588235, |
|
"grad_norm": 2.318305253982544, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.4376, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 31.372549019607842, |
|
"grad_norm": 0.9606600999832153, |
|
"learning_rate": 4e-05, |
|
"loss": 0.4394, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 33.333333333333336, |
|
"grad_norm": 1.4766446352005005, |
|
"learning_rate": 4.25e-05, |
|
"loss": 0.4376, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 35.294117647058826, |
|
"grad_norm": 1.1099992990493774, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.4333, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 37.254901960784316, |
|
"grad_norm": 1.1501879692077637, |
|
"learning_rate": 4.75e-05, |
|
"loss": 0.4281, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 39.21568627450981, |
|
"grad_norm": 0.9793707728385925, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4288, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 39.21568627450981, |
|
"eval_loss": 0.4076782763004303, |
|
"eval_runtime": 6.5279, |
|
"eval_samples_per_second": 27.574, |
|
"eval_steps_per_second": 3.523, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 41.1764705882353, |
|
"grad_norm": 1.812929630279541, |
|
"learning_rate": 5.25e-05, |
|
"loss": 0.4224, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 43.13725490196079, |
|
"grad_norm": 1.066340446472168, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 0.4231, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 45.09803921568628, |
|
"grad_norm": 2.465958595275879, |
|
"learning_rate": 5.745e-05, |
|
"loss": 0.4261, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 47.05882352941177, |
|
"grad_norm": 1.3723503351211548, |
|
"learning_rate": 5.995000000000001e-05, |
|
"loss": 0.416, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 49.01960784313726, |
|
"grad_norm": 1.6065536737442017, |
|
"learning_rate": 6.245000000000001e-05, |
|
"loss": 0.416, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 50.98039215686274, |
|
"grad_norm": 3.8591885566711426, |
|
"learning_rate": 6.494999999999999e-05, |
|
"loss": 0.4162, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 52.94117647058823, |
|
"grad_norm": 2.415721893310547, |
|
"learning_rate": 6.745e-05, |
|
"loss": 0.4121, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 54.90196078431372, |
|
"grad_norm": 1.969329833984375, |
|
"learning_rate": 6.995e-05, |
|
"loss": 0.4104, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 56.86274509803921, |
|
"grad_norm": 3.190781593322754, |
|
"learning_rate": 7.245000000000001e-05, |
|
"loss": 0.4055, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 58.8235294117647, |
|
"grad_norm": 1.4585760831832886, |
|
"learning_rate": 7.495e-05, |
|
"loss": 0.4109, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 58.8235294117647, |
|
"eval_loss": 0.4012959897518158, |
|
"eval_runtime": 6.3508, |
|
"eval_samples_per_second": 28.343, |
|
"eval_steps_per_second": 3.622, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 60.78431372549019, |
|
"grad_norm": 4.141477584838867, |
|
"learning_rate": 7.745e-05, |
|
"loss": 0.4117, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 62.745098039215684, |
|
"grad_norm": 5.334190845489502, |
|
"learning_rate": 7.995e-05, |
|
"loss": 0.413, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 64.70588235294117, |
|
"grad_norm": 3.1039328575134277, |
|
"learning_rate": 8.245e-05, |
|
"loss": 0.405, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 66.66666666666667, |
|
"grad_norm": 2.6190948486328125, |
|
"learning_rate": 8.495e-05, |
|
"loss": 0.4098, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 68.62745098039215, |
|
"grad_norm": 1.8761863708496094, |
|
"learning_rate": 8.745000000000001e-05, |
|
"loss": 0.4035, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 70.58823529411765, |
|
"grad_norm": 0.9362647533416748, |
|
"learning_rate": 8.995e-05, |
|
"loss": 0.4253, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 72.54901960784314, |
|
"grad_norm": 1.6635262966156006, |
|
"learning_rate": 9.245e-05, |
|
"loss": 0.4295, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 74.50980392156863, |
|
"grad_norm": 1.3353509902954102, |
|
"learning_rate": 9.495e-05, |
|
"loss": 0.4078, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 76.47058823529412, |
|
"grad_norm": 8.328239440917969, |
|
"learning_rate": 9.745000000000001e-05, |
|
"loss": 0.4051, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 78.43137254901961, |
|
"grad_norm": 1.1230006217956543, |
|
"learning_rate": 9.995e-05, |
|
"loss": 0.3978, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 78.43137254901961, |
|
"eval_loss": 0.40350666642189026, |
|
"eval_runtime": 6.5593, |
|
"eval_samples_per_second": 27.442, |
|
"eval_steps_per_second": 3.506, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 80.3921568627451, |
|
"grad_norm": 3.8058626651763916, |
|
"learning_rate": 9.918333333333334e-05, |
|
"loss": 0.3997, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 82.3529411764706, |
|
"grad_norm": 2.170177936553955, |
|
"learning_rate": 9.835e-05, |
|
"loss": 0.3988, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 84.31372549019608, |
|
"grad_norm": 0.6957865357398987, |
|
"learning_rate": 9.751666666666666e-05, |
|
"loss": 0.3938, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 86.27450980392157, |
|
"grad_norm": 1.1778301000595093, |
|
"learning_rate": 9.668333333333334e-05, |
|
"loss": 0.4008, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 88.23529411764706, |
|
"grad_norm": 1.1749225854873657, |
|
"learning_rate": 9.585000000000001e-05, |
|
"loss": 0.406, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 90.19607843137256, |
|
"grad_norm": 1.1235101222991943, |
|
"learning_rate": 9.501666666666668e-05, |
|
"loss": 0.3906, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 92.15686274509804, |
|
"grad_norm": 5.676591873168945, |
|
"learning_rate": 9.418333333333334e-05, |
|
"loss": 0.3954, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 94.11764705882354, |
|
"grad_norm": 1.4196738004684448, |
|
"learning_rate": 9.335e-05, |
|
"loss": 0.4018, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 96.07843137254902, |
|
"grad_norm": 2.848329544067383, |
|
"learning_rate": 9.251666666666667e-05, |
|
"loss": 0.387, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 98.03921568627452, |
|
"grad_norm": 1.7598848342895508, |
|
"learning_rate": 9.168333333333333e-05, |
|
"loss": 0.3898, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 98.03921568627452, |
|
"eval_loss": 0.40130773186683655, |
|
"eval_runtime": 6.4746, |
|
"eval_samples_per_second": 27.801, |
|
"eval_steps_per_second": 3.552, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"grad_norm": 5.990013599395752, |
|
"learning_rate": 9.085e-05, |
|
"loss": 0.3982, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 101.96078431372548, |
|
"grad_norm": 2.984307050704956, |
|
"learning_rate": 9.001666666666667e-05, |
|
"loss": 0.3889, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 103.92156862745098, |
|
"grad_norm": 1.976651668548584, |
|
"learning_rate": 8.918333333333334e-05, |
|
"loss": 0.382, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 105.88235294117646, |
|
"grad_norm": 1.1698123216629028, |
|
"learning_rate": 8.834999999999999e-05, |
|
"loss": 0.3877, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 107.84313725490196, |
|
"grad_norm": 1.8311396837234497, |
|
"learning_rate": 8.751666666666668e-05, |
|
"loss": 0.3848, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 109.80392156862744, |
|
"grad_norm": 3.1962406635284424, |
|
"learning_rate": 8.668333333333334e-05, |
|
"loss": 0.384, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 111.76470588235294, |
|
"grad_norm": 1.4986674785614014, |
|
"learning_rate": 8.585000000000001e-05, |
|
"loss": 0.3737, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 113.72549019607843, |
|
"grad_norm": 4.846043109893799, |
|
"learning_rate": 8.501666666666667e-05, |
|
"loss": 0.3787, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 115.68627450980392, |
|
"grad_norm": 2.5160129070281982, |
|
"learning_rate": 8.418333333333334e-05, |
|
"loss": 0.3707, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 117.6470588235294, |
|
"grad_norm": 1.209688663482666, |
|
"learning_rate": 8.335e-05, |
|
"loss": 0.373, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 117.6470588235294, |
|
"eval_loss": 0.40103283524513245, |
|
"eval_runtime": 6.6876, |
|
"eval_samples_per_second": 26.916, |
|
"eval_steps_per_second": 3.439, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 119.6078431372549, |
|
"grad_norm": 1.4071707725524902, |
|
"learning_rate": 8.251666666666668e-05, |
|
"loss": 0.3731, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 121.56862745098039, |
|
"grad_norm": 2.3906798362731934, |
|
"learning_rate": 8.168333333333333e-05, |
|
"loss": 0.3705, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 123.52941176470588, |
|
"grad_norm": 2.833566188812256, |
|
"learning_rate": 8.085e-05, |
|
"loss": 0.3754, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 125.49019607843137, |
|
"grad_norm": 1.7871581315994263, |
|
"learning_rate": 8.001666666666667e-05, |
|
"loss": 0.3734, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 127.45098039215686, |
|
"grad_norm": 1.0529006719589233, |
|
"learning_rate": 7.918333333333334e-05, |
|
"loss": 0.3792, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 129.41176470588235, |
|
"grad_norm": 2.939656972885132, |
|
"learning_rate": 7.835000000000001e-05, |
|
"loss": 0.3693, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 131.37254901960785, |
|
"grad_norm": 0.9791694283485413, |
|
"learning_rate": 7.751666666666668e-05, |
|
"loss": 0.366, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 133.33333333333334, |
|
"grad_norm": 1.6775403022766113, |
|
"learning_rate": 7.668333333333335e-05, |
|
"loss": 0.3709, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 135.2941176470588, |
|
"grad_norm": 1.0679121017456055, |
|
"learning_rate": 7.585e-05, |
|
"loss": 0.3677, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 137.2549019607843, |
|
"grad_norm": 2.068532705307007, |
|
"learning_rate": 7.501666666666667e-05, |
|
"loss": 0.3644, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 137.2549019607843, |
|
"eval_loss": 0.4005117118358612, |
|
"eval_runtime": 6.7669, |
|
"eval_samples_per_second": 26.6, |
|
"eval_steps_per_second": 3.399, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 139.2156862745098, |
|
"grad_norm": 0.9116536378860474, |
|
"learning_rate": 7.418333333333334e-05, |
|
"loss": 0.365, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 141.1764705882353, |
|
"grad_norm": 1.6524943113327026, |
|
"learning_rate": 7.335000000000001e-05, |
|
"loss": 0.362, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 143.13725490196077, |
|
"grad_norm": 0.9939906597137451, |
|
"learning_rate": 7.251666666666666e-05, |
|
"loss": 0.3641, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 145.09803921568627, |
|
"grad_norm": 1.7879794836044312, |
|
"learning_rate": 7.168333333333333e-05, |
|
"loss": 0.3649, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 147.05882352941177, |
|
"grad_norm": 2.4735560417175293, |
|
"learning_rate": 7.085e-05, |
|
"loss": 0.3591, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 149.01960784313727, |
|
"grad_norm": 1.4222075939178467, |
|
"learning_rate": 7.001666666666667e-05, |
|
"loss": 0.3605, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 150.98039215686273, |
|
"grad_norm": 2.095998525619507, |
|
"learning_rate": 6.918333333333334e-05, |
|
"loss": 0.3597, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 152.94117647058823, |
|
"grad_norm": 3.8910038471221924, |
|
"learning_rate": 6.835000000000001e-05, |
|
"loss": 0.3605, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 154.90196078431373, |
|
"grad_norm": 0.7127563953399658, |
|
"learning_rate": 6.751666666666668e-05, |
|
"loss": 0.3629, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 156.86274509803923, |
|
"grad_norm": 1.66582190990448, |
|
"learning_rate": 6.668333333333333e-05, |
|
"loss": 0.3569, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 156.86274509803923, |
|
"eval_loss": 0.4029478132724762, |
|
"eval_runtime": 6.4438, |
|
"eval_samples_per_second": 27.934, |
|
"eval_steps_per_second": 3.569, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 158.8235294117647, |
|
"grad_norm": 1.4073445796966553, |
|
"learning_rate": 6.585e-05, |
|
"loss": 0.3648, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 160.7843137254902, |
|
"grad_norm": 1.070461392402649, |
|
"learning_rate": 6.501666666666667e-05, |
|
"loss": 0.3569, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 162.7450980392157, |
|
"grad_norm": 2.141096353530884, |
|
"learning_rate": 6.418333333333334e-05, |
|
"loss": 0.3607, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 164.7058823529412, |
|
"grad_norm": 1.1931374073028564, |
|
"learning_rate": 6.335e-05, |
|
"loss": 0.3575, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 166.66666666666666, |
|
"grad_norm": 1.0879640579223633, |
|
"learning_rate": 6.251666666666666e-05, |
|
"loss": 0.356, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 168.62745098039215, |
|
"grad_norm": 1.5755019187927246, |
|
"learning_rate": 6.168333333333333e-05, |
|
"loss": 0.362, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 170.58823529411765, |
|
"grad_norm": 9.463594436645508, |
|
"learning_rate": 6.085000000000001e-05, |
|
"loss": 0.3612, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 172.54901960784315, |
|
"grad_norm": 5.174690246582031, |
|
"learning_rate": 6.0016666666666664e-05, |
|
"loss": 0.3537, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 174.50980392156862, |
|
"grad_norm": 1.342671513557434, |
|
"learning_rate": 5.918333333333333e-05, |
|
"loss": 0.3521, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 176.47058823529412, |
|
"grad_norm": 1.7866413593292236, |
|
"learning_rate": 5.835e-05, |
|
"loss": 0.3515, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 176.47058823529412, |
|
"eval_loss": 0.4038984179496765, |
|
"eval_runtime": 6.8698, |
|
"eval_samples_per_second": 26.202, |
|
"eval_steps_per_second": 3.348, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 178.4313725490196, |
|
"grad_norm": 2.6986000537872314, |
|
"learning_rate": 5.751666666666667e-05, |
|
"loss": 0.3539, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 180.3921568627451, |
|
"grad_norm": 1.255347728729248, |
|
"learning_rate": 5.668333333333333e-05, |
|
"loss": 0.3578, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 182.35294117647058, |
|
"grad_norm": 2.3221983909606934, |
|
"learning_rate": 5.585e-05, |
|
"loss": 0.3594, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 184.31372549019608, |
|
"grad_norm": 1.0801488161087036, |
|
"learning_rate": 5.501666666666667e-05, |
|
"loss": 0.3481, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 186.27450980392157, |
|
"grad_norm": 0.7307039499282837, |
|
"learning_rate": 5.4183333333333334e-05, |
|
"loss": 0.3486, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 188.23529411764707, |
|
"grad_norm": 0.9629709720611572, |
|
"learning_rate": 5.335e-05, |
|
"loss": 0.3465, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 190.19607843137254, |
|
"grad_norm": 0.5935477614402771, |
|
"learning_rate": 5.251666666666667e-05, |
|
"loss": 0.3481, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 192.15686274509804, |
|
"grad_norm": 1.965649127960205, |
|
"learning_rate": 5.168333333333334e-05, |
|
"loss": 0.3468, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 194.11764705882354, |
|
"grad_norm": 0.9675062894821167, |
|
"learning_rate": 5.0849999999999996e-05, |
|
"loss": 0.3529, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 196.07843137254903, |
|
"grad_norm": 0.7859313488006592, |
|
"learning_rate": 5.0016666666666665e-05, |
|
"loss": 0.3443, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 196.07843137254903, |
|
"eval_loss": 0.4005332589149475, |
|
"eval_runtime": 6.4088, |
|
"eval_samples_per_second": 28.086, |
|
"eval_steps_per_second": 3.589, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 198.0392156862745, |
|
"grad_norm": 0.9622226357460022, |
|
"learning_rate": 4.9183333333333334e-05, |
|
"loss": 0.349, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 200.0, |
|
"grad_norm": 0.7713702321052551, |
|
"learning_rate": 4.835e-05, |
|
"loss": 0.3446, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 201.9607843137255, |
|
"grad_norm": 1.2544292211532593, |
|
"learning_rate": 4.751666666666667e-05, |
|
"loss": 0.3443, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 203.92156862745097, |
|
"grad_norm": 0.7886931300163269, |
|
"learning_rate": 4.6683333333333334e-05, |
|
"loss": 0.3424, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 205.88235294117646, |
|
"grad_norm": 1.3799712657928467, |
|
"learning_rate": 4.585e-05, |
|
"loss": 0.348, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 207.84313725490196, |
|
"grad_norm": 0.9509637355804443, |
|
"learning_rate": 4.5016666666666665e-05, |
|
"loss": 0.3447, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 209.80392156862746, |
|
"grad_norm": 1.3633594512939453, |
|
"learning_rate": 4.4183333333333334e-05, |
|
"loss": 0.343, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 211.76470588235293, |
|
"grad_norm": 3.379939317703247, |
|
"learning_rate": 4.335e-05, |
|
"loss": 0.3451, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 213.72549019607843, |
|
"grad_norm": 0.7113842368125916, |
|
"learning_rate": 4.251666666666667e-05, |
|
"loss": 0.3429, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 215.68627450980392, |
|
"grad_norm": 0.8408060073852539, |
|
"learning_rate": 4.1683333333333335e-05, |
|
"loss": 0.3469, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 215.68627450980392, |
|
"eval_loss": 0.40183132886886597, |
|
"eval_runtime": 6.5032, |
|
"eval_samples_per_second": 27.679, |
|
"eval_steps_per_second": 3.537, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 217.64705882352942, |
|
"grad_norm": 1.2636085748672485, |
|
"learning_rate": 4.085e-05, |
|
"loss": 0.3371, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 219.6078431372549, |
|
"grad_norm": 2.5460524559020996, |
|
"learning_rate": 4.0016666666666666e-05, |
|
"loss": 0.3449, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 221.5686274509804, |
|
"grad_norm": 0.6922531723976135, |
|
"learning_rate": 3.9183333333333335e-05, |
|
"loss": 0.3432, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 223.52941176470588, |
|
"grad_norm": 0.5262002944946289, |
|
"learning_rate": 3.8350000000000004e-05, |
|
"loss": 0.3408, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 225.49019607843138, |
|
"grad_norm": 0.9883546233177185, |
|
"learning_rate": 3.7516666666666666e-05, |
|
"loss": 0.3399, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 227.45098039215685, |
|
"grad_norm": 0.7871970534324646, |
|
"learning_rate": 3.6683333333333335e-05, |
|
"loss": 0.3417, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 229.41176470588235, |
|
"grad_norm": 0.7385707497596741, |
|
"learning_rate": 3.585e-05, |
|
"loss": 0.3399, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 231.37254901960785, |
|
"grad_norm": 0.5304046869277954, |
|
"learning_rate": 3.501666666666667e-05, |
|
"loss": 0.3388, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 233.33333333333334, |
|
"grad_norm": 0.5726514458656311, |
|
"learning_rate": 3.4183333333333335e-05, |
|
"loss": 0.3361, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 235.2941176470588, |
|
"grad_norm": 0.49944621324539185, |
|
"learning_rate": 3.3350000000000004e-05, |
|
"loss": 0.3427, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 235.2941176470588, |
|
"eval_loss": 0.40008437633514404, |
|
"eval_runtime": 6.512, |
|
"eval_samples_per_second": 27.641, |
|
"eval_steps_per_second": 3.532, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 237.2549019607843, |
|
"grad_norm": 1.1355714797973633, |
|
"learning_rate": 3.2516666666666666e-05, |
|
"loss": 0.3377, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 239.2156862745098, |
|
"grad_norm": 0.7759362459182739, |
|
"learning_rate": 3.1683333333333335e-05, |
|
"loss": 0.339, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 241.1764705882353, |
|
"grad_norm": 1.1163146495819092, |
|
"learning_rate": 3.0850000000000004e-05, |
|
"loss": 0.337, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 243.13725490196077, |
|
"grad_norm": 0.8132624626159668, |
|
"learning_rate": 3.001666666666667e-05, |
|
"loss": 0.3378, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 245.09803921568627, |
|
"grad_norm": 0.7331820130348206, |
|
"learning_rate": 2.9183333333333336e-05, |
|
"loss": 0.3323, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 247.05882352941177, |
|
"grad_norm": 0.9192725419998169, |
|
"learning_rate": 2.8349999999999998e-05, |
|
"loss": 0.3382, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 249.01960784313727, |
|
"grad_norm": 3.9876034259796143, |
|
"learning_rate": 2.7516666666666667e-05, |
|
"loss": 0.3384, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 250.98039215686273, |
|
"grad_norm": 0.6646643280982971, |
|
"learning_rate": 2.6683333333333333e-05, |
|
"loss": 0.3365, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 252.94117647058823, |
|
"grad_norm": 0.5878006815910339, |
|
"learning_rate": 2.585e-05, |
|
"loss": 0.3403, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 254.90196078431373, |
|
"grad_norm": 0.8833298087120056, |
|
"learning_rate": 2.5016666666666667e-05, |
|
"loss": 0.3401, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 254.90196078431373, |
|
"eval_loss": 0.4041617810726166, |
|
"eval_runtime": 6.702, |
|
"eval_samples_per_second": 26.858, |
|
"eval_steps_per_second": 3.432, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 256.8627450980392, |
|
"grad_norm": 0.751977801322937, |
|
"learning_rate": 2.4183333333333336e-05, |
|
"loss": 0.3319, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 258.8235294117647, |
|
"grad_norm": 1.289437174797058, |
|
"learning_rate": 2.3350000000000002e-05, |
|
"loss": 0.3382, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 260.7843137254902, |
|
"grad_norm": 0.5142917633056641, |
|
"learning_rate": 2.2516666666666667e-05, |
|
"loss": 0.334, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 262.7450980392157, |
|
"grad_norm": 0.4598856568336487, |
|
"learning_rate": 2.1683333333333333e-05, |
|
"loss": 0.338, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 264.70588235294116, |
|
"grad_norm": 0.47424769401550293, |
|
"learning_rate": 2.085e-05, |
|
"loss": 0.3369, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 266.6666666666667, |
|
"grad_norm": 0.6651924848556519, |
|
"learning_rate": 2.0016666666666668e-05, |
|
"loss": 0.3379, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 268.62745098039215, |
|
"grad_norm": 0.6406331062316895, |
|
"learning_rate": 1.9183333333333333e-05, |
|
"loss": 0.3409, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 270.5882352941176, |
|
"grad_norm": 0.8162440061569214, |
|
"learning_rate": 1.8350000000000002e-05, |
|
"loss": 0.3336, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 272.54901960784315, |
|
"grad_norm": 0.5905545353889465, |
|
"learning_rate": 1.7516666666666668e-05, |
|
"loss": 0.3324, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 274.5098039215686, |
|
"grad_norm": 0.7422587871551514, |
|
"learning_rate": 1.6700000000000003e-05, |
|
"loss": 0.3419, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 274.5098039215686, |
|
"eval_loss": 0.40541204810142517, |
|
"eval_runtime": 6.8059, |
|
"eval_samples_per_second": 26.447, |
|
"eval_steps_per_second": 3.379, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 276.47058823529414, |
|
"grad_norm": 0.5827329158782959, |
|
"learning_rate": 1.586666666666667e-05, |
|
"loss": 0.3372, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 278.4313725490196, |
|
"grad_norm": 0.6354573369026184, |
|
"learning_rate": 1.5033333333333336e-05, |
|
"loss": 0.3316, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 280.3921568627451, |
|
"grad_norm": 0.47346118092536926, |
|
"learning_rate": 1.42e-05, |
|
"loss": 0.335, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 282.3529411764706, |
|
"grad_norm": 0.6653403043746948, |
|
"learning_rate": 1.3366666666666667e-05, |
|
"loss": 0.333, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 284.3137254901961, |
|
"grad_norm": 0.44320473074913025, |
|
"learning_rate": 1.2533333333333332e-05, |
|
"loss": 0.3322, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 286.27450980392155, |
|
"grad_norm": 0.40339425206184387, |
|
"learning_rate": 1.1700000000000001e-05, |
|
"loss": 0.3286, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 288.2352941176471, |
|
"grad_norm": 0.6550925374031067, |
|
"learning_rate": 1.0866666666666667e-05, |
|
"loss": 0.3351, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 290.19607843137254, |
|
"grad_norm": 0.5210171341896057, |
|
"learning_rate": 1.0033333333333333e-05, |
|
"loss": 0.3309, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 292.15686274509807, |
|
"grad_norm": 0.4037918448448181, |
|
"learning_rate": 9.2e-06, |
|
"loss": 0.3291, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 294.11764705882354, |
|
"grad_norm": 0.7901822328567505, |
|
"learning_rate": 8.366666666666667e-06, |
|
"loss": 0.3318, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 294.11764705882354, |
|
"eval_loss": 0.405687540769577, |
|
"eval_runtime": 6.5967, |
|
"eval_samples_per_second": 27.286, |
|
"eval_steps_per_second": 3.487, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 296.078431372549, |
|
"grad_norm": 0.6064783930778503, |
|
"learning_rate": 7.533333333333334e-06, |
|
"loss": 0.3314, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 298.03921568627453, |
|
"grad_norm": 0.5984176397323608, |
|
"learning_rate": 6.700000000000001e-06, |
|
"loss": 0.3312, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 300.0, |
|
"grad_norm": 0.6286676526069641, |
|
"learning_rate": 5.866666666666667e-06, |
|
"loss": 0.3286, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 301.96078431372547, |
|
"grad_norm": 0.45633211731910706, |
|
"learning_rate": 5.033333333333334e-06, |
|
"loss": 0.3285, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 303.921568627451, |
|
"grad_norm": 0.41933074593544006, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 0.3272, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 305.88235294117646, |
|
"grad_norm": 0.4385930597782135, |
|
"learning_rate": 3.3666666666666665e-06, |
|
"loss": 0.3297, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 307.84313725490193, |
|
"grad_norm": 0.49151408672332764, |
|
"learning_rate": 2.5333333333333334e-06, |
|
"loss": 0.3315, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 309.80392156862746, |
|
"grad_norm": 0.5050336718559265, |
|
"learning_rate": 1.7000000000000002e-06, |
|
"loss": 0.331, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 311.7647058823529, |
|
"grad_norm": 0.3783471882343292, |
|
"learning_rate": 8.666666666666667e-07, |
|
"loss": 0.3269, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 313.72549019607845, |
|
"grad_norm": 0.4698627293109894, |
|
"learning_rate": 3.3333333333333334e-08, |
|
"loss": 0.3312, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 313.72549019607845, |
|
"eval_loss": 0.4050500988960266, |
|
"eval_runtime": 6.4964, |
|
"eval_samples_per_second": 27.708, |
|
"eval_steps_per_second": 3.54, |
|
"step": 8000 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 8000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 320, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.70355911863679e+16, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|