|
{ |
|
"best_metric": 0.3785873353481293, |
|
"best_model_checkpoint": "mikhail-panzo/zlm_b64_le5_s8000/checkpoint-7000", |
|
"epoch": 5.8626465661641545, |
|
"eval_steps": 500, |
|
"global_step": 7000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04187604690117253, |
|
"grad_norm": 5.415271759033203, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 1.1819, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08375209380234507, |
|
"grad_norm": 16.256921768188477, |
|
"learning_rate": 4.900000000000001e-07, |
|
"loss": 1.0035, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12562814070351758, |
|
"grad_norm": 3.2439260482788086, |
|
"learning_rate": 7.350000000000001e-07, |
|
"loss": 0.9742, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16750418760469013, |
|
"grad_norm": 4.24526834487915, |
|
"learning_rate": 9.85e-07, |
|
"loss": 0.9361, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.20938023450586266, |
|
"grad_norm": 3.625537872314453, |
|
"learning_rate": 1.235e-06, |
|
"loss": 0.8733, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.25125628140703515, |
|
"grad_norm": 4.1455464363098145, |
|
"learning_rate": 1.485e-06, |
|
"loss": 0.8471, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2931323283082077, |
|
"grad_norm": 3.164158582687378, |
|
"learning_rate": 1.7350000000000001e-06, |
|
"loss": 0.838, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.33500837520938026, |
|
"grad_norm": 4.24931001663208, |
|
"learning_rate": 1.985e-06, |
|
"loss": 0.7467, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3768844221105528, |
|
"grad_norm": 2.332688331604004, |
|
"learning_rate": 2.235e-06, |
|
"loss": 0.7389, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4187604690117253, |
|
"grad_norm": 2.370961904525757, |
|
"learning_rate": 2.4850000000000003e-06, |
|
"loss": 0.7074, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4187604690117253, |
|
"eval_loss": 0.6029486060142517, |
|
"eval_runtime": 220.2253, |
|
"eval_samples_per_second": 38.547, |
|
"eval_steps_per_second": 4.822, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.46063651591289784, |
|
"grad_norm": 14.466177940368652, |
|
"learning_rate": 2.7350000000000005e-06, |
|
"loss": 0.6738, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5025125628140703, |
|
"grad_norm": 3.282449722290039, |
|
"learning_rate": 2.9850000000000002e-06, |
|
"loss": 0.6517, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5443886097152428, |
|
"grad_norm": 2.621645212173462, |
|
"learning_rate": 3.2350000000000004e-06, |
|
"loss": 0.6479, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5862646566164154, |
|
"grad_norm": 2.4493019580841064, |
|
"learning_rate": 3.485e-06, |
|
"loss": 0.6321, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.628140703517588, |
|
"grad_norm": 1.919048547744751, |
|
"learning_rate": 3.7350000000000002e-06, |
|
"loss": 0.6407, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6700167504187605, |
|
"grad_norm": 2.8260602951049805, |
|
"learning_rate": 3.985000000000001e-06, |
|
"loss": 0.6279, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.711892797319933, |
|
"grad_norm": 3.333841323852539, |
|
"learning_rate": 4.235e-06, |
|
"loss": 0.6143, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7537688442211056, |
|
"grad_norm": 1.7883094549179077, |
|
"learning_rate": 4.485e-06, |
|
"loss": 0.5949, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7956448911222781, |
|
"grad_norm": 2.48634934425354, |
|
"learning_rate": 4.735e-06, |
|
"loss": 0.5875, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8375209380234506, |
|
"grad_norm": 5.4277873039245605, |
|
"learning_rate": 4.9850000000000006e-06, |
|
"loss": 0.5916, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8375209380234506, |
|
"eval_loss": 0.49684029817581177, |
|
"eval_runtime": 219.2621, |
|
"eval_samples_per_second": 38.716, |
|
"eval_steps_per_second": 4.844, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8793969849246231, |
|
"grad_norm": 2.675995349884033, |
|
"learning_rate": 5.235e-06, |
|
"loss": 0.5676, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9212730318257957, |
|
"grad_norm": 2.507474422454834, |
|
"learning_rate": 5.485e-06, |
|
"loss": 0.5535, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9631490787269682, |
|
"grad_norm": 5.360454082489014, |
|
"learning_rate": 5.735e-06, |
|
"loss": 0.569, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.0050251256281406, |
|
"grad_norm": 2.9445533752441406, |
|
"learning_rate": 5.985000000000001e-06, |
|
"loss": 0.5385, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.0469011725293131, |
|
"grad_norm": 2.3764073848724365, |
|
"learning_rate": 6.235000000000001e-06, |
|
"loss": 0.5647, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.0887772194304857, |
|
"grad_norm": 4.364608287811279, |
|
"learning_rate": 6.485000000000001e-06, |
|
"loss": 0.5554, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.1306532663316582, |
|
"grad_norm": 4.688488006591797, |
|
"learning_rate": 6.735000000000001e-06, |
|
"loss": 0.5676, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.1725293132328307, |
|
"grad_norm": 3.0410969257354736, |
|
"learning_rate": 6.985000000000001e-06, |
|
"loss": 0.5374, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.2144053601340032, |
|
"grad_norm": 6.28264856338501, |
|
"learning_rate": 7.235000000000001e-06, |
|
"loss": 0.5275, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.2562814070351758, |
|
"grad_norm": 3.7520318031311035, |
|
"learning_rate": 7.485000000000001e-06, |
|
"loss": 0.5206, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.2562814070351758, |
|
"eval_loss": 0.4592318534851074, |
|
"eval_runtime": 220.2748, |
|
"eval_samples_per_second": 38.538, |
|
"eval_steps_per_second": 4.821, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.2981574539363483, |
|
"grad_norm": 2.9252817630767822, |
|
"learning_rate": 7.735e-06, |
|
"loss": 0.5175, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.3400335008375208, |
|
"grad_norm": 2.6018381118774414, |
|
"learning_rate": 7.985e-06, |
|
"loss": 0.5191, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.3819095477386933, |
|
"grad_norm": 2.215420722961426, |
|
"learning_rate": 8.235e-06, |
|
"loss": 0.5274, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.4237855946398659, |
|
"grad_norm": 2.787269353866577, |
|
"learning_rate": 8.485000000000001e-06, |
|
"loss": 0.5201, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.4656616415410384, |
|
"grad_norm": 3.644702434539795, |
|
"learning_rate": 8.735000000000002e-06, |
|
"loss": 0.5157, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.507537688442211, |
|
"grad_norm": 2.4051408767700195, |
|
"learning_rate": 8.985000000000001e-06, |
|
"loss": 0.5091, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.5494137353433834, |
|
"grad_norm": 3.729388952255249, |
|
"learning_rate": 9.235e-06, |
|
"loss": 0.5134, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.591289782244556, |
|
"grad_norm": 3.360603094100952, |
|
"learning_rate": 9.485000000000002e-06, |
|
"loss": 0.5041, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.6331658291457285, |
|
"grad_norm": 3.6210439205169678, |
|
"learning_rate": 9.735e-06, |
|
"loss": 0.4993, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.675041876046901, |
|
"grad_norm": 2.6245505809783936, |
|
"learning_rate": 9.985000000000002e-06, |
|
"loss": 0.4979, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.675041876046901, |
|
"eval_loss": 0.4388435482978821, |
|
"eval_runtime": 220.5956, |
|
"eval_samples_per_second": 38.482, |
|
"eval_steps_per_second": 4.814, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.7169179229480735, |
|
"grad_norm": 3.5152196884155273, |
|
"learning_rate": 9.921666666666667e-06, |
|
"loss": 0.4917, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.758793969849246, |
|
"grad_norm": 2.4508583545684814, |
|
"learning_rate": 9.838333333333334e-06, |
|
"loss": 0.5161, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.8006700167504186, |
|
"grad_norm": 3.44022536277771, |
|
"learning_rate": 9.755e-06, |
|
"loss": 0.4933, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.8425460636515911, |
|
"grad_norm": 2.480679512023926, |
|
"learning_rate": 9.671666666666668e-06, |
|
"loss": 0.4983, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.8844221105527639, |
|
"grad_norm": 2.595881462097168, |
|
"learning_rate": 9.588333333333334e-06, |
|
"loss": 0.4797, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.9262981574539364, |
|
"grad_norm": 2.0328593254089355, |
|
"learning_rate": 9.505000000000001e-06, |
|
"loss": 0.483, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.968174204355109, |
|
"grad_norm": 3.7075388431549072, |
|
"learning_rate": 9.421666666666668e-06, |
|
"loss": 0.4922, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.0100502512562812, |
|
"grad_norm": 2.3989317417144775, |
|
"learning_rate": 9.338333333333333e-06, |
|
"loss": 0.4846, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.0519262981574538, |
|
"grad_norm": 2.803882360458374, |
|
"learning_rate": 9.255e-06, |
|
"loss": 0.4832, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.0938023450586263, |
|
"grad_norm": 3.1467437744140625, |
|
"learning_rate": 9.171666666666667e-06, |
|
"loss": 0.4852, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.0938023450586263, |
|
"eval_loss": 0.4210895597934723, |
|
"eval_runtime": 222.2617, |
|
"eval_samples_per_second": 38.194, |
|
"eval_steps_per_second": 4.778, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.135678391959799, |
|
"grad_norm": 3.0684900283813477, |
|
"learning_rate": 9.088333333333334e-06, |
|
"loss": 0.4787, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.1775544388609713, |
|
"grad_norm": 2.283045768737793, |
|
"learning_rate": 9.005000000000001e-06, |
|
"loss": 0.4781, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.219430485762144, |
|
"grad_norm": 2.2983710765838623, |
|
"learning_rate": 8.921666666666668e-06, |
|
"loss": 0.4745, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.2613065326633164, |
|
"grad_norm": 2.7612133026123047, |
|
"learning_rate": 8.838333333333335e-06, |
|
"loss": 0.4659, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.3031825795644894, |
|
"grad_norm": 1.8307946920394897, |
|
"learning_rate": 8.755e-06, |
|
"loss": 0.4741, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.3450586264656614, |
|
"grad_norm": 2.60187029838562, |
|
"learning_rate": 8.671666666666667e-06, |
|
"loss": 0.4603, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.3869346733668344, |
|
"grad_norm": 1.789478063583374, |
|
"learning_rate": 8.588333333333334e-06, |
|
"loss": 0.4587, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.4288107202680065, |
|
"grad_norm": 2.2422680854797363, |
|
"learning_rate": 8.505e-06, |
|
"loss": 0.4614, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.4706867671691795, |
|
"grad_norm": 2.4102437496185303, |
|
"learning_rate": 8.421666666666668e-06, |
|
"loss": 0.4618, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.5125628140703515, |
|
"grad_norm": 1.9738932847976685, |
|
"learning_rate": 8.338333333333335e-06, |
|
"loss": 0.4615, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.5125628140703515, |
|
"eval_loss": 0.4087553322315216, |
|
"eval_runtime": 221.1861, |
|
"eval_samples_per_second": 38.379, |
|
"eval_steps_per_second": 4.801, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.5544388609715245, |
|
"grad_norm": 2.5020742416381836, |
|
"learning_rate": 8.255000000000001e-06, |
|
"loss": 0.4505, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.5963149078726966, |
|
"grad_norm": 1.6484034061431885, |
|
"learning_rate": 8.171666666666668e-06, |
|
"loss": 0.4548, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.6381909547738696, |
|
"grad_norm": 1.7585675716400146, |
|
"learning_rate": 8.088333333333334e-06, |
|
"loss": 0.4668, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.6800670016750416, |
|
"grad_norm": 2.0797314643859863, |
|
"learning_rate": 8.006666666666667e-06, |
|
"loss": 0.4583, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.7219430485762146, |
|
"grad_norm": 5.132874011993408, |
|
"learning_rate": 7.923333333333334e-06, |
|
"loss": 0.4573, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.7638190954773867, |
|
"grad_norm": 2.005979537963867, |
|
"learning_rate": 7.840000000000001e-06, |
|
"loss": 0.449, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.8056951423785597, |
|
"grad_norm": 2.802713394165039, |
|
"learning_rate": 7.756666666666666e-06, |
|
"loss": 0.448, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.8475711892797317, |
|
"grad_norm": 1.8899844884872437, |
|
"learning_rate": 7.673333333333333e-06, |
|
"loss": 0.4558, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.8894472361809047, |
|
"grad_norm": 3.7795069217681885, |
|
"learning_rate": 7.590000000000001e-06, |
|
"loss": 0.4533, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.931323283082077, |
|
"grad_norm": 4.659352779388428, |
|
"learning_rate": 7.506666666666668e-06, |
|
"loss": 0.4521, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.931323283082077, |
|
"eval_loss": 0.400156170129776, |
|
"eval_runtime": 222.3025, |
|
"eval_samples_per_second": 38.187, |
|
"eval_steps_per_second": 4.777, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.9731993299832498, |
|
"grad_norm": 2.298712730407715, |
|
"learning_rate": 7.423333333333333e-06, |
|
"loss": 0.4398, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 3.0150753768844223, |
|
"grad_norm": 2.995213508605957, |
|
"learning_rate": 7.340000000000001e-06, |
|
"loss": 0.4496, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3.056951423785595, |
|
"grad_norm": 2.17159366607666, |
|
"learning_rate": 7.256666666666668e-06, |
|
"loss": 0.4519, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 3.0988274706867673, |
|
"grad_norm": 2.4102556705474854, |
|
"learning_rate": 7.173333333333335e-06, |
|
"loss": 0.4497, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 3.14070351758794, |
|
"grad_norm": 1.7901294231414795, |
|
"learning_rate": 7.09e-06, |
|
"loss": 0.4545, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 3.1825795644891124, |
|
"grad_norm": 2.869340181350708, |
|
"learning_rate": 7.006666666666667e-06, |
|
"loss": 0.4492, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3.224455611390285, |
|
"grad_norm": 2.1695075035095215, |
|
"learning_rate": 6.9233333333333345e-06, |
|
"loss": 0.4505, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.2663316582914574, |
|
"grad_norm": 2.9931163787841797, |
|
"learning_rate": 6.8400000000000014e-06, |
|
"loss": 0.4421, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.30820770519263, |
|
"grad_norm": 3.8682026863098145, |
|
"learning_rate": 6.756666666666667e-06, |
|
"loss": 0.4439, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.3500837520938025, |
|
"grad_norm": 1.6634626388549805, |
|
"learning_rate": 6.6733333333333335e-06, |
|
"loss": 0.4431, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.3500837520938025, |
|
"eval_loss": 0.39482244849205017, |
|
"eval_runtime": 222.4171, |
|
"eval_samples_per_second": 38.167, |
|
"eval_steps_per_second": 4.775, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.391959798994975, |
|
"grad_norm": 3.006056308746338, |
|
"learning_rate": 6.5900000000000004e-06, |
|
"loss": 0.4615, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.4338358458961475, |
|
"grad_norm": 1.9082704782485962, |
|
"learning_rate": 6.5066666666666665e-06, |
|
"loss": 0.4486, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.47571189279732, |
|
"grad_norm": 3.1924006938934326, |
|
"learning_rate": 6.423333333333333e-06, |
|
"loss": 0.4526, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.5175879396984926, |
|
"grad_norm": 1.9817140102386475, |
|
"learning_rate": 6.34e-06, |
|
"loss": 0.4428, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.559463986599665, |
|
"grad_norm": 1.3777679204940796, |
|
"learning_rate": 6.256666666666667e-06, |
|
"loss": 0.443, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.6013400335008376, |
|
"grad_norm": 1.8882936239242554, |
|
"learning_rate": 6.173333333333333e-06, |
|
"loss": 0.4404, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.64321608040201, |
|
"grad_norm": 1.632260799407959, |
|
"learning_rate": 6.09e-06, |
|
"loss": 0.4451, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 3.6850921273031827, |
|
"grad_norm": 7.229835033416748, |
|
"learning_rate": 6.006666666666667e-06, |
|
"loss": 0.4352, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.726968174204355, |
|
"grad_norm": 1.7340707778930664, |
|
"learning_rate": 5.923333333333334e-06, |
|
"loss": 0.433, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.7688442211055277, |
|
"grad_norm": 1.81380295753479, |
|
"learning_rate": 5.84e-06, |
|
"loss": 0.4393, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.7688442211055277, |
|
"eval_loss": 0.3913716673851013, |
|
"eval_runtime": 221.2245, |
|
"eval_samples_per_second": 38.373, |
|
"eval_steps_per_second": 4.801, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.8107202680067003, |
|
"grad_norm": 2.9950456619262695, |
|
"learning_rate": 5.756666666666667e-06, |
|
"loss": 0.4303, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.852596314907873, |
|
"grad_norm": 1.537866473197937, |
|
"learning_rate": 5.673333333333334e-06, |
|
"loss": 0.4322, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.8944723618090453, |
|
"grad_norm": 2.1190896034240723, |
|
"learning_rate": 5.590000000000001e-06, |
|
"loss": 0.4299, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.936348408710218, |
|
"grad_norm": 1.5541598796844482, |
|
"learning_rate": 5.506666666666667e-06, |
|
"loss": 0.4259, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.9782244556113904, |
|
"grad_norm": 2.0810546875, |
|
"learning_rate": 5.423333333333334e-06, |
|
"loss": 0.4265, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 4.0201005025125625, |
|
"grad_norm": 1.5855557918548584, |
|
"learning_rate": 5.3400000000000005e-06, |
|
"loss": 0.4343, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 4.061976549413735, |
|
"grad_norm": 2.2456538677215576, |
|
"learning_rate": 5.256666666666667e-06, |
|
"loss": 0.432, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 4.1038525963149075, |
|
"grad_norm": 2.1106362342834473, |
|
"learning_rate": 5.1733333333333335e-06, |
|
"loss": 0.4212, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 4.1457286432160805, |
|
"grad_norm": 2.1016457080841064, |
|
"learning_rate": 5.09e-06, |
|
"loss": 0.4343, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 4.187604690117253, |
|
"grad_norm": 2.1289258003234863, |
|
"learning_rate": 5.006666666666667e-06, |
|
"loss": 0.4271, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.187604690117253, |
|
"eval_loss": 0.3861308693885803, |
|
"eval_runtime": 221.7311, |
|
"eval_samples_per_second": 38.285, |
|
"eval_steps_per_second": 4.79, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.2294807370184255, |
|
"grad_norm": 1.7459477186203003, |
|
"learning_rate": 4.923333333333334e-06, |
|
"loss": 0.4294, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 4.271356783919598, |
|
"grad_norm": 2.2993977069854736, |
|
"learning_rate": 4.84e-06, |
|
"loss": 0.4305, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 4.313232830820771, |
|
"grad_norm": 2.3953640460968018, |
|
"learning_rate": 4.756666666666667e-06, |
|
"loss": 0.4297, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 4.355108877721943, |
|
"grad_norm": 1.3711094856262207, |
|
"learning_rate": 4.673333333333333e-06, |
|
"loss": 0.4277, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 4.396984924623116, |
|
"grad_norm": 1.373367428779602, |
|
"learning_rate": 4.590000000000001e-06, |
|
"loss": 0.424, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 4.438860971524288, |
|
"grad_norm": 2.1267800331115723, |
|
"learning_rate": 4.506666666666667e-06, |
|
"loss": 0.4244, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 4.480737018425461, |
|
"grad_norm": 1.67445707321167, |
|
"learning_rate": 4.423333333333334e-06, |
|
"loss": 0.4246, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 4.522613065326633, |
|
"grad_norm": 2.8858883380889893, |
|
"learning_rate": 4.34e-06, |
|
"loss": 0.4363, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 4.564489112227806, |
|
"grad_norm": 1.7339367866516113, |
|
"learning_rate": 4.256666666666668e-06, |
|
"loss": 0.4257, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 4.606365159128979, |
|
"grad_norm": 1.892198085784912, |
|
"learning_rate": 4.173333333333334e-06, |
|
"loss": 0.4317, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 4.606365159128979, |
|
"eval_loss": 0.3835948705673218, |
|
"eval_runtime": 220.3794, |
|
"eval_samples_per_second": 38.52, |
|
"eval_steps_per_second": 4.819, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 4.648241206030151, |
|
"grad_norm": 1.3021107912063599, |
|
"learning_rate": 4.09e-06, |
|
"loss": 0.4239, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 4.690117252931323, |
|
"grad_norm": 1.9206578731536865, |
|
"learning_rate": 4.006666666666667e-06, |
|
"loss": 0.4296, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 4.731993299832496, |
|
"grad_norm": 1.5515390634536743, |
|
"learning_rate": 3.923333333333334e-06, |
|
"loss": 0.4253, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 4.773869346733669, |
|
"grad_norm": 1.9020607471466064, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 0.4232, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 4.815745393634841, |
|
"grad_norm": 1.831719160079956, |
|
"learning_rate": 3.756666666666667e-06, |
|
"loss": 0.4264, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 4.857621440536013, |
|
"grad_norm": 1.7857838869094849, |
|
"learning_rate": 3.673333333333334e-06, |
|
"loss": 0.4217, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 4.899497487437186, |
|
"grad_norm": 1.8334425687789917, |
|
"learning_rate": 3.5900000000000004e-06, |
|
"loss": 0.4256, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 4.941373534338359, |
|
"grad_norm": 1.8758697509765625, |
|
"learning_rate": 3.5066666666666673e-06, |
|
"loss": 0.4238, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 4.983249581239531, |
|
"grad_norm": 2.417583465576172, |
|
"learning_rate": 3.4233333333333333e-06, |
|
"loss": 0.4194, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 5.025125628140704, |
|
"grad_norm": 1.6724933385849, |
|
"learning_rate": 3.3400000000000006e-06, |
|
"loss": 0.4265, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 5.025125628140704, |
|
"eval_loss": 0.38090234994888306, |
|
"eval_runtime": 221.6516, |
|
"eval_samples_per_second": 38.299, |
|
"eval_steps_per_second": 4.791, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 5.067001675041876, |
|
"grad_norm": 1.7485418319702148, |
|
"learning_rate": 3.2566666666666667e-06, |
|
"loss": 0.4154, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 5.108877721943049, |
|
"grad_norm": 1.8903107643127441, |
|
"learning_rate": 3.173333333333334e-06, |
|
"loss": 0.4224, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 5.150753768844221, |
|
"grad_norm": 1.360568881034851, |
|
"learning_rate": 3.09e-06, |
|
"loss": 0.4207, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 5.192629815745394, |
|
"grad_norm": 1.8849430084228516, |
|
"learning_rate": 3.0066666666666674e-06, |
|
"loss": 0.4193, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 5.234505862646566, |
|
"grad_norm": 1.2618883848190308, |
|
"learning_rate": 2.9233333333333334e-06, |
|
"loss": 0.4174, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 5.276381909547739, |
|
"grad_norm": 1.923030972480774, |
|
"learning_rate": 2.84e-06, |
|
"loss": 0.4188, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 5.318257956448911, |
|
"grad_norm": 1.4617573022842407, |
|
"learning_rate": 2.756666666666667e-06, |
|
"loss": 0.4195, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 5.360134003350084, |
|
"grad_norm": 1.308645248413086, |
|
"learning_rate": 2.6733333333333333e-06, |
|
"loss": 0.4265, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 5.402010050251256, |
|
"grad_norm": 1.4184455871582031, |
|
"learning_rate": 2.59e-06, |
|
"loss": 0.4202, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 5.443886097152429, |
|
"grad_norm": 1.2012479305267334, |
|
"learning_rate": 2.5066666666666667e-06, |
|
"loss": 0.424, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 5.443886097152429, |
|
"eval_loss": 0.3793572783470154, |
|
"eval_runtime": 221.7764, |
|
"eval_samples_per_second": 38.277, |
|
"eval_steps_per_second": 4.789, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 5.485762144053601, |
|
"grad_norm": 1.3103199005126953, |
|
"learning_rate": 2.4233333333333336e-06, |
|
"loss": 0.4179, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 5.527638190954773, |
|
"grad_norm": 1.4100350141525269, |
|
"learning_rate": 2.3400000000000005e-06, |
|
"loss": 0.4201, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 5.569514237855946, |
|
"grad_norm": 1.944542646408081, |
|
"learning_rate": 2.2566666666666665e-06, |
|
"loss": 0.4157, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 5.611390284757119, |
|
"grad_norm": 1.759690284729004, |
|
"learning_rate": 2.1733333333333334e-06, |
|
"loss": 0.4135, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 5.653266331658291, |
|
"grad_norm": 1.6196409463882446, |
|
"learning_rate": 2.09e-06, |
|
"loss": 0.4218, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 5.695142378559464, |
|
"grad_norm": 1.4138773679733276, |
|
"learning_rate": 2.006666666666667e-06, |
|
"loss": 0.4228, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 5.7370184254606365, |
|
"grad_norm": 1.7974828481674194, |
|
"learning_rate": 1.9233333333333333e-06, |
|
"loss": 0.4242, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 5.778894472361809, |
|
"grad_norm": 1.697729229927063, |
|
"learning_rate": 1.8400000000000002e-06, |
|
"loss": 0.4133, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 5.8207705192629815, |
|
"grad_norm": 1.2323369979858398, |
|
"learning_rate": 1.7566666666666669e-06, |
|
"loss": 0.4205, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 5.8626465661641545, |
|
"grad_norm": 1.0918774604797363, |
|
"learning_rate": 1.6733333333333335e-06, |
|
"loss": 0.4123, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 5.8626465661641545, |
|
"eval_loss": 0.3785873353481293, |
|
"eval_runtime": 222.4775, |
|
"eval_samples_per_second": 38.157, |
|
"eval_steps_per_second": 4.774, |
|
"step": 7000 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 8000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 7, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.790045125555648e+16, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|