|
{ |
|
"best_metric": 0.3162495493888855, |
|
"best_model_checkpoint": "mikhail_panzo/zlm_b64_le4_s12000/checkpoint-9500", |
|
"epoch": 7.956448911222781, |
|
"eval_steps": 500, |
|
"global_step": 9500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04187604690117253, |
|
"grad_norm": 3.0184223651885986, |
|
"learning_rate": 2.35e-06, |
|
"loss": 1.1206, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08375209380234507, |
|
"grad_norm": 3.4442005157470703, |
|
"learning_rate": 4.85e-06, |
|
"loss": 0.8185, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12562814070351758, |
|
"grad_norm": 2.6812493801116943, |
|
"learning_rate": 7.35e-06, |
|
"loss": 0.7576, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16750418760469013, |
|
"grad_norm": 4.518482685089111, |
|
"learning_rate": 9.85e-06, |
|
"loss": 0.6938, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.20938023450586266, |
|
"grad_norm": 4.683146953582764, |
|
"learning_rate": 1.235e-05, |
|
"loss": 0.6386, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.25125628140703515, |
|
"grad_norm": 2.7392578125, |
|
"learning_rate": 1.485e-05, |
|
"loss": 0.6226, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2931323283082077, |
|
"grad_norm": 2.017056465148926, |
|
"learning_rate": 1.7349999999999998e-05, |
|
"loss": 0.6128, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.33500837520938026, |
|
"grad_norm": 3.6669230461120605, |
|
"learning_rate": 1.985e-05, |
|
"loss": 0.5675, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3768844221105528, |
|
"grad_norm": 4.16896915435791, |
|
"learning_rate": 2.235e-05, |
|
"loss": 0.5655, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4187604690117253, |
|
"grad_norm": 3.691459894180298, |
|
"learning_rate": 2.485e-05, |
|
"loss": 0.5487, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4187604690117253, |
|
"eval_loss": 0.47458764910697937, |
|
"eval_runtime": 214.7196, |
|
"eval_samples_per_second": 39.535, |
|
"eval_steps_per_second": 4.946, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.46063651591289784, |
|
"grad_norm": 3.3792996406555176, |
|
"learning_rate": 2.7350000000000004e-05, |
|
"loss": 0.5232, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5025125628140703, |
|
"grad_norm": 3.6263363361358643, |
|
"learning_rate": 2.985e-05, |
|
"loss": 0.5132, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5443886097152428, |
|
"grad_norm": 2.652538776397705, |
|
"learning_rate": 3.235e-05, |
|
"loss": 0.5067, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5862646566164154, |
|
"grad_norm": 3.5355172157287598, |
|
"learning_rate": 3.485e-05, |
|
"loss": 0.4981, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.628140703517588, |
|
"grad_norm": 2.5035619735717773, |
|
"learning_rate": 3.735e-05, |
|
"loss": 0.5094, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6700167504187605, |
|
"grad_norm": 2.9585518836975098, |
|
"learning_rate": 3.9850000000000006e-05, |
|
"loss": 0.5053, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.711892797319933, |
|
"grad_norm": 9.997116088867188, |
|
"learning_rate": 4.235e-05, |
|
"loss": 0.502, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7537688442211056, |
|
"grad_norm": 1.8630377054214478, |
|
"learning_rate": 4.4850000000000006e-05, |
|
"loss": 0.483, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7956448911222781, |
|
"grad_norm": 1.5534099340438843, |
|
"learning_rate": 4.735e-05, |
|
"loss": 0.4743, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8375209380234506, |
|
"grad_norm": 2.8155503273010254, |
|
"learning_rate": 4.9850000000000006e-05, |
|
"loss": 0.483, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8375209380234506, |
|
"eval_loss": 0.4226582944393158, |
|
"eval_runtime": 213.2339, |
|
"eval_samples_per_second": 39.811, |
|
"eval_steps_per_second": 4.98, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8793969849246231, |
|
"grad_norm": 4.2074055671691895, |
|
"learning_rate": 5.235e-05, |
|
"loss": 0.4625, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9212730318257957, |
|
"grad_norm": 2.215416431427002, |
|
"learning_rate": 5.485e-05, |
|
"loss": 0.461, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9631490787269682, |
|
"grad_norm": 3.3321831226348877, |
|
"learning_rate": 5.7350000000000005e-05, |
|
"loss": 0.4807, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.0050251256281406, |
|
"grad_norm": 3.5796420574188232, |
|
"learning_rate": 5.9850000000000005e-05, |
|
"loss": 0.4456, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.0469011725293131, |
|
"grad_norm": 2.3565518856048584, |
|
"learning_rate": 6.235000000000001e-05, |
|
"loss": 0.4664, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.0887772194304857, |
|
"grad_norm": 3.147794485092163, |
|
"learning_rate": 6.485e-05, |
|
"loss": 0.4681, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.1306532663316582, |
|
"grad_norm": 3.028409957885742, |
|
"learning_rate": 6.735e-05, |
|
"loss": 0.4663, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.1725293132328307, |
|
"grad_norm": 4.820915222167969, |
|
"learning_rate": 6.985e-05, |
|
"loss": 0.4493, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.2144053601340032, |
|
"grad_norm": 1.93254816532135, |
|
"learning_rate": 7.235000000000001e-05, |
|
"loss": 0.4413, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.2562814070351758, |
|
"grad_norm": 2.237015724182129, |
|
"learning_rate": 7.485e-05, |
|
"loss": 0.432, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.2562814070351758, |
|
"eval_loss": 0.39829039573669434, |
|
"eval_runtime": 217.3782, |
|
"eval_samples_per_second": 39.052, |
|
"eval_steps_per_second": 4.885, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.2981574539363483, |
|
"grad_norm": 1.709120273590088, |
|
"learning_rate": 7.735e-05, |
|
"loss": 0.4361, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.3400335008375208, |
|
"grad_norm": 3.733328104019165, |
|
"learning_rate": 7.985e-05, |
|
"loss": 0.4357, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.3819095477386933, |
|
"grad_norm": 4.131324768066406, |
|
"learning_rate": 8.235000000000001e-05, |
|
"loss": 0.4445, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.4237855946398659, |
|
"grad_norm": 2.2893927097320557, |
|
"learning_rate": 8.485e-05, |
|
"loss": 0.4379, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.4656616415410384, |
|
"grad_norm": 2.1464734077453613, |
|
"learning_rate": 8.735000000000001e-05, |
|
"loss": 0.4393, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.507537688442211, |
|
"grad_norm": 2.01698637008667, |
|
"learning_rate": 8.985e-05, |
|
"loss": 0.4341, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.5494137353433834, |
|
"grad_norm": 2.259115219116211, |
|
"learning_rate": 9.235000000000001e-05, |
|
"loss": 0.4324, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.591289782244556, |
|
"grad_norm": 2.0072858333587646, |
|
"learning_rate": 9.485e-05, |
|
"loss": 0.4327, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.6331658291457285, |
|
"grad_norm": 2.060303211212158, |
|
"learning_rate": 9.735000000000001e-05, |
|
"loss": 0.4279, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.675041876046901, |
|
"grad_norm": 1.2846741676330566, |
|
"learning_rate": 9.985000000000001e-05, |
|
"loss": 0.429, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.675041876046901, |
|
"eval_loss": 0.3953319191932678, |
|
"eval_runtime": 217.8108, |
|
"eval_samples_per_second": 38.974, |
|
"eval_steps_per_second": 4.876, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.7169179229480735, |
|
"grad_norm": 2.253840923309326, |
|
"learning_rate": 9.953e-05, |
|
"loss": 0.4264, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.758793969849246, |
|
"grad_norm": 1.3114299774169922, |
|
"learning_rate": 9.903e-05, |
|
"loss": 0.4355, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.8006700167504186, |
|
"grad_norm": 1.8178610801696777, |
|
"learning_rate": 9.853e-05, |
|
"loss": 0.4256, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.8425460636515911, |
|
"grad_norm": 2.1193735599517822, |
|
"learning_rate": 9.803e-05, |
|
"loss": 0.4305, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.8844221105527639, |
|
"grad_norm": 1.6871263980865479, |
|
"learning_rate": 9.753e-05, |
|
"loss": 0.4106, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.9262981574539364, |
|
"grad_norm": 1.2699921131134033, |
|
"learning_rate": 9.703000000000001e-05, |
|
"loss": 0.4156, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.968174204355109, |
|
"grad_norm": 1.7966769933700562, |
|
"learning_rate": 9.653000000000001e-05, |
|
"loss": 0.4257, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.0100502512562812, |
|
"grad_norm": 1.7838683128356934, |
|
"learning_rate": 9.603000000000001e-05, |
|
"loss": 0.4156, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.0519262981574538, |
|
"grad_norm": 1.4161527156829834, |
|
"learning_rate": 9.553e-05, |
|
"loss": 0.4096, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.0938023450586263, |
|
"grad_norm": 1.5864896774291992, |
|
"learning_rate": 9.503e-05, |
|
"loss": 0.4168, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.0938023450586263, |
|
"eval_loss": 0.3701198697090149, |
|
"eval_runtime": 220.4399, |
|
"eval_samples_per_second": 38.509, |
|
"eval_steps_per_second": 4.818, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.135678391959799, |
|
"grad_norm": 1.2460856437683105, |
|
"learning_rate": 9.453e-05, |
|
"loss": 0.415, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.1775544388609713, |
|
"grad_norm": 1.994315266609192, |
|
"learning_rate": 9.403e-05, |
|
"loss": 0.4075, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.219430485762144, |
|
"grad_norm": 2.148324966430664, |
|
"learning_rate": 9.353000000000001e-05, |
|
"loss": 0.4077, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.2613065326633164, |
|
"grad_norm": 1.7242035865783691, |
|
"learning_rate": 9.303000000000001e-05, |
|
"loss": 0.4044, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.3031825795644894, |
|
"grad_norm": 1.912110447883606, |
|
"learning_rate": 9.253000000000001e-05, |
|
"loss": 0.4075, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.3450586264656614, |
|
"grad_norm": 1.3661189079284668, |
|
"learning_rate": 9.203000000000001e-05, |
|
"loss": 0.4041, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.3869346733668344, |
|
"grad_norm": 1.7083531618118286, |
|
"learning_rate": 9.153e-05, |
|
"loss": 0.396, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.4288107202680065, |
|
"grad_norm": 1.7829670906066895, |
|
"learning_rate": 9.103e-05, |
|
"loss": 0.4042, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.4706867671691795, |
|
"grad_norm": 2.188753366470337, |
|
"learning_rate": 9.053e-05, |
|
"loss": 0.3963, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.5125628140703515, |
|
"grad_norm": 1.2608036994934082, |
|
"learning_rate": 9.003e-05, |
|
"loss": 0.4021, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.5125628140703515, |
|
"eval_loss": 0.3613213300704956, |
|
"eval_runtime": 214.7226, |
|
"eval_samples_per_second": 39.535, |
|
"eval_steps_per_second": 4.946, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.5544388609715245, |
|
"grad_norm": 1.501607060432434, |
|
"learning_rate": 8.953e-05, |
|
"loss": 0.3897, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.5963149078726966, |
|
"grad_norm": 1.321588397026062, |
|
"learning_rate": 8.903e-05, |
|
"loss": 0.3948, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.6381909547738696, |
|
"grad_norm": 1.5581566095352173, |
|
"learning_rate": 8.853000000000001e-05, |
|
"loss": 0.4005, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.6800670016750416, |
|
"grad_norm": 1.3824856281280518, |
|
"learning_rate": 8.803e-05, |
|
"loss": 0.3981, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.7219430485762146, |
|
"grad_norm": 1.7730051279067993, |
|
"learning_rate": 8.753e-05, |
|
"loss": 0.3982, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.7638190954773867, |
|
"grad_norm": 1.516622543334961, |
|
"learning_rate": 8.703e-05, |
|
"loss": 0.3906, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.8056951423785597, |
|
"grad_norm": 1.2814849615097046, |
|
"learning_rate": 8.653e-05, |
|
"loss": 0.3859, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.8475711892797317, |
|
"grad_norm": 1.239619255065918, |
|
"learning_rate": 8.603e-05, |
|
"loss": 0.3942, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.8894472361809047, |
|
"grad_norm": 2.24064040184021, |
|
"learning_rate": 8.553e-05, |
|
"loss": 0.3938, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.931323283082077, |
|
"grad_norm": 1.1459012031555176, |
|
"learning_rate": 8.503e-05, |
|
"loss": 0.3925, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.931323283082077, |
|
"eval_loss": 0.35087427496910095, |
|
"eval_runtime": 217.1018, |
|
"eval_samples_per_second": 39.101, |
|
"eval_steps_per_second": 4.892, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.9731993299832498, |
|
"grad_norm": 1.8844960927963257, |
|
"learning_rate": 8.453e-05, |
|
"loss": 0.3803, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 3.0150753768844223, |
|
"grad_norm": 1.5738004446029663, |
|
"learning_rate": 8.403000000000001e-05, |
|
"loss": 0.3895, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3.056951423785595, |
|
"grad_norm": 1.19874906539917, |
|
"learning_rate": 8.353000000000001e-05, |
|
"loss": 0.3902, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 3.0988274706867673, |
|
"grad_norm": 2.4810609817504883, |
|
"learning_rate": 8.303000000000001e-05, |
|
"loss": 0.3863, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 3.14070351758794, |
|
"grad_norm": 1.4734585285186768, |
|
"learning_rate": 8.253000000000001e-05, |
|
"loss": 0.391, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 3.1825795644891124, |
|
"grad_norm": 1.6449118852615356, |
|
"learning_rate": 8.203000000000001e-05, |
|
"loss": 0.3888, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3.224455611390285, |
|
"grad_norm": 2.5607736110687256, |
|
"learning_rate": 8.153e-05, |
|
"loss": 0.3893, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.2663316582914574, |
|
"grad_norm": 1.0670469999313354, |
|
"learning_rate": 8.103e-05, |
|
"loss": 0.3853, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.30820770519263, |
|
"grad_norm": 2.191486120223999, |
|
"learning_rate": 8.053e-05, |
|
"loss": 0.3832, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.3500837520938025, |
|
"grad_norm": 1.0259532928466797, |
|
"learning_rate": 8.003e-05, |
|
"loss": 0.3839, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.3500837520938025, |
|
"eval_loss": 0.35058465600013733, |
|
"eval_runtime": 214.7925, |
|
"eval_samples_per_second": 39.522, |
|
"eval_steps_per_second": 4.944, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.391959798994975, |
|
"grad_norm": 2.1106534004211426, |
|
"learning_rate": 7.953e-05, |
|
"loss": 0.4088, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.4338358458961475, |
|
"grad_norm": 1.7350833415985107, |
|
"learning_rate": 7.903000000000001e-05, |
|
"loss": 0.3923, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.47571189279732, |
|
"grad_norm": 1.1461760997772217, |
|
"learning_rate": 7.853000000000001e-05, |
|
"loss": 0.3924, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.5175879396984926, |
|
"grad_norm": 1.3420217037200928, |
|
"learning_rate": 7.803e-05, |
|
"loss": 0.3833, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.559463986599665, |
|
"grad_norm": 1.1494131088256836, |
|
"learning_rate": 7.753e-05, |
|
"loss": 0.3821, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.6013400335008376, |
|
"grad_norm": 2.058499813079834, |
|
"learning_rate": 7.703e-05, |
|
"loss": 0.3784, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.64321608040201, |
|
"grad_norm": 1.8158782720565796, |
|
"learning_rate": 7.653e-05, |
|
"loss": 0.3833, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 3.6850921273031827, |
|
"grad_norm": 1.8433794975280762, |
|
"learning_rate": 7.603e-05, |
|
"loss": 0.3768, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.726968174204355, |
|
"grad_norm": 1.6182619333267212, |
|
"learning_rate": 7.553e-05, |
|
"loss": 0.3772, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.7688442211055277, |
|
"grad_norm": 1.5222620964050293, |
|
"learning_rate": 7.503e-05, |
|
"loss": 0.3798, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.7688442211055277, |
|
"eval_loss": 0.3422691822052002, |
|
"eval_runtime": 214.805, |
|
"eval_samples_per_second": 39.52, |
|
"eval_steps_per_second": 4.944, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.8107202680067003, |
|
"grad_norm": 1.981819748878479, |
|
"learning_rate": 7.453e-05, |
|
"loss": 0.3739, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.852596314907873, |
|
"grad_norm": 1.1871320009231567, |
|
"learning_rate": 7.403e-05, |
|
"loss": 0.3769, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.8944723618090453, |
|
"grad_norm": 1.5213547945022583, |
|
"learning_rate": 7.353e-05, |
|
"loss": 0.3734, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.936348408710218, |
|
"grad_norm": 1.6221413612365723, |
|
"learning_rate": 7.303e-05, |
|
"loss": 0.3704, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.9782244556113904, |
|
"grad_norm": 2.075160264968872, |
|
"learning_rate": 7.253e-05, |
|
"loss": 0.3701, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 4.0201005025125625, |
|
"grad_norm": 1.217353105545044, |
|
"learning_rate": 7.203000000000001e-05, |
|
"loss": 0.3747, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 4.061976549413735, |
|
"grad_norm": 1.9273918867111206, |
|
"learning_rate": 7.153000000000001e-05, |
|
"loss": 0.3718, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 4.1038525963149075, |
|
"grad_norm": 1.2843226194381714, |
|
"learning_rate": 7.103e-05, |
|
"loss": 0.3689, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 4.1457286432160805, |
|
"grad_norm": 2.8989481925964355, |
|
"learning_rate": 7.053e-05, |
|
"loss": 0.3785, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 4.187604690117253, |
|
"grad_norm": 1.6432912349700928, |
|
"learning_rate": 7.003e-05, |
|
"loss": 0.3693, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.187604690117253, |
|
"eval_loss": 0.3375495374202728, |
|
"eval_runtime": 215.1803, |
|
"eval_samples_per_second": 39.451, |
|
"eval_steps_per_second": 4.935, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.2294807370184255, |
|
"grad_norm": 1.5097157955169678, |
|
"learning_rate": 6.953000000000001e-05, |
|
"loss": 0.3731, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 4.271356783919598, |
|
"grad_norm": 1.1934735774993896, |
|
"learning_rate": 6.903000000000001e-05, |
|
"loss": 0.3683, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 4.313232830820771, |
|
"grad_norm": 1.1494152545928955, |
|
"learning_rate": 6.853000000000001e-05, |
|
"loss": 0.3752, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 4.355108877721943, |
|
"grad_norm": 1.404390811920166, |
|
"learning_rate": 6.803000000000001e-05, |
|
"loss": 0.3697, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 4.396984924623116, |
|
"grad_norm": 1.0370264053344727, |
|
"learning_rate": 6.753e-05, |
|
"loss": 0.3688, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 4.438860971524288, |
|
"grad_norm": 1.6091570854187012, |
|
"learning_rate": 6.703e-05, |
|
"loss": 0.3692, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 4.480737018425461, |
|
"grad_norm": 2.325129270553589, |
|
"learning_rate": 6.653e-05, |
|
"loss": 0.3711, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 4.522613065326633, |
|
"grad_norm": 1.2980619668960571, |
|
"learning_rate": 6.603e-05, |
|
"loss": 0.379, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 4.564489112227806, |
|
"grad_norm": 1.407894253730774, |
|
"learning_rate": 6.553e-05, |
|
"loss": 0.3678, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 4.606365159128979, |
|
"grad_norm": 1.9543007612228394, |
|
"learning_rate": 6.503e-05, |
|
"loss": 0.3712, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 4.606365159128979, |
|
"eval_loss": 0.33667415380477905, |
|
"eval_runtime": 214.7408, |
|
"eval_samples_per_second": 39.531, |
|
"eval_steps_per_second": 4.945, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 4.648241206030151, |
|
"grad_norm": 1.7211672067642212, |
|
"learning_rate": 6.453000000000001e-05, |
|
"loss": 0.369, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 4.690117252931323, |
|
"grad_norm": 1.2072224617004395, |
|
"learning_rate": 6.403e-05, |
|
"loss": 0.3661, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 4.731993299832496, |
|
"grad_norm": 1.994110345840454, |
|
"learning_rate": 6.353e-05, |
|
"loss": 0.3685, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 4.773869346733669, |
|
"grad_norm": 1.9967550039291382, |
|
"learning_rate": 6.303e-05, |
|
"loss": 0.3647, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 4.815745393634841, |
|
"grad_norm": 1.83970046043396, |
|
"learning_rate": 6.253e-05, |
|
"loss": 0.369, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 4.857621440536013, |
|
"grad_norm": 1.1974071264266968, |
|
"learning_rate": 6.203e-05, |
|
"loss": 0.3662, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 4.899497487437186, |
|
"grad_norm": 1.50179123878479, |
|
"learning_rate": 6.153e-05, |
|
"loss": 0.3663, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 4.941373534338359, |
|
"grad_norm": 1.105136513710022, |
|
"learning_rate": 6.103e-05, |
|
"loss": 0.3654, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 4.983249581239531, |
|
"grad_norm": 1.079147219657898, |
|
"learning_rate": 6.053e-05, |
|
"loss": 0.3613, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 5.025125628140704, |
|
"grad_norm": 1.25212824344635, |
|
"learning_rate": 6.003e-05, |
|
"loss": 0.3668, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 5.025125628140704, |
|
"eval_loss": 0.3315671682357788, |
|
"eval_runtime": 216.0413, |
|
"eval_samples_per_second": 39.293, |
|
"eval_steps_per_second": 4.916, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 5.067001675041876, |
|
"grad_norm": 1.2671592235565186, |
|
"learning_rate": 5.953000000000001e-05, |
|
"loss": 0.3589, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 5.108877721943049, |
|
"grad_norm": 2.1920173168182373, |
|
"learning_rate": 5.903000000000001e-05, |
|
"loss": 0.3647, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 5.150753768844221, |
|
"grad_norm": 0.9611026048660278, |
|
"learning_rate": 5.8530000000000004e-05, |
|
"loss": 0.3639, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 5.192629815745394, |
|
"grad_norm": 1.5600687265396118, |
|
"learning_rate": 5.8030000000000005e-05, |
|
"loss": 0.3608, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 5.234505862646566, |
|
"grad_norm": 1.410090446472168, |
|
"learning_rate": 5.7530000000000007e-05, |
|
"loss": 0.3602, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 5.276381909547739, |
|
"grad_norm": 1.2535935640335083, |
|
"learning_rate": 5.703000000000001e-05, |
|
"loss": 0.3611, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 5.318257956448911, |
|
"grad_norm": 1.2604366540908813, |
|
"learning_rate": 5.653e-05, |
|
"loss": 0.3609, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 5.360134003350084, |
|
"grad_norm": 1.4509762525558472, |
|
"learning_rate": 5.6030000000000004e-05, |
|
"loss": 0.366, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 5.402010050251256, |
|
"grad_norm": 2.065939426422119, |
|
"learning_rate": 5.5530000000000005e-05, |
|
"loss": 0.3622, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 5.443886097152429, |
|
"grad_norm": 1.6851164102554321, |
|
"learning_rate": 5.503000000000001e-05, |
|
"loss": 0.3635, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 5.443886097152429, |
|
"eval_loss": 0.3290642201900482, |
|
"eval_runtime": 216.182, |
|
"eval_samples_per_second": 39.268, |
|
"eval_steps_per_second": 4.913, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 5.485762144053601, |
|
"grad_norm": 1.2512905597686768, |
|
"learning_rate": 5.453e-05, |
|
"loss": 0.3595, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 5.527638190954773, |
|
"grad_norm": 1.0366383790969849, |
|
"learning_rate": 5.403e-05, |
|
"loss": 0.3654, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 5.569514237855946, |
|
"grad_norm": 1.6947450637817383, |
|
"learning_rate": 5.3530000000000004e-05, |
|
"loss": 0.3573, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 5.611390284757119, |
|
"grad_norm": 1.5837823152542114, |
|
"learning_rate": 5.303e-05, |
|
"loss": 0.3575, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 5.653266331658291, |
|
"grad_norm": 1.6993337869644165, |
|
"learning_rate": 5.253e-05, |
|
"loss": 0.3606, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 5.695142378559464, |
|
"grad_norm": 1.208640456199646, |
|
"learning_rate": 5.203e-05, |
|
"loss": 0.3608, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 5.7370184254606365, |
|
"grad_norm": 1.5352587699890137, |
|
"learning_rate": 5.153e-05, |
|
"loss": 0.3613, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 5.778894472361809, |
|
"grad_norm": 1.9643837213516235, |
|
"learning_rate": 5.103e-05, |
|
"loss": 0.3542, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 5.8207705192629815, |
|
"grad_norm": 0.9766552448272705, |
|
"learning_rate": 5.053e-05, |
|
"loss": 0.3579, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 5.8626465661641545, |
|
"grad_norm": 1.5910046100616455, |
|
"learning_rate": 5.003e-05, |
|
"loss": 0.3543, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 5.8626465661641545, |
|
"eval_loss": 0.3250139653682709, |
|
"eval_runtime": 213.3758, |
|
"eval_samples_per_second": 39.784, |
|
"eval_steps_per_second": 4.977, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 5.9045226130653266, |
|
"grad_norm": 2.036315679550171, |
|
"learning_rate": 4.953e-05, |
|
"loss": 0.3615, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 5.9463986599664995, |
|
"grad_norm": 1.7725523710250854, |
|
"learning_rate": 4.903e-05, |
|
"loss": 0.3569, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 5.988274706867672, |
|
"grad_norm": 0.9128499031066895, |
|
"learning_rate": 4.8530000000000005e-05, |
|
"loss": 0.3607, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 6.030150753768845, |
|
"grad_norm": 1.2767729759216309, |
|
"learning_rate": 4.8030000000000006e-05, |
|
"loss": 0.3548, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 6.072026800670017, |
|
"grad_norm": 1.359195351600647, |
|
"learning_rate": 4.753e-05, |
|
"loss": 0.3595, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 6.11390284757119, |
|
"grad_norm": 1.290307641029358, |
|
"learning_rate": 4.703e-05, |
|
"loss": 0.3515, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 6.155778894472362, |
|
"grad_norm": 1.5797748565673828, |
|
"learning_rate": 4.6530000000000003e-05, |
|
"loss": 0.3594, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 6.197654941373535, |
|
"grad_norm": 3.1323928833007812, |
|
"learning_rate": 4.603e-05, |
|
"loss": 0.3531, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 6.239530988274707, |
|
"grad_norm": 1.1986955404281616, |
|
"learning_rate": 4.553e-05, |
|
"loss": 0.3503, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 6.28140703517588, |
|
"grad_norm": 2.56941294670105, |
|
"learning_rate": 4.503e-05, |
|
"loss": 0.3526, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 6.28140703517588, |
|
"eval_loss": 0.32207000255584717, |
|
"eval_runtime": 213.5905, |
|
"eval_samples_per_second": 39.744, |
|
"eval_steps_per_second": 4.972, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 6.323283082077052, |
|
"grad_norm": 1.4174665212631226, |
|
"learning_rate": 4.453e-05, |
|
"loss": 0.3597, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 6.365159128978225, |
|
"grad_norm": 1.4059972763061523, |
|
"learning_rate": 4.4030000000000004e-05, |
|
"loss": 0.3495, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 6.407035175879397, |
|
"grad_norm": 1.2972431182861328, |
|
"learning_rate": 4.3530000000000005e-05, |
|
"loss": 0.3534, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 6.44891122278057, |
|
"grad_norm": 1.3449029922485352, |
|
"learning_rate": 4.3030000000000006e-05, |
|
"loss": 0.3505, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 6.490787269681742, |
|
"grad_norm": 1.2508536577224731, |
|
"learning_rate": 4.253e-05, |
|
"loss": 0.3583, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 6.532663316582915, |
|
"grad_norm": 1.881706714630127, |
|
"learning_rate": 4.203e-05, |
|
"loss": 0.3519, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 6.574539363484087, |
|
"grad_norm": 1.0013474225997925, |
|
"learning_rate": 4.1530000000000004e-05, |
|
"loss": 0.3512, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 6.61641541038526, |
|
"grad_norm": 1.5754601955413818, |
|
"learning_rate": 4.103e-05, |
|
"loss": 0.3517, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 6.658291457286432, |
|
"grad_norm": 1.0650445222854614, |
|
"learning_rate": 4.053e-05, |
|
"loss": 0.352, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 6.700167504187605, |
|
"grad_norm": 0.8827164769172668, |
|
"learning_rate": 4.003e-05, |
|
"loss": 0.3525, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 6.700167504187605, |
|
"eval_loss": 0.32177916169166565, |
|
"eval_runtime": 212.2379, |
|
"eval_samples_per_second": 39.998, |
|
"eval_steps_per_second": 5.004, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 6.742043551088777, |
|
"grad_norm": 1.0491342544555664, |
|
"learning_rate": 3.953e-05, |
|
"loss": 0.3508, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 6.78391959798995, |
|
"grad_norm": 1.8081963062286377, |
|
"learning_rate": 3.903e-05, |
|
"loss": 0.3598, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 6.825795644891122, |
|
"grad_norm": 1.4452039003372192, |
|
"learning_rate": 3.853e-05, |
|
"loss": 0.3475, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 6.867671691792295, |
|
"grad_norm": 1.9516081809997559, |
|
"learning_rate": 3.803000000000001e-05, |
|
"loss": 0.3576, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 6.909547738693467, |
|
"grad_norm": 2.1795125007629395, |
|
"learning_rate": 3.753e-05, |
|
"loss": 0.3523, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 6.95142378559464, |
|
"grad_norm": 1.8348519802093506, |
|
"learning_rate": 3.703e-05, |
|
"loss": 0.3554, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 6.993299832495812, |
|
"grad_norm": 1.1464494466781616, |
|
"learning_rate": 3.6530000000000004e-05, |
|
"loss": 0.3558, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 7.035175879396985, |
|
"grad_norm": 1.2294760942459106, |
|
"learning_rate": 3.6030000000000006e-05, |
|
"loss": 0.3502, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 7.077051926298157, |
|
"grad_norm": 4.840690612792969, |
|
"learning_rate": 3.553e-05, |
|
"loss": 0.3534, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 7.11892797319933, |
|
"grad_norm": 1.1307451725006104, |
|
"learning_rate": 3.503e-05, |
|
"loss": 0.3513, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 7.11892797319933, |
|
"eval_loss": 0.31820428371429443, |
|
"eval_runtime": 211.821, |
|
"eval_samples_per_second": 40.076, |
|
"eval_steps_per_second": 5.014, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 7.160804020100502, |
|
"grad_norm": 1.680982232093811, |
|
"learning_rate": 3.453e-05, |
|
"loss": 0.3464, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 7.202680067001675, |
|
"grad_norm": 1.16056489944458, |
|
"learning_rate": 3.403e-05, |
|
"loss": 0.353, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 7.244556113902847, |
|
"grad_norm": 1.0772472620010376, |
|
"learning_rate": 3.353e-05, |
|
"loss": 0.3515, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 7.28643216080402, |
|
"grad_norm": 1.8330378532409668, |
|
"learning_rate": 3.303e-05, |
|
"loss": 0.3487, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 7.328308207705192, |
|
"grad_norm": 3.8991479873657227, |
|
"learning_rate": 3.253e-05, |
|
"loss": 0.3504, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 7.370184254606365, |
|
"grad_norm": 1.5016411542892456, |
|
"learning_rate": 3.2029999999999997e-05, |
|
"loss": 0.3451, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 7.4120603015075375, |
|
"grad_norm": 1.6319200992584229, |
|
"learning_rate": 3.154e-05, |
|
"loss": 0.3487, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 7.45393634840871, |
|
"grad_norm": 1.5115652084350586, |
|
"learning_rate": 3.104e-05, |
|
"loss": 0.3451, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 7.4958123953098825, |
|
"grad_norm": 1.1861990690231323, |
|
"learning_rate": 3.054e-05, |
|
"loss": 0.3467, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 7.5376884422110555, |
|
"grad_norm": 1.0466668605804443, |
|
"learning_rate": 3.004e-05, |
|
"loss": 0.346, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 7.5376884422110555, |
|
"eval_loss": 0.316290020942688, |
|
"eval_runtime": 212.3656, |
|
"eval_samples_per_second": 39.974, |
|
"eval_steps_per_second": 5.001, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 7.579564489112228, |
|
"grad_norm": 1.9901808500289917, |
|
"learning_rate": 2.9540000000000002e-05, |
|
"loss": 0.3469, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 7.6214405360134005, |
|
"grad_norm": 1.5441765785217285, |
|
"learning_rate": 2.904e-05, |
|
"loss": 0.3452, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 7.663316582914573, |
|
"grad_norm": 1.1541210412979126, |
|
"learning_rate": 2.854e-05, |
|
"loss": 0.3453, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 7.705192629815746, |
|
"grad_norm": 1.121564269065857, |
|
"learning_rate": 2.804e-05, |
|
"loss": 0.3464, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 7.747068676716918, |
|
"grad_norm": 1.4487674236297607, |
|
"learning_rate": 2.754e-05, |
|
"loss": 0.3535, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 7.788944723618091, |
|
"grad_norm": 1.9906679391860962, |
|
"learning_rate": 2.704e-05, |
|
"loss": 0.35, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 7.830820770519263, |
|
"grad_norm": 1.2831981182098389, |
|
"learning_rate": 2.6540000000000003e-05, |
|
"loss": 0.3494, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 7.872696817420436, |
|
"grad_norm": 1.4484026432037354, |
|
"learning_rate": 2.6050000000000003e-05, |
|
"loss": 0.3494, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 7.914572864321608, |
|
"grad_norm": 0.8587632179260254, |
|
"learning_rate": 2.555e-05, |
|
"loss": 0.3439, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 7.956448911222781, |
|
"grad_norm": 1.0778433084487915, |
|
"learning_rate": 2.5050000000000002e-05, |
|
"loss": 0.3448, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 7.956448911222781, |
|
"eval_loss": 0.3162495493888855, |
|
"eval_runtime": 212.2033, |
|
"eval_samples_per_second": 40.004, |
|
"eval_steps_per_second": 5.005, |
|
"step": 9500 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 12000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 11, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.2392487225299e+16, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|