|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9992927864214993, |
|
"eval_steps": 35, |
|
"global_step": 712, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 51.75, |
|
"learning_rate": 8.571428571428572e-08, |
|
"loss": 1.9845, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"eval_loss": 2.040191650390625, |
|
"eval_runtime": 20.1394, |
|
"eval_samples_per_second": 64.997, |
|
"eval_steps_per_second": 64.997, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 48.75, |
|
"learning_rate": 1.7142857142857143e-07, |
|
"loss": 1.9423, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 45.5, |
|
"learning_rate": 2.5714285714285716e-07, |
|
"loss": 1.9446, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 56.25, |
|
"learning_rate": 3.4285714285714286e-07, |
|
"loss": 2.1833, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 55.25, |
|
"learning_rate": 4.2857142857142857e-07, |
|
"loss": 2.0796, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 49.75, |
|
"learning_rate": 5.142857142857143e-07, |
|
"loss": 1.9704, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 53.0, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.9781, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 49.0, |
|
"learning_rate": 6.857142857142857e-07, |
|
"loss": 1.9854, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 51.5, |
|
"learning_rate": 7.714285714285714e-07, |
|
"loss": 1.927, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 51.5, |
|
"learning_rate": 8.571428571428571e-07, |
|
"loss": 2.0879, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 48.5, |
|
"learning_rate": 9.428571428571429e-07, |
|
"loss": 1.8861, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 49.5, |
|
"learning_rate": 1.0285714285714286e-06, |
|
"loss": 1.9962, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 42.5, |
|
"learning_rate": 1.1142857142857143e-06, |
|
"loss": 1.904, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 38.0, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.8107, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 37.25, |
|
"learning_rate": 1.2857142857142856e-06, |
|
"loss": 1.8908, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 35.75, |
|
"learning_rate": 1.3714285714285715e-06, |
|
"loss": 1.9207, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 32.25, |
|
"learning_rate": 1.4571428571428571e-06, |
|
"loss": 1.8448, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 28.625, |
|
"learning_rate": 1.5428571428571428e-06, |
|
"loss": 1.7089, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 25.5, |
|
"learning_rate": 1.6285714285714284e-06, |
|
"loss": 1.6403, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 23.75, |
|
"learning_rate": 1.7142857142857143e-06, |
|
"loss": 1.867, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 21.875, |
|
"learning_rate": 1.8e-06, |
|
"loss": 1.6772, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 20.0, |
|
"learning_rate": 1.8857142857142858e-06, |
|
"loss": 1.4738, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 19.375, |
|
"learning_rate": 1.9714285714285714e-06, |
|
"loss": 1.584, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 18.25, |
|
"learning_rate": 2.0571428571428573e-06, |
|
"loss": 1.5077, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 15.625, |
|
"learning_rate": 2.142857142857143e-06, |
|
"loss": 1.3541, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 17.375, |
|
"learning_rate": 2.2285714285714286e-06, |
|
"loss": 1.4149, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 14.25, |
|
"learning_rate": 2.3142857142857145e-06, |
|
"loss": 1.2507, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 13.125, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.2992, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 10.75, |
|
"learning_rate": 2.4857142857142858e-06, |
|
"loss": 1.2994, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 9.1875, |
|
"learning_rate": 2.571428571428571e-06, |
|
"loss": 1.2327, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 9.3125, |
|
"learning_rate": 2.657142857142857e-06, |
|
"loss": 1.3238, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 2.742857142857143e-06, |
|
"loss": 1.2781, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 6.5, |
|
"learning_rate": 2.8285714285714288e-06, |
|
"loss": 1.2028, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 2.9142857142857142e-06, |
|
"loss": 1.3073, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.875, |
|
"learning_rate": 3e-06, |
|
"loss": 1.1666, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 1.2127766609191895, |
|
"eval_runtime": 20.1589, |
|
"eval_samples_per_second": 64.934, |
|
"eval_steps_per_second": 64.934, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.71875, |
|
"learning_rate": 3.0857142857142855e-06, |
|
"loss": 1.1807, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 3.1714285714285714e-06, |
|
"loss": 1.178, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.125, |
|
"learning_rate": 3.257142857142857e-06, |
|
"loss": 1.08, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 3.342857142857143e-06, |
|
"loss": 1.2125, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 3.4285714285714285e-06, |
|
"loss": 1.2026, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.875, |
|
"learning_rate": 3.5142857142857144e-06, |
|
"loss": 1.1884, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.875, |
|
"learning_rate": 3.6e-06, |
|
"loss": 1.3009, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 3.685714285714286e-06, |
|
"loss": 1.0866, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 3.7714285714285716e-06, |
|
"loss": 1.1396, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 3.857142857142858e-06, |
|
"loss": 1.0933, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 3.942857142857143e-06, |
|
"loss": 1.0827, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 4.028571428571429e-06, |
|
"loss": 1.1054, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 4.114285714285715e-06, |
|
"loss": 1.1369, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 4.2e-06, |
|
"loss": 1.086, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 4.285714285714286e-06, |
|
"loss": 1.0688, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 4.371428571428571e-06, |
|
"loss": 1.0986, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.84375, |
|
"learning_rate": 4.457142857142857e-06, |
|
"loss": 1.0268, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 4.542857142857143e-06, |
|
"loss": 1.0134, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.625, |
|
"learning_rate": 4.628571428571429e-06, |
|
"loss": 1.0973, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 4.714285714285714e-06, |
|
"loss": 1.1536, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.034, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.885714285714286e-06, |
|
"loss": 1.1263, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.0, |
|
"learning_rate": 4.9714285714285715e-06, |
|
"loss": 1.1458, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.057142857142857e-06, |
|
"loss": 1.0678, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 5.142857142857142e-06, |
|
"loss": 1.0277, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 5.228571428571429e-06, |
|
"loss": 1.0104, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 5.314285714285714e-06, |
|
"loss": 1.2226, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 5.4e-06, |
|
"loss": 1.1612, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.875, |
|
"learning_rate": 5.485714285714286e-06, |
|
"loss": 1.0489, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 5.571428571428572e-06, |
|
"loss": 1.0256, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 5.6571428571428576e-06, |
|
"loss": 1.0704, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.9140625, |
|
"learning_rate": 5.742857142857143e-06, |
|
"loss": 1.0114, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 5.8285714285714284e-06, |
|
"loss": 0.999, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 5.914285714285715e-06, |
|
"loss": 1.0967, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 6e-06, |
|
"loss": 1.0044, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 1.0669639110565186, |
|
"eval_runtime": 20.1538, |
|
"eval_samples_per_second": 64.951, |
|
"eval_steps_per_second": 64.951, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 5.999986378009738e-06, |
|
"loss": 0.9961, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.9375, |
|
"learning_rate": 5.9999455121763995e-06, |
|
"loss": 1.141, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 5.999877402912338e-06, |
|
"loss": 0.976, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 5.999782050904801e-06, |
|
"loss": 0.998, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 5.9996594571159225e-06, |
|
"loss": 1.0277, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 5.9995096227827206e-06, |
|
"loss": 1.0644, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 5.999332549417079e-06, |
|
"loss": 1.0319, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.875, |
|
"learning_rate": 5.999128238805728e-06, |
|
"loss": 1.1474, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 5.99889669301024e-06, |
|
"loss": 1.0158, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 5.998637914366994e-06, |
|
"loss": 1.0571, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 5.99835190548716e-06, |
|
"loss": 1.1034, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.234375, |
|
"learning_rate": 5.998038669256674e-06, |
|
"loss": 1.0618, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 5.997698208836202e-06, |
|
"loss": 0.9915, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.515625, |
|
"learning_rate": 5.997330527661113e-06, |
|
"loss": 1.0523, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 5.996935629441444e-06, |
|
"loss": 0.9552, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 5.996513518161861e-06, |
|
"loss": 1.0611, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 5.996064198081619e-06, |
|
"loss": 0.9936, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 5.9955876737345236e-06, |
|
"loss": 1.0302, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.0625, |
|
"learning_rate": 5.995083949928876e-06, |
|
"loss": 1.043, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 5.994553031747435e-06, |
|
"loss": 1.0136, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 5.993994924547356e-06, |
|
"loss": 1.1077, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 3.875, |
|
"learning_rate": 5.993409633960142e-06, |
|
"loss": 0.9621, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 3.375, |
|
"learning_rate": 5.99279716589159e-06, |
|
"loss": 1.0648, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 5.992157526521724e-06, |
|
"loss": 0.9725, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.109375, |
|
"learning_rate": 5.991490722304738e-06, |
|
"loss": 1.0358, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 5.990796759968929e-06, |
|
"loss": 1.1569, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 5.990075646516628e-06, |
|
"loss": 1.0382, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 5.989327389224132e-06, |
|
"loss": 0.9393, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 4.6875, |
|
"learning_rate": 5.988551995641629e-06, |
|
"loss": 1.0306, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 5.987749473593123e-06, |
|
"loss": 1.092, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 3.625, |
|
"learning_rate": 5.98691983117635e-06, |
|
"loss": 0.962, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.984375, |
|
"learning_rate": 5.986063076762707e-06, |
|
"loss": 0.941, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.985179218997154e-06, |
|
"loss": 0.9805, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.9842682667981374e-06, |
|
"loss": 0.938, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.983330229357497e-06, |
|
"loss": 1.0686, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_loss": 1.0165659189224243, |
|
"eval_runtime": 20.3105, |
|
"eval_samples_per_second": 64.449, |
|
"eval_steps_per_second": 64.449, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.982365116140371e-06, |
|
"loss": 1.1855, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.981372936885102e-06, |
|
"loss": 0.9464, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.980353701603138e-06, |
|
"loss": 0.9249, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.979307420578934e-06, |
|
"loss": 0.9662, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.978234104369846e-06, |
|
"loss": 0.9772, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.9771337638060235e-06, |
|
"loss": 1.0412, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.9760064099903025e-06, |
|
"loss": 1.0262, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.974852054298093e-06, |
|
"loss": 0.9824, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.973670708377263e-06, |
|
"loss": 0.9425, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.972462384148021e-06, |
|
"loss": 1.0243, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 5.971227093802798e-06, |
|
"loss": 0.9326, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 5.9699648498061216e-06, |
|
"loss": 1.0175, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.968675664894493e-06, |
|
"loss": 1.013, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.967359552076256e-06, |
|
"loss": 1.0109, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.966016524631466e-06, |
|
"loss": 0.9164, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.964646596111758e-06, |
|
"loss": 1.0451, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.9632497803402104e-06, |
|
"loss": 0.9873, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.961826091411198e-06, |
|
"loss": 0.9989, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.960375543690261e-06, |
|
"loss": 1.1006, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.9588981518139525e-06, |
|
"loss": 1.0773, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.957393930689694e-06, |
|
"loss": 0.905, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.955862895495621e-06, |
|
"loss": 0.9727, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.9543050616804355e-06, |
|
"loss": 1.0183, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.952720444963247e-06, |
|
"loss": 1.008, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 5.951109061333413e-06, |
|
"loss": 0.9817, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 5.94947092705038e-06, |
|
"loss": 1.0044, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 5.947806058643517e-06, |
|
"loss": 1.0978, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.946114472911952e-06, |
|
"loss": 0.9711, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.944396186924398e-06, |
|
"loss": 1.0369, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.942651218018985e-06, |
|
"loss": 1.0105, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.940879583803084e-06, |
|
"loss": 1.0037, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.939081302153127e-06, |
|
"loss": 1.0426, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.937256391214429e-06, |
|
"loss": 1.0015, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.935404869401004e-06, |
|
"loss": 0.9964, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 5.933526755395378e-06, |
|
"loss": 1.0053, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"eval_loss": 1.0025994777679443, |
|
"eval_runtime": 20.1779, |
|
"eval_samples_per_second": 64.873, |
|
"eval_steps_per_second": 64.873, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 5.931622068148405e-06, |
|
"loss": 1.0178, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.929690826879072e-06, |
|
"loss": 0.9558, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.927733051074303e-06, |
|
"loss": 0.9541, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.925748760488767e-06, |
|
"loss": 0.9655, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 5.923737975144677e-06, |
|
"loss": 0.9171, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.921700715331588e-06, |
|
"loss": 1.0064, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 5.919637001606191e-06, |
|
"loss": 0.9423, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.917546854792106e-06, |
|
"loss": 1.0746, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.915430295979674e-06, |
|
"loss": 0.9868, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.913287346525743e-06, |
|
"loss": 0.9514, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.911118028053448e-06, |
|
"loss": 1.0203, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.908922362452005e-06, |
|
"loss": 0.9735, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.906700371876474e-06, |
|
"loss": 1.036, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.904452078747547e-06, |
|
"loss": 0.979, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.90217750575132e-06, |
|
"loss": 0.9801, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.899876675839058e-06, |
|
"loss": 0.9683, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.897549612226973e-06, |
|
"loss": 0.9777, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.895196338395979e-06, |
|
"loss": 1.0319, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.892816878091468e-06, |
|
"loss": 0.9227, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.8904112553230535e-06, |
|
"loss": 0.9632, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.887979494364347e-06, |
|
"loss": 0.9928, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.885521619752696e-06, |
|
"loss": 1.0652, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.883037656288951e-06, |
|
"loss": 0.9787, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 5.880527629037203e-06, |
|
"loss": 0.9502, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.877991563324541e-06, |
|
"loss": 0.9333, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.875429484740787e-06, |
|
"loss": 0.9805, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.872841419138246e-06, |
|
"loss": 1.0546, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.870227392631439e-06, |
|
"loss": 0.9738, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.8675874315968425e-06, |
|
"loss": 0.945, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 5.8649215626726225e-06, |
|
"loss": 0.9291, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.8622298127583635e-06, |
|
"loss": 0.9397, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.859512209014801e-06, |
|
"loss": 0.8954, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.856768778863542e-06, |
|
"loss": 0.9867, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.853999549986795e-06, |
|
"loss": 0.9465, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.851204550327085e-06, |
|
"loss": 0.9735, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 0.9932471513748169, |
|
"eval_runtime": 20.1373, |
|
"eval_samples_per_second": 65.004, |
|
"eval_steps_per_second": 65.004, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.848383808086973e-06, |
|
"loss": 1.112, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.845537351728772e-06, |
|
"loss": 0.9184, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.842665209974259e-06, |
|
"loss": 1.0099, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 5.839767411804391e-06, |
|
"loss": 1.0246, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.836843986459001e-06, |
|
"loss": 0.9687, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.833894963436514e-06, |
|
"loss": 0.9731, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.8309203724936425e-06, |
|
"loss": 1.0483, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.82792024364509e-06, |
|
"loss": 0.9702, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.824894607163246e-06, |
|
"loss": 0.9713, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.82184349357788e-06, |
|
"loss": 1.0378, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.818766933675838e-06, |
|
"loss": 1.0155, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 5.815664958500725e-06, |
|
"loss": 1.0087, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.812537599352597e-06, |
|
"loss": 0.9576, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.8093848877876455e-06, |
|
"loss": 0.9713, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.8062068556178745e-06, |
|
"loss": 0.9434, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 5.803003534910781e-06, |
|
"loss": 1.0159, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.799774957989036e-06, |
|
"loss": 0.9089, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.796521157430152e-06, |
|
"loss": 1.073, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.79324216606616e-06, |
|
"loss": 1.0105, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 5.7899380169832715e-06, |
|
"loss": 1.2134, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.786608743521553e-06, |
|
"loss": 0.932, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.783254379274581e-06, |
|
"loss": 0.9971, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.779874958089108e-06, |
|
"loss": 1.02, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.7764705140647235e-06, |
|
"loss": 0.9966, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 5.773041081553503e-06, |
|
"loss": 0.9188, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.769586695159664e-06, |
|
"loss": 0.8953, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.7661073897392245e-06, |
|
"loss": 0.9489, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.7626032003996355e-06, |
|
"loss": 0.909, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 5.759074162499441e-06, |
|
"loss": 0.9914, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.7555203116479166e-06, |
|
"loss": 0.9092, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.7519416837047065e-06, |
|
"loss": 0.9878, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.748338314779465e-06, |
|
"loss": 0.9933, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 5.744710241231496e-06, |
|
"loss": 1.0544, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 5.741057499669377e-06, |
|
"loss": 1.0482, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.737380126950598e-06, |
|
"loss": 0.9966, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"eval_loss": 0.9859724044799805, |
|
"eval_runtime": 20.1941, |
|
"eval_samples_per_second": 64.821, |
|
"eval_steps_per_second": 64.821, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 5.733678160181186e-06, |
|
"loss": 1.0631, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.729951636715328e-06, |
|
"loss": 0.8644, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.726200594155e-06, |
|
"loss": 0.9526, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.722425070349584e-06, |
|
"loss": 0.9792, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.718625103395486e-06, |
|
"loss": 0.9734, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.7148007316357526e-06, |
|
"loss": 1.07, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.7109519936596835e-06, |
|
"loss": 1.0494, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.707078928302444e-06, |
|
"loss": 0.9834, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.703181574644667e-06, |
|
"loss": 0.9465, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 5.699259972012069e-06, |
|
"loss": 1.0075, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.695314159975044e-06, |
|
"loss": 1.0926, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.691344178348268e-06, |
|
"loss": 0.9195, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 5.687350067190298e-06, |
|
"loss": 0.9602, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.683331866803166e-06, |
|
"loss": 0.9602, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.679289617731972e-06, |
|
"loss": 1.0241, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.675223360764478e-06, |
|
"loss": 0.9545, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.671133136930693e-06, |
|
"loss": 1.0943, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.75, |
|
"learning_rate": 5.667018987502462e-06, |
|
"loss": 0.9773, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.6628809539930465e-06, |
|
"loss": 0.8895, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 5.658719078156706e-06, |
|
"loss": 0.9635, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.654533401988281e-06, |
|
"loss": 0.9368, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.650323967722762e-06, |
|
"loss": 0.9458, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.646090817834871e-06, |
|
"loss": 0.8872, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.641833995038629e-06, |
|
"loss": 0.9918, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.63755354228692e-06, |
|
"loss": 0.9063, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.633249502771071e-06, |
|
"loss": 1.0154, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.6289219199203996e-06, |
|
"loss": 1.0322, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.624570837401791e-06, |
|
"loss": 0.9797, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.620196299119248e-06, |
|
"loss": 1.0433, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 5.61579834921345e-06, |
|
"loss": 0.8911, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.611377032061308e-06, |
|
"loss": 1.0065, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 5.60693239227552e-06, |
|
"loss": 0.9498, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.602464474704111e-06, |
|
"loss": 0.8877, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 5.597973324429997e-06, |
|
"loss": 1.0399, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.593458986770512e-06, |
|
"loss": 1.0804, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"eval_loss": 0.9796280264854431, |
|
"eval_runtime": 20.1507, |
|
"eval_samples_per_second": 64.96, |
|
"eval_steps_per_second": 64.96, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.588921507276964e-06, |
|
"loss": 0.9844, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.584360931734168e-06, |
|
"loss": 0.9852, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.5797773061599876e-06, |
|
"loss": 0.9387, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.57517067680487e-06, |
|
"loss": 0.9369, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.570541090151377e-06, |
|
"loss": 0.9211, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.565888592913721e-06, |
|
"loss": 1.0429, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.561213232037286e-06, |
|
"loss": 0.939, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.556515054698165e-06, |
|
"loss": 1.0304, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.5517941083026704e-06, |
|
"loss": 0.8778, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.547050440486868e-06, |
|
"loss": 0.9531, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 5.542284099116085e-06, |
|
"loss": 1.0964, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.537495132284442e-06, |
|
"loss": 1.0354, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.53268358831435e-06, |
|
"loss": 0.9608, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.527849515756036e-06, |
|
"loss": 0.9937, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 5.522992963387047e-06, |
|
"loss": 0.9523, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.518113980211762e-06, |
|
"loss": 0.929, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.51321261546089e-06, |
|
"loss": 0.9928, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 5.5082889185909834e-06, |
|
"loss": 0.8508, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5.503342939283931e-06, |
|
"loss": 0.9067, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.498374727446461e-06, |
|
"loss": 0.9878, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 5.493384333209634e-06, |
|
"loss": 0.8992, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.488371806928342e-06, |
|
"loss": 1.0696, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.483337199180793e-06, |
|
"loss": 0.9069, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.4782805607680085e-06, |
|
"loss": 0.9608, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.473201942713307e-06, |
|
"loss": 1.0443, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.46810139626179e-06, |
|
"loss": 0.888, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 5.46297897287982e-06, |
|
"loss": 0.9668, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.457834724254512e-06, |
|
"loss": 0.9828, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.452668702293203e-06, |
|
"loss": 0.9154, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.447480959122929e-06, |
|
"loss": 0.9498, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.442271547089904e-06, |
|
"loss": 0.9782, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.437040518758986e-06, |
|
"loss": 0.9729, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.431787926913152e-06, |
|
"loss": 0.9455, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.42651382455296e-06, |
|
"loss": 0.9834, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.421218264896019e-06, |
|
"loss": 0.8952, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"eval_loss": 0.9743253588676453, |
|
"eval_runtime": 20.2702, |
|
"eval_samples_per_second": 64.578, |
|
"eval_steps_per_second": 64.578, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.415901301376447e-06, |
|
"loss": 1.1117, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.4105629876443405e-06, |
|
"loss": 0.9905, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 5.40520337756522e-06, |
|
"loss": 1.0321, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.3998225252195e-06, |
|
"loss": 0.9664, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 5.394420484901934e-06, |
|
"loss": 1.0156, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.38899731112107e-06, |
|
"loss": 0.8818, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.383553058598702e-06, |
|
"loss": 0.9483, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.378087782269314e-06, |
|
"loss": 0.9671, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 5.37260153727953e-06, |
|
"loss": 0.915, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.367094378987554e-06, |
|
"loss": 0.9215, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 5.361566362962613e-06, |
|
"loss": 0.9655, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.356017544984396e-06, |
|
"loss": 0.9641, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.350447981042491e-06, |
|
"loss": 0.9303, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.344857727335822e-06, |
|
"loss": 0.9223, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.339246840272079e-06, |
|
"loss": 1.0255, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.375, |
|
"learning_rate": 5.333615376467149e-06, |
|
"loss": 0.9899, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 5.3279633927445486e-06, |
|
"loss": 0.9336, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.322290946134844e-06, |
|
"loss": 0.9654, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.316598093875082e-06, |
|
"loss": 1.0265, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.310884893408211e-06, |
|
"loss": 0.9132, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 5.305151402382497e-06, |
|
"loss": 0.9248, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.299397678650947e-06, |
|
"loss": 0.9737, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 5.293623780270723e-06, |
|
"loss": 0.9366, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.287829765502556e-06, |
|
"loss": 0.9379, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.282015692810159e-06, |
|
"loss": 1.0213, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.276181620859639e-06, |
|
"loss": 1.013, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.270327608518898e-06, |
|
"loss": 1.0134, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.264453714857047e-06, |
|
"loss": 0.9575, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.258559999143809e-06, |
|
"loss": 0.8686, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 5.252646520848912e-06, |
|
"loss": 1.0265, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 5.2467133396415016e-06, |
|
"loss": 0.88, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.24076051538953e-06, |
|
"loss": 0.9549, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.2347881081591565e-06, |
|
"loss": 0.9798, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 5.228796178214138e-06, |
|
"loss": 0.8834, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.222784786015225e-06, |
|
"loss": 0.9394, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"eval_loss": 0.9702575206756592, |
|
"eval_runtime": 20.1318, |
|
"eval_samples_per_second": 65.022, |
|
"eval_steps_per_second": 65.022, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.216753992219547e-06, |
|
"loss": 0.9804, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.210703857680003e-06, |
|
"loss": 0.9201, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.204634443444651e-06, |
|
"loss": 0.9839, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.198545810756079e-06, |
|
"loss": 1.024, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 5.192438021050806e-06, |
|
"loss": 0.9319, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.186311135958647e-06, |
|
"loss": 0.8863, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.180165217302099e-06, |
|
"loss": 0.9621, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.174000327095711e-06, |
|
"loss": 0.8818, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 5.167816527545466e-06, |
|
"loss": 0.807, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 5.161613881048147e-06, |
|
"loss": 0.9364, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.155392450190707e-06, |
|
"loss": 0.9787, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.149152297749646e-06, |
|
"loss": 0.9272, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.142893486690366e-06, |
|
"loss": 0.8953, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.136616080166545e-06, |
|
"loss": 0.952, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 5.1303201415194946e-06, |
|
"loss": 1.046, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.1240057342775204e-06, |
|
"loss": 0.9028, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.1176729221552865e-06, |
|
"loss": 0.9819, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.111321769053166e-06, |
|
"loss": 0.9189, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 5.104952339056601e-06, |
|
"loss": 0.9145, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 5.098564696435452e-06, |
|
"loss": 0.8783, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 5.092158905643353e-06, |
|
"loss": 1.0541, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 5.085735031317061e-06, |
|
"loss": 0.8956, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 5.0792931382758e-06, |
|
"loss": 1.0593, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.072833291520609e-06, |
|
"loss": 0.9345, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 5.066355556233688e-06, |
|
"loss": 0.9642, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.059859997777741e-06, |
|
"loss": 0.8651, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 5.053346681695309e-06, |
|
"loss": 0.9767, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.046815673708117e-06, |
|
"loss": 0.8192, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 5.040267039716409e-06, |
|
"loss": 0.856, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.033700845798278e-06, |
|
"loss": 0.9764, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 5.0271171582090065e-06, |
|
"loss": 0.9693, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 5.020516043380393e-06, |
|
"loss": 0.8832, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 5.013897567920083e-06, |
|
"loss": 0.9449, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 5.007261798610898e-06, |
|
"loss": 0.9854, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 5.00060880241016e-06, |
|
"loss": 0.8984, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"eval_loss": 0.9668439626693726, |
|
"eval_runtime": 20.1513, |
|
"eval_samples_per_second": 64.958, |
|
"eval_steps_per_second": 64.958, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.993938646449017e-06, |
|
"loss": 0.9833, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 4.9872513980317635e-06, |
|
"loss": 0.8848, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.9805471246351666e-06, |
|
"loss": 0.9356, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 4.973825893907778e-06, |
|
"loss": 1.0285, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.9670877736692575e-06, |
|
"loss": 1.0421, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.960332831909685e-06, |
|
"loss": 0.9617, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.953561136788877e-06, |
|
"loss": 0.9683, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.946772756635694e-06, |
|
"loss": 0.9737, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.939967759947358e-06, |
|
"loss": 1.0571, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 4.933146215388759e-06, |
|
"loss": 0.8903, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.926308191791757e-06, |
|
"loss": 0.893, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.919453758154493e-06, |
|
"loss": 0.9087, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.912582983640692e-06, |
|
"loss": 0.8752, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.905695937578965e-06, |
|
"loss": 0.9943, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.898792689462106e-06, |
|
"loss": 0.9087, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.891873308946399e-06, |
|
"loss": 0.9659, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 4.884937865850902e-06, |
|
"loss": 0.924, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.877986430156756e-06, |
|
"loss": 0.87, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.871019072006473e-06, |
|
"loss": 1.0459, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.864035861703225e-06, |
|
"loss": 0.8948, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.8570368697101415e-06, |
|
"loss": 0.8847, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.850022166649592e-06, |
|
"loss": 0.9415, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.84299182330248e-06, |
|
"loss": 0.9833, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.8359459106075204e-06, |
|
"loss": 0.9669, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.828884499660533e-06, |
|
"loss": 0.8532, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.8218076617137175e-06, |
|
"loss": 0.9317, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.814715468174939e-06, |
|
"loss": 0.8942, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.807607990607007e-06, |
|
"loss": 0.8847, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.8004853007269504e-06, |
|
"loss": 0.9326, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.793347470405296e-06, |
|
"loss": 0.8939, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.786194571665346e-06, |
|
"loss": 0.8817, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 4.7790266766824435e-06, |
|
"loss": 0.9413, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.771843857783254e-06, |
|
"loss": 0.8537, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.7646461874450294e-06, |
|
"loss": 0.9405, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.757433738294874e-06, |
|
"loss": 0.9424, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"eval_loss": 0.9648303389549255, |
|
"eval_runtime": 20.2432, |
|
"eval_samples_per_second": 64.664, |
|
"eval_steps_per_second": 64.664, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.750206583109022e-06, |
|
"loss": 0.9327, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.742964794812093e-06, |
|
"loss": 0.8816, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.735708446476361e-06, |
|
"loss": 0.8918, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.728437611321015e-06, |
|
"loss": 0.958, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.721152362711423e-06, |
|
"loss": 0.8336, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.713852774158389e-06, |
|
"loss": 1.0331, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.706538919317411e-06, |
|
"loss": 0.8288, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.6992108719879445e-06, |
|
"loss": 0.8429, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 4.691868706112643e-06, |
|
"loss": 0.8227, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.684512495776631e-06, |
|
"loss": 1.0182, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.6771423152067395e-06, |
|
"loss": 0.915, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.669758238770767e-06, |
|
"loss": 0.9564, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.662360340976728e-06, |
|
"loss": 0.9006, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.654948696472097e-06, |
|
"loss": 0.9841, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.6475233800430586e-06, |
|
"loss": 0.8723, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.6400844666137525e-06, |
|
"loss": 1.0067, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.632632031245516e-06, |
|
"loss": 0.9236, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.625, |
|
"learning_rate": 4.6251661491361285e-06, |
|
"loss": 0.9849, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.617686895619052e-06, |
|
"loss": 0.923, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.610194346162673e-06, |
|
"loss": 0.9378, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 4.602688576369531e-06, |
|
"loss": 1.0061, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.595169661975573e-06, |
|
"loss": 0.9105, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.5876376788493754e-06, |
|
"loss": 0.9587, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.580092702991383e-06, |
|
"loss": 0.877, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.572534810533142e-06, |
|
"loss": 0.923, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.564964077736531e-06, |
|
"loss": 0.9448, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.5573805809929946e-06, |
|
"loss": 0.9546, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.549784396822769e-06, |
|
"loss": 0.9835, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.542175601874111e-06, |
|
"loss": 0.8655, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.534554272922525e-06, |
|
"loss": 0.8878, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 4.526920486869986e-06, |
|
"loss": 0.9834, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.5192743207441685e-06, |
|
"loss": 0.8446, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.511615851697668e-06, |
|
"loss": 0.9525, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.503945157007216e-06, |
|
"loss": 1.0056, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.4962623140729074e-06, |
|
"loss": 0.9117, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"eval_loss": 0.9622918367385864, |
|
"eval_runtime": 20.1675, |
|
"eval_samples_per_second": 64.906, |
|
"eval_steps_per_second": 64.906, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.5, |
|
"learning_rate": 4.488567400417417e-06, |
|
"loss": 0.9416, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.48086049368522e-06, |
|
"loss": 0.9492, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.473141671641803e-06, |
|
"loss": 0.8472, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 4.4654110121728855e-06, |
|
"loss": 0.9374, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.457668593283625e-06, |
|
"loss": 0.8029, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.449914493097844e-06, |
|
"loss": 0.8902, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.442148789857225e-06, |
|
"loss": 0.9008, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.434371561920539e-06, |
|
"loss": 0.87, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.426582887762836e-06, |
|
"loss": 0.9065, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.418782845974669e-06, |
|
"loss": 0.9354, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.410971515261291e-06, |
|
"loss": 0.8697, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 4.403148974441867e-06, |
|
"loss": 0.8788, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.3953153024486716e-06, |
|
"loss": 0.9644, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.387470578326305e-06, |
|
"loss": 1.0109, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.379614881230879e-06, |
|
"loss": 0.9348, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 4.371748290429231e-06, |
|
"loss": 0.9769, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.363870885298119e-06, |
|
"loss": 0.9734, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 4.355982745323421e-06, |
|
"loss": 0.8801, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.348083950099335e-06, |
|
"loss": 0.9323, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 4.340174579327574e-06, |
|
"loss": 0.9672, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.332254712816559e-06, |
|
"loss": 0.9541, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.32432443048062e-06, |
|
"loss": 0.8939, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.316383812339188e-06, |
|
"loss": 0.9381, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 4.30843293851598e-06, |
|
"loss": 0.8943, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.300471889238203e-06, |
|
"loss": 1.0116, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.292500744835735e-06, |
|
"loss": 0.9023, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 4.284519585740317e-06, |
|
"loss": 0.8961, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.276528492484743e-06, |
|
"loss": 0.9492, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.2685275457020465e-06, |
|
"loss": 0.8869, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 4.260516826124686e-06, |
|
"loss": 0.8692, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 4.2524964145837305e-06, |
|
"loss": 0.857, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 4.244466392008045e-06, |
|
"loss": 0.853, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.236426839423474e-06, |
|
"loss": 0.8563, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 4.2283778379520235e-06, |
|
"loss": 0.9452, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 4.22031946881104e-06, |
|
"loss": 0.9345, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"eval_loss": 0.9603658318519592, |
|
"eval_runtime": 20.3177, |
|
"eval_samples_per_second": 64.427, |
|
"eval_steps_per_second": 64.427, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.212251813312395e-06, |
|
"loss": 0.8672, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 4.204174952861663e-06, |
|
"loss": 0.9066, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.196088968957298e-06, |
|
"loss": 0.9464, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 4.187993943189815e-06, |
|
"loss": 0.921, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 4.179889957240963e-06, |
|
"loss": 0.8512, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.171777092882905e-06, |
|
"loss": 0.8376, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.1636554319773865e-06, |
|
"loss": 0.961, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 4.155525056474917e-06, |
|
"loss": 0.9431, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 4.147386048413936e-06, |
|
"loss": 0.9156, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 4.1392384899199916e-06, |
|
"loss": 1.0053, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 4.13108246320491e-06, |
|
"loss": 0.9785, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 4.122918050565959e-06, |
|
"loss": 1.0079, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.11474533438503e-06, |
|
"loss": 0.8462, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.1065643971277945e-06, |
|
"loss": 0.9099, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.098375321342883e-06, |
|
"loss": 0.9068, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 4.090178189661043e-06, |
|
"loss": 0.8288, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 4.08197308479431e-06, |
|
"loss": 0.9653, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 4.073760089535173e-06, |
|
"loss": 0.9364, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 4.065539286755734e-06, |
|
"loss": 0.9165, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 4.057310759406881e-06, |
|
"loss": 0.9906, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 4.0490745905174425e-06, |
|
"loss": 0.8814, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 4.040830863193354e-06, |
|
"loss": 0.9109, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 4.032579660616816e-06, |
|
"loss": 0.8054, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 4.024321066045462e-06, |
|
"loss": 0.95, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 4.016055162811508e-06, |
|
"loss": 0.9861, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 4.00778203432092e-06, |
|
"loss": 0.8921, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.999501764052571e-06, |
|
"loss": 0.9639, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.991214435557392e-06, |
|
"loss": 0.8409, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.982920132457538e-06, |
|
"loss": 0.9423, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.97461893844554e-06, |
|
"loss": 0.9888, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.966310937283461e-06, |
|
"loss": 0.9327, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 3.957996212802049e-06, |
|
"loss": 0.9888, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 3.949674848899893e-06, |
|
"loss": 0.9688, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.9413469295425764e-06, |
|
"loss": 0.853, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 3.933012538761832e-06, |
|
"loss": 0.951, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"eval_loss": 0.9591352343559265, |
|
"eval_runtime": 20.1897, |
|
"eval_samples_per_second": 64.835, |
|
"eval_steps_per_second": 64.835, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.924671760654688e-06, |
|
"loss": 0.9298, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 3.916324679382624e-06, |
|
"loss": 0.8488, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.907971379170722e-06, |
|
"loss": 0.8833, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 3.899611944306816e-06, |
|
"loss": 0.8187, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.891246459140637e-06, |
|
"loss": 0.9175, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 3.88287500808297e-06, |
|
"loss": 0.9665, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.874497675604795e-06, |
|
"loss": 0.9449, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.5, |
|
"learning_rate": 3.866114546236441e-06, |
|
"loss": 1.0137, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 3.857725704566727e-06, |
|
"loss": 0.8451, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 3.849331235242112e-06, |
|
"loss": 0.8812, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.8409312229658385e-06, |
|
"loss": 0.9713, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.8325257524970825e-06, |
|
"loss": 0.9424, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 3.8241149086500925e-06, |
|
"loss": 0.9523, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 3.815698776293337e-06, |
|
"loss": 0.8275, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 3.807277440348651e-06, |
|
"loss": 0.8957, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.7988509857903683e-06, |
|
"loss": 0.9782, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.7904194976444786e-06, |
|
"loss": 0.8956, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.7819830609877576e-06, |
|
"loss": 0.8747, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.773541760946916e-06, |
|
"loss": 0.9444, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 3.7650956826977353e-06, |
|
"loss": 0.873, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 3.7566449114642136e-06, |
|
"loss": 0.9871, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.7481895325177007e-06, |
|
"loss": 0.8154, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.7397296311760422e-06, |
|
"loss": 0.9796, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.731265292802713e-06, |
|
"loss": 0.8966, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.722796602805963e-06, |
|
"loss": 0.9269, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.375, |
|
"learning_rate": 3.7143236466379477e-06, |
|
"loss": 0.8601, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 3.705846509793875e-06, |
|
"loss": 0.939, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 3.69736527781113e-06, |
|
"loss": 0.9095, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 3.6888800362684264e-06, |
|
"loss": 1.0163, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.375, |
|
"learning_rate": 3.6803908707849324e-06, |
|
"loss": 0.8745, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 3.6718978670194105e-06, |
|
"loss": 1.003, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.663401110669353e-06, |
|
"loss": 0.9032, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 3.654900687470116e-06, |
|
"loss": 0.8658, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.646396683194058e-06, |
|
"loss": 0.9152, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.6378891836496702e-06, |
|
"loss": 0.9348, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"eval_loss": 0.9573694467544556, |
|
"eval_runtime": 20.2011, |
|
"eval_samples_per_second": 64.798, |
|
"eval_steps_per_second": 64.798, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.629378274680713e-06, |
|
"loss": 0.9352, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 3.6208640421653465e-06, |
|
"loss": 0.9221, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 3.612346572015271e-06, |
|
"loss": 0.8442, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.603825950174853e-06, |
|
"loss": 0.9139, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 3.5953022626202615e-06, |
|
"loss": 0.9462, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.375, |
|
"learning_rate": 3.5867755953585975e-06, |
|
"loss": 0.9609, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 3.578246034427033e-06, |
|
"loss": 0.9211, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 3.5697136658919335e-06, |
|
"loss": 0.959, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.5611785758479966e-06, |
|
"loss": 0.8621, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.552640850417381e-06, |
|
"loss": 0.9998, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 3.544100575748835e-06, |
|
"loss": 0.8606, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.535557838016833e-06, |
|
"loss": 1.0495, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.527012723420701e-06, |
|
"loss": 0.9242, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.518465318183748e-06, |
|
"loss": 0.9776, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.5099157085523975e-06, |
|
"loss": 0.9708, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.501363980795315e-06, |
|
"loss": 0.9329, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 3.4928102212025395e-06, |
|
"loss": 1.007, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 3.484254516084612e-06, |
|
"loss": 0.9819, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 3.475696951771704e-06, |
|
"loss": 0.8111, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 3.467137614612746e-06, |
|
"loss": 0.9027, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.4585765909745595e-06, |
|
"loss": 0.9196, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.450013967240982e-06, |
|
"loss": 0.9815, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.441449829811996e-06, |
|
"loss": 0.8236, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 3.4328842651028592e-06, |
|
"loss": 0.87, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.4243173595432297e-06, |
|
"loss": 0.9304, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.4157491995762946e-06, |
|
"loss": 0.957, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 3.4071798716579008e-06, |
|
"loss": 0.9195, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 3.3986094622556796e-06, |
|
"loss": 0.8709, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 3.3900380578481727e-06, |
|
"loss": 0.9077, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 3.3814657449239643e-06, |
|
"loss": 1.0009, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.372892609980805e-06, |
|
"loss": 0.9224, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 3.3643187395247386e-06, |
|
"loss": 0.894, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.375, |
|
"learning_rate": 3.3557442200692337e-06, |
|
"loss": 0.9728, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 3.3471691381343042e-06, |
|
"loss": 0.9809, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.3385935802456405e-06, |
|
"loss": 0.9539, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"eval_loss": 0.9559576511383057, |
|
"eval_runtime": 20.2178, |
|
"eval_samples_per_second": 64.745, |
|
"eval_steps_per_second": 64.745, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.330017632933736e-06, |
|
"loss": 0.9418, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 3.3214413827330156e-06, |
|
"loss": 0.9327, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 3.3128649161809558e-06, |
|
"loss": 0.8283, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.3042883198172203e-06, |
|
"loss": 0.8813, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 3.2957116801827805e-06, |
|
"loss": 0.983, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.287135083819045e-06, |
|
"loss": 0.91, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.278558617266985e-06, |
|
"loss": 0.8266, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.2699823670662644e-06, |
|
"loss": 0.9438, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.2614064197543607e-06, |
|
"loss": 0.8858, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.2528308618656974e-06, |
|
"loss": 0.8525, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 3.2442557799307675e-06, |
|
"loss": 0.9663, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.375, |
|
"learning_rate": 3.235681260475262e-06, |
|
"loss": 1.0201, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 3.2271073900191958e-06, |
|
"loss": 0.9675, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.2185342550760365e-06, |
|
"loss": 0.9893, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 3.2099619421518285e-06, |
|
"loss": 0.9938, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.2013905377443207e-06, |
|
"loss": 0.8972, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.5, |
|
"learning_rate": 3.1928201283421e-06, |
|
"loss": 0.8968, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 3.1842508004237066e-06, |
|
"loss": 0.9195, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.175682640456772e-06, |
|
"loss": 0.9322, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.1671157348971415e-06, |
|
"loss": 0.8878, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.1585501701880053e-06, |
|
"loss": 0.8621, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.1499860327590182e-06, |
|
"loss": 0.966, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.1414234090254413e-06, |
|
"loss": 0.8862, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 3.1328623853872558e-06, |
|
"loss": 0.9531, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.1243030482282973e-06, |
|
"loss": 0.9865, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.1157454839153885e-06, |
|
"loss": 0.8452, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.1071897787974613e-06, |
|
"loss": 0.8975, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 3.0986360192046853e-06, |
|
"loss": 0.8988, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 3.0900842914476024e-06, |
|
"loss": 0.855, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 3.0815346818162534e-06, |
|
"loss": 0.9749, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.0729872765793e-06, |
|
"loss": 0.9262, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.0644421619831683e-06, |
|
"loss": 0.8623, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 1.375, |
|
"learning_rate": 3.0558994242511667e-06, |
|
"loss": 0.9703, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 3.047359149582621e-06, |
|
"loss": 0.936, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 3.038821424152004e-06, |
|
"loss": 1.2291, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"eval_loss": 0.954813539981842, |
|
"eval_runtime": 20.1791, |
|
"eval_samples_per_second": 64.869, |
|
"eval_steps_per_second": 64.869, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.030286334108068e-06, |
|
"loss": 0.8685, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.021753965572967e-06, |
|
"loss": 0.8731, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 3.0132244046414032e-06, |
|
"loss": 0.9185, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.0046977373797396e-06, |
|
"loss": 0.893, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 2.9961740498251477e-06, |
|
"loss": 0.8581, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.987653427984729e-06, |
|
"loss": 0.8867, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.9791359578346543e-06, |
|
"loss": 0.8389, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.9706217253192877e-06, |
|
"loss": 0.8954, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 2.962110816350331e-06, |
|
"loss": 0.922, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 2.953603316805943e-06, |
|
"loss": 0.8986, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 2.9450993125298848e-06, |
|
"loss": 0.8654, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 2.936598889330648e-06, |
|
"loss": 0.88, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.9281021329805902e-06, |
|
"loss": 0.8357, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 2.9196091292150675e-06, |
|
"loss": 0.8443, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 2.911119963731574e-06, |
|
"loss": 0.9116, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.902634722188871e-06, |
|
"loss": 0.8414, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 2.894153490206126e-06, |
|
"loss": 1.032, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.885676353362053e-06, |
|
"loss": 1.0082, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 2.8772033971940384e-06, |
|
"loss": 0.8801, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.8687347071972875e-06, |
|
"loss": 0.9127, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.8602703688239585e-06, |
|
"loss": 0.8387, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.8518104674823e-06, |
|
"loss": 0.9205, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 1.6015625, |
|
"learning_rate": 2.8433550885357863e-06, |
|
"loss": 0.8927, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.8349043173022655e-06, |
|
"loss": 0.8627, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.8264582390530854e-06, |
|
"loss": 0.8549, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.5, |
|
"learning_rate": 2.818016939012243e-06, |
|
"loss": 0.8739, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 2.809580502355522e-06, |
|
"loss": 1.0039, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 2.8011490142096316e-06, |
|
"loss": 0.9061, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.79272255965135e-06, |
|
"loss": 0.9938, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 2.7843012237066627e-06, |
|
"loss": 1.0029, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.775885091349909e-06, |
|
"loss": 0.9585, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 2.767474247502919e-06, |
|
"loss": 0.8371, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.7590687770341626e-06, |
|
"loss": 0.8878, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 2.7506687647578895e-06, |
|
"loss": 0.9514, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.7422742954332735e-06, |
|
"loss": 0.9397, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"eval_loss": 0.9540159106254578, |
|
"eval_runtime": 20.3197, |
|
"eval_samples_per_second": 64.42, |
|
"eval_steps_per_second": 64.42, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.7338854537635585e-06, |
|
"loss": 0.9597, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.725502324395205e-06, |
|
"loss": 0.9125, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.7171249919170304e-06, |
|
"loss": 0.9403, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 2.7087535408593643e-06, |
|
"loss": 0.9128, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.7003880556931855e-06, |
|
"loss": 0.8996, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.6920286208292777e-06, |
|
"loss": 0.9428, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 2.683675320617376e-06, |
|
"loss": 0.8807, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 2.6753282393453134e-06, |
|
"loss": 1.0051, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.6669874612381685e-06, |
|
"loss": 0.926, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.6586530704574235e-06, |
|
"loss": 0.9606, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.650325151100108e-06, |
|
"loss": 0.8918, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 2.642003787197952e-06, |
|
"loss": 0.9572, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 2.6336890627165397e-06, |
|
"loss": 0.8793, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.6253810615544603e-06, |
|
"loss": 0.8986, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.6170798675424617e-06, |
|
"loss": 0.904, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.608785564442609e-06, |
|
"loss": 0.9042, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.600498235947431e-06, |
|
"loss": 0.9596, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 2.5922179656790796e-06, |
|
"loss": 0.8224, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 2.5839448371884926e-06, |
|
"loss": 0.9048, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.5756789339545387e-06, |
|
"loss": 0.9585, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 2.567420339383184e-06, |
|
"loss": 0.9117, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 2.5591691368066475e-06, |
|
"loss": 0.9752, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.550925409482559e-06, |
|
"loss": 0.9207, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.542689240593119e-06, |
|
"loss": 0.8864, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.5344607132442667e-06, |
|
"loss": 0.8982, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 2.5262399104648286e-06, |
|
"loss": 0.9327, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.5, |
|
"learning_rate": 2.51802691520569e-06, |
|
"loss": 0.8947, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 2.509821810338957e-06, |
|
"loss": 0.9742, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.5016246786571174e-06, |
|
"loss": 0.8728, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.493435602872206e-06, |
|
"loss": 0.9128, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 2.485254665614971e-06, |
|
"loss": 0.9039, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 2.4770819494340417e-06, |
|
"loss": 0.8575, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.4689175367950906e-06, |
|
"loss": 1.0352, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.4607615100800083e-06, |
|
"loss": 0.9921, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 2.452613951586065e-06, |
|
"loss": 0.985, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"eval_loss": 0.9532026052474976, |
|
"eval_runtime": 20.1343, |
|
"eval_samples_per_second": 65.014, |
|
"eval_steps_per_second": 65.014, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.5, |
|
"learning_rate": 2.4444749435250837e-06, |
|
"loss": 0.8722, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 2.436344568022614e-06, |
|
"loss": 0.8208, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.4282229071170965e-06, |
|
"loss": 0.9407, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 2.4201100427590365e-06, |
|
"loss": 0.8911, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 2.4120060568101856e-06, |
|
"loss": 0.8809, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 2.403911031042702e-06, |
|
"loss": 0.8505, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.375, |
|
"learning_rate": 2.3958250471383373e-06, |
|
"loss": 0.9088, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 2.387748186687606e-06, |
|
"loss": 0.826, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.3796805311889613e-06, |
|
"loss": 0.8774, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.3716221620479772e-06, |
|
"loss": 0.8676, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.3635731605765264e-06, |
|
"loss": 0.9997, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 2.3555336079919567e-06, |
|
"loss": 0.8903, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 2.3475035854162707e-06, |
|
"loss": 0.8055, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 2.3394831738753154e-06, |
|
"loss": 0.8353, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 2.3314724542979543e-06, |
|
"loss": 0.9792, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.323471507515257e-06, |
|
"loss": 0.8619, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.3154804142596834e-06, |
|
"loss": 0.8911, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 2.307499255164266e-06, |
|
"loss": 0.9566, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 2.2995281107617967e-06, |
|
"loss": 0.8491, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 2.291567061484021e-06, |
|
"loss": 0.9986, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.2836161876608136e-06, |
|
"loss": 0.9711, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 2.27567556951938e-06, |
|
"loss": 0.9344, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.267745287183442e-06, |
|
"loss": 0.9524, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 2.2598254206724275e-06, |
|
"loss": 0.8871, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 2.251916049900664e-06, |
|
"loss": 1.095, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 2.2440172546765796e-06, |
|
"loss": 0.8317, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 2.2361291147018816e-06, |
|
"loss": 0.8479, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 2.2282517095707697e-06, |
|
"loss": 0.9132, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 2.2203851187691216e-06, |
|
"loss": 0.8728, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.5, |
|
"learning_rate": 2.2125294216736956e-06, |
|
"loss": 0.927, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 2.2046846975513275e-06, |
|
"loss": 0.8764, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.1968510255581348e-06, |
|
"loss": 0.9311, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 2.1890284847387107e-06, |
|
"loss": 0.9595, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 2.1812171540253324e-06, |
|
"loss": 0.9611, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.1734171122371646e-06, |
|
"loss": 0.9543, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"eval_loss": 0.9524132609367371, |
|
"eval_runtime": 20.1667, |
|
"eval_samples_per_second": 64.909, |
|
"eval_steps_per_second": 64.909, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 2.165628438079463e-06, |
|
"loss": 0.9835, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 2.1578512101427746e-06, |
|
"loss": 0.9356, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.1500855069021567e-06, |
|
"loss": 0.906, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 2.1423314067163747e-06, |
|
"loss": 0.9937, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 2.1345889878271152e-06, |
|
"loss": 0.9486, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 2.126858328358197e-06, |
|
"loss": 0.9641, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.5, |
|
"learning_rate": 2.11913950631478e-06, |
|
"loss": 0.866, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 2.111432599582582e-06, |
|
"loss": 0.9687, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 2.1037376859270937e-06, |
|
"loss": 0.9431, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 2.0960548429927853e-06, |
|
"loss": 0.8683, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 2.088384148302332e-06, |
|
"loss": 0.895, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 2.0807256792558305e-06, |
|
"loss": 0.919, |
|
"step": 712 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1059, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 89, |
|
"total_flos": 2.1011521993955082e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|