|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.913669064748202, |
|
"eval_steps": 500, |
|
"global_step": 204, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 3.7385, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 2.9618, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 3.7649, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 2.5575, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 3.2366, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 3.7946, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 5e-05, |
|
"loss": 3.4406, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.999682116415026e-05, |
|
"loss": 3.254, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.998728546500082e-05, |
|
"loss": 3.2319, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.9971395327545466e-05, |
|
"loss": 3.8141, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.9949154792755286e-05, |
|
"loss": 2.9063, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.992056951655103e-05, |
|
"loss": 2.4719, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.988564676836475e-05, |
|
"loss": 2.3808, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.984439542929117e-05, |
|
"loss": 2.312, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.979682598982912e-05, |
|
"loss": 2.8406, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.974295054721377e-05, |
|
"loss": 3.0506, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.9682782802340184e-05, |
|
"loss": 2.7309, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.9616338056279124e-05, |
|
"loss": 2.8111, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.9543633206385834e-05, |
|
"loss": 3.6213, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.9464686742003006e-05, |
|
"loss": 3.5676, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.937951873975871e-05, |
|
"loss": 2.8303, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.928815085846087e-05, |
|
"loss": 3.3833, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.9190606333589194e-05, |
|
"loss": 2.9999, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.9086909971386305e-05, |
|
"loss": 3.1962, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.8977088142549285e-05, |
|
"loss": 2.6578, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.886116877552347e-05, |
|
"loss": 3.2361, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.873918134940002e-05, |
|
"loss": 4.1256, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.8611156886419206e-05, |
|
"loss": 2.683, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.847712794408124e-05, |
|
"loss": 3.4159, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.833712860686666e-05, |
|
"loss": 3.918, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.8191194477568435e-05, |
|
"loss": 3.1473, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.803936266823792e-05, |
|
"loss": 3.9101, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.7881671790747e-05, |
|
"loss": 3.0017, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.7718161946968835e-05, |
|
"loss": 2.9023, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.754887471857969e-05, |
|
"loss": 2.7649, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.7373853156484406e-05, |
|
"loss": 2.9823, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.7193141769868265e-05, |
|
"loss": 2.9701, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.7006786514877997e-05, |
|
"loss": 2.5226, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.6814834782934844e-05, |
|
"loss": 2.3836, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.6617335388682556e-05, |
|
"loss": 3.2099, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.641433855757351e-05, |
|
"loss": 3.3312, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.620589591309603e-05, |
|
"loss": 2.9884, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.59920604636462e-05, |
|
"loss": 2.4634, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.577288658904741e-05, |
|
"loss": 2.8223, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.554843002672129e-05, |
|
"loss": 4.1285, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.531874785751317e-05, |
|
"loss": 3.5577, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.5083898491176136e-05, |
|
"loss": 3.3524, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.4843941651517e-05, |
|
"loss": 2.518, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.4598938361208095e-05, |
|
"loss": 2.948, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.434895092626883e-05, |
|
"loss": 2.4119, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.409404292022081e-05, |
|
"loss": 1.5223, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.38342791679207e-05, |
|
"loss": 1.6446, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.356972572907473e-05, |
|
"loss": 2.1176, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.3300449881439375e-05, |
|
"loss": 2.3363, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.302652010371205e-05, |
|
"loss": 1.4723, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.274800605811658e-05, |
|
"loss": 1.7024, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.246497857268759e-05, |
|
"loss": 1.7637, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.2177509623258456e-05, |
|
"loss": 1.6597, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.1885672315157346e-05, |
|
"loss": 1.3964, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.1589540864616025e-05, |
|
"loss": 1.7824, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.128919057989622e-05, |
|
"loss": 2.6526, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.098469784213812e-05, |
|
"loss": 1.5922, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.0676140085936186e-05, |
|
"loss": 1.863, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.0363595779647e-05, |
|
"loss": 1.9858, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.0047144405434175e-05, |
|
"loss": 1.7664, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.972686643905558e-05, |
|
"loss": 2.3141, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.940284332939771e-05, |
|
"loss": 1.2004, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.9075157477762744e-05, |
|
"loss": 1.5104, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.874389221691329e-05, |
|
"loss": 1.6063, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.84091317898803e-05, |
|
"loss": 1.3248, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.8070961328539525e-05, |
|
"loss": 1.5846, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.772946683196179e-05, |
|
"loss": 1.1226, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.738473514454297e-05, |
|
"loss": 1.3365, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.7036853933918784e-05, |
|
"loss": 1.863, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.668591166867035e-05, |
|
"loss": 1.3578, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.633199759582596e-05, |
|
"loss": 1.3557, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.597520171816503e-05, |
|
"loss": 1.414, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.5615614771329706e-05, |
|
"loss": 1.5178, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.5253328200750224e-05, |
|
"loss": 2.2556, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.488843413838963e-05, |
|
"loss": 1.9742, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 3.452102537931408e-05, |
|
"loss": 2.1719, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.4151195358094365e-05, |
|
"loss": 1.1524, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.377903812504487e-05, |
|
"loss": 1.7081, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.340464832230592e-05, |
|
"loss": 1.2862, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.3028121159775656e-05, |
|
"loss": 3.1442, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.2649552390897494e-05, |
|
"loss": 3.1724, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.226903828830935e-05, |
|
"loss": 2.821, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.1886675619360885e-05, |
|
"loss": 3.2655, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.1502561621504874e-05, |
|
"loss": 3.2369, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 3.111679397756906e-05, |
|
"loss": 2.6636, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.072947079091472e-05, |
|
"loss": 2.289, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.034069056048837e-05, |
|
"loss": 2.9397, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 2.9950552155772743e-05, |
|
"loss": 3.18, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 2.95591547916436e-05, |
|
"loss": 2.362, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 2.9166598003138766e-05, |
|
"loss": 3.0609, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.8772981620145623e-05, |
|
"loss": 3.0864, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.8378405742013713e-05, |
|
"loss": 2.7795, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.7982970712098794e-05, |
|
"loss": 3.1472, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.7586777092244804e-05, |
|
"loss": 3.9248, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.7189925637210323e-05, |
|
"loss": 3.9677, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 2.6792517269045943e-05, |
|
"loss": 1.1242, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.6394653051429064e-05, |
|
"loss": 1.2223, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 2.5996434163962763e-05, |
|
"loss": 1.2138, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 2.5597961876445077e-05, |
|
"loss": 0.9312, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 2.5199337523115418e-05, |
|
"loss": 1.0011, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 2.480066247688459e-05, |
|
"loss": 0.6636, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 2.4402038123554933e-05, |
|
"loss": 0.6459, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 2.4003565836037246e-05, |
|
"loss": 1.0171, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 2.360534694857094e-05, |
|
"loss": 0.6442, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 2.3207482730954063e-05, |
|
"loss": 0.6365, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 2.2810074362789676e-05, |
|
"loss": 0.7385, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 2.2413222907755195e-05, |
|
"loss": 0.6871, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 2.2017029287901212e-05, |
|
"loss": 0.9302, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 2.162159425798629e-05, |
|
"loss": 0.8288, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 2.1227018379854383e-05, |
|
"loss": 0.8842, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 2.083340199686124e-05, |
|
"loss": 0.431, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 2.0440845208356402e-05, |
|
"loss": 0.8126, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 2.0049447844227266e-05, |
|
"loss": 0.5205, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.9659309439511628e-05, |
|
"loss": 0.8926, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.927052920908528e-05, |
|
"loss": 1.1858, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.8883206022430956e-05, |
|
"loss": 1.3177, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.849743837849513e-05, |
|
"loss": 1.3189, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.8113324380639117e-05, |
|
"loss": 1.7173, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.7730961711690655e-05, |
|
"loss": 1.2899, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.735044760910251e-05, |
|
"loss": 1.3921, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.6971878840224346e-05, |
|
"loss": 1.913, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.6595351677694083e-05, |
|
"loss": 2.1296, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.6220961874955134e-05, |
|
"loss": 1.4972, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.5848804641905634e-05, |
|
"loss": 1.9327, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.547897462068592e-05, |
|
"loss": 1.7836, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.5111565861610378e-05, |
|
"loss": 1.4168, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.4746671799249784e-05, |
|
"loss": 1.8149, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.4384385228670288e-05, |
|
"loss": 2.3297, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.4024798281834966e-05, |
|
"loss": 2.8803, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.3668002404174047e-05, |
|
"loss": 4.1083, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.331408833132966e-05, |
|
"loss": 3.8829, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.2963146066081217e-05, |
|
"loss": 3.0687, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.2615264855457038e-05, |
|
"loss": 3.2044, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.2270533168038217e-05, |
|
"loss": 2.6155, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.1929038671460486e-05, |
|
"loss": 3.357, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.1590868210119692e-05, |
|
"loss": 3.2517, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.125610778308672e-05, |
|
"loss": 3.5836, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.0924842522237267e-05, |
|
"loss": 3.9635, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.0597156670602299e-05, |
|
"loss": 3.369, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.027313356094443e-05, |
|
"loss": 3.3562, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 9.95285559456583e-06, |
|
"loss": 2.6965, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 9.636404220353013e-06, |
|
"loss": 3.0411, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 9.323859914063814e-06, |
|
"loss": 3.3667, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 9.015302157861883e-06, |
|
"loss": 2.4606, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 8.710809420103789e-06, |
|
"loss": 3.3676, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 8.41045913538398e-06, |
|
"loss": 3.3801, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 8.11432768484267e-06, |
|
"loss": 3.2172, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 7.822490376741554e-06, |
|
"loss": 2.8255, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 7.535021427312417e-06, |
|
"loss": 2.1001, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 7.251993941883428e-06, |
|
"loss": 3.0872, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 6.9734798962879575e-06, |
|
"loss": 3.0398, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 6.699550118560632e-06, |
|
"loss": 2.4247, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 6.430274270925271e-06, |
|
"loss": 3.318, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 6.1657208320793054e-06, |
|
"loss": 2.9561, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 5.905957079779187e-06, |
|
"loss": 4.0259, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 5.6510490737311735e-06, |
|
"loss": 3.2479, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 5.4010616387919095e-06, |
|
"loss": 2.8332, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 5.156058348483006e-06, |
|
"loss": 3.157, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 4.916101508823873e-06, |
|
"loss": 2.7395, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 4.681252142486841e-06, |
|
"loss": 2.4334, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 4.451569973278719e-06, |
|
"loss": 2.7141, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 4.227113410952585e-06, |
|
"loss": 3.8611, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 4.007939536353805e-06, |
|
"loss": 2.5086, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 3.7941040869039714e-06, |
|
"loss": 3.7843, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 3.585661442426494e-06, |
|
"loss": 3.5859, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.382664611317446e-06, |
|
"loss": 4.9826, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 3.1851652170651584e-06, |
|
"loss": 2.6905, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.9932134851220038e-06, |
|
"loss": 3.2184, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.8068582301317425e-06, |
|
"loss": 3.0139, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.6261468435155978e-06, |
|
"loss": 2.9526, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.4511252814203107e-06, |
|
"loss": 2.4834, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.2818380530311655e-06, |
|
"loss": 4.3975, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 2.1183282092530065e-06, |
|
"loss": 3.4159, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 1.960637331762091e-06, |
|
"loss": 3.2516, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 1.8088055224315697e-06, |
|
"loss": 2.7591, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 1.6628713931333445e-06, |
|
"loss": 2.6279, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 1.5228720559187642e-06, |
|
"loss": 2.847, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 1.3888431135807956e-06, |
|
"loss": 2.089, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 1.2608186505999847e-06, |
|
"loss": 3.5639, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 1.138831224476533e-06, |
|
"loss": 3.0349, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 1.0229118574507174e-06, |
|
"loss": 3.6886, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 9.130900286137001e-07, |
|
"loss": 2.7077, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 8.093936664108071e-07, |
|
"loss": 2.3523, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 7.118491415391337e-07, |
|
"loss": 3.7049, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 6.204812602412902e-07, |
|
"loss": 5.1024, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 5.353132579969972e-07, |
|
"loss": 3.0828, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 4.56366793614163e-07, |
|
"loss": 2.5269, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 3.8366194372088384e-07, |
|
"loss": 2.7201, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 3.1721719765981926e-07, |
|
"loss": 2.9555, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.5704945278623436e-07, |
|
"loss": 3.5292, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.0317401017088122e-07, |
|
"loss": 2.9735, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 1.5560457070883105e-07, |
|
"loss": 3.2746, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 1.1435323163525025e-07, |
|
"loss": 2.8847, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 7.94304834489723e-08, |
|
"loss": 2.6234, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 5.0845207244715196e-08, |
|
"loss": 3.2828, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.8604672454538018e-08, |
|
"loss": 2.3275, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 1.2714534999183625e-08, |
|
"loss": 3.3622, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 3.1788358497431005e-09, |
|
"loss": 3.235, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 0.0, |
|
"loss": 2.0377, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"step": 204, |
|
"total_flos": 0.0, |
|
"train_loss": 1.2812642963493572, |
|
"train_runtime": 30830.0379, |
|
"train_samples_per_second": 13.815, |
|
"train_steps_per_second": 0.007 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 204, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 50, |
|
"total_flos": 0.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|