|
{ |
|
"best_metric": 1.4545527696609497, |
|
"best_model_checkpoint": "./lora-outB/checkpoint-150", |
|
"epoch": 4.491017964071856, |
|
"eval_steps": 25, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.866, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.7383, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6e-06, |
|
"loss": 1.8306, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.9213, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1e-05, |
|
"loss": 1.9524, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.7308, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.4e-05, |
|
"loss": 1.7334, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.6749, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.805, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2e-05, |
|
"loss": 1.778, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9997946042345128e-05, |
|
"loss": 1.7804, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9991785013128922e-05, |
|
"loss": 1.8191, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.998151944325001e-05, |
|
"loss": 1.7089, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.996715354971755e-05, |
|
"loss": 1.7131, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.994869323391895e-05, |
|
"loss": 1.917, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9926146079195597e-05, |
|
"loss": 1.6701, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.989952134772769e-05, |
|
"loss": 1.7119, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9868829976729444e-05, |
|
"loss": 1.7037, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.983408457395613e-05, |
|
"loss": 1.7314, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9795299412524948e-05, |
|
"loss": 1.6771, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.975249042505174e-05, |
|
"loss": 1.6734, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.970567519710602e-05, |
|
"loss": 1.689, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.9654872959986936e-05, |
|
"loss": 1.7452, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.960010458282326e-05, |
|
"loss": 1.7126, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.954139256400049e-05, |
|
"loss": 1.5629, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 1.6511253118515015, |
|
"eval_runtime": 5.4427, |
|
"eval_samples_per_second": 5.145, |
|
"eval_steps_per_second": 0.735, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.947876102191873e-05, |
|
"loss": 1.6948, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9412235685085034e-05, |
|
"loss": 1.5969, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.9341843881544372e-05, |
|
"loss": 1.5367, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.926761452765349e-05, |
|
"loss": 1.463, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.918957811620231e-05, |
|
"loss": 1.4575, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.9107766703887764e-05, |
|
"loss": 1.5928, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9022213898145176e-05, |
|
"loss": 1.5604, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.893295484334259e-05, |
|
"loss": 1.6508, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.8840026206343786e-05, |
|
"loss": 1.4583, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.8743466161445823e-05, |
|
"loss": 1.7124, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.8643314374697377e-05, |
|
"loss": 1.462, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.853961198760426e-05, |
|
"loss": 1.6664, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.8432401600228823e-05, |
|
"loss": 1.6264, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.832172725369024e-05, |
|
"loss": 1.6704, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.8207634412072765e-05, |
|
"loss": 1.5221, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.8090169943749477e-05, |
|
"loss": 1.5411, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.7969382102129153e-05, |
|
"loss": 1.4493, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.7845320505834176e-05, |
|
"loss": 1.4783, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.771803611831762e-05, |
|
"loss": 1.5882, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.758758122692791e-05, |
|
"loss": 1.5848, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.74540094214296e-05, |
|
"loss": 1.4439, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.7317375571989158e-05, |
|
"loss": 1.5398, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.717773580663479e-05, |
|
"loss": 1.3689, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.703514748819948e-05, |
|
"loss": 1.5311, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.688966919075687e-05, |
|
"loss": 1.5253, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"eval_loss": 1.5729879140853882, |
|
"eval_runtime": 5.4053, |
|
"eval_samples_per_second": 5.18, |
|
"eval_steps_per_second": 0.74, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.6741360675559475e-05, |
|
"loss": 1.3689, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.659028286648932e-05, |
|
"loss": 1.4167, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.6436497825030886e-05, |
|
"loss": 1.6581, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.6280068724776795e-05, |
|
"loss": 1.5004, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.612105982547663e-05, |
|
"loss": 1.4912, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.5959536446639572e-05, |
|
"loss": 1.5128, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.57955649407017e-05, |
|
"loss": 1.4908, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.562921266576898e-05, |
|
"loss": 1.4826, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.5460547957947105e-05, |
|
"loss": 1.423, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.5289640103269626e-05, |
|
"loss": 1.5394, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.5116559309235825e-05, |
|
"loss": 1.4102, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.4941376675970058e-05, |
|
"loss": 1.3556, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.4764164167014451e-05, |
|
"loss": 1.5444, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.4584994579766865e-05, |
|
"loss": 1.4466, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.4403941515576344e-05, |
|
"loss": 1.4467, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.422107934950832e-05, |
|
"loss": 1.4268, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.4036483199791949e-05, |
|
"loss": 1.4073, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.3850228896962178e-05, |
|
"loss": 1.5186, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.366239295270923e-05, |
|
"loss": 1.5014, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.3473052528448203e-05, |
|
"loss": 1.3933, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.3282285403621864e-05, |
|
"loss": 1.4067, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 1.5052, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.2896785068234925e-05, |
|
"loss": 1.3016, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2702210217947289e-05, |
|
"loss": 1.3951, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2506525322587207e-05, |
|
"loss": 1.3363, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"eval_loss": 1.5013550519943237, |
|
"eval_runtime": 5.4016, |
|
"eval_samples_per_second": 5.184, |
|
"eval_steps_per_second": 0.741, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2309810767852435e-05, |
|
"loss": 1.3622, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.2112147362416076e-05, |
|
"loss": 1.3339, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1913616304731064e-05, |
|
"loss": 1.3832, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.1714299149674538e-05, |
|
"loss": 1.2172, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.1514277775045768e-05, |
|
"loss": 1.2421, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.1313634347931466e-05, |
|
"loss": 1.3649, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.1112451290952238e-05, |
|
"loss": 1.5337, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.0910811248404064e-05, |
|
"loss": 1.2464, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.070879705230873e-05, |
|
"loss": 1.4806, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.0506491688387128e-05, |
|
"loss": 1.3603, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.030397826196943e-05, |
|
"loss": 1.2416, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0101339963856112e-05, |
|
"loss": 1.2974, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.898660036143893e-06, |
|
"loss": 1.6356, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.696021738030575e-06, |
|
"loss": 1.2952, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.493508311612874e-06, |
|
"loss": 1.3857, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.291202947691272e-06, |
|
"loss": 1.3712, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 9.089188751595937e-06, |
|
"loss": 1.3713, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 8.887548709047765e-06, |
|
"loss": 1.4597, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 8.686365652068536e-06, |
|
"loss": 1.225, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.485722224954237e-06, |
|
"loss": 1.3307, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.285700850325467e-06, |
|
"loss": 1.3423, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 8.086383695268937e-06, |
|
"loss": 1.352, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 7.887852637583927e-06, |
|
"loss": 1.3908, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.690189232147566e-06, |
|
"loss": 1.406, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.493474677412795e-06, |
|
"loss": 1.4017, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"eval_loss": 1.4689786434173584, |
|
"eval_runtime": 5.4385, |
|
"eval_samples_per_second": 5.148, |
|
"eval_steps_per_second": 0.735, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 7.297789782052716e-06, |
|
"loss": 1.2587, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 7.10321493176508e-06, |
|
"loss": 1.2309, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 6.909830056250527e-06, |
|
"loss": 1.3232, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 6.717714596378138e-06, |
|
"loss": 1.2715, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.526947471551799e-06, |
|
"loss": 1.3015, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 6.337607047290774e-06, |
|
"loss": 1.3752, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 6.149771103037821e-06, |
|
"loss": 1.3156, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 5.963516800208056e-06, |
|
"loss": 1.3419, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 5.7789206504916815e-06, |
|
"loss": 1.4729, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 5.5960584844236565e-06, |
|
"loss": 1.3619, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 5.415005420233141e-06, |
|
"loss": 1.4708, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 5.235835832985552e-06, |
|
"loss": 1.4684, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 5.058623324029944e-06, |
|
"loss": 1.2379, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.8834406907641784e-06, |
|
"loss": 1.2921, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.710359896730379e-06, |
|
"loss": 1.3846, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.539452042052901e-06, |
|
"loss": 1.218, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.370787334231026e-06, |
|
"loss": 1.2349, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.204435059298303e-06, |
|
"loss": 1.1808, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.040463553360431e-06, |
|
"loss": 1.2824, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.878940174523371e-06, |
|
"loss": 1.4456, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.7199312752232053e-06, |
|
"loss": 1.3098, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.563502174969117e-06, |
|
"loss": 1.2517, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.409717133510683e-06, |
|
"loss": 1.3281, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.258639324440527e-06, |
|
"loss": 1.3068, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.110330809243134e-06, |
|
"loss": 1.2677, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"eval_loss": 1.4592927694320679, |
|
"eval_runtime": 5.3759, |
|
"eval_samples_per_second": 5.208, |
|
"eval_steps_per_second": 0.744, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 2.964852511800519e-06, |
|
"loss": 1.2642, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 2.822264193365212e-06, |
|
"loss": 1.2629, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.6826244280108438e-06, |
|
"loss": 1.4554, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 2.545990578570404e-06, |
|
"loss": 1.4209, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 2.4124187730720916e-06, |
|
"loss": 1.3152, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 2.2819638816823796e-06, |
|
"loss": 1.2729, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 2.154679494165829e-06, |
|
"loss": 1.3608, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 2.030617897870851e-06, |
|
"loss": 1.3871, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.9098300562505266e-06, |
|
"loss": 1.3255, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.7923655879272395e-06, |
|
"loss": 1.3443, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.6782727463097626e-06, |
|
"loss": 1.41, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.5675983997711797e-06, |
|
"loss": 1.2783, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.4603880123957448e-06, |
|
"loss": 1.169, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.356685625302625e-06, |
|
"loss": 1.1928, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.2565338385541792e-06, |
|
"loss": 1.4349, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.159973793656215e-06, |
|
"loss": 1.4173, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.0670451566574102e-06, |
|
"loss": 1.3533, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 9.77786101854825e-07, |
|
"loss": 1.188, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 8.92233296112236e-07, |
|
"loss": 1.2933, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 8.10421883797694e-07, |
|
"loss": 1.2848, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 7.32385472346514e-07, |
|
"loss": 1.3775, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 6.581561184556296e-07, |
|
"loss": 1.2679, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 5.877643149149669e-07, |
|
"loss": 1.2643, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 5.212389780812733e-07, |
|
"loss": 1.3166, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 4.5860743599951186e-07, |
|
"loss": 1.351, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"eval_loss": 1.4545527696609497, |
|
"eval_runtime": 5.4009, |
|
"eval_samples_per_second": 5.184, |
|
"eval_steps_per_second": 0.741, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 165, |
|
"num_train_epochs": 5, |
|
"save_steps": 50, |
|
"total_flos": 1.7018825258041344e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|