|
{ |
|
"best_metric": 1.707595944404602, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 1.3093289689034369, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.006546644844517185, |
|
"grad_norm": 2.13655686378479, |
|
"learning_rate": 1e-05, |
|
"loss": 8.7017, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006546644844517185, |
|
"eval_loss": 2.8909528255462646, |
|
"eval_runtime": 26.1547, |
|
"eval_samples_per_second": 9.864, |
|
"eval_steps_per_second": 2.485, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01309328968903437, |
|
"grad_norm": 1.9584050178527832, |
|
"learning_rate": 2e-05, |
|
"loss": 8.7593, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.019639934533551555, |
|
"grad_norm": 1.92738938331604, |
|
"learning_rate": 3e-05, |
|
"loss": 9.2191, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02618657937806874, |
|
"grad_norm": 1.802982211112976, |
|
"learning_rate": 4e-05, |
|
"loss": 8.9038, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03273322422258593, |
|
"grad_norm": 1.6323221921920776, |
|
"learning_rate": 5e-05, |
|
"loss": 9.051, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03927986906710311, |
|
"grad_norm": 1.6458439826965332, |
|
"learning_rate": 6e-05, |
|
"loss": 9.0919, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04582651391162029, |
|
"grad_norm": 1.7354305982589722, |
|
"learning_rate": 7e-05, |
|
"loss": 9.5186, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.05237315875613748, |
|
"grad_norm": 1.9769362211227417, |
|
"learning_rate": 8e-05, |
|
"loss": 9.3298, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.058919803600654665, |
|
"grad_norm": 1.8338909149169922, |
|
"learning_rate": 9e-05, |
|
"loss": 10.5342, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.06546644844517185, |
|
"grad_norm": 1.890850305557251, |
|
"learning_rate": 0.0001, |
|
"loss": 8.1525, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07201309328968904, |
|
"grad_norm": 2.500300645828247, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 10.1409, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07855973813420622, |
|
"grad_norm": 2.272092342376709, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 10.0184, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0851063829787234, |
|
"grad_norm": 2.830733299255371, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 9.3191, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.09165302782324058, |
|
"grad_norm": 3.044830322265625, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 8.7616, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.09819967266775777, |
|
"grad_norm": 3.9173166751861572, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 9.8097, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.10474631751227496, |
|
"grad_norm": 4.72042989730835, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 7.531, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.11129296235679215, |
|
"grad_norm": 4.051272392272949, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 7.9077, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.11783960720130933, |
|
"grad_norm": 3.737215042114258, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 9.7853, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.12438625204582651, |
|
"grad_norm": 4.385887145996094, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 8.0269, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1309328968903437, |
|
"grad_norm": 3.413221597671509, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 8.4059, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.13747954173486088, |
|
"grad_norm": 3.395505905151367, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 7.9983, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.14402618657937807, |
|
"grad_norm": 3.604326009750366, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 7.5998, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.15057283142389524, |
|
"grad_norm": 3.5756168365478516, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 8.0325, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.15711947626841244, |
|
"grad_norm": 3.7345871925354004, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 7.4147, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.16366612111292964, |
|
"grad_norm": 3.4550774097442627, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 7.8503, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1702127659574468, |
|
"grad_norm": 3.4074184894561768, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 6.5266, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.176759410801964, |
|
"grad_norm": 3.3474767208099365, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 8.3397, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.18330605564648117, |
|
"grad_norm": 3.2262606620788574, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 7.8356, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.18985270049099837, |
|
"grad_norm": 3.6275148391723633, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 8.2803, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.19639934533551553, |
|
"grad_norm": 3.781080722808838, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 8.1983, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.20294599018003273, |
|
"grad_norm": 3.650963068008423, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 8.1172, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.20949263502454993, |
|
"grad_norm": 4.347227573394775, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 9.1505, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.2160392798690671, |
|
"grad_norm": 4.019195556640625, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 6.9112, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.2225859247135843, |
|
"grad_norm": 4.088239669799805, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 7.8147, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.22913256955810146, |
|
"grad_norm": 4.491867542266846, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 7.8134, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.23567921440261866, |
|
"grad_norm": 4.543848514556885, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 6.7855, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.24222585924713586, |
|
"grad_norm": 4.584990978240967, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 9.0826, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.24877250409165302, |
|
"grad_norm": 5.8984479904174805, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 9.0719, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.2553191489361702, |
|
"grad_norm": 13.235140800476074, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 7.4419, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.2618657937806874, |
|
"grad_norm": 8.171952247619629, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 7.5932, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2684124386252046, |
|
"grad_norm": 5.803350448608398, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 7.1615, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.27495908346972175, |
|
"grad_norm": 5.395884037017822, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 7.2399, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.281505728314239, |
|
"grad_norm": 4.930261135101318, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 6.9501, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.28805237315875615, |
|
"grad_norm": 4.3326826095581055, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 6.7461, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2945990180032733, |
|
"grad_norm": 3.443617820739746, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 7.678, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.3011456628477905, |
|
"grad_norm": 3.4096052646636963, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 8.0259, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 3.3578953742980957, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 8.1257, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.3142389525368249, |
|
"grad_norm": 3.4864139556884766, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 7.6784, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.32078559738134205, |
|
"grad_norm": 3.0312955379486084, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 7.3808, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.32733224222585927, |
|
"grad_norm": 3.1818759441375732, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 7.7415, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.32733224222585927, |
|
"eval_loss": 1.9966988563537598, |
|
"eval_runtime": 26.5888, |
|
"eval_samples_per_second": 9.703, |
|
"eval_steps_per_second": 2.445, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.33387888707037644, |
|
"grad_norm": 2.8749489784240723, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 7.5203, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.3404255319148936, |
|
"grad_norm": 3.1056835651397705, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 6.7367, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3469721767594108, |
|
"grad_norm": 2.9506490230560303, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 8.1926, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.353518821603928, |
|
"grad_norm": 2.955796003341675, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 7.9538, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.36006546644844517, |
|
"grad_norm": 3.287029981613159, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 7.2937, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.36661211129296234, |
|
"grad_norm": 3.352778911590576, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 6.9685, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.37315875613747956, |
|
"grad_norm": 3.1772475242614746, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 8.2336, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.37970540098199673, |
|
"grad_norm": 3.9878532886505127, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 7.6858, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.3862520458265139, |
|
"grad_norm": 3.363985300064087, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 7.5955, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.39279869067103107, |
|
"grad_norm": 3.496302366256714, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 8.1299, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.3993453355155483, |
|
"grad_norm": 3.4857332706451416, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 6.8698, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.40589198036006546, |
|
"grad_norm": 3.369488477706909, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 6.6206, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.41243862520458263, |
|
"grad_norm": 4.747243404388428, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 8.7977, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.41898527004909986, |
|
"grad_norm": 4.112165927886963, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 7.1464, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.425531914893617, |
|
"grad_norm": 4.025830268859863, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 7.6691, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.4320785597381342, |
|
"grad_norm": 3.832612991333008, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 8.5289, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.4386252045826514, |
|
"grad_norm": 3.802485466003418, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 8.9616, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.4451718494271686, |
|
"grad_norm": 3.8989100456237793, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 9.464, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.45171849427168576, |
|
"grad_norm": 5.011821269989014, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 8.5757, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.4582651391162029, |
|
"grad_norm": 3.615846633911133, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 7.5682, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.46481178396072015, |
|
"grad_norm": 3.701805830001831, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 5.8679, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.4713584288052373, |
|
"grad_norm": 4.6807661056518555, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 8.063, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.4779050736497545, |
|
"grad_norm": 3.485200881958008, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 7.4695, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.4844517184942717, |
|
"grad_norm": 4.052950382232666, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 7.3515, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.4909983633387889, |
|
"grad_norm": 4.987173557281494, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 8.0323, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.49754500818330605, |
|
"grad_norm": 7.222501754760742, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 10.0476, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.5040916530278232, |
|
"grad_norm": 4.9196391105651855, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 6.0554, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.5106382978723404, |
|
"grad_norm": 6.087227821350098, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 7.014, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.5171849427168577, |
|
"grad_norm": 5.477786064147949, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 5.7058, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.5237315875613748, |
|
"grad_norm": 4.938673496246338, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 6.7502, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.530278232405892, |
|
"grad_norm": 4.272306442260742, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 6.1399, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.5368248772504092, |
|
"grad_norm": 4.495388031005859, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 7.7363, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.5433715220949263, |
|
"grad_norm": 3.923799991607666, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 6.6252, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.5499181669394435, |
|
"grad_norm": 3.6672651767730713, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 6.3417, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.5564648117839607, |
|
"grad_norm": 3.5456626415252686, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 7.1854, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.563011456628478, |
|
"grad_norm": 3.7435622215270996, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 7.2925, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.5695581014729951, |
|
"grad_norm": 3.5279946327209473, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 7.2659, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5761047463175123, |
|
"grad_norm": 3.6771609783172607, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 7.4484, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5826513911620295, |
|
"grad_norm": 3.353963613510132, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 7.9794, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5891980360065466, |
|
"grad_norm": 3.9403250217437744, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 7.3322, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5957446808510638, |
|
"grad_norm": 3.8401012420654297, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 7.652, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.602291325695581, |
|
"grad_norm": 4.258552074432373, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 7.9399, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.6088379705400983, |
|
"grad_norm": 3.9695637226104736, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 8.1528, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 3.8052663803100586, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 6.5618, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.6219312602291326, |
|
"grad_norm": 3.762793779373169, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 7.6469, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6284779050736498, |
|
"grad_norm": 3.9404263496398926, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 6.7442, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6350245499181669, |
|
"grad_norm": 4.887734889984131, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 7.1085, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.6415711947626841, |
|
"grad_norm": 3.9277775287628174, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 6.8363, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.6481178396072013, |
|
"grad_norm": 4.448163986206055, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 8.6857, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.6546644844517185, |
|
"grad_norm": 3.6832666397094727, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 7.4869, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6546644844517185, |
|
"eval_loss": 1.8067591190338135, |
|
"eval_runtime": 26.62, |
|
"eval_samples_per_second": 9.692, |
|
"eval_steps_per_second": 2.442, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6612111292962357, |
|
"grad_norm": 4.730930328369141, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 7.4568, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.6677577741407529, |
|
"grad_norm": 4.326946258544922, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 7.3844, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.67430441898527, |
|
"grad_norm": 4.695581436157227, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 7.6806, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.6808510638297872, |
|
"grad_norm": 4.098830223083496, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 6.1029, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.6873977086743044, |
|
"grad_norm": 3.6151130199432373, |
|
"learning_rate": 5e-05, |
|
"loss": 7.0723, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6939443535188216, |
|
"grad_norm": 4.55037784576416, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 7.8156, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.7004909983633388, |
|
"grad_norm": 3.7407169342041016, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 7.0921, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.707037643207856, |
|
"grad_norm": 4.5362372398376465, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 8.2413, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.7135842880523732, |
|
"grad_norm": 5.322324275970459, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 8.7592, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.7201309328968903, |
|
"grad_norm": 4.402954578399658, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 6.0549, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7266775777414075, |
|
"grad_norm": 4.464359283447266, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 6.4, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.7332242225859247, |
|
"grad_norm": 4.0333333015441895, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 5.8919, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.7397708674304418, |
|
"grad_norm": 4.631279468536377, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 6.8713, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.7463175122749591, |
|
"grad_norm": 5.974384784698486, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 7.7068, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.7528641571194763, |
|
"grad_norm": 3.7521626949310303, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 4.9648, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7594108019639935, |
|
"grad_norm": 3.2828927040100098, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 4.7717, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.7659574468085106, |
|
"grad_norm": 3.8715484142303467, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 5.6252, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.7725040916530278, |
|
"grad_norm": 3.565499782562256, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 5.0633, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.779050736497545, |
|
"grad_norm": 4.171899318695068, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 5.5828, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.7855973813420621, |
|
"grad_norm": 3.9527981281280518, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 6.6647, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7921440261865794, |
|
"grad_norm": 4.4996137619018555, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 6.0745, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.7986906710310966, |
|
"grad_norm": 4.248125076293945, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 6.2583, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.8052373158756138, |
|
"grad_norm": 4.524069309234619, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 7.6514, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.8117839607201309, |
|
"grad_norm": 4.165614128112793, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 6.5651, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.8183306055646481, |
|
"grad_norm": 3.7812719345092773, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 8.0841, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.8248772504091653, |
|
"grad_norm": 3.8515408039093018, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 6.645, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.8314238952536824, |
|
"grad_norm": 3.675601005554199, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 6.6233, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.8379705400981997, |
|
"grad_norm": 4.184865474700928, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 6.5936, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.8445171849427169, |
|
"grad_norm": 3.9905803203582764, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 6.6051, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.851063829787234, |
|
"grad_norm": 3.789395570755005, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 6.7708, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8576104746317512, |
|
"grad_norm": 4.332606792449951, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 6.4427, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.8641571194762684, |
|
"grad_norm": 4.295475959777832, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 5.8015, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.8707037643207856, |
|
"grad_norm": 4.5518388748168945, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 7.3128, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.8772504091653028, |
|
"grad_norm": 4.419987201690674, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 7.4535, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.88379705400982, |
|
"grad_norm": 4.4990458488464355, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 6.4881, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.8903436988543372, |
|
"grad_norm": 5.589775085449219, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 6.4211, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.8968903436988543, |
|
"grad_norm": 4.943357944488525, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 7.0361, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.9034369885433715, |
|
"grad_norm": 5.135087490081787, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 8.3705, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.9099836333878887, |
|
"grad_norm": 4.697838306427002, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 6.7204, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.9165302782324058, |
|
"grad_norm": 4.322061538696289, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 8.1387, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 4.582803249359131, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 6.67, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.9296235679214403, |
|
"grad_norm": 4.9027838706970215, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 7.9627, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.9361702127659575, |
|
"grad_norm": 4.5149827003479, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 6.4197, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.9427168576104746, |
|
"grad_norm": 5.014720916748047, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 7.2758, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.9492635024549918, |
|
"grad_norm": 4.3964362144470215, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 6.5388, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.955810147299509, |
|
"grad_norm": 4.305325508117676, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 7.328, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.9623567921440261, |
|
"grad_norm": 4.718265056610107, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 8.0182, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.9689034369885434, |
|
"grad_norm": 5.577218055725098, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 6.0249, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.9754500818330606, |
|
"grad_norm": 4.048355579376221, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 5.1523, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.9819967266775778, |
|
"grad_norm": 4.773705959320068, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 6.3304, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9819967266775778, |
|
"eval_loss": 1.7272412776947021, |
|
"eval_runtime": 26.6126, |
|
"eval_samples_per_second": 9.695, |
|
"eval_steps_per_second": 2.442, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9885433715220949, |
|
"grad_norm": 6.138540744781494, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 8.892, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.9950900163666121, |
|
"grad_norm": 6.4282402992248535, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 7.8377, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.0016366612111294, |
|
"grad_norm": 3.81278657913208, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 7.4905, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.0081833060556464, |
|
"grad_norm": 2.5406858921051025, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 4.6057, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.0147299509001637, |
|
"grad_norm": 2.9028007984161377, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 4.4537, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.0212765957446808, |
|
"grad_norm": 3.118882417678833, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 4.9172, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.027823240589198, |
|
"grad_norm": 3.208533525466919, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 4.9149, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.034369885433715, |
|
"grad_norm": 3.223609209060669, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 4.4849, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.0409165302782324, |
|
"grad_norm": 3.3039231300354004, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 5.4302, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.0474631751227497, |
|
"grad_norm": 3.211501121520996, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 4.5527, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.0540098199672667, |
|
"grad_norm": 3.086197853088379, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 6.0466, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.060556464811784, |
|
"grad_norm": 3.426628589630127, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 5.1313, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.067103109656301, |
|
"grad_norm": 3.1073648929595947, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 6.1329, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.0736497545008183, |
|
"grad_norm": 3.4100728034973145, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 5.1229, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.0801963993453354, |
|
"grad_norm": 3.2149834632873535, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 5.9124, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.0867430441898527, |
|
"grad_norm": 3.5418028831481934, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 5.2255, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.09328968903437, |
|
"grad_norm": 3.696084976196289, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 5.2342, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.099836333878887, |
|
"grad_norm": 3.730708360671997, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 5.8431, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.1063829787234043, |
|
"grad_norm": 3.742860794067383, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 4.6969, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.1129296235679214, |
|
"grad_norm": 3.598694086074829, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 5.7549, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.1194762684124386, |
|
"grad_norm": 3.616276502609253, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 5.1967, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.1260229132569557, |
|
"grad_norm": 3.9378201961517334, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 5.3633, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.132569558101473, |
|
"grad_norm": 3.6692304611206055, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 5.5043, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.1391162029459903, |
|
"grad_norm": 3.8457822799682617, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 5.2013, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.1456628477905073, |
|
"grad_norm": 4.024327754974365, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 5.0389, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.1522094926350246, |
|
"grad_norm": 3.874255895614624, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 5.1974, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.1587561374795416, |
|
"grad_norm": 4.2925920486450195, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 6.2995, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.165302782324059, |
|
"grad_norm": 4.385538578033447, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 5.41, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.171849427168576, |
|
"grad_norm": 4.054061412811279, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 5.9913, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.1783960720130933, |
|
"grad_norm": 4.442548751831055, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 6.638, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.1849427168576105, |
|
"grad_norm": 3.944359302520752, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 5.7074, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.1914893617021276, |
|
"grad_norm": 4.47265625, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 6.308, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.1980360065466449, |
|
"grad_norm": 4.241458892822266, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 5.8567, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.204582651391162, |
|
"grad_norm": 4.236311435699463, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 6.3607, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.2111292962356792, |
|
"grad_norm": 4.7658467292785645, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 6.0578, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.2176759410801963, |
|
"grad_norm": 4.109133720397949, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 4.8959, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.2242225859247136, |
|
"grad_norm": 4.497995853424072, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 5.8, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.2307692307692308, |
|
"grad_norm": 4.011194229125977, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 4.6202, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.237315875613748, |
|
"grad_norm": 4.688493251800537, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 5.2817, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.2438625204582652, |
|
"grad_norm": 5.393063068389893, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 6.4032, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.2504091653027825, |
|
"grad_norm": 5.186883449554443, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 6.4349, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.2569558101472995, |
|
"grad_norm": 2.460773229598999, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 3.6578, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.2635024549918166, |
|
"grad_norm": 3.062981128692627, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 3.9411, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.2700490998363339, |
|
"grad_norm": 3.388601541519165, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 4.6924, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.2765957446808511, |
|
"grad_norm": 3.355208158493042, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 4.653, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.2831423895253682, |
|
"grad_norm": 3.150766611099243, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 4.5391, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.2896890343698855, |
|
"grad_norm": 3.37044095993042, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 5.5631, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.2962356792144027, |
|
"grad_norm": 3.706301212310791, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 5.4415, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.3027823240589198, |
|
"grad_norm": 3.9071426391601562, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 5.121, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.3093289689034369, |
|
"grad_norm": 3.6013264656066895, |
|
"learning_rate": 0.0, |
|
"loss": 6.7051, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.3093289689034369, |
|
"eval_loss": 1.707595944404602, |
|
"eval_runtime": 26.5748, |
|
"eval_samples_per_second": 9.708, |
|
"eval_steps_per_second": 2.446, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.09187566075904e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|