|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 5268, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0056947608200455585, |
|
"grad_norm": 5.042048454284668, |
|
"learning_rate": 4.990508731966591e-05, |
|
"loss": 1.2902, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.011389521640091117, |
|
"grad_norm": 1.2201155424118042, |
|
"learning_rate": 4.981017463933182e-05, |
|
"loss": 0.6163, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.017084282460136675, |
|
"grad_norm": 1.153208613395691, |
|
"learning_rate": 4.971526195899772e-05, |
|
"loss": 0.2969, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.022779043280182234, |
|
"grad_norm": 1.3441071510314941, |
|
"learning_rate": 4.962034927866363e-05, |
|
"loss": 0.427, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02847380410022779, |
|
"grad_norm": 1.4184179306030273, |
|
"learning_rate": 4.9525436598329536e-05, |
|
"loss": 0.2912, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03416856492027335, |
|
"grad_norm": 1.7669013738632202, |
|
"learning_rate": 4.9430523917995447e-05, |
|
"loss": 0.2581, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03986332574031891, |
|
"grad_norm": 2.6280012130737305, |
|
"learning_rate": 4.933561123766136e-05, |
|
"loss": 0.191, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04555808656036447, |
|
"grad_norm": 2.000683069229126, |
|
"learning_rate": 4.924069855732726e-05, |
|
"loss": 0.1709, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05125284738041002, |
|
"grad_norm": 2.97003436088562, |
|
"learning_rate": 4.9145785876993165e-05, |
|
"loss": 0.1989, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05694760820045558, |
|
"grad_norm": 1.8411054611206055, |
|
"learning_rate": 4.9050873196659076e-05, |
|
"loss": 0.1736, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06264236902050115, |
|
"grad_norm": 7.188544750213623, |
|
"learning_rate": 4.895596051632499e-05, |
|
"loss": 0.2518, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0683371298405467, |
|
"grad_norm": 2.3247721195220947, |
|
"learning_rate": 4.886104783599089e-05, |
|
"loss": 0.1648, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07403189066059225, |
|
"grad_norm": 0.8464665412902832, |
|
"learning_rate": 4.87661351556568e-05, |
|
"loss": 0.1625, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07972665148063782, |
|
"grad_norm": 2.508469343185425, |
|
"learning_rate": 4.8671222475322705e-05, |
|
"loss": 0.1449, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08542141230068337, |
|
"grad_norm": 0.8359629511833191, |
|
"learning_rate": 4.857630979498861e-05, |
|
"loss": 0.1114, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09111617312072894, |
|
"grad_norm": 1.0664496421813965, |
|
"learning_rate": 4.848139711465452e-05, |
|
"loss": 0.0721, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09681093394077449, |
|
"grad_norm": 1.8065450191497803, |
|
"learning_rate": 4.838648443432043e-05, |
|
"loss": 0.1384, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.10250569476082004, |
|
"grad_norm": 1.9225945472717285, |
|
"learning_rate": 4.8291571753986335e-05, |
|
"loss": 0.0997, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1082004555808656, |
|
"grad_norm": 4.79730224609375, |
|
"learning_rate": 4.8196659073652246e-05, |
|
"loss": 0.0918, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11389521640091116, |
|
"grad_norm": 0.7483096122741699, |
|
"learning_rate": 4.810174639331815e-05, |
|
"loss": 0.0932, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11958997722095673, |
|
"grad_norm": 4.0233473777771, |
|
"learning_rate": 4.8006833712984054e-05, |
|
"loss": 0.1084, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1252847380410023, |
|
"grad_norm": 3.508195638656616, |
|
"learning_rate": 4.7911921032649964e-05, |
|
"loss": 0.103, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.13097949886104784, |
|
"grad_norm": 1.099330186843872, |
|
"learning_rate": 4.7817008352315875e-05, |
|
"loss": 0.1048, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1366742596810934, |
|
"grad_norm": 0.9204321503639221, |
|
"learning_rate": 4.772209567198178e-05, |
|
"loss": 0.1044, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.14236902050113895, |
|
"grad_norm": 0.697127103805542, |
|
"learning_rate": 4.762718299164768e-05, |
|
"loss": 0.1003, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1480637813211845, |
|
"grad_norm": 2.3334975242614746, |
|
"learning_rate": 4.7532270311313594e-05, |
|
"loss": 0.0891, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.15375854214123008, |
|
"grad_norm": 2.2273714542388916, |
|
"learning_rate": 4.74373576309795e-05, |
|
"loss": 0.1135, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.15945330296127563, |
|
"grad_norm": 1.4537644386291504, |
|
"learning_rate": 4.734244495064541e-05, |
|
"loss": 0.0991, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.16514806378132119, |
|
"grad_norm": 2.1925878524780273, |
|
"learning_rate": 4.724753227031132e-05, |
|
"loss": 0.0885, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.17084282460136674, |
|
"grad_norm": 1.6182861328125, |
|
"learning_rate": 4.715261958997722e-05, |
|
"loss": 0.0959, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1765375854214123, |
|
"grad_norm": 0.6557925343513489, |
|
"learning_rate": 4.705770690964313e-05, |
|
"loss": 0.1518, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.18223234624145787, |
|
"grad_norm": 1.7739641666412354, |
|
"learning_rate": 4.696279422930904e-05, |
|
"loss": 0.1077, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.18792710706150342, |
|
"grad_norm": 0.9851402044296265, |
|
"learning_rate": 4.686788154897495e-05, |
|
"loss": 0.1562, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.19362186788154898, |
|
"grad_norm": 0.23916186392307281, |
|
"learning_rate": 4.677296886864085e-05, |
|
"loss": 0.0787, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.19931662870159453, |
|
"grad_norm": 1.2982560396194458, |
|
"learning_rate": 4.6678056188306763e-05, |
|
"loss": 0.0843, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.20501138952164008, |
|
"grad_norm": 0.8306344747543335, |
|
"learning_rate": 4.658314350797267e-05, |
|
"loss": 0.096, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.21070615034168566, |
|
"grad_norm": 1.477241039276123, |
|
"learning_rate": 4.648823082763857e-05, |
|
"loss": 0.0799, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.2164009111617312, |
|
"grad_norm": 2.520557165145874, |
|
"learning_rate": 4.639331814730448e-05, |
|
"loss": 0.12, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.22209567198177677, |
|
"grad_norm": 5.481024742126465, |
|
"learning_rate": 4.629840546697039e-05, |
|
"loss": 0.0756, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.22779043280182232, |
|
"grad_norm": 7.298396587371826, |
|
"learning_rate": 4.62034927866363e-05, |
|
"loss": 0.0926, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.23348519362186787, |
|
"grad_norm": 1.9556351900100708, |
|
"learning_rate": 4.61085801063022e-05, |
|
"loss": 0.061, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.23917995444191345, |
|
"grad_norm": 1.6126965284347534, |
|
"learning_rate": 4.601366742596811e-05, |
|
"loss": 0.0913, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.244874715261959, |
|
"grad_norm": 1.855068325996399, |
|
"learning_rate": 4.5918754745634016e-05, |
|
"loss": 0.0881, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2505694760820046, |
|
"grad_norm": 4.383277416229248, |
|
"learning_rate": 4.5823842065299926e-05, |
|
"loss": 0.0948, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.25626423690205014, |
|
"grad_norm": 4.287411212921143, |
|
"learning_rate": 4.572892938496584e-05, |
|
"loss": 0.1398, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2619589977220957, |
|
"grad_norm": 1.1114435195922852, |
|
"learning_rate": 4.563401670463174e-05, |
|
"loss": 0.105, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.26765375854214124, |
|
"grad_norm": 0.9561439156532288, |
|
"learning_rate": 4.5539104024297645e-05, |
|
"loss": 0.1039, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2733485193621868, |
|
"grad_norm": 2.2738897800445557, |
|
"learning_rate": 4.5444191343963556e-05, |
|
"loss": 0.1049, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.27904328018223234, |
|
"grad_norm": 2.5520405769348145, |
|
"learning_rate": 4.5349278663629466e-05, |
|
"loss": 0.1059, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.2847380410022779, |
|
"grad_norm": 1.6588718891143799, |
|
"learning_rate": 4.525436598329537e-05, |
|
"loss": 0.07, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.29043280182232345, |
|
"grad_norm": 1.141545057296753, |
|
"learning_rate": 4.515945330296128e-05, |
|
"loss": 0.1057, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.296127562642369, |
|
"grad_norm": 1.383521556854248, |
|
"learning_rate": 4.5064540622627185e-05, |
|
"loss": 0.041, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.30182232346241455, |
|
"grad_norm": 2.6926703453063965, |
|
"learning_rate": 4.496962794229309e-05, |
|
"loss": 0.0825, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.30751708428246016, |
|
"grad_norm": 5.290349006652832, |
|
"learning_rate": 4.4874715261959e-05, |
|
"loss": 0.0614, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3132118451025057, |
|
"grad_norm": 3.191392183303833, |
|
"learning_rate": 4.477980258162491e-05, |
|
"loss": 0.1721, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.31890660592255127, |
|
"grad_norm": 1.4004778861999512, |
|
"learning_rate": 4.4684889901290815e-05, |
|
"loss": 0.0448, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.3246013667425968, |
|
"grad_norm": 1.8395417928695679, |
|
"learning_rate": 4.4589977220956725e-05, |
|
"loss": 0.0967, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.33029612756264237, |
|
"grad_norm": 2.5588178634643555, |
|
"learning_rate": 4.449506454062263e-05, |
|
"loss": 0.0741, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3359908883826879, |
|
"grad_norm": 3.3830788135528564, |
|
"learning_rate": 4.440015186028853e-05, |
|
"loss": 0.0723, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.3416856492027335, |
|
"grad_norm": 2.411625862121582, |
|
"learning_rate": 4.4305239179954444e-05, |
|
"loss": 0.1199, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.34738041002277903, |
|
"grad_norm": 0.10270216315984726, |
|
"learning_rate": 4.4210326499620355e-05, |
|
"loss": 0.051, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3530751708428246, |
|
"grad_norm": 3.844116687774658, |
|
"learning_rate": 4.411541381928626e-05, |
|
"loss": 0.0484, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.35876993166287013, |
|
"grad_norm": 1.4396392107009888, |
|
"learning_rate": 4.402050113895216e-05, |
|
"loss": 0.0355, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.36446469248291574, |
|
"grad_norm": 2.1004416942596436, |
|
"learning_rate": 4.3925588458618073e-05, |
|
"loss": 0.0854, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3701594533029613, |
|
"grad_norm": 1.7273060083389282, |
|
"learning_rate": 4.383067577828398e-05, |
|
"loss": 0.068, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.37585421412300685, |
|
"grad_norm": 1.6584464311599731, |
|
"learning_rate": 4.373576309794989e-05, |
|
"loss": 0.0591, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.3815489749430524, |
|
"grad_norm": 5.8685150146484375, |
|
"learning_rate": 4.36408504176158e-05, |
|
"loss": 0.0703, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.38724373576309795, |
|
"grad_norm": 1.8818565607070923, |
|
"learning_rate": 4.35459377372817e-05, |
|
"loss": 0.0864, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3929384965831435, |
|
"grad_norm": 0.8174405694007874, |
|
"learning_rate": 4.345102505694761e-05, |
|
"loss": 0.0541, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.39863325740318906, |
|
"grad_norm": 2.2386655807495117, |
|
"learning_rate": 4.335611237661352e-05, |
|
"loss": 0.0779, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4043280182232346, |
|
"grad_norm": 1.5718015432357788, |
|
"learning_rate": 4.326119969627943e-05, |
|
"loss": 0.0827, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.41002277904328016, |
|
"grad_norm": 0.7225226759910583, |
|
"learning_rate": 4.316628701594533e-05, |
|
"loss": 0.0961, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4157175398633257, |
|
"grad_norm": 2.2464139461517334, |
|
"learning_rate": 4.307137433561124e-05, |
|
"loss": 0.0432, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4214123006833713, |
|
"grad_norm": 1.8811899423599243, |
|
"learning_rate": 4.297646165527715e-05, |
|
"loss": 0.0752, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.4271070615034169, |
|
"grad_norm": 1.1304991245269775, |
|
"learning_rate": 4.288154897494305e-05, |
|
"loss": 0.0684, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4328018223234624, |
|
"grad_norm": 5.151347637176514, |
|
"learning_rate": 4.278663629460896e-05, |
|
"loss": 0.0498, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.438496583143508, |
|
"grad_norm": 0.0376850925385952, |
|
"learning_rate": 4.269172361427487e-05, |
|
"loss": 0.1001, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.44419134396355353, |
|
"grad_norm": 0.9273952841758728, |
|
"learning_rate": 4.2596810933940777e-05, |
|
"loss": 0.0773, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4498861047835991, |
|
"grad_norm": 2.6929280757904053, |
|
"learning_rate": 4.250189825360668e-05, |
|
"loss": 0.0555, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.45558086560364464, |
|
"grad_norm": 0.7927613854408264, |
|
"learning_rate": 4.240698557327259e-05, |
|
"loss": 0.0659, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4612756264236902, |
|
"grad_norm": 0.3811888098716736, |
|
"learning_rate": 4.2312072892938495e-05, |
|
"loss": 0.0361, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.46697038724373574, |
|
"grad_norm": 8.167976379394531, |
|
"learning_rate": 4.2217160212604406e-05, |
|
"loss": 0.0742, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.47266514806378135, |
|
"grad_norm": 2.267798662185669, |
|
"learning_rate": 4.212224753227032e-05, |
|
"loss": 0.0655, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.4783599088838269, |
|
"grad_norm": 0.3650657832622528, |
|
"learning_rate": 4.202733485193622e-05, |
|
"loss": 0.0508, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.48405466970387245, |
|
"grad_norm": 2.446582078933716, |
|
"learning_rate": 4.1932422171602125e-05, |
|
"loss": 0.0824, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.489749430523918, |
|
"grad_norm": 2.046130418777466, |
|
"learning_rate": 4.1837509491268035e-05, |
|
"loss": 0.0443, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.49544419134396356, |
|
"grad_norm": 2.4211316108703613, |
|
"learning_rate": 4.174259681093394e-05, |
|
"loss": 0.0642, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5011389521640092, |
|
"grad_norm": 3.668614625930786, |
|
"learning_rate": 4.164768413059985e-05, |
|
"loss": 0.1067, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5068337129840547, |
|
"grad_norm": 0.860970675945282, |
|
"learning_rate": 4.155277145026576e-05, |
|
"loss": 0.0583, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5125284738041003, |
|
"grad_norm": 5.353702068328857, |
|
"learning_rate": 4.1457858769931665e-05, |
|
"loss": 0.0761, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5182232346241458, |
|
"grad_norm": 4.8318681716918945, |
|
"learning_rate": 4.136294608959757e-05, |
|
"loss": 0.0878, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5239179954441914, |
|
"grad_norm": 0.8994714617729187, |
|
"learning_rate": 4.126803340926348e-05, |
|
"loss": 0.0502, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5296127562642369, |
|
"grad_norm": 1.8920825719833374, |
|
"learning_rate": 4.117312072892939e-05, |
|
"loss": 0.0952, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5353075170842825, |
|
"grad_norm": 2.3858070373535156, |
|
"learning_rate": 4.1078208048595294e-05, |
|
"loss": 0.0797, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.541002277904328, |
|
"grad_norm": 3.2179317474365234, |
|
"learning_rate": 4.0983295368261205e-05, |
|
"loss": 0.1181, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5466970387243736, |
|
"grad_norm": 0.5987422466278076, |
|
"learning_rate": 4.088838268792711e-05, |
|
"loss": 0.0765, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5523917995444191, |
|
"grad_norm": 1.7444469928741455, |
|
"learning_rate": 4.079347000759301e-05, |
|
"loss": 0.0519, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5580865603644647, |
|
"grad_norm": 0.14087039232254028, |
|
"learning_rate": 4.0698557327258924e-05, |
|
"loss": 0.0438, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5637813211845103, |
|
"grad_norm": 2.490300416946411, |
|
"learning_rate": 4.0603644646924834e-05, |
|
"loss": 0.0796, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5694760820045558, |
|
"grad_norm": 0.5218722820281982, |
|
"learning_rate": 4.050873196659074e-05, |
|
"loss": 0.0595, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5751708428246014, |
|
"grad_norm": 2.9636070728302, |
|
"learning_rate": 4.041381928625664e-05, |
|
"loss": 0.0439, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.5808656036446469, |
|
"grad_norm": 1.2304065227508545, |
|
"learning_rate": 4.031890660592255e-05, |
|
"loss": 0.0421, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5865603644646925, |
|
"grad_norm": 0.5061107277870178, |
|
"learning_rate": 4.022399392558846e-05, |
|
"loss": 0.0698, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.592255125284738, |
|
"grad_norm": 2.6270368099212646, |
|
"learning_rate": 4.012908124525437e-05, |
|
"loss": 0.0456, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.5979498861047836, |
|
"grad_norm": 2.7538509368896484, |
|
"learning_rate": 4.003416856492028e-05, |
|
"loss": 0.0934, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6036446469248291, |
|
"grad_norm": 0.8339662551879883, |
|
"learning_rate": 3.993925588458618e-05, |
|
"loss": 0.0493, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6093394077448747, |
|
"grad_norm": 2.7625882625579834, |
|
"learning_rate": 3.9844343204252087e-05, |
|
"loss": 0.0661, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6150341685649203, |
|
"grad_norm": 0.10445995628833771, |
|
"learning_rate": 3.9749430523918e-05, |
|
"loss": 0.0565, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6207289293849658, |
|
"grad_norm": 4.233523845672607, |
|
"learning_rate": 3.965451784358391e-05, |
|
"loss": 0.0608, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6264236902050114, |
|
"grad_norm": 2.9015579223632812, |
|
"learning_rate": 3.955960516324981e-05, |
|
"loss": 0.0394, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6321184510250569, |
|
"grad_norm": 1.122717261314392, |
|
"learning_rate": 3.946469248291572e-05, |
|
"loss": 0.0297, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.6378132118451025, |
|
"grad_norm": 0.9211141467094421, |
|
"learning_rate": 3.936977980258163e-05, |
|
"loss": 0.0733, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.643507972665148, |
|
"grad_norm": 1.9820640087127686, |
|
"learning_rate": 3.927486712224753e-05, |
|
"loss": 0.0814, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6492027334851936, |
|
"grad_norm": 0.8466345071792603, |
|
"learning_rate": 3.917995444191344e-05, |
|
"loss": 0.0933, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6548974943052391, |
|
"grad_norm": 1.1416703462600708, |
|
"learning_rate": 3.908504176157935e-05, |
|
"loss": 0.0826, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6605922551252847, |
|
"grad_norm": 1.0590317249298096, |
|
"learning_rate": 3.8990129081245256e-05, |
|
"loss": 0.1039, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6662870159453302, |
|
"grad_norm": 0.13635486364364624, |
|
"learning_rate": 3.889521640091117e-05, |
|
"loss": 0.0717, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.6719817767653758, |
|
"grad_norm": 0.7193166017532349, |
|
"learning_rate": 3.880030372057707e-05, |
|
"loss": 0.0554, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6776765375854215, |
|
"grad_norm": 1.9214146137237549, |
|
"learning_rate": 3.8705391040242975e-05, |
|
"loss": 0.0405, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.683371298405467, |
|
"grad_norm": 3.638381242752075, |
|
"learning_rate": 3.8610478359908886e-05, |
|
"loss": 0.069, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.6890660592255126, |
|
"grad_norm": 0.2034996747970581, |
|
"learning_rate": 3.8515565679574796e-05, |
|
"loss": 0.0473, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6947608200455581, |
|
"grad_norm": 2.54274582862854, |
|
"learning_rate": 3.84206529992407e-05, |
|
"loss": 0.0405, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7004555808656037, |
|
"grad_norm": 1.0374627113342285, |
|
"learning_rate": 3.8325740318906604e-05, |
|
"loss": 0.0593, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7061503416856492, |
|
"grad_norm": 1.1265807151794434, |
|
"learning_rate": 3.8230827638572515e-05, |
|
"loss": 0.0367, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7118451025056948, |
|
"grad_norm": 1.830880045890808, |
|
"learning_rate": 3.813591495823842e-05, |
|
"loss": 0.0589, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7175398633257403, |
|
"grad_norm": 1.4103747606277466, |
|
"learning_rate": 3.804100227790433e-05, |
|
"loss": 0.0602, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.7232346241457859, |
|
"grad_norm": 1.1360421180725098, |
|
"learning_rate": 3.794608959757024e-05, |
|
"loss": 0.0843, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.7289293849658315, |
|
"grad_norm": 1.648220181465149, |
|
"learning_rate": 3.7851176917236145e-05, |
|
"loss": 0.1067, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.734624145785877, |
|
"grad_norm": 2.033055305480957, |
|
"learning_rate": 3.775626423690205e-05, |
|
"loss": 0.0531, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.7403189066059226, |
|
"grad_norm": 2.301596164703369, |
|
"learning_rate": 3.766135155656796e-05, |
|
"loss": 0.0332, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.7460136674259681, |
|
"grad_norm": 0.5601398944854736, |
|
"learning_rate": 3.756643887623387e-05, |
|
"loss": 0.0452, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.7517084282460137, |
|
"grad_norm": 4.6115312576293945, |
|
"learning_rate": 3.7471526195899774e-05, |
|
"loss": 0.0737, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.7574031890660592, |
|
"grad_norm": 2.644883632659912, |
|
"learning_rate": 3.7376613515565685e-05, |
|
"loss": 0.0558, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.7630979498861048, |
|
"grad_norm": 2.106394052505493, |
|
"learning_rate": 3.728170083523159e-05, |
|
"loss": 0.033, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7687927107061503, |
|
"grad_norm": 1.5199148654937744, |
|
"learning_rate": 3.718678815489749e-05, |
|
"loss": 0.0811, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7744874715261959, |
|
"grad_norm": 2.3490426540374756, |
|
"learning_rate": 3.7091875474563403e-05, |
|
"loss": 0.0554, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7801822323462415, |
|
"grad_norm": 2.133591413497925, |
|
"learning_rate": 3.6996962794229314e-05, |
|
"loss": 0.0792, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.785876993166287, |
|
"grad_norm": 0.3369489014148712, |
|
"learning_rate": 3.690205011389522e-05, |
|
"loss": 0.0436, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7915717539863326, |
|
"grad_norm": 1.3656107187271118, |
|
"learning_rate": 3.680713743356112e-05, |
|
"loss": 0.0543, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.7972665148063781, |
|
"grad_norm": 0.7134440541267395, |
|
"learning_rate": 3.671222475322703e-05, |
|
"loss": 0.0329, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8029612756264237, |
|
"grad_norm": 0.5661123991012573, |
|
"learning_rate": 3.661731207289294e-05, |
|
"loss": 0.0285, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.8086560364464692, |
|
"grad_norm": 1.2485991716384888, |
|
"learning_rate": 3.652239939255885e-05, |
|
"loss": 0.0908, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8143507972665148, |
|
"grad_norm": 2.110455274581909, |
|
"learning_rate": 3.642748671222476e-05, |
|
"loss": 0.0284, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.8200455580865603, |
|
"grad_norm": 12.661782264709473, |
|
"learning_rate": 3.633257403189066e-05, |
|
"loss": 0.0719, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.8257403189066059, |
|
"grad_norm": 5.139870643615723, |
|
"learning_rate": 3.6237661351556566e-05, |
|
"loss": 0.084, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.8314350797266514, |
|
"grad_norm": 1.949857234954834, |
|
"learning_rate": 3.614274867122248e-05, |
|
"loss": 0.0396, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.837129840546697, |
|
"grad_norm": 3.2489545345306396, |
|
"learning_rate": 3.604783599088839e-05, |
|
"loss": 0.0521, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.8428246013667426, |
|
"grad_norm": 0.4927772879600525, |
|
"learning_rate": 3.595292331055429e-05, |
|
"loss": 0.0584, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.8485193621867881, |
|
"grad_norm": 0.08044728636741638, |
|
"learning_rate": 3.58580106302202e-05, |
|
"loss": 0.0478, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.8542141230068337, |
|
"grad_norm": 1.3953468799591064, |
|
"learning_rate": 3.5763097949886106e-05, |
|
"loss": 0.097, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.8599088838268792, |
|
"grad_norm": 0.017015213146805763, |
|
"learning_rate": 3.566818526955201e-05, |
|
"loss": 0.0659, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.8656036446469249, |
|
"grad_norm": 0.984900951385498, |
|
"learning_rate": 3.557327258921792e-05, |
|
"loss": 0.0674, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8712984054669703, |
|
"grad_norm": 1.6620683670043945, |
|
"learning_rate": 3.547835990888383e-05, |
|
"loss": 0.0546, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.876993166287016, |
|
"grad_norm": 4.897571086883545, |
|
"learning_rate": 3.5383447228549736e-05, |
|
"loss": 0.0754, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.8826879271070615, |
|
"grad_norm": 1.2931139469146729, |
|
"learning_rate": 3.528853454821565e-05, |
|
"loss": 0.0508, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.8883826879271071, |
|
"grad_norm": 4.596437931060791, |
|
"learning_rate": 3.519362186788155e-05, |
|
"loss": 0.085, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8940774487471527, |
|
"grad_norm": 2.0353763103485107, |
|
"learning_rate": 3.5098709187547455e-05, |
|
"loss": 0.0462, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.8997722095671982, |
|
"grad_norm": 1.4557256698608398, |
|
"learning_rate": 3.5003796507213365e-05, |
|
"loss": 0.067, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.9054669703872438, |
|
"grad_norm": 1.3519636392593384, |
|
"learning_rate": 3.4908883826879276e-05, |
|
"loss": 0.0541, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.9111617312072893, |
|
"grad_norm": 0.38374024629592896, |
|
"learning_rate": 3.481397114654518e-05, |
|
"loss": 0.0696, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9168564920273349, |
|
"grad_norm": 0.17080959677696228, |
|
"learning_rate": 3.4719058466211084e-05, |
|
"loss": 0.0974, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9225512528473804, |
|
"grad_norm": 0.33803385496139526, |
|
"learning_rate": 3.4624145785876995e-05, |
|
"loss": 0.0237, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.928246013667426, |
|
"grad_norm": 1.0675870180130005, |
|
"learning_rate": 3.45292331055429e-05, |
|
"loss": 0.1002, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.9339407744874715, |
|
"grad_norm": 0.23425699770450592, |
|
"learning_rate": 3.443432042520881e-05, |
|
"loss": 0.028, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.9396355353075171, |
|
"grad_norm": 1.695246696472168, |
|
"learning_rate": 3.433940774487472e-05, |
|
"loss": 0.0434, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.9453302961275627, |
|
"grad_norm": 1.134783148765564, |
|
"learning_rate": 3.4244495064540624e-05, |
|
"loss": 0.0608, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.9510250569476082, |
|
"grad_norm": 0.09808668494224548, |
|
"learning_rate": 3.414958238420653e-05, |
|
"loss": 0.027, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.9567198177676538, |
|
"grad_norm": 1.7201132774353027, |
|
"learning_rate": 3.405466970387244e-05, |
|
"loss": 0.0744, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.9624145785876993, |
|
"grad_norm": 0.9982559084892273, |
|
"learning_rate": 3.395975702353835e-05, |
|
"loss": 0.039, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.9681093394077449, |
|
"grad_norm": 0.573174238204956, |
|
"learning_rate": 3.3864844343204254e-05, |
|
"loss": 0.0448, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.9738041002277904, |
|
"grad_norm": 0.15374533832073212, |
|
"learning_rate": 3.3769931662870164e-05, |
|
"loss": 0.0307, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.979498861047836, |
|
"grad_norm": 0.28979602456092834, |
|
"learning_rate": 3.367501898253607e-05, |
|
"loss": 0.0312, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.9851936218678815, |
|
"grad_norm": 0.4142756760120392, |
|
"learning_rate": 3.358010630220197e-05, |
|
"loss": 0.0671, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.9908883826879271, |
|
"grad_norm": 1.2099072933197021, |
|
"learning_rate": 3.348519362186788e-05, |
|
"loss": 0.0613, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.9965831435079726, |
|
"grad_norm": 2.8961591720581055, |
|
"learning_rate": 3.3390280941533794e-05, |
|
"loss": 0.0468, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9834836297060943, |
|
"eval_f1": 0.9111092563225106, |
|
"eval_loss": 0.06205601245164871, |
|
"eval_precision": 0.9033432638199271, |
|
"eval_recall": 0.9190099343323792, |
|
"eval_runtime": 13.7445, |
|
"eval_samples_per_second": 236.458, |
|
"eval_steps_per_second": 29.612, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 1.0022779043280183, |
|
"grad_norm": 1.1877965927124023, |
|
"learning_rate": 3.32953682611997e-05, |
|
"loss": 0.0464, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.0079726651480638, |
|
"grad_norm": 1.4486593008041382, |
|
"learning_rate": 3.32004555808656e-05, |
|
"loss": 0.0177, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.0136674259681093, |
|
"grad_norm": 0.045917004346847534, |
|
"learning_rate": 3.310554290053151e-05, |
|
"loss": 0.0293, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.0193621867881548, |
|
"grad_norm": 0.501386821269989, |
|
"learning_rate": 3.3010630220197417e-05, |
|
"loss": 0.0117, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.0250569476082005, |
|
"grad_norm": 4.940465927124023, |
|
"learning_rate": 3.291571753986333e-05, |
|
"loss": 0.0227, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.030751708428246, |
|
"grad_norm": 0.1896640509366989, |
|
"learning_rate": 3.282080485952924e-05, |
|
"loss": 0.0157, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.0364464692482915, |
|
"grad_norm": 0.2719975411891937, |
|
"learning_rate": 3.272589217919514e-05, |
|
"loss": 0.0366, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.042141230068337, |
|
"grad_norm": 1.6215999126434326, |
|
"learning_rate": 3.2630979498861046e-05, |
|
"loss": 0.0206, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.0478359908883828, |
|
"grad_norm": 0.15671390295028687, |
|
"learning_rate": 3.253606681852696e-05, |
|
"loss": 0.0142, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.0535307517084282, |
|
"grad_norm": 0.4000178575515747, |
|
"learning_rate": 3.244115413819286e-05, |
|
"loss": 0.0246, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.0592255125284737, |
|
"grad_norm": 0.03926578164100647, |
|
"learning_rate": 3.234624145785877e-05, |
|
"loss": 0.0249, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.0649202733485195, |
|
"grad_norm": 0.6636160612106323, |
|
"learning_rate": 3.225132877752468e-05, |
|
"loss": 0.0614, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.070615034168565, |
|
"grad_norm": 0.16424879431724548, |
|
"learning_rate": 3.2156416097190586e-05, |
|
"loss": 0.0411, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.0763097949886105, |
|
"grad_norm": 4.489077568054199, |
|
"learning_rate": 3.206150341685649e-05, |
|
"loss": 0.053, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.082004555808656, |
|
"grad_norm": 0.028950968757271767, |
|
"learning_rate": 3.19665907365224e-05, |
|
"loss": 0.0244, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.0876993166287017, |
|
"grad_norm": 3.7143993377685547, |
|
"learning_rate": 3.187167805618831e-05, |
|
"loss": 0.0291, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.0933940774487472, |
|
"grad_norm": 2.972627639770508, |
|
"learning_rate": 3.1776765375854216e-05, |
|
"loss": 0.0206, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.0990888382687927, |
|
"grad_norm": 1.4652332067489624, |
|
"learning_rate": 3.1681852695520126e-05, |
|
"loss": 0.0169, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.1047835990888384, |
|
"grad_norm": 1.768967866897583, |
|
"learning_rate": 3.158694001518603e-05, |
|
"loss": 0.0216, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.1104783599088839, |
|
"grad_norm": 0.3902505338191986, |
|
"learning_rate": 3.1492027334851934e-05, |
|
"loss": 0.0343, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.1161731207289294, |
|
"grad_norm": 0.0570041760802269, |
|
"learning_rate": 3.1397114654517845e-05, |
|
"loss": 0.0224, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.1218678815489749, |
|
"grad_norm": 3.5211033821105957, |
|
"learning_rate": 3.1302201974183756e-05, |
|
"loss": 0.0142, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.1275626423690206, |
|
"grad_norm": 4.0271077156066895, |
|
"learning_rate": 3.120728929384966e-05, |
|
"loss": 0.0188, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.133257403189066, |
|
"grad_norm": 0.030941655859351158, |
|
"learning_rate": 3.1112376613515564e-05, |
|
"loss": 0.0338, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.1389521640091116, |
|
"grad_norm": 0.20438140630722046, |
|
"learning_rate": 3.1017463933181475e-05, |
|
"loss": 0.0127, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.144646924829157, |
|
"grad_norm": 0.011589931324124336, |
|
"learning_rate": 3.092255125284738e-05, |
|
"loss": 0.0188, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.1503416856492028, |
|
"grad_norm": 0.036334265023469925, |
|
"learning_rate": 3.082763857251329e-05, |
|
"loss": 0.0254, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.1560364464692483, |
|
"grad_norm": 0.12188951671123505, |
|
"learning_rate": 3.07327258921792e-05, |
|
"loss": 0.0319, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.1617312072892938, |
|
"grad_norm": 3.9816770553588867, |
|
"learning_rate": 3.0637813211845104e-05, |
|
"loss": 0.0809, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.1674259681093395, |
|
"grad_norm": 2.992051124572754, |
|
"learning_rate": 3.054290053151101e-05, |
|
"loss": 0.0255, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.173120728929385, |
|
"grad_norm": 0.4495603144168854, |
|
"learning_rate": 3.044798785117692e-05, |
|
"loss": 0.0351, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.1788154897494305, |
|
"grad_norm": 2.132361888885498, |
|
"learning_rate": 3.0353075170842826e-05, |
|
"loss": 0.0317, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.184510250569476, |
|
"grad_norm": 1.9215974807739258, |
|
"learning_rate": 3.0258162490508733e-05, |
|
"loss": 0.0453, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.1902050113895217, |
|
"grad_norm": 3.614654302597046, |
|
"learning_rate": 3.0163249810174644e-05, |
|
"loss": 0.0302, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.1958997722095672, |
|
"grad_norm": 0.05617928504943848, |
|
"learning_rate": 3.0068337129840545e-05, |
|
"loss": 0.0376, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.2015945330296127, |
|
"grad_norm": 0.08208244293928146, |
|
"learning_rate": 2.9973424449506455e-05, |
|
"loss": 0.0393, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.2072892938496582, |
|
"grad_norm": 0.10671478509902954, |
|
"learning_rate": 2.9878511769172363e-05, |
|
"loss": 0.0152, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.212984054669704, |
|
"grad_norm": 0.021128954365849495, |
|
"learning_rate": 2.978359908883827e-05, |
|
"loss": 0.0091, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.2186788154897494, |
|
"grad_norm": 0.9037052392959595, |
|
"learning_rate": 2.968868640850418e-05, |
|
"loss": 0.0213, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.224373576309795, |
|
"grad_norm": 0.2880910336971283, |
|
"learning_rate": 2.959377372817008e-05, |
|
"loss": 0.0205, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.2300683371298406, |
|
"grad_norm": 0.014215152710676193, |
|
"learning_rate": 2.949886104783599e-05, |
|
"loss": 0.0177, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.2357630979498861, |
|
"grad_norm": 0.01584666594862938, |
|
"learning_rate": 2.94039483675019e-05, |
|
"loss": 0.0304, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.2414578587699316, |
|
"grad_norm": 0.027049539610743523, |
|
"learning_rate": 2.9309035687167807e-05, |
|
"loss": 0.022, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.2471526195899771, |
|
"grad_norm": 4.972292900085449, |
|
"learning_rate": 2.9214123006833714e-05, |
|
"loss": 0.0345, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.2528473804100229, |
|
"grad_norm": 1.9279311895370483, |
|
"learning_rate": 2.9119210326499625e-05, |
|
"loss": 0.0078, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.2585421412300684, |
|
"grad_norm": 4.7130255699157715, |
|
"learning_rate": 2.9024297646165526e-05, |
|
"loss": 0.0195, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.2642369020501139, |
|
"grad_norm": 0.13271582126617432, |
|
"learning_rate": 2.8929384965831436e-05, |
|
"loss": 0.0147, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.2699316628701594, |
|
"grad_norm": 2.073155641555786, |
|
"learning_rate": 2.8834472285497344e-05, |
|
"loss": 0.0295, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.275626423690205, |
|
"grad_norm": 0.2916022837162018, |
|
"learning_rate": 2.873955960516325e-05, |
|
"loss": 0.0108, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.2813211845102506, |
|
"grad_norm": 0.059762142598629, |
|
"learning_rate": 2.8644646924829162e-05, |
|
"loss": 0.051, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.287015945330296, |
|
"grad_norm": 0.08399085700511932, |
|
"learning_rate": 2.8549734244495063e-05, |
|
"loss": 0.028, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.2927107061503418, |
|
"grad_norm": 1.698492169380188, |
|
"learning_rate": 2.8454821564160973e-05, |
|
"loss": 0.0478, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.2984054669703873, |
|
"grad_norm": 0.1704496294260025, |
|
"learning_rate": 2.835990888382688e-05, |
|
"loss": 0.0403, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.3041002277904328, |
|
"grad_norm": 0.056637831032276154, |
|
"learning_rate": 2.8264996203492788e-05, |
|
"loss": 0.0215, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.3097949886104785, |
|
"grad_norm": 0.02712276019155979, |
|
"learning_rate": 2.8170083523158695e-05, |
|
"loss": 0.0216, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.315489749430524, |
|
"grad_norm": 1.20741605758667, |
|
"learning_rate": 2.8075170842824606e-05, |
|
"loss": 0.0161, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.3211845102505695, |
|
"grad_norm": 2.321183443069458, |
|
"learning_rate": 2.7980258162490507e-05, |
|
"loss": 0.0611, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.326879271070615, |
|
"grad_norm": 0.0435294434428215, |
|
"learning_rate": 2.7885345482156417e-05, |
|
"loss": 0.0203, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.3325740318906605, |
|
"grad_norm": 2.0571095943450928, |
|
"learning_rate": 2.7790432801822325e-05, |
|
"loss": 0.039, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.3382687927107062, |
|
"grad_norm": 0.1287350356578827, |
|
"learning_rate": 2.7695520121488232e-05, |
|
"loss": 0.0211, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.3439635535307517, |
|
"grad_norm": 0.2631838917732239, |
|
"learning_rate": 2.7600607441154143e-05, |
|
"loss": 0.0196, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.3496583143507972, |
|
"grad_norm": 0.11948617547750473, |
|
"learning_rate": 2.7505694760820043e-05, |
|
"loss": 0.0225, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.355353075170843, |
|
"grad_norm": 0.30161651968955994, |
|
"learning_rate": 2.7410782080485954e-05, |
|
"loss": 0.0425, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.3610478359908884, |
|
"grad_norm": 0.09938052296638489, |
|
"learning_rate": 2.731586940015186e-05, |
|
"loss": 0.02, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.366742596810934, |
|
"grad_norm": 0.12348034232854843, |
|
"learning_rate": 2.722095671981777e-05, |
|
"loss": 0.0312, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.3724373576309796, |
|
"grad_norm": 1.968189001083374, |
|
"learning_rate": 2.7126044039483676e-05, |
|
"loss": 0.0296, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.3781321184510251, |
|
"grad_norm": 2.6236233711242676, |
|
"learning_rate": 2.7031131359149587e-05, |
|
"loss": 0.026, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.3838268792710706, |
|
"grad_norm": 0.05339749529957771, |
|
"learning_rate": 2.6936218678815488e-05, |
|
"loss": 0.0235, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.3895216400911161, |
|
"grad_norm": 0.15053918957710266, |
|
"learning_rate": 2.68413059984814e-05, |
|
"loss": 0.022, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.3952164009111616, |
|
"grad_norm": 0.2259766161441803, |
|
"learning_rate": 2.6746393318147306e-05, |
|
"loss": 0.0268, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.4009111617312073, |
|
"grad_norm": 0.04061640426516533, |
|
"learning_rate": 2.6651480637813213e-05, |
|
"loss": 0.0229, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.4066059225512528, |
|
"grad_norm": 0.10259843617677689, |
|
"learning_rate": 2.6556567957479124e-05, |
|
"loss": 0.0214, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.4123006833712983, |
|
"grad_norm": 0.5361473560333252, |
|
"learning_rate": 2.6461655277145024e-05, |
|
"loss": 0.0321, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.417995444191344, |
|
"grad_norm": 1.511304497718811, |
|
"learning_rate": 2.6366742596810935e-05, |
|
"loss": 0.0083, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.4236902050113895, |
|
"grad_norm": 0.5383846759796143, |
|
"learning_rate": 2.6271829916476843e-05, |
|
"loss": 0.0255, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.429384965831435, |
|
"grad_norm": 6.834687232971191, |
|
"learning_rate": 2.617691723614275e-05, |
|
"loss": 0.0324, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.4350797266514808, |
|
"grad_norm": 0.4993496835231781, |
|
"learning_rate": 2.608200455580866e-05, |
|
"loss": 0.047, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.4407744874715263, |
|
"grad_norm": 0.32794803380966187, |
|
"learning_rate": 2.5987091875474568e-05, |
|
"loss": 0.0127, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.4464692482915718, |
|
"grad_norm": 2.7348170280456543, |
|
"learning_rate": 2.589217919514047e-05, |
|
"loss": 0.0373, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.4521640091116172, |
|
"grad_norm": 0.02981397695839405, |
|
"learning_rate": 2.579726651480638e-05, |
|
"loss": 0.0403, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.4578587699316627, |
|
"grad_norm": 0.2732754945755005, |
|
"learning_rate": 2.5702353834472287e-05, |
|
"loss": 0.0193, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.4635535307517085, |
|
"grad_norm": 0.0971960499882698, |
|
"learning_rate": 2.5607441154138194e-05, |
|
"loss": 0.015, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.469248291571754, |
|
"grad_norm": 0.6999448537826538, |
|
"learning_rate": 2.5512528473804105e-05, |
|
"loss": 0.0112, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.4749430523917995, |
|
"grad_norm": 4.170445919036865, |
|
"learning_rate": 2.5417615793470005e-05, |
|
"loss": 0.0296, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.4806378132118452, |
|
"grad_norm": 0.017274674028158188, |
|
"learning_rate": 2.5322703113135916e-05, |
|
"loss": 0.0096, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.4863325740318907, |
|
"grad_norm": 2.0988516807556152, |
|
"learning_rate": 2.5227790432801824e-05, |
|
"loss": 0.0219, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.4920273348519362, |
|
"grad_norm": 0.33241793513298035, |
|
"learning_rate": 2.513287775246773e-05, |
|
"loss": 0.0337, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.497722095671982, |
|
"grad_norm": 0.34468936920166016, |
|
"learning_rate": 2.503796507213364e-05, |
|
"loss": 0.0209, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.5034168564920274, |
|
"grad_norm": 0.026434799656271935, |
|
"learning_rate": 2.4943052391799546e-05, |
|
"loss": 0.017, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.5091116173120729, |
|
"grad_norm": 0.00831619743257761, |
|
"learning_rate": 2.4848139711465453e-05, |
|
"loss": 0.0318, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.5148063781321186, |
|
"grad_norm": 0.5614336133003235, |
|
"learning_rate": 2.475322703113136e-05, |
|
"loss": 0.0349, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.5205011389521639, |
|
"grad_norm": 0.6730136871337891, |
|
"learning_rate": 2.4658314350797268e-05, |
|
"loss": 0.0163, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.5261958997722096, |
|
"grad_norm": 0.08429904282093048, |
|
"learning_rate": 2.4563401670463175e-05, |
|
"loss": 0.0182, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.531890660592255, |
|
"grad_norm": 0.017787586897611618, |
|
"learning_rate": 2.4468488990129082e-05, |
|
"loss": 0.0746, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.5375854214123006, |
|
"grad_norm": 0.2954597771167755, |
|
"learning_rate": 2.437357630979499e-05, |
|
"loss": 0.0093, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.5432801822323463, |
|
"grad_norm": 1.5878958702087402, |
|
"learning_rate": 2.4278663629460897e-05, |
|
"loss": 0.023, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.5489749430523918, |
|
"grad_norm": 0.09025635570287704, |
|
"learning_rate": 2.4183750949126804e-05, |
|
"loss": 0.0492, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.5546697038724373, |
|
"grad_norm": 0.3361518979072571, |
|
"learning_rate": 2.4088838268792712e-05, |
|
"loss": 0.0336, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.560364464692483, |
|
"grad_norm": 1.164107322692871, |
|
"learning_rate": 2.399392558845862e-05, |
|
"loss": 0.0158, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.5660592255125285, |
|
"grad_norm": 2.972662925720215, |
|
"learning_rate": 2.3899012908124527e-05, |
|
"loss": 0.0198, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.571753986332574, |
|
"grad_norm": 0.5792711973190308, |
|
"learning_rate": 2.3804100227790434e-05, |
|
"loss": 0.0186, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.5774487471526197, |
|
"grad_norm": 3.0818066596984863, |
|
"learning_rate": 2.370918754745634e-05, |
|
"loss": 0.0402, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.583143507972665, |
|
"grad_norm": 0.48252251744270325, |
|
"learning_rate": 2.361427486712225e-05, |
|
"loss": 0.0335, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.5888382687927107, |
|
"grad_norm": 0.024195630103349686, |
|
"learning_rate": 2.3519362186788156e-05, |
|
"loss": 0.0482, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.5945330296127562, |
|
"grad_norm": 2.509986400604248, |
|
"learning_rate": 2.3424449506454063e-05, |
|
"loss": 0.0164, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.6002277904328017, |
|
"grad_norm": 0.16147683560848236, |
|
"learning_rate": 2.332953682611997e-05, |
|
"loss": 0.0172, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.6059225512528474, |
|
"grad_norm": 6.9054484367370605, |
|
"learning_rate": 2.3234624145785878e-05, |
|
"loss": 0.0249, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.611617312072893, |
|
"grad_norm": 0.02088954672217369, |
|
"learning_rate": 2.3139711465451785e-05, |
|
"loss": 0.0442, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.6173120728929384, |
|
"grad_norm": 0.334330677986145, |
|
"learning_rate": 2.3044798785117693e-05, |
|
"loss": 0.0128, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.6230068337129842, |
|
"grad_norm": 0.7511960864067078, |
|
"learning_rate": 2.29498861047836e-05, |
|
"loss": 0.0134, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.6287015945330297, |
|
"grad_norm": 0.07080753147602081, |
|
"learning_rate": 2.2854973424449508e-05, |
|
"loss": 0.015, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.6343963553530751, |
|
"grad_norm": 1.2584781646728516, |
|
"learning_rate": 2.2760060744115415e-05, |
|
"loss": 0.0277, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.6400911161731209, |
|
"grad_norm": 1.7000540494918823, |
|
"learning_rate": 2.2665148063781322e-05, |
|
"loss": 0.0361, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.6457858769931661, |
|
"grad_norm": 0.07065621763467789, |
|
"learning_rate": 2.257023538344723e-05, |
|
"loss": 0.0204, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.6514806378132119, |
|
"grad_norm": 1.1046833992004395, |
|
"learning_rate": 2.2475322703113137e-05, |
|
"loss": 0.0279, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.6571753986332574, |
|
"grad_norm": 0.12607507407665253, |
|
"learning_rate": 2.2380410022779044e-05, |
|
"loss": 0.0126, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.6628701594533029, |
|
"grad_norm": 0.016763895750045776, |
|
"learning_rate": 2.2285497342444952e-05, |
|
"loss": 0.0198, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.6685649202733486, |
|
"grad_norm": 0.35705041885375977, |
|
"learning_rate": 2.219058466211086e-05, |
|
"loss": 0.0196, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.674259681093394, |
|
"grad_norm": 2.3731415271759033, |
|
"learning_rate": 2.2095671981776766e-05, |
|
"loss": 0.0195, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.6799544419134396, |
|
"grad_norm": 0.042373090982437134, |
|
"learning_rate": 2.2000759301442674e-05, |
|
"loss": 0.0532, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.6856492027334853, |
|
"grad_norm": 0.13854794204235077, |
|
"learning_rate": 2.190584662110858e-05, |
|
"loss": 0.0093, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.6913439635535308, |
|
"grad_norm": 1.4668946266174316, |
|
"learning_rate": 2.181093394077449e-05, |
|
"loss": 0.0155, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.6970387243735763, |
|
"grad_norm": 0.01016306784003973, |
|
"learning_rate": 2.1716021260440396e-05, |
|
"loss": 0.0254, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.702733485193622, |
|
"grad_norm": 1.037170171737671, |
|
"learning_rate": 2.1621108580106303e-05, |
|
"loss": 0.0315, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.7084282460136673, |
|
"grad_norm": 1.5667529106140137, |
|
"learning_rate": 2.152619589977221e-05, |
|
"loss": 0.026, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.714123006833713, |
|
"grad_norm": 2.287351608276367, |
|
"learning_rate": 2.1431283219438118e-05, |
|
"loss": 0.0186, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.7198177676537585, |
|
"grad_norm": 0.9441989064216614, |
|
"learning_rate": 2.1336370539104025e-05, |
|
"loss": 0.029, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.725512528473804, |
|
"grad_norm": 0.032152529805898666, |
|
"learning_rate": 2.1241457858769933e-05, |
|
"loss": 0.0175, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.7312072892938497, |
|
"grad_norm": 0.23306457698345184, |
|
"learning_rate": 2.114654517843584e-05, |
|
"loss": 0.0168, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.7369020501138952, |
|
"grad_norm": 0.6797131896018982, |
|
"learning_rate": 2.1051632498101747e-05, |
|
"loss": 0.0147, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.7425968109339407, |
|
"grad_norm": 0.3807680904865265, |
|
"learning_rate": 2.0956719817767655e-05, |
|
"loss": 0.0184, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.7482915717539864, |
|
"grad_norm": 0.3467291295528412, |
|
"learning_rate": 2.0861807137433562e-05, |
|
"loss": 0.0142, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.753986332574032, |
|
"grad_norm": 1.8795088529586792, |
|
"learning_rate": 2.076689445709947e-05, |
|
"loss": 0.048, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.7596810933940774, |
|
"grad_norm": 2.4153385162353516, |
|
"learning_rate": 2.0671981776765377e-05, |
|
"loss": 0.017, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.7653758542141231, |
|
"grad_norm": 1.2200242280960083, |
|
"learning_rate": 2.0577069096431284e-05, |
|
"loss": 0.0244, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.7710706150341684, |
|
"grad_norm": 0.9201875925064087, |
|
"learning_rate": 2.048215641609719e-05, |
|
"loss": 0.0377, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.7767653758542141, |
|
"grad_norm": 0.008683672174811363, |
|
"learning_rate": 2.03872437357631e-05, |
|
"loss": 0.0292, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.7824601366742598, |
|
"grad_norm": 0.28299397230148315, |
|
"learning_rate": 2.0292331055429006e-05, |
|
"loss": 0.0309, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.7881548974943051, |
|
"grad_norm": 0.22945231199264526, |
|
"learning_rate": 2.0197418375094914e-05, |
|
"loss": 0.0249, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.7938496583143508, |
|
"grad_norm": 0.12214020639657974, |
|
"learning_rate": 2.010250569476082e-05, |
|
"loss": 0.0089, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.7995444191343963, |
|
"grad_norm": 0.06660139560699463, |
|
"learning_rate": 2.000759301442673e-05, |
|
"loss": 0.0189, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.8052391799544418, |
|
"grad_norm": 5.765466213226318, |
|
"learning_rate": 1.9912680334092636e-05, |
|
"loss": 0.0715, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.8109339407744875, |
|
"grad_norm": 0.15886840224266052, |
|
"learning_rate": 1.9817767653758543e-05, |
|
"loss": 0.0066, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.816628701594533, |
|
"grad_norm": 1.608310341835022, |
|
"learning_rate": 1.972285497342445e-05, |
|
"loss": 0.027, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.8223234624145785, |
|
"grad_norm": 2.088512420654297, |
|
"learning_rate": 1.9627942293090358e-05, |
|
"loss": 0.0348, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.8280182232346243, |
|
"grad_norm": 0.33316129446029663, |
|
"learning_rate": 1.9533029612756265e-05, |
|
"loss": 0.056, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.8337129840546698, |
|
"grad_norm": 0.11885243654251099, |
|
"learning_rate": 1.9438116932422173e-05, |
|
"loss": 0.0231, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.8394077448747153, |
|
"grad_norm": 5.114065647125244, |
|
"learning_rate": 1.934320425208808e-05, |
|
"loss": 0.0324, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.845102505694761, |
|
"grad_norm": 3.247770309448242, |
|
"learning_rate": 1.9248291571753987e-05, |
|
"loss": 0.0437, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.8507972665148062, |
|
"grad_norm": 4.07563591003418, |
|
"learning_rate": 1.9153378891419895e-05, |
|
"loss": 0.0152, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.856492027334852, |
|
"grad_norm": 0.7992623448371887, |
|
"learning_rate": 1.9058466211085802e-05, |
|
"loss": 0.04, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.8621867881548975, |
|
"grad_norm": 2.1190977096557617, |
|
"learning_rate": 1.896355353075171e-05, |
|
"loss": 0.0205, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.867881548974943, |
|
"grad_norm": 0.08607299625873566, |
|
"learning_rate": 1.8868640850417617e-05, |
|
"loss": 0.0614, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.8735763097949887, |
|
"grad_norm": 0.11223249137401581, |
|
"learning_rate": 1.8773728170083524e-05, |
|
"loss": 0.0152, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.8792710706150342, |
|
"grad_norm": 0.09186819195747375, |
|
"learning_rate": 1.867881548974943e-05, |
|
"loss": 0.0607, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.8849658314350797, |
|
"grad_norm": 1.1647542715072632, |
|
"learning_rate": 1.858390280941534e-05, |
|
"loss": 0.0312, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.8906605922551254, |
|
"grad_norm": 0.19422082602977753, |
|
"learning_rate": 1.8488990129081246e-05, |
|
"loss": 0.0085, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.896355353075171, |
|
"grad_norm": 2.4002015590667725, |
|
"learning_rate": 1.8394077448747154e-05, |
|
"loss": 0.0264, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.9020501138952164, |
|
"grad_norm": 0.8677728176116943, |
|
"learning_rate": 1.829916476841306e-05, |
|
"loss": 0.0296, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.907744874715262, |
|
"grad_norm": 2.0761377811431885, |
|
"learning_rate": 1.8204252088078968e-05, |
|
"loss": 0.0192, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.9134396355353074, |
|
"grad_norm": 0.7011535167694092, |
|
"learning_rate": 1.8109339407744876e-05, |
|
"loss": 0.0147, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.919134396355353, |
|
"grad_norm": 0.8465079069137573, |
|
"learning_rate": 1.8014426727410783e-05, |
|
"loss": 0.0287, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.9248291571753986, |
|
"grad_norm": 0.30490973591804504, |
|
"learning_rate": 1.791951404707669e-05, |
|
"loss": 0.0111, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.930523917995444, |
|
"grad_norm": 0.2505026161670685, |
|
"learning_rate": 1.7824601366742598e-05, |
|
"loss": 0.01, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.9362186788154898, |
|
"grad_norm": 0.8068025708198547, |
|
"learning_rate": 1.7729688686408505e-05, |
|
"loss": 0.0286, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.9419134396355353, |
|
"grad_norm": 2.2277448177337646, |
|
"learning_rate": 1.7634776006074412e-05, |
|
"loss": 0.0198, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.9476082004555808, |
|
"grad_norm": 0.13520954549312592, |
|
"learning_rate": 1.753986332574032e-05, |
|
"loss": 0.0232, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.9533029612756265, |
|
"grad_norm": 0.0674479603767395, |
|
"learning_rate": 1.7444950645406227e-05, |
|
"loss": 0.0176, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.958997722095672, |
|
"grad_norm": 0.018033193424344063, |
|
"learning_rate": 1.7350037965072134e-05, |
|
"loss": 0.0262, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.9646924829157175, |
|
"grad_norm": 0.09576301276683807, |
|
"learning_rate": 1.7255125284738042e-05, |
|
"loss": 0.0217, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.9703872437357632, |
|
"grad_norm": 0.9586463570594788, |
|
"learning_rate": 1.716021260440395e-05, |
|
"loss": 0.0123, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.9760820045558085, |
|
"grad_norm": 0.006530022248625755, |
|
"learning_rate": 1.7065299924069857e-05, |
|
"loss": 0.0163, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.9817767653758542, |
|
"grad_norm": 1.7253230810165405, |
|
"learning_rate": 1.6970387243735764e-05, |
|
"loss": 0.0126, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.9874715261958997, |
|
"grad_norm": 0.07670151442289352, |
|
"learning_rate": 1.687547456340167e-05, |
|
"loss": 0.0096, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.9931662870159452, |
|
"grad_norm": 0.013690901920199394, |
|
"learning_rate": 1.678056188306758e-05, |
|
"loss": 0.0168, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.998861047835991, |
|
"grad_norm": 0.4434203505516052, |
|
"learning_rate": 1.6685649202733486e-05, |
|
"loss": 0.0178, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9870868472820054, |
|
"eval_f1": 0.9356108180524156, |
|
"eval_loss": 0.05325901880860329, |
|
"eval_precision": 0.9305463024650233, |
|
"eval_recall": 0.9407307627546725, |
|
"eval_runtime": 13.1682, |
|
"eval_samples_per_second": 246.807, |
|
"eval_steps_per_second": 30.908, |
|
"step": 3512 |
|
}, |
|
{ |
|
"epoch": 2.0045558086560367, |
|
"grad_norm": 1.626996397972107, |
|
"learning_rate": 1.6590736522399393e-05, |
|
"loss": 0.0158, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.010250569476082, |
|
"grad_norm": 0.17399589717388153, |
|
"learning_rate": 1.64958238420653e-05, |
|
"loss": 0.0077, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.0159453302961277, |
|
"grad_norm": 0.027639882639050484, |
|
"learning_rate": 1.6400911161731208e-05, |
|
"loss": 0.0236, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.021640091116173, |
|
"grad_norm": 0.7574236392974854, |
|
"learning_rate": 1.6305998481397115e-05, |
|
"loss": 0.0153, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.0273348519362187, |
|
"grad_norm": 0.023957232013344765, |
|
"learning_rate": 1.6211085801063023e-05, |
|
"loss": 0.0046, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.0330296127562644, |
|
"grad_norm": 2.448194980621338, |
|
"learning_rate": 1.611617312072893e-05, |
|
"loss": 0.011, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.0387243735763096, |
|
"grad_norm": 0.5603412985801697, |
|
"learning_rate": 1.6021260440394838e-05, |
|
"loss": 0.0129, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.0444191343963554, |
|
"grad_norm": 0.7681556940078735, |
|
"learning_rate": 1.5926347760060745e-05, |
|
"loss": 0.0129, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.050113895216401, |
|
"grad_norm": 0.09505771845579147, |
|
"learning_rate": 1.5831435079726652e-05, |
|
"loss": 0.0093, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.0558086560364464, |
|
"grad_norm": 0.05849296599626541, |
|
"learning_rate": 1.573652239939256e-05, |
|
"loss": 0.0311, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.061503416856492, |
|
"grad_norm": 0.5222179293632507, |
|
"learning_rate": 1.5641609719058467e-05, |
|
"loss": 0.0083, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.067198177676538, |
|
"grad_norm": 1.0224918127059937, |
|
"learning_rate": 1.5546697038724374e-05, |
|
"loss": 0.0159, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.072892938496583, |
|
"grad_norm": 1.3902862071990967, |
|
"learning_rate": 1.545178435839028e-05, |
|
"loss": 0.0077, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.078587699316629, |
|
"grad_norm": 0.2441783845424652, |
|
"learning_rate": 1.535687167805619e-05, |
|
"loss": 0.0079, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.084282460136674, |
|
"grad_norm": 0.012967357411980629, |
|
"learning_rate": 1.5261958997722096e-05, |
|
"loss": 0.0068, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.08997722095672, |
|
"grad_norm": 0.5421155095100403, |
|
"learning_rate": 1.5167046317388004e-05, |
|
"loss": 0.0108, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.0956719817767655, |
|
"grad_norm": 0.24783940613269806, |
|
"learning_rate": 1.5072133637053911e-05, |
|
"loss": 0.0023, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.1013667425968108, |
|
"grad_norm": 0.03946458548307419, |
|
"learning_rate": 1.4977220956719817e-05, |
|
"loss": 0.0036, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.1070615034168565, |
|
"grad_norm": 0.009126793593168259, |
|
"learning_rate": 1.4882308276385726e-05, |
|
"loss": 0.0064, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.112756264236902, |
|
"grad_norm": 1.0558329820632935, |
|
"learning_rate": 1.4787395596051635e-05, |
|
"loss": 0.0095, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.1184510250569475, |
|
"grad_norm": 0.3603816032409668, |
|
"learning_rate": 1.469248291571754e-05, |
|
"loss": 0.0038, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.124145785876993, |
|
"grad_norm": 0.16135656833648682, |
|
"learning_rate": 1.4597570235383448e-05, |
|
"loss": 0.0019, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.129840546697039, |
|
"grad_norm": 0.006461753975600004, |
|
"learning_rate": 1.4502657555049357e-05, |
|
"loss": 0.0122, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.135535307517084, |
|
"grad_norm": 1.8980814218521118, |
|
"learning_rate": 1.4407744874715263e-05, |
|
"loss": 0.0058, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.14123006833713, |
|
"grad_norm": 1.94465970993042, |
|
"learning_rate": 1.431283219438117e-05, |
|
"loss": 0.0196, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.146924829157175, |
|
"grad_norm": 0.01323908194899559, |
|
"learning_rate": 1.4217919514047076e-05, |
|
"loss": 0.0141, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.152619589977221, |
|
"grad_norm": 0.07417018711566925, |
|
"learning_rate": 1.4123006833712985e-05, |
|
"loss": 0.0076, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.1583143507972666, |
|
"grad_norm": 0.02077096700668335, |
|
"learning_rate": 1.4028094153378892e-05, |
|
"loss": 0.0033, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.164009111617312, |
|
"grad_norm": 0.01688476838171482, |
|
"learning_rate": 1.3933181473044798e-05, |
|
"loss": 0.0198, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.1697038724373576, |
|
"grad_norm": 0.04020548611879349, |
|
"learning_rate": 1.3838268792710707e-05, |
|
"loss": 0.0214, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.1753986332574033, |
|
"grad_norm": 0.01843833364546299, |
|
"learning_rate": 1.3743356112376616e-05, |
|
"loss": 0.0049, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 2.1810933940774486, |
|
"grad_norm": 0.12003644555807114, |
|
"learning_rate": 1.3648443432042522e-05, |
|
"loss": 0.006, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 2.1867881548974943, |
|
"grad_norm": 1.9108054637908936, |
|
"learning_rate": 1.3553530751708429e-05, |
|
"loss": 0.0211, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 2.19248291571754, |
|
"grad_norm": 0.5751833319664001, |
|
"learning_rate": 1.3458618071374338e-05, |
|
"loss": 0.008, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.1981776765375853, |
|
"grad_norm": 0.5237908959388733, |
|
"learning_rate": 1.3363705391040244e-05, |
|
"loss": 0.0122, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 2.203872437357631, |
|
"grad_norm": 1.8080928325653076, |
|
"learning_rate": 1.3268792710706151e-05, |
|
"loss": 0.0074, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 2.2095671981776768, |
|
"grad_norm": 1.2654815912246704, |
|
"learning_rate": 1.3173880030372057e-05, |
|
"loss": 0.0098, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.215261958997722, |
|
"grad_norm": 0.4974677860736847, |
|
"learning_rate": 1.3078967350037966e-05, |
|
"loss": 0.0038, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.2209567198177678, |
|
"grad_norm": 2.2507166862487793, |
|
"learning_rate": 1.2984054669703875e-05, |
|
"loss": 0.0184, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.226651480637813, |
|
"grad_norm": 0.04915757104754448, |
|
"learning_rate": 1.2889141989369779e-05, |
|
"loss": 0.003, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 2.2323462414578588, |
|
"grad_norm": 0.03872552514076233, |
|
"learning_rate": 1.2794229309035688e-05, |
|
"loss": 0.0138, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 2.2380410022779045, |
|
"grad_norm": 0.7878251671791077, |
|
"learning_rate": 1.2699316628701597e-05, |
|
"loss": 0.0129, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 2.2437357630979498, |
|
"grad_norm": 0.011351537890732288, |
|
"learning_rate": 1.2604403948367503e-05, |
|
"loss": 0.0159, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 2.2494305239179955, |
|
"grad_norm": 0.012801927514374256, |
|
"learning_rate": 1.250949126803341e-05, |
|
"loss": 0.0084, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.255125284738041, |
|
"grad_norm": 0.8789533376693726, |
|
"learning_rate": 1.2414578587699317e-05, |
|
"loss": 0.0137, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 2.2608200455580865, |
|
"grad_norm": 0.008565433323383331, |
|
"learning_rate": 1.2319665907365225e-05, |
|
"loss": 0.0018, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 2.266514806378132, |
|
"grad_norm": 0.04799957945942879, |
|
"learning_rate": 1.2224753227031132e-05, |
|
"loss": 0.0114, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 2.2722095671981775, |
|
"grad_norm": 0.27682027220726013, |
|
"learning_rate": 1.212984054669704e-05, |
|
"loss": 0.0102, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 2.277904328018223, |
|
"grad_norm": 0.008524512872099876, |
|
"learning_rate": 1.2034927866362947e-05, |
|
"loss": 0.016, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.283599088838269, |
|
"grad_norm": 0.14340077340602875, |
|
"learning_rate": 1.1940015186028854e-05, |
|
"loss": 0.0052, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 2.289293849658314, |
|
"grad_norm": 0.16430233418941498, |
|
"learning_rate": 1.1845102505694761e-05, |
|
"loss": 0.0066, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 2.29498861047836, |
|
"grad_norm": 0.14387188851833344, |
|
"learning_rate": 1.1750189825360669e-05, |
|
"loss": 0.0066, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 2.3006833712984056, |
|
"grad_norm": 0.16657856106758118, |
|
"learning_rate": 1.1655277145026576e-05, |
|
"loss": 0.0172, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 2.306378132118451, |
|
"grad_norm": 0.9251142740249634, |
|
"learning_rate": 1.1560364464692483e-05, |
|
"loss": 0.0204, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.3120728929384966, |
|
"grad_norm": 0.04931972548365593, |
|
"learning_rate": 1.146545178435839e-05, |
|
"loss": 0.0097, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 2.3177676537585423, |
|
"grad_norm": 4.824743747711182, |
|
"learning_rate": 1.1370539104024298e-05, |
|
"loss": 0.0169, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 2.3234624145785876, |
|
"grad_norm": 0.01028421986848116, |
|
"learning_rate": 1.1275626423690206e-05, |
|
"loss": 0.0049, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 2.3291571753986333, |
|
"grad_norm": 0.04389754310250282, |
|
"learning_rate": 1.1180713743356113e-05, |
|
"loss": 0.0051, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 2.334851936218679, |
|
"grad_norm": 0.7398139238357544, |
|
"learning_rate": 1.108580106302202e-05, |
|
"loss": 0.0062, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.3405466970387243, |
|
"grad_norm": 0.2955804467201233, |
|
"learning_rate": 1.0990888382687928e-05, |
|
"loss": 0.0092, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 2.34624145785877, |
|
"grad_norm": 0.026279909536242485, |
|
"learning_rate": 1.0895975702353835e-05, |
|
"loss": 0.0241, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 2.3519362186788153, |
|
"grad_norm": 2.0768744945526123, |
|
"learning_rate": 1.0801063022019742e-05, |
|
"loss": 0.0175, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 2.357630979498861, |
|
"grad_norm": 1.5808554887771606, |
|
"learning_rate": 1.070615034168565e-05, |
|
"loss": 0.0137, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 2.3633257403189067, |
|
"grad_norm": 2.1162478923797607, |
|
"learning_rate": 1.0611237661351557e-05, |
|
"loss": 0.011, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.369020501138952, |
|
"grad_norm": 1.039797306060791, |
|
"learning_rate": 1.0516324981017464e-05, |
|
"loss": 0.026, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 2.3747152619589977, |
|
"grad_norm": 0.1136997640132904, |
|
"learning_rate": 1.0421412300683372e-05, |
|
"loss": 0.0123, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 2.3804100227790435, |
|
"grad_norm": 0.648858904838562, |
|
"learning_rate": 1.032649962034928e-05, |
|
"loss": 0.0145, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 2.3861047835990887, |
|
"grad_norm": 0.03447713330388069, |
|
"learning_rate": 1.0231586940015187e-05, |
|
"loss": 0.0014, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 2.3917995444191344, |
|
"grad_norm": 0.10330228507518768, |
|
"learning_rate": 1.0136674259681094e-05, |
|
"loss": 0.0191, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.3974943052391797, |
|
"grad_norm": 0.018602780997753143, |
|
"learning_rate": 1.0041761579347001e-05, |
|
"loss": 0.0071, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 2.4031890660592254, |
|
"grad_norm": 0.14088431000709534, |
|
"learning_rate": 9.946848899012909e-06, |
|
"loss": 0.0028, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 2.408883826879271, |
|
"grad_norm": 0.19464652240276337, |
|
"learning_rate": 9.851936218678816e-06, |
|
"loss": 0.0035, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 2.4145785876993164, |
|
"grad_norm": 2.1453051567077637, |
|
"learning_rate": 9.757023538344723e-06, |
|
"loss": 0.0067, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 2.420273348519362, |
|
"grad_norm": 0.0253062192350626, |
|
"learning_rate": 9.66211085801063e-06, |
|
"loss": 0.0128, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.425968109339408, |
|
"grad_norm": 0.06526502221822739, |
|
"learning_rate": 9.567198177676538e-06, |
|
"loss": 0.0183, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 2.431662870159453, |
|
"grad_norm": 0.4081474244594574, |
|
"learning_rate": 9.472285497342445e-06, |
|
"loss": 0.0013, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 2.437357630979499, |
|
"grad_norm": 1.4686610698699951, |
|
"learning_rate": 9.377372817008353e-06, |
|
"loss": 0.0067, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 2.4430523917995446, |
|
"grad_norm": 0.048870667815208435, |
|
"learning_rate": 9.28246013667426e-06, |
|
"loss": 0.0156, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 2.44874715261959, |
|
"grad_norm": 0.04144718497991562, |
|
"learning_rate": 9.187547456340167e-06, |
|
"loss": 0.007, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.4544419134396356, |
|
"grad_norm": 0.01028160285204649, |
|
"learning_rate": 9.092634776006075e-06, |
|
"loss": 0.004, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 2.4601366742596813, |
|
"grad_norm": 0.11557559669017792, |
|
"learning_rate": 8.997722095671982e-06, |
|
"loss": 0.0014, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 2.4658314350797266, |
|
"grad_norm": 0.014574232511222363, |
|
"learning_rate": 8.90280941533789e-06, |
|
"loss": 0.001, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 2.4715261958997723, |
|
"grad_norm": 0.010948601178824902, |
|
"learning_rate": 8.807896735003797e-06, |
|
"loss": 0.0381, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 2.477220956719818, |
|
"grad_norm": 0.011354812420904636, |
|
"learning_rate": 8.712984054669704e-06, |
|
"loss": 0.0058, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.4829157175398633, |
|
"grad_norm": 0.03828594833612442, |
|
"learning_rate": 8.618071374335612e-06, |
|
"loss": 0.0022, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 2.488610478359909, |
|
"grad_norm": 2.171107053756714, |
|
"learning_rate": 8.523158694001519e-06, |
|
"loss": 0.0201, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 2.4943052391799543, |
|
"grad_norm": 2.7769832611083984, |
|
"learning_rate": 8.428246013667426e-06, |
|
"loss": 0.03, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.3383297324180603, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.0013, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 2.5056947608200457, |
|
"grad_norm": 0.008414714597165585, |
|
"learning_rate": 8.238420652999241e-06, |
|
"loss": 0.0015, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.511389521640091, |
|
"grad_norm": 0.7041877508163452, |
|
"learning_rate": 8.143507972665148e-06, |
|
"loss": 0.0564, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 2.5170842824601367, |
|
"grad_norm": 2.449838399887085, |
|
"learning_rate": 8.048595292331056e-06, |
|
"loss": 0.0127, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 2.522779043280182, |
|
"grad_norm": 5.027844429016113, |
|
"learning_rate": 7.953682611996963e-06, |
|
"loss": 0.0122, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 2.5284738041002277, |
|
"grad_norm": 1.5973525047302246, |
|
"learning_rate": 7.85876993166287e-06, |
|
"loss": 0.0088, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 2.5341685649202734, |
|
"grad_norm": 0.02022445946931839, |
|
"learning_rate": 7.763857251328778e-06, |
|
"loss": 0.0011, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.5398633257403187, |
|
"grad_norm": 0.20087704062461853, |
|
"learning_rate": 7.668944570994685e-06, |
|
"loss": 0.0077, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 2.5455580865603644, |
|
"grad_norm": 0.006311175879091024, |
|
"learning_rate": 7.574031890660592e-06, |
|
"loss": 0.0023, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 2.55125284738041, |
|
"grad_norm": 0.057606592774391174, |
|
"learning_rate": 7.4791192103265e-06, |
|
"loss": 0.0036, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 2.5569476082004554, |
|
"grad_norm": 0.029715605080127716, |
|
"learning_rate": 7.384206529992407e-06, |
|
"loss": 0.0085, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 2.562642369020501, |
|
"grad_norm": 0.17613981664180756, |
|
"learning_rate": 7.289293849658315e-06, |
|
"loss": 0.0073, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.568337129840547, |
|
"grad_norm": 0.09365126490592957, |
|
"learning_rate": 7.194381169324223e-06, |
|
"loss": 0.0119, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 2.574031890660592, |
|
"grad_norm": 0.19684773683547974, |
|
"learning_rate": 7.099468488990129e-06, |
|
"loss": 0.0064, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 2.579726651480638, |
|
"grad_norm": 1.106364130973816, |
|
"learning_rate": 7.004555808656037e-06, |
|
"loss": 0.0025, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 2.5854214123006836, |
|
"grad_norm": 0.040543317794799805, |
|
"learning_rate": 6.909643128321943e-06, |
|
"loss": 0.0065, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 2.591116173120729, |
|
"grad_norm": 0.00573074072599411, |
|
"learning_rate": 6.814730447987852e-06, |
|
"loss": 0.0195, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.5968109339407746, |
|
"grad_norm": 0.04998818784952164, |
|
"learning_rate": 6.719817767653759e-06, |
|
"loss": 0.0012, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 2.6025056947608203, |
|
"grad_norm": 0.502973198890686, |
|
"learning_rate": 6.624905087319666e-06, |
|
"loss": 0.0172, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 2.6082004555808656, |
|
"grad_norm": 0.06730928272008896, |
|
"learning_rate": 6.529992406985573e-06, |
|
"loss": 0.003, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 2.6138952164009113, |
|
"grad_norm": 0.007552881259471178, |
|
"learning_rate": 6.435079726651482e-06, |
|
"loss": 0.0045, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 2.619589977220957, |
|
"grad_norm": 0.015098507516086102, |
|
"learning_rate": 6.340167046317388e-06, |
|
"loss": 0.0039, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.6252847380410023, |
|
"grad_norm": 1.1533013582229614, |
|
"learning_rate": 6.245254365983296e-06, |
|
"loss": 0.0024, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 2.630979498861048, |
|
"grad_norm": 1.244232177734375, |
|
"learning_rate": 6.150341685649203e-06, |
|
"loss": 0.0234, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 2.6366742596810933, |
|
"grad_norm": 0.011134306900203228, |
|
"learning_rate": 6.05542900531511e-06, |
|
"loss": 0.0126, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 2.642369020501139, |
|
"grad_norm": 0.008693058975040913, |
|
"learning_rate": 5.960516324981018e-06, |
|
"loss": 0.0013, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 2.6480637813211843, |
|
"grad_norm": 0.4049939513206482, |
|
"learning_rate": 5.865603644646925e-06, |
|
"loss": 0.0045, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 2.65375854214123, |
|
"grad_norm": 0.01917268894612789, |
|
"learning_rate": 5.7706909643128325e-06, |
|
"loss": 0.0011, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 2.6594533029612757, |
|
"grad_norm": 0.032446008175611496, |
|
"learning_rate": 5.67577828397874e-06, |
|
"loss": 0.0034, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 2.665148063781321, |
|
"grad_norm": 0.3410187065601349, |
|
"learning_rate": 5.580865603644647e-06, |
|
"loss": 0.0179, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 2.6708428246013667, |
|
"grad_norm": 0.021253902465105057, |
|
"learning_rate": 5.4859529233105546e-06, |
|
"loss": 0.0147, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 2.6765375854214124, |
|
"grad_norm": 0.011832083575427532, |
|
"learning_rate": 5.391040242976462e-06, |
|
"loss": 0.0157, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.6822323462414577, |
|
"grad_norm": 0.011258887127041817, |
|
"learning_rate": 5.296127562642369e-06, |
|
"loss": 0.0054, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 2.6879271070615034, |
|
"grad_norm": 0.6816171407699585, |
|
"learning_rate": 5.201214882308277e-06, |
|
"loss": 0.0103, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 2.693621867881549, |
|
"grad_norm": 0.015361886471509933, |
|
"learning_rate": 5.106302201974184e-06, |
|
"loss": 0.0177, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 2.6993166287015944, |
|
"grad_norm": 1.1560934782028198, |
|
"learning_rate": 5.011389521640091e-06, |
|
"loss": 0.0121, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 2.70501138952164, |
|
"grad_norm": 0.2196737378835678, |
|
"learning_rate": 4.916476841305999e-06, |
|
"loss": 0.0174, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 2.710706150341686, |
|
"grad_norm": 0.00927172601222992, |
|
"learning_rate": 4.821564160971906e-06, |
|
"loss": 0.0069, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 2.716400911161731, |
|
"grad_norm": 0.0045639206655323505, |
|
"learning_rate": 4.7266514806378134e-06, |
|
"loss": 0.0041, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 2.722095671981777, |
|
"grad_norm": 0.026646023616194725, |
|
"learning_rate": 4.631738800303721e-06, |
|
"loss": 0.0138, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 2.7277904328018225, |
|
"grad_norm": 0.021237283945083618, |
|
"learning_rate": 4.536826119969628e-06, |
|
"loss": 0.0036, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 2.733485193621868, |
|
"grad_norm": 0.008988427929580212, |
|
"learning_rate": 4.4419134396355355e-06, |
|
"loss": 0.0048, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.7391799544419135, |
|
"grad_norm": 0.04954913258552551, |
|
"learning_rate": 4.347000759301443e-06, |
|
"loss": 0.0085, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 2.7448747152619593, |
|
"grad_norm": 1.486095666885376, |
|
"learning_rate": 4.25208807896735e-06, |
|
"loss": 0.0072, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 2.7505694760820045, |
|
"grad_norm": 0.6007639169692993, |
|
"learning_rate": 4.157175398633258e-06, |
|
"loss": 0.0045, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 2.7562642369020502, |
|
"grad_norm": 0.008939745835959911, |
|
"learning_rate": 4.062262718299165e-06, |
|
"loss": 0.0034, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 2.7619589977220955, |
|
"grad_norm": 1.5877684354782104, |
|
"learning_rate": 3.967350037965072e-06, |
|
"loss": 0.0211, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 2.7676537585421412, |
|
"grad_norm": 0.029445238411426544, |
|
"learning_rate": 3.87243735763098e-06, |
|
"loss": 0.0165, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 2.7733485193621865, |
|
"grad_norm": 0.021546538919210434, |
|
"learning_rate": 3.777524677296887e-06, |
|
"loss": 0.0023, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 2.7790432801822322, |
|
"grad_norm": 0.044492632150650024, |
|
"learning_rate": 3.682611996962795e-06, |
|
"loss": 0.0013, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 2.784738041002278, |
|
"grad_norm": 0.24219924211502075, |
|
"learning_rate": 3.5876993166287018e-06, |
|
"loss": 0.0231, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 2.7904328018223232, |
|
"grad_norm": 0.03521392121911049, |
|
"learning_rate": 3.492786636294609e-06, |
|
"loss": 0.0014, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.796127562642369, |
|
"grad_norm": 1.6615145206451416, |
|
"learning_rate": 3.3978739559605165e-06, |
|
"loss": 0.007, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 2.8018223234624147, |
|
"grad_norm": 0.3814733028411865, |
|
"learning_rate": 3.302961275626424e-06, |
|
"loss": 0.0028, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 2.80751708428246, |
|
"grad_norm": 1.6600357294082642, |
|
"learning_rate": 3.208048595292331e-06, |
|
"loss": 0.0098, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 2.8132118451025057, |
|
"grad_norm": 0.09767989814281464, |
|
"learning_rate": 3.1131359149582386e-06, |
|
"loss": 0.0125, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 2.8189066059225514, |
|
"grad_norm": 0.004134484566748142, |
|
"learning_rate": 3.018223234624146e-06, |
|
"loss": 0.001, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 2.8246013667425967, |
|
"grad_norm": 0.3089199960231781, |
|
"learning_rate": 2.9233105542900533e-06, |
|
"loss": 0.0153, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 2.8302961275626424, |
|
"grad_norm": 1.1428207159042358, |
|
"learning_rate": 2.8283978739559607e-06, |
|
"loss": 0.0217, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 2.835990888382688, |
|
"grad_norm": 1.542417049407959, |
|
"learning_rate": 2.733485193621868e-06, |
|
"loss": 0.0077, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 2.8416856492027334, |
|
"grad_norm": 0.7124744653701782, |
|
"learning_rate": 2.6385725132877754e-06, |
|
"loss": 0.025, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 2.847380410022779, |
|
"grad_norm": 0.045164983719587326, |
|
"learning_rate": 2.5436598329536827e-06, |
|
"loss": 0.0071, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.853075170842825, |
|
"grad_norm": 0.14568312466144562, |
|
"learning_rate": 2.44874715261959e-06, |
|
"loss": 0.0009, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 2.85876993166287, |
|
"grad_norm": 1.8170379400253296, |
|
"learning_rate": 2.3538344722854975e-06, |
|
"loss": 0.0071, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 2.864464692482916, |
|
"grad_norm": 0.018464110791683197, |
|
"learning_rate": 2.258921791951405e-06, |
|
"loss": 0.018, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 2.8701594533029615, |
|
"grad_norm": 0.01644555851817131, |
|
"learning_rate": 2.164009111617312e-06, |
|
"loss": 0.0014, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 2.875854214123007, |
|
"grad_norm": 1.8252747058868408, |
|
"learning_rate": 2.0690964312832195e-06, |
|
"loss": 0.0063, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 2.8815489749430525, |
|
"grad_norm": 2.68369197845459, |
|
"learning_rate": 1.974183750949127e-06, |
|
"loss": 0.0068, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 2.887243735763098, |
|
"grad_norm": 0.1895146667957306, |
|
"learning_rate": 1.879271070615034e-06, |
|
"loss": 0.0013, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 2.8929384965831435, |
|
"grad_norm": 0.9527952671051025, |
|
"learning_rate": 1.7843583902809414e-06, |
|
"loss": 0.0214, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 2.8986332574031892, |
|
"grad_norm": 0.3007192313671112, |
|
"learning_rate": 1.6894457099468492e-06, |
|
"loss": 0.0059, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 2.9043280182232345, |
|
"grad_norm": 1.1584299802780151, |
|
"learning_rate": 1.5945330296127566e-06, |
|
"loss": 0.0323, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.91002277904328, |
|
"grad_norm": 0.17047913372516632, |
|
"learning_rate": 1.4996203492786637e-06, |
|
"loss": 0.0075, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 2.9157175398633255, |
|
"grad_norm": 0.019094351679086685, |
|
"learning_rate": 1.404707668944571e-06, |
|
"loss": 0.0111, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 2.921412300683371, |
|
"grad_norm": 0.030679482966661453, |
|
"learning_rate": 1.3097949886104784e-06, |
|
"loss": 0.0066, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 2.927107061503417, |
|
"grad_norm": 2.1198318004608154, |
|
"learning_rate": 1.2148823082763858e-06, |
|
"loss": 0.0182, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 2.932801822323462, |
|
"grad_norm": 0.2060464471578598, |
|
"learning_rate": 1.1199696279422931e-06, |
|
"loss": 0.0084, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 2.938496583143508, |
|
"grad_norm": 0.4984743893146515, |
|
"learning_rate": 1.0250569476082005e-06, |
|
"loss": 0.0106, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 2.9441913439635536, |
|
"grad_norm": 2.510512351989746, |
|
"learning_rate": 9.301442672741079e-07, |
|
"loss": 0.0032, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 2.949886104783599, |
|
"grad_norm": 1.7853169441223145, |
|
"learning_rate": 8.352315869400152e-07, |
|
"loss": 0.0278, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 2.9555808656036446, |
|
"grad_norm": 0.020138094201683998, |
|
"learning_rate": 7.403189066059226e-07, |
|
"loss": 0.0105, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 2.9612756264236904, |
|
"grad_norm": 0.009497715160250664, |
|
"learning_rate": 6.4540622627183e-07, |
|
"loss": 0.0011, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.9669703872437356, |
|
"grad_norm": 0.5586445331573486, |
|
"learning_rate": 5.504935459377373e-07, |
|
"loss": 0.0179, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 2.9726651480637813, |
|
"grad_norm": 0.4381959140300751, |
|
"learning_rate": 4.5558086560364467e-07, |
|
"loss": 0.0251, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 2.978359908883827, |
|
"grad_norm": 0.004315070807933807, |
|
"learning_rate": 3.6066818526955203e-07, |
|
"loss": 0.0243, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 2.9840546697038723, |
|
"grad_norm": 0.5453450083732605, |
|
"learning_rate": 2.657555049354594e-07, |
|
"loss": 0.0085, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 2.989749430523918, |
|
"grad_norm": 0.2711639404296875, |
|
"learning_rate": 1.7084282460136675e-07, |
|
"loss": 0.0167, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 2.995444191343964, |
|
"grad_norm": 2.088967800140381, |
|
"learning_rate": 7.593014426727411e-08, |
|
"loss": 0.0021, |
|
"step": 5260 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5268, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1376049275709696.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|