bodo-bert-mlm-base / trainer_state.json
Sanjib Narzary
Bodo BERT base pretrained model with line by line
0332d80
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"global_step": 168252,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"learning_rate": 4.9851413356156244e-05,
"loss": 9.0623,
"step": 500
},
{
"epoch": 0.02,
"learning_rate": 4.9702826712312485e-05,
"loss": 8.8042,
"step": 1000
},
{
"epoch": 0.03,
"learning_rate": 4.9554240068468726e-05,
"loss": 8.6835,
"step": 1500
},
{
"epoch": 0.04,
"learning_rate": 4.940565342462497e-05,
"loss": 8.721,
"step": 2000
},
{
"epoch": 0.04,
"learning_rate": 4.9257066780781215e-05,
"loss": 8.7979,
"step": 2500
},
{
"epoch": 0.05,
"learning_rate": 4.910848013693745e-05,
"loss": 8.7957,
"step": 3000
},
{
"epoch": 0.06,
"learning_rate": 4.89598934930937e-05,
"loss": 8.7586,
"step": 3500
},
{
"epoch": 0.07,
"learning_rate": 4.881130684924994e-05,
"loss": 8.7277,
"step": 4000
},
{
"epoch": 0.08,
"learning_rate": 4.866272020540618e-05,
"loss": 8.754,
"step": 4500
},
{
"epoch": 0.09,
"learning_rate": 4.851413356156242e-05,
"loss": 8.6946,
"step": 5000
},
{
"epoch": 0.1,
"learning_rate": 4.836554691771866e-05,
"loss": 8.6201,
"step": 5500
},
{
"epoch": 0.11,
"learning_rate": 4.82169602738749e-05,
"loss": 8.7833,
"step": 6000
},
{
"epoch": 0.12,
"learning_rate": 4.8068373630031144e-05,
"loss": 8.5384,
"step": 6500
},
{
"epoch": 0.12,
"learning_rate": 4.7919786986187386e-05,
"loss": 8.6775,
"step": 7000
},
{
"epoch": 0.13,
"learning_rate": 4.7771200342343634e-05,
"loss": 8.5872,
"step": 7500
},
{
"epoch": 0.14,
"learning_rate": 4.7622613698499875e-05,
"loss": 8.7197,
"step": 8000
},
{
"epoch": 0.15,
"learning_rate": 4.747402705465611e-05,
"loss": 8.6843,
"step": 8500
},
{
"epoch": 0.16,
"learning_rate": 4.732544041081236e-05,
"loss": 8.466,
"step": 9000
},
{
"epoch": 0.17,
"learning_rate": 4.71768537669686e-05,
"loss": 8.8049,
"step": 9500
},
{
"epoch": 0.18,
"learning_rate": 4.702826712312484e-05,
"loss": 8.5617,
"step": 10000
},
{
"epoch": 0.19,
"learning_rate": 4.687968047928108e-05,
"loss": 8.4742,
"step": 10500
},
{
"epoch": 0.2,
"learning_rate": 4.673109383543732e-05,
"loss": 8.7015,
"step": 11000
},
{
"epoch": 0.21,
"learning_rate": 4.658250719159356e-05,
"loss": 8.5935,
"step": 11500
},
{
"epoch": 0.21,
"learning_rate": 4.6433920547749804e-05,
"loss": 8.5818,
"step": 12000
},
{
"epoch": 0.22,
"learning_rate": 4.6285333903906045e-05,
"loss": 8.594,
"step": 12500
},
{
"epoch": 0.23,
"learning_rate": 4.613674726006229e-05,
"loss": 8.5958,
"step": 13000
},
{
"epoch": 0.24,
"learning_rate": 4.5988160616218534e-05,
"loss": 8.6729,
"step": 13500
},
{
"epoch": 0.25,
"learning_rate": 4.583957397237477e-05,
"loss": 8.5583,
"step": 14000
},
{
"epoch": 0.26,
"learning_rate": 4.569098732853102e-05,
"loss": 8.4468,
"step": 14500
},
{
"epoch": 0.27,
"learning_rate": 4.554240068468726e-05,
"loss": 8.4181,
"step": 15000
},
{
"epoch": 0.28,
"learning_rate": 4.53938140408435e-05,
"loss": 8.549,
"step": 15500
},
{
"epoch": 0.29,
"learning_rate": 4.524522739699974e-05,
"loss": 8.4542,
"step": 16000
},
{
"epoch": 0.29,
"learning_rate": 4.509664075315598e-05,
"loss": 8.4954,
"step": 16500
},
{
"epoch": 0.3,
"learning_rate": 4.494805410931223e-05,
"loss": 8.3789,
"step": 17000
},
{
"epoch": 0.31,
"learning_rate": 4.4799467465468464e-05,
"loss": 8.4243,
"step": 17500
},
{
"epoch": 0.32,
"learning_rate": 4.4650880821624705e-05,
"loss": 8.5108,
"step": 18000
},
{
"epoch": 0.33,
"learning_rate": 4.450229417778095e-05,
"loss": 8.5746,
"step": 18500
},
{
"epoch": 0.34,
"learning_rate": 4.435370753393719e-05,
"loss": 8.5546,
"step": 19000
},
{
"epoch": 0.35,
"learning_rate": 4.4205120890093435e-05,
"loss": 8.5514,
"step": 19500
},
{
"epoch": 0.36,
"learning_rate": 4.4056534246249676e-05,
"loss": 8.4347,
"step": 20000
},
{
"epoch": 0.37,
"learning_rate": 4.390794760240592e-05,
"loss": 8.5224,
"step": 20500
},
{
"epoch": 0.37,
"learning_rate": 4.375936095856216e-05,
"loss": 8.4994,
"step": 21000
},
{
"epoch": 0.38,
"learning_rate": 4.36107743147184e-05,
"loss": 8.4593,
"step": 21500
},
{
"epoch": 0.39,
"learning_rate": 4.346218767087464e-05,
"loss": 8.4521,
"step": 22000
},
{
"epoch": 0.4,
"learning_rate": 4.331360102703089e-05,
"loss": 8.5219,
"step": 22500
},
{
"epoch": 0.41,
"learning_rate": 4.3165014383187123e-05,
"loss": 8.4422,
"step": 23000
},
{
"epoch": 0.42,
"learning_rate": 4.3016427739343365e-05,
"loss": 8.6163,
"step": 23500
},
{
"epoch": 0.43,
"learning_rate": 4.286784109549961e-05,
"loss": 8.5638,
"step": 24000
},
{
"epoch": 0.44,
"learning_rate": 4.271925445165585e-05,
"loss": 8.6115,
"step": 24500
},
{
"epoch": 0.45,
"learning_rate": 4.2570667807812095e-05,
"loss": 8.4891,
"step": 25000
},
{
"epoch": 0.45,
"learning_rate": 4.2422081163968336e-05,
"loss": 8.4262,
"step": 25500
},
{
"epoch": 0.46,
"learning_rate": 4.227349452012458e-05,
"loss": 8.477,
"step": 26000
},
{
"epoch": 0.47,
"learning_rate": 4.212490787628082e-05,
"loss": 8.4471,
"step": 26500
},
{
"epoch": 0.48,
"learning_rate": 4.197632123243706e-05,
"loss": 8.5222,
"step": 27000
},
{
"epoch": 0.49,
"learning_rate": 4.18277345885933e-05,
"loss": 8.5666,
"step": 27500
},
{
"epoch": 0.5,
"learning_rate": 4.167914794474955e-05,
"loss": 8.4356,
"step": 28000
},
{
"epoch": 0.51,
"learning_rate": 4.153056130090578e-05,
"loss": 8.4847,
"step": 28500
},
{
"epoch": 0.52,
"learning_rate": 4.138197465706203e-05,
"loss": 8.4716,
"step": 29000
},
{
"epoch": 0.53,
"learning_rate": 4.123338801321827e-05,
"loss": 8.5387,
"step": 29500
},
{
"epoch": 0.53,
"learning_rate": 4.1084801369374507e-05,
"loss": 8.5522,
"step": 30000
},
{
"epoch": 0.54,
"learning_rate": 4.0936214725530755e-05,
"loss": 8.6269,
"step": 30500
},
{
"epoch": 0.55,
"learning_rate": 4.0787628081686996e-05,
"loss": 8.4528,
"step": 31000
},
{
"epoch": 0.56,
"learning_rate": 4.063904143784324e-05,
"loss": 8.5515,
"step": 31500
},
{
"epoch": 0.57,
"learning_rate": 4.049045479399948e-05,
"loss": 8.3859,
"step": 32000
},
{
"epoch": 0.58,
"learning_rate": 4.034186815015572e-05,
"loss": 8.5035,
"step": 32500
},
{
"epoch": 0.59,
"learning_rate": 4.019328150631197e-05,
"loss": 8.3759,
"step": 33000
},
{
"epoch": 0.6,
"learning_rate": 4.00446948624682e-05,
"loss": 8.6423,
"step": 33500
},
{
"epoch": 0.61,
"learning_rate": 3.989610821862444e-05,
"loss": 8.4445,
"step": 34000
},
{
"epoch": 0.62,
"learning_rate": 3.974752157478069e-05,
"loss": 8.5676,
"step": 34500
},
{
"epoch": 0.62,
"learning_rate": 3.959893493093693e-05,
"loss": 8.558,
"step": 35000
},
{
"epoch": 0.63,
"learning_rate": 3.9450348287093166e-05,
"loss": 8.5935,
"step": 35500
},
{
"epoch": 0.64,
"learning_rate": 3.9301761643249414e-05,
"loss": 8.5513,
"step": 36000
},
{
"epoch": 0.65,
"learning_rate": 3.9153174999405655e-05,
"loss": 8.5013,
"step": 36500
},
{
"epoch": 0.66,
"learning_rate": 3.9004588355561897e-05,
"loss": 8.367,
"step": 37000
},
{
"epoch": 0.67,
"learning_rate": 3.885600171171814e-05,
"loss": 8.3926,
"step": 37500
},
{
"epoch": 0.68,
"learning_rate": 3.870741506787438e-05,
"loss": 8.5125,
"step": 38000
},
{
"epoch": 0.69,
"learning_rate": 3.855882842403063e-05,
"loss": 8.5391,
"step": 38500
},
{
"epoch": 0.7,
"learning_rate": 3.841024178018686e-05,
"loss": 8.6012,
"step": 39000
},
{
"epoch": 0.7,
"learning_rate": 3.82616551363431e-05,
"loss": 8.5837,
"step": 39500
},
{
"epoch": 0.71,
"learning_rate": 3.811306849249935e-05,
"loss": 8.4399,
"step": 40000
},
{
"epoch": 0.72,
"learning_rate": 3.796448184865559e-05,
"loss": 8.3717,
"step": 40500
},
{
"epoch": 0.73,
"learning_rate": 3.781589520481183e-05,
"loss": 8.3921,
"step": 41000
},
{
"epoch": 0.74,
"learning_rate": 3.7667308560968074e-05,
"loss": 8.6485,
"step": 41500
},
{
"epoch": 0.75,
"learning_rate": 3.7518721917124315e-05,
"loss": 8.6172,
"step": 42000
},
{
"epoch": 0.76,
"learning_rate": 3.737013527328056e-05,
"loss": 8.4443,
"step": 42500
},
{
"epoch": 0.77,
"learning_rate": 3.72215486294368e-05,
"loss": 8.5358,
"step": 43000
},
{
"epoch": 0.78,
"learning_rate": 3.707296198559304e-05,
"loss": 8.5569,
"step": 43500
},
{
"epoch": 0.78,
"learning_rate": 3.6924375341749286e-05,
"loss": 8.4957,
"step": 44000
},
{
"epoch": 0.79,
"learning_rate": 3.677578869790552e-05,
"loss": 8.6387,
"step": 44500
},
{
"epoch": 0.8,
"learning_rate": 3.662720205406177e-05,
"loss": 8.4732,
"step": 45000
},
{
"epoch": 0.81,
"learning_rate": 3.647861541021801e-05,
"loss": 8.4356,
"step": 45500
},
{
"epoch": 0.82,
"learning_rate": 3.633002876637425e-05,
"loss": 8.5194,
"step": 46000
},
{
"epoch": 0.83,
"learning_rate": 3.618144212253049e-05,
"loss": 8.5896,
"step": 46500
},
{
"epoch": 0.84,
"learning_rate": 3.6032855478686734e-05,
"loss": 8.5769,
"step": 47000
},
{
"epoch": 0.85,
"learning_rate": 3.5884268834842975e-05,
"loss": 8.4959,
"step": 47500
},
{
"epoch": 0.86,
"learning_rate": 3.5735682190999216e-05,
"loss": 8.4584,
"step": 48000
},
{
"epoch": 0.86,
"learning_rate": 3.558709554715546e-05,
"loss": 8.5566,
"step": 48500
},
{
"epoch": 0.87,
"learning_rate": 3.54385089033117e-05,
"loss": 8.4576,
"step": 49000
},
{
"epoch": 0.88,
"learning_rate": 3.5289922259467946e-05,
"loss": 8.6795,
"step": 49500
},
{
"epoch": 0.89,
"learning_rate": 3.514133561562418e-05,
"loss": 8.5897,
"step": 50000
},
{
"epoch": 0.9,
"learning_rate": 3.499274897178043e-05,
"loss": 8.576,
"step": 50500
},
{
"epoch": 0.91,
"learning_rate": 3.484416232793667e-05,
"loss": 8.5107,
"step": 51000
},
{
"epoch": 0.92,
"learning_rate": 3.469557568409291e-05,
"loss": 8.5208,
"step": 51500
},
{
"epoch": 0.93,
"learning_rate": 3.454698904024915e-05,
"loss": 8.523,
"step": 52000
},
{
"epoch": 0.94,
"learning_rate": 3.439840239640539e-05,
"loss": 8.6138,
"step": 52500
},
{
"epoch": 0.95,
"learning_rate": 3.4249815752561634e-05,
"loss": 8.5042,
"step": 53000
},
{
"epoch": 0.95,
"learning_rate": 3.4101229108717876e-05,
"loss": 8.6146,
"step": 53500
},
{
"epoch": 0.96,
"learning_rate": 3.395264246487412e-05,
"loss": 8.5214,
"step": 54000
},
{
"epoch": 0.97,
"learning_rate": 3.3804055821030365e-05,
"loss": 8.4696,
"step": 54500
},
{
"epoch": 0.98,
"learning_rate": 3.3655469177186606e-05,
"loss": 8.451,
"step": 55000
},
{
"epoch": 0.99,
"learning_rate": 3.350688253334284e-05,
"loss": 8.4613,
"step": 55500
},
{
"epoch": 1.0,
"learning_rate": 3.335829588949909e-05,
"loss": 8.4817,
"step": 56000
},
{
"epoch": 1.01,
"learning_rate": 3.320970924565533e-05,
"loss": 8.4691,
"step": 56500
},
{
"epoch": 1.02,
"learning_rate": 3.306112260181157e-05,
"loss": 8.5732,
"step": 57000
},
{
"epoch": 1.03,
"learning_rate": 3.291253595796781e-05,
"loss": 8.4901,
"step": 57500
},
{
"epoch": 1.03,
"learning_rate": 3.276394931412405e-05,
"loss": 8.4497,
"step": 58000
},
{
"epoch": 1.04,
"learning_rate": 3.2615362670280294e-05,
"loss": 8.5303,
"step": 58500
},
{
"epoch": 1.05,
"learning_rate": 3.2466776026436535e-05,
"loss": 8.6241,
"step": 59000
},
{
"epoch": 1.06,
"learning_rate": 3.2318189382592776e-05,
"loss": 8.4288,
"step": 59500
},
{
"epoch": 1.07,
"learning_rate": 3.2169602738749024e-05,
"loss": 8.4964,
"step": 60000
},
{
"epoch": 1.08,
"learning_rate": 3.2021016094905265e-05,
"loss": 8.446,
"step": 60500
},
{
"epoch": 1.09,
"learning_rate": 3.18724294510615e-05,
"loss": 8.4443,
"step": 61000
},
{
"epoch": 1.1,
"learning_rate": 3.172384280721775e-05,
"loss": 8.5108,
"step": 61500
},
{
"epoch": 1.11,
"learning_rate": 3.157525616337399e-05,
"loss": 8.6207,
"step": 62000
},
{
"epoch": 1.11,
"learning_rate": 3.142666951953023e-05,
"loss": 8.59,
"step": 62500
},
{
"epoch": 1.12,
"learning_rate": 3.127808287568647e-05,
"loss": 8.6877,
"step": 63000
},
{
"epoch": 1.13,
"learning_rate": 3.112949623184271e-05,
"loss": 8.4827,
"step": 63500
},
{
"epoch": 1.14,
"learning_rate": 3.098090958799896e-05,
"loss": 8.5279,
"step": 64000
},
{
"epoch": 1.15,
"learning_rate": 3.0832322944155195e-05,
"loss": 8.4369,
"step": 64500
},
{
"epoch": 1.16,
"learning_rate": 3.0683736300311436e-05,
"loss": 8.6061,
"step": 65000
},
{
"epoch": 1.17,
"learning_rate": 3.0535149656467684e-05,
"loss": 8.4595,
"step": 65500
},
{
"epoch": 1.18,
"learning_rate": 3.0386563012623925e-05,
"loss": 8.3962,
"step": 66000
},
{
"epoch": 1.19,
"learning_rate": 3.0237976368780163e-05,
"loss": 8.5392,
"step": 66500
},
{
"epoch": 1.19,
"learning_rate": 3.0089389724936407e-05,
"loss": 8.3932,
"step": 67000
},
{
"epoch": 1.2,
"learning_rate": 2.994080308109265e-05,
"loss": 8.4278,
"step": 67500
},
{
"epoch": 1.21,
"learning_rate": 2.9792216437248886e-05,
"loss": 8.372,
"step": 68000
},
{
"epoch": 1.22,
"learning_rate": 2.964362979340513e-05,
"loss": 8.4326,
"step": 68500
},
{
"epoch": 1.23,
"learning_rate": 2.9495043149561376e-05,
"loss": 8.394,
"step": 69000
},
{
"epoch": 1.24,
"learning_rate": 2.9346456505717617e-05,
"loss": 8.3595,
"step": 69500
},
{
"epoch": 1.25,
"learning_rate": 2.9197869861873855e-05,
"loss": 8.4364,
"step": 70000
},
{
"epoch": 1.26,
"learning_rate": 2.90492832180301e-05,
"loss": 8.4903,
"step": 70500
},
{
"epoch": 1.27,
"learning_rate": 2.8900696574186344e-05,
"loss": 8.5403,
"step": 71000
},
{
"epoch": 1.27,
"learning_rate": 2.875210993034258e-05,
"loss": 8.4901,
"step": 71500
},
{
"epoch": 1.28,
"learning_rate": 2.8603523286498823e-05,
"loss": 8.5069,
"step": 72000
},
{
"epoch": 1.29,
"learning_rate": 2.8454936642655067e-05,
"loss": 8.4554,
"step": 72500
},
{
"epoch": 1.3,
"learning_rate": 2.8306349998811312e-05,
"loss": 8.6517,
"step": 73000
},
{
"epoch": 1.31,
"learning_rate": 2.815776335496755e-05,
"loss": 8.4631,
"step": 73500
},
{
"epoch": 1.32,
"learning_rate": 2.800917671112379e-05,
"loss": 8.4751,
"step": 74000
},
{
"epoch": 1.33,
"learning_rate": 2.7860590067280035e-05,
"loss": 8.589,
"step": 74500
},
{
"epoch": 1.34,
"learning_rate": 2.7712003423436276e-05,
"loss": 8.515,
"step": 75000
},
{
"epoch": 1.35,
"learning_rate": 2.7563416779592514e-05,
"loss": 8.5457,
"step": 75500
},
{
"epoch": 1.36,
"learning_rate": 2.741483013574876e-05,
"loss": 8.4698,
"step": 76000
},
{
"epoch": 1.36,
"learning_rate": 2.7266243491905003e-05,
"loss": 8.5481,
"step": 76500
},
{
"epoch": 1.37,
"learning_rate": 2.711765684806124e-05,
"loss": 8.5733,
"step": 77000
},
{
"epoch": 1.38,
"learning_rate": 2.6969070204217482e-05,
"loss": 8.4624,
"step": 77500
},
{
"epoch": 1.39,
"learning_rate": 2.6820483560373727e-05,
"loss": 8.4875,
"step": 78000
},
{
"epoch": 1.4,
"learning_rate": 2.667189691652997e-05,
"loss": 8.4949,
"step": 78500
},
{
"epoch": 1.41,
"learning_rate": 2.652331027268621e-05,
"loss": 8.5104,
"step": 79000
},
{
"epoch": 1.42,
"learning_rate": 2.637472362884245e-05,
"loss": 8.6732,
"step": 79500
},
{
"epoch": 1.43,
"learning_rate": 2.6226136984998695e-05,
"loss": 8.6028,
"step": 80000
},
{
"epoch": 1.44,
"learning_rate": 2.607755034115494e-05,
"loss": 8.6293,
"step": 80500
},
{
"epoch": 1.44,
"learning_rate": 2.5928963697311177e-05,
"loss": 8.6331,
"step": 81000
},
{
"epoch": 1.45,
"learning_rate": 2.578037705346742e-05,
"loss": 8.4385,
"step": 81500
},
{
"epoch": 1.46,
"learning_rate": 2.5631790409623663e-05,
"loss": 8.4303,
"step": 82000
},
{
"epoch": 1.47,
"learning_rate": 2.54832037657799e-05,
"loss": 8.3661,
"step": 82500
},
{
"epoch": 1.48,
"learning_rate": 2.5334617121936145e-05,
"loss": 8.3943,
"step": 83000
},
{
"epoch": 1.49,
"learning_rate": 2.5186030478092386e-05,
"loss": 8.4089,
"step": 83500
},
{
"epoch": 1.5,
"learning_rate": 2.503744383424863e-05,
"loss": 8.4719,
"step": 84000
},
{
"epoch": 1.51,
"learning_rate": 2.4888857190404872e-05,
"loss": 8.5825,
"step": 84500
},
{
"epoch": 1.52,
"learning_rate": 2.4740270546561113e-05,
"loss": 8.4867,
"step": 85000
},
{
"epoch": 1.52,
"learning_rate": 2.459168390271735e-05,
"loss": 8.3653,
"step": 85500
},
{
"epoch": 1.53,
"learning_rate": 2.4443097258873596e-05,
"loss": 8.4881,
"step": 86000
},
{
"epoch": 1.54,
"learning_rate": 2.4294510615029837e-05,
"loss": 8.3916,
"step": 86500
},
{
"epoch": 1.55,
"learning_rate": 2.4145923971186078e-05,
"loss": 8.4061,
"step": 87000
},
{
"epoch": 1.56,
"learning_rate": 2.399733732734232e-05,
"loss": 8.5029,
"step": 87500
},
{
"epoch": 1.57,
"learning_rate": 2.3848750683498564e-05,
"loss": 8.3934,
"step": 88000
},
{
"epoch": 1.58,
"learning_rate": 2.3700164039654805e-05,
"loss": 8.4483,
"step": 88500
},
{
"epoch": 1.59,
"learning_rate": 2.3551577395811046e-05,
"loss": 8.4115,
"step": 89000
},
{
"epoch": 1.6,
"learning_rate": 2.3402990751967287e-05,
"loss": 8.599,
"step": 89500
},
{
"epoch": 1.6,
"learning_rate": 2.3254404108123532e-05,
"loss": 8.4504,
"step": 90000
},
{
"epoch": 1.61,
"learning_rate": 2.3105817464279773e-05,
"loss": 8.3569,
"step": 90500
},
{
"epoch": 1.62,
"learning_rate": 2.2957230820436014e-05,
"loss": 8.6043,
"step": 91000
},
{
"epoch": 1.63,
"learning_rate": 2.2808644176592255e-05,
"loss": 8.3064,
"step": 91500
},
{
"epoch": 1.64,
"learning_rate": 2.2660057532748497e-05,
"loss": 8.6221,
"step": 92000
},
{
"epoch": 1.65,
"learning_rate": 2.251147088890474e-05,
"loss": 8.4029,
"step": 92500
},
{
"epoch": 1.66,
"learning_rate": 2.236288424506098e-05,
"loss": 8.5238,
"step": 93000
},
{
"epoch": 1.67,
"learning_rate": 2.2214297601217223e-05,
"loss": 8.3069,
"step": 93500
},
{
"epoch": 1.68,
"learning_rate": 2.2065710957373465e-05,
"loss": 8.5189,
"step": 94000
},
{
"epoch": 1.68,
"learning_rate": 2.191712431352971e-05,
"loss": 8.5181,
"step": 94500
},
{
"epoch": 1.69,
"learning_rate": 2.1768537669685947e-05,
"loss": 8.586,
"step": 95000
},
{
"epoch": 1.7,
"learning_rate": 2.1619951025842188e-05,
"loss": 8.3729,
"step": 95500
},
{
"epoch": 1.71,
"learning_rate": 2.1471364381998433e-05,
"loss": 8.6504,
"step": 96000
},
{
"epoch": 1.72,
"learning_rate": 2.1322777738154674e-05,
"loss": 8.493,
"step": 96500
},
{
"epoch": 1.73,
"learning_rate": 2.1174191094310915e-05,
"loss": 8.3185,
"step": 97000
},
{
"epoch": 1.74,
"learning_rate": 2.1025604450467156e-05,
"loss": 8.439,
"step": 97500
},
{
"epoch": 1.75,
"learning_rate": 2.08770178066234e-05,
"loss": 8.5533,
"step": 98000
},
{
"epoch": 1.76,
"learning_rate": 2.0728431162779642e-05,
"loss": 8.3284,
"step": 98500
},
{
"epoch": 1.77,
"learning_rate": 2.0579844518935883e-05,
"loss": 8.315,
"step": 99000
},
{
"epoch": 1.77,
"learning_rate": 2.0431257875092124e-05,
"loss": 8.4237,
"step": 99500
},
{
"epoch": 1.78,
"learning_rate": 2.0282671231248365e-05,
"loss": 8.4191,
"step": 100000
},
{
"epoch": 1.79,
"learning_rate": 2.013408458740461e-05,
"loss": 8.4631,
"step": 100500
},
{
"epoch": 1.8,
"learning_rate": 1.9985497943560848e-05,
"loss": 8.2988,
"step": 101000
},
{
"epoch": 1.81,
"learning_rate": 1.9836911299717092e-05,
"loss": 8.3866,
"step": 101500
},
{
"epoch": 1.82,
"learning_rate": 1.9688324655873334e-05,
"loss": 8.4536,
"step": 102000
},
{
"epoch": 1.83,
"learning_rate": 1.9539738012029578e-05,
"loss": 8.5572,
"step": 102500
},
{
"epoch": 1.84,
"learning_rate": 1.9391151368185816e-05,
"loss": 8.366,
"step": 103000
},
{
"epoch": 1.85,
"learning_rate": 1.924256472434206e-05,
"loss": 8.3163,
"step": 103500
},
{
"epoch": 1.85,
"learning_rate": 1.90939780804983e-05,
"loss": 8.3835,
"step": 104000
},
{
"epoch": 1.86,
"learning_rate": 1.8945391436654543e-05,
"loss": 8.5343,
"step": 104500
},
{
"epoch": 1.87,
"learning_rate": 1.8796804792810784e-05,
"loss": 8.3379,
"step": 105000
},
{
"epoch": 1.88,
"learning_rate": 1.8648218148967025e-05,
"loss": 8.4609,
"step": 105500
},
{
"epoch": 1.89,
"learning_rate": 1.849963150512327e-05,
"loss": 8.5256,
"step": 106000
},
{
"epoch": 1.9,
"learning_rate": 1.835104486127951e-05,
"loss": 8.3157,
"step": 106500
},
{
"epoch": 1.91,
"learning_rate": 1.8202458217435752e-05,
"loss": 8.6021,
"step": 107000
},
{
"epoch": 1.92,
"learning_rate": 1.8053871573591993e-05,
"loss": 8.4711,
"step": 107500
},
{
"epoch": 1.93,
"learning_rate": 1.7905284929748238e-05,
"loss": 8.4117,
"step": 108000
},
{
"epoch": 1.93,
"learning_rate": 1.775669828590448e-05,
"loss": 8.3728,
"step": 108500
},
{
"epoch": 1.94,
"learning_rate": 1.760811164206072e-05,
"loss": 8.475,
"step": 109000
},
{
"epoch": 1.95,
"learning_rate": 1.745952499821696e-05,
"loss": 8.4445,
"step": 109500
},
{
"epoch": 1.96,
"learning_rate": 1.7310938354373202e-05,
"loss": 8.4425,
"step": 110000
},
{
"epoch": 1.97,
"learning_rate": 1.7162351710529444e-05,
"loss": 8.4416,
"step": 110500
},
{
"epoch": 1.98,
"learning_rate": 1.7013765066685685e-05,
"loss": 8.4447,
"step": 111000
},
{
"epoch": 1.99,
"learning_rate": 1.686517842284193e-05,
"loss": 8.5124,
"step": 111500
},
{
"epoch": 2.0,
"learning_rate": 1.671659177899817e-05,
"loss": 8.187,
"step": 112000
},
{
"epoch": 2.01,
"learning_rate": 1.656800513515441e-05,
"loss": 8.3755,
"step": 112500
},
{
"epoch": 2.01,
"learning_rate": 1.6419418491310653e-05,
"loss": 8.3418,
"step": 113000
},
{
"epoch": 2.02,
"learning_rate": 1.6270831847466897e-05,
"loss": 8.4131,
"step": 113500
},
{
"epoch": 2.03,
"learning_rate": 1.612224520362314e-05,
"loss": 8.3132,
"step": 114000
},
{
"epoch": 2.04,
"learning_rate": 1.597365855977938e-05,
"loss": 8.4248,
"step": 114500
},
{
"epoch": 2.05,
"learning_rate": 1.582507191593562e-05,
"loss": 8.3797,
"step": 115000
},
{
"epoch": 2.06,
"learning_rate": 1.5676485272091862e-05,
"loss": 8.3197,
"step": 115500
},
{
"epoch": 2.07,
"learning_rate": 1.5527898628248107e-05,
"loss": 8.3627,
"step": 116000
},
{
"epoch": 2.08,
"learning_rate": 1.5379311984404344e-05,
"loss": 8.3708,
"step": 116500
},
{
"epoch": 2.09,
"learning_rate": 1.5230725340560589e-05,
"loss": 8.3659,
"step": 117000
},
{
"epoch": 2.1,
"learning_rate": 1.508213869671683e-05,
"loss": 8.4066,
"step": 117500
},
{
"epoch": 2.1,
"learning_rate": 1.4933552052873073e-05,
"loss": 8.4507,
"step": 118000
},
{
"epoch": 2.11,
"learning_rate": 1.4784965409029314e-05,
"loss": 8.3325,
"step": 118500
},
{
"epoch": 2.12,
"learning_rate": 1.4636378765185554e-05,
"loss": 8.4277,
"step": 119000
},
{
"epoch": 2.13,
"learning_rate": 1.4487792121341798e-05,
"loss": 8.3021,
"step": 119500
},
{
"epoch": 2.14,
"learning_rate": 1.4339205477498038e-05,
"loss": 8.4499,
"step": 120000
},
{
"epoch": 2.15,
"learning_rate": 1.4190618833654282e-05,
"loss": 8.3656,
"step": 120500
},
{
"epoch": 2.16,
"learning_rate": 1.4042032189810522e-05,
"loss": 8.4882,
"step": 121000
},
{
"epoch": 2.17,
"learning_rate": 1.3893445545966766e-05,
"loss": 8.5041,
"step": 121500
},
{
"epoch": 2.18,
"learning_rate": 1.3744858902123006e-05,
"loss": 8.2957,
"step": 122000
},
{
"epoch": 2.18,
"learning_rate": 1.359627225827925e-05,
"loss": 8.3517,
"step": 122500
},
{
"epoch": 2.19,
"learning_rate": 1.344768561443549e-05,
"loss": 8.3476,
"step": 123000
},
{
"epoch": 2.2,
"learning_rate": 1.3299098970591731e-05,
"loss": 8.3239,
"step": 123500
},
{
"epoch": 2.21,
"learning_rate": 1.3150512326747974e-05,
"loss": 8.3705,
"step": 124000
},
{
"epoch": 2.22,
"learning_rate": 1.3001925682904215e-05,
"loss": 8.2817,
"step": 124500
},
{
"epoch": 2.23,
"learning_rate": 1.2853339039060458e-05,
"loss": 8.386,
"step": 125000
},
{
"epoch": 2.24,
"learning_rate": 1.2704752395216699e-05,
"loss": 8.3455,
"step": 125500
},
{
"epoch": 2.25,
"learning_rate": 1.2556165751372942e-05,
"loss": 8.3228,
"step": 126000
},
{
"epoch": 2.26,
"learning_rate": 1.2407579107529183e-05,
"loss": 8.3176,
"step": 126500
},
{
"epoch": 2.26,
"learning_rate": 1.2258992463685424e-05,
"loss": 8.4304,
"step": 127000
},
{
"epoch": 2.27,
"learning_rate": 1.2110405819841667e-05,
"loss": 8.4209,
"step": 127500
},
{
"epoch": 2.28,
"learning_rate": 1.1961819175997908e-05,
"loss": 8.3362,
"step": 128000
},
{
"epoch": 2.29,
"learning_rate": 1.1813232532154151e-05,
"loss": 8.3753,
"step": 128500
},
{
"epoch": 2.3,
"learning_rate": 1.1664645888310392e-05,
"loss": 8.2538,
"step": 129000
},
{
"epoch": 2.31,
"learning_rate": 1.1516059244466634e-05,
"loss": 8.1921,
"step": 129500
},
{
"epoch": 2.32,
"learning_rate": 1.1367472600622876e-05,
"loss": 8.3222,
"step": 130000
},
{
"epoch": 2.33,
"learning_rate": 1.1218885956779118e-05,
"loss": 8.2967,
"step": 130500
},
{
"epoch": 2.34,
"learning_rate": 1.1070299312935359e-05,
"loss": 8.33,
"step": 131000
},
{
"epoch": 2.34,
"learning_rate": 1.0921712669091602e-05,
"loss": 8.4005,
"step": 131500
},
{
"epoch": 2.35,
"learning_rate": 1.0773126025247843e-05,
"loss": 8.3699,
"step": 132000
},
{
"epoch": 2.36,
"learning_rate": 1.0624539381404084e-05,
"loss": 8.3546,
"step": 132500
},
{
"epoch": 2.37,
"learning_rate": 1.0475952737560327e-05,
"loss": 8.4229,
"step": 133000
},
{
"epoch": 2.38,
"learning_rate": 1.0327366093716568e-05,
"loss": 8.3003,
"step": 133500
},
{
"epoch": 2.39,
"learning_rate": 1.0178779449872811e-05,
"loss": 8.3456,
"step": 134000
},
{
"epoch": 2.4,
"learning_rate": 1.0030192806029052e-05,
"loss": 8.2217,
"step": 134500
},
{
"epoch": 2.41,
"learning_rate": 9.881606162185295e-06,
"loss": 8.3187,
"step": 135000
},
{
"epoch": 2.42,
"learning_rate": 9.733019518341534e-06,
"loss": 8.3815,
"step": 135500
},
{
"epoch": 2.42,
"learning_rate": 9.584432874497777e-06,
"loss": 8.2689,
"step": 136000
},
{
"epoch": 2.43,
"learning_rate": 9.435846230654018e-06,
"loss": 8.3191,
"step": 136500
},
{
"epoch": 2.44,
"learning_rate": 9.287259586810261e-06,
"loss": 8.4162,
"step": 137000
},
{
"epoch": 2.45,
"learning_rate": 9.138672942966502e-06,
"loss": 8.3997,
"step": 137500
},
{
"epoch": 2.46,
"learning_rate": 8.990086299122745e-06,
"loss": 8.3898,
"step": 138000
},
{
"epoch": 2.47,
"learning_rate": 8.841499655278986e-06,
"loss": 8.3515,
"step": 138500
},
{
"epoch": 2.48,
"learning_rate": 8.69291301143523e-06,
"loss": 8.402,
"step": 139000
},
{
"epoch": 2.49,
"learning_rate": 8.54432636759147e-06,
"loss": 8.5421,
"step": 139500
},
{
"epoch": 2.5,
"learning_rate": 8.395739723747713e-06,
"loss": 8.284,
"step": 140000
},
{
"epoch": 2.51,
"learning_rate": 8.247153079903953e-06,
"loss": 8.3907,
"step": 140500
},
{
"epoch": 2.51,
"learning_rate": 8.098566436060196e-06,
"loss": 8.2416,
"step": 141000
},
{
"epoch": 2.52,
"learning_rate": 7.949979792216437e-06,
"loss": 8.2786,
"step": 141500
},
{
"epoch": 2.53,
"learning_rate": 7.80139314837268e-06,
"loss": 8.218,
"step": 142000
},
{
"epoch": 2.54,
"learning_rate": 7.652806504528921e-06,
"loss": 8.3371,
"step": 142500
},
{
"epoch": 2.55,
"learning_rate": 7.504219860685163e-06,
"loss": 8.4112,
"step": 143000
},
{
"epoch": 2.56,
"learning_rate": 7.355633216841405e-06,
"loss": 8.3448,
"step": 143500
},
{
"epoch": 2.57,
"learning_rate": 7.207046572997647e-06,
"loss": 8.3472,
"step": 144000
},
{
"epoch": 2.58,
"learning_rate": 7.058459929153889e-06,
"loss": 8.256,
"step": 144500
},
{
"epoch": 2.59,
"learning_rate": 6.909873285310129e-06,
"loss": 8.362,
"step": 145000
},
{
"epoch": 2.59,
"learning_rate": 6.761286641466371e-06,
"loss": 8.3393,
"step": 145500
},
{
"epoch": 2.6,
"learning_rate": 6.612699997622613e-06,
"loss": 8.2448,
"step": 146000
},
{
"epoch": 2.61,
"learning_rate": 6.464113353778855e-06,
"loss": 8.3471,
"step": 146500
},
{
"epoch": 2.62,
"learning_rate": 6.315526709935097e-06,
"loss": 8.2684,
"step": 147000
},
{
"epoch": 2.63,
"learning_rate": 6.1669400660913394e-06,
"loss": 8.3226,
"step": 147500
},
{
"epoch": 2.64,
"learning_rate": 6.0183534222475815e-06,
"loss": 8.3493,
"step": 148000
},
{
"epoch": 2.65,
"learning_rate": 5.869766778403823e-06,
"loss": 8.3861,
"step": 148500
},
{
"epoch": 2.66,
"learning_rate": 5.721180134560065e-06,
"loss": 8.2889,
"step": 149000
},
{
"epoch": 2.67,
"learning_rate": 5.572593490716307e-06,
"loss": 8.3556,
"step": 149500
},
{
"epoch": 2.67,
"learning_rate": 5.424006846872549e-06,
"loss": 8.2095,
"step": 150000
},
{
"epoch": 2.68,
"learning_rate": 5.275420203028791e-06,
"loss": 8.1834,
"step": 150500
},
{
"epoch": 2.69,
"learning_rate": 5.126833559185032e-06,
"loss": 8.3183,
"step": 151000
},
{
"epoch": 2.7,
"learning_rate": 4.978246915341274e-06,
"loss": 8.2933,
"step": 151500
},
{
"epoch": 2.71,
"learning_rate": 4.829660271497516e-06,
"loss": 8.2567,
"step": 152000
},
{
"epoch": 2.72,
"learning_rate": 4.681073627653758e-06,
"loss": 8.3184,
"step": 152500
},
{
"epoch": 2.73,
"learning_rate": 4.53248698381e-06,
"loss": 8.4259,
"step": 153000
},
{
"epoch": 2.74,
"learning_rate": 4.383900339966241e-06,
"loss": 8.3777,
"step": 153500
},
{
"epoch": 2.75,
"learning_rate": 4.235313696122483e-06,
"loss": 8.2875,
"step": 154000
},
{
"epoch": 2.75,
"learning_rate": 4.086727052278725e-06,
"loss": 8.3118,
"step": 154500
},
{
"epoch": 2.76,
"learning_rate": 3.938140408434967e-06,
"loss": 8.2465,
"step": 155000
},
{
"epoch": 2.77,
"learning_rate": 3.7895537645912088e-06,
"loss": 8.4145,
"step": 155500
},
{
"epoch": 2.78,
"learning_rate": 3.6409671207474504e-06,
"loss": 8.1984,
"step": 156000
},
{
"epoch": 2.79,
"learning_rate": 3.4923804769036924e-06,
"loss": 8.3681,
"step": 156500
},
{
"epoch": 2.8,
"learning_rate": 3.343793833059934e-06,
"loss": 8.1982,
"step": 157000
},
{
"epoch": 2.81,
"learning_rate": 3.195207189216176e-06,
"loss": 8.303,
"step": 157500
},
{
"epoch": 2.82,
"learning_rate": 3.0466205453724176e-06,
"loss": 8.3355,
"step": 158000
},
{
"epoch": 2.83,
"learning_rate": 2.898033901528659e-06,
"loss": 8.3953,
"step": 158500
},
{
"epoch": 2.84,
"learning_rate": 2.749447257684901e-06,
"loss": 8.4069,
"step": 159000
},
{
"epoch": 2.84,
"learning_rate": 2.6008606138411432e-06,
"loss": 8.3092,
"step": 159500
},
{
"epoch": 2.85,
"learning_rate": 2.452273969997385e-06,
"loss": 8.3073,
"step": 160000
},
{
"epoch": 2.86,
"learning_rate": 2.303687326153627e-06,
"loss": 8.3872,
"step": 160500
},
{
"epoch": 2.87,
"learning_rate": 2.1551006823098684e-06,
"loss": 8.3404,
"step": 161000
},
{
"epoch": 2.88,
"learning_rate": 2.0065140384661104e-06,
"loss": 8.2552,
"step": 161500
},
{
"epoch": 2.89,
"learning_rate": 1.8579273946223525e-06,
"loss": 8.2809,
"step": 162000
},
{
"epoch": 2.9,
"learning_rate": 1.709340750778594e-06,
"loss": 8.3951,
"step": 162500
},
{
"epoch": 2.91,
"learning_rate": 1.5607541069348359e-06,
"loss": 8.3388,
"step": 163000
},
{
"epoch": 2.92,
"learning_rate": 1.4121674630910779e-06,
"loss": 8.2567,
"step": 163500
},
{
"epoch": 2.92,
"learning_rate": 1.2635808192473197e-06,
"loss": 8.4246,
"step": 164000
},
{
"epoch": 2.93,
"learning_rate": 1.1149941754035615e-06,
"loss": 8.2949,
"step": 164500
},
{
"epoch": 2.94,
"learning_rate": 9.66407531559803e-07,
"loss": 8.3421,
"step": 165000
},
{
"epoch": 2.95,
"learning_rate": 8.17820887716045e-07,
"loss": 8.1758,
"step": 165500
},
{
"epoch": 2.96,
"learning_rate": 6.692342438722869e-07,
"loss": 8.305,
"step": 166000
},
{
"epoch": 2.97,
"learning_rate": 5.206476000285286e-07,
"loss": 8.3029,
"step": 166500
},
{
"epoch": 2.98,
"learning_rate": 3.720609561847705e-07,
"loss": 8.2702,
"step": 167000
},
{
"epoch": 2.99,
"learning_rate": 2.234743123410123e-07,
"loss": 8.3746,
"step": 167500
},
{
"epoch": 3.0,
"learning_rate": 7.488766849725412e-08,
"loss": 8.2764,
"step": 168000
},
{
"epoch": 3.0,
"step": 168252,
"total_flos": 6046176456633600.0,
"train_loss": 8.453391505526772,
"train_runtime": 13355.768,
"train_samples_per_second": 25.195,
"train_steps_per_second": 12.598
}
],
"max_steps": 168252,
"num_train_epochs": 3,
"total_flos": 6046176456633600.0,
"trial_name": null,
"trial_params": null
}