|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 261603, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.990443534668945e-05, |
|
"loss": 2.4122, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.98088706933789e-05, |
|
"loss": 2.0865, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.971330604006835e-05, |
|
"loss": 1.9392, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.96177413867578e-05, |
|
"loss": 1.8163, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.952217673344725e-05, |
|
"loss": 1.7553, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9426612080136694e-05, |
|
"loss": 1.6586, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.933104742682615e-05, |
|
"loss": 1.6214, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9235482773515594e-05, |
|
"loss": 1.58, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.913991812020505e-05, |
|
"loss": 1.5398, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9044353466894494e-05, |
|
"loss": 1.5098, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.894878881358394e-05, |
|
"loss": 1.4674, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.885322416027339e-05, |
|
"loss": 1.4478, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.875765950696284e-05, |
|
"loss": 1.4264, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.866209485365229e-05, |
|
"loss": 1.413, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.856653020034174e-05, |
|
"loss": 1.3918, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8470965547031185e-05, |
|
"loss": 1.3834, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.837540089372064e-05, |
|
"loss": 1.3515, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8279836240410085e-05, |
|
"loss": 1.3263, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.818427158709954e-05, |
|
"loss": 1.3411, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.808870693378899e-05, |
|
"loss": 1.3176, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.7993142280478434e-05, |
|
"loss": 1.3117, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.789757762716789e-05, |
|
"loss": 1.305, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7802012973857334e-05, |
|
"loss": 1.2867, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7706448320546784e-05, |
|
"loss": 1.2586, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7610883667236234e-05, |
|
"loss": 1.2645, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.751531901392568e-05, |
|
"loss": 1.2608, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.741975436061513e-05, |
|
"loss": 1.2586, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.732418970730458e-05, |
|
"loss": 1.2374, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.722862505399403e-05, |
|
"loss": 1.2306, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.713306040068348e-05, |
|
"loss": 1.2437, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.7037495747372926e-05, |
|
"loss": 1.2214, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.694193109406238e-05, |
|
"loss": 1.2365, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6846366440751825e-05, |
|
"loss": 1.2184, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.675080178744128e-05, |
|
"loss": 1.2098, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6655237134130725e-05, |
|
"loss": 1.1901, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6559672480820174e-05, |
|
"loss": 1.1875, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6464107827509624e-05, |
|
"loss": 1.1871, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6368543174199074e-05, |
|
"loss": 1.1909, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6272978520888524e-05, |
|
"loss": 1.176, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6177413867577974e-05, |
|
"loss": 1.18, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.608184921426742e-05, |
|
"loss": 1.1598, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.598628456095687e-05, |
|
"loss": 1.1607, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.589071990764632e-05, |
|
"loss": 1.1591, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.579515525433577e-05, |
|
"loss": 1.1464, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.569959060102522e-05, |
|
"loss": 1.1605, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5604025947714666e-05, |
|
"loss": 1.1387, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.550846129440412e-05, |
|
"loss": 1.1553, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5412896641093565e-05, |
|
"loss": 1.1238, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5317331987783015e-05, |
|
"loss": 1.1291, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5221767334472465e-05, |
|
"loss": 1.1154, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5126202681161914e-05, |
|
"loss": 1.1228, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5030638027851364e-05, |
|
"loss": 1.1204, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.4935073374540814e-05, |
|
"loss": 1.113, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4839508721230264e-05, |
|
"loss": 1.1165, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4743944067919714e-05, |
|
"loss": 1.0987, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.464837941460916e-05, |
|
"loss": 1.1014, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.455281476129861e-05, |
|
"loss": 1.1041, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4457250107988056e-05, |
|
"loss": 1.0836, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.436168545467751e-05, |
|
"loss": 1.1053, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.426612080136696e-05, |
|
"loss": 1.0941, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4170556148056406e-05, |
|
"loss": 1.0864, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.407499149474586e-05, |
|
"loss": 1.1057, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3979426841435305e-05, |
|
"loss": 1.0829, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3883862188124755e-05, |
|
"loss": 1.077, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3788297534814205e-05, |
|
"loss": 1.0654, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3692732881503655e-05, |
|
"loss": 1.0889, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3597168228193104e-05, |
|
"loss": 1.0631, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3501603574882554e-05, |
|
"loss": 1.079, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3406038921572004e-05, |
|
"loss": 1.0663, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3310474268261454e-05, |
|
"loss": 1.0479, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.32149096149509e-05, |
|
"loss": 1.0503, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.311934496164035e-05, |
|
"loss": 1.0586, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3023780308329796e-05, |
|
"loss": 1.053, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2928215655019246e-05, |
|
"loss": 1.0698, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2832651001708696e-05, |
|
"loss": 1.0644, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2737086348398146e-05, |
|
"loss": 1.0363, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2641521695087595e-05, |
|
"loss": 1.0343, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2545957041777045e-05, |
|
"loss": 1.0459, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2450392388466495e-05, |
|
"loss": 1.0376, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2354827735155945e-05, |
|
"loss": 1.0394, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2259263081845395e-05, |
|
"loss": 1.0396, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2163698428534844e-05, |
|
"loss": 1.042, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.2068133775224294e-05, |
|
"loss": 1.0336, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.1972569121913744e-05, |
|
"loss": 1.0184, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1877004468603194e-05, |
|
"loss": 1.0268, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.178143981529264e-05, |
|
"loss": 1.0142, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.168587516198209e-05, |
|
"loss": 1.0199, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1590310508671536e-05, |
|
"loss": 1.0097, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1494745855360986e-05, |
|
"loss": 1.0123, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1399181202050436e-05, |
|
"loss": 1.009, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1303616548739886e-05, |
|
"loss": 1.0122, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1208051895429335e-05, |
|
"loss": 0.9982, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1112487242118785e-05, |
|
"loss": 1.0064, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.1016922588808235e-05, |
|
"loss": 0.9934, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.0921357935497685e-05, |
|
"loss": 1.0044, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.082579328218713e-05, |
|
"loss": 1.004, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0730228628876584e-05, |
|
"loss": 0.9874, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0634663975566034e-05, |
|
"loss": 1.0173, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.053909932225548e-05, |
|
"loss": 1.0207, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0443534668944934e-05, |
|
"loss": 1.0013, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.034797001563438e-05, |
|
"loss": 1.0093, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.025240536232383e-05, |
|
"loss": 0.9874, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0156840709013276e-05, |
|
"loss": 1.0028, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.0061276055702726e-05, |
|
"loss": 0.9882, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9965711402392176e-05, |
|
"loss": 1.001, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9870146749081626e-05, |
|
"loss": 0.998, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9774582095771076e-05, |
|
"loss": 0.9912, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9679017442460525e-05, |
|
"loss": 0.9872, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9583452789149975e-05, |
|
"loss": 0.9958, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9487888135839425e-05, |
|
"loss": 0.9797, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.939232348252887e-05, |
|
"loss": 0.9761, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9296758829218324e-05, |
|
"loss": 0.9781, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.920119417590777e-05, |
|
"loss": 0.9645, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.910562952259722e-05, |
|
"loss": 0.986, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.901006486928667e-05, |
|
"loss": 0.9766, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.891450021597612e-05, |
|
"loss": 0.945, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.881893556266557e-05, |
|
"loss": 0.9671, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8723370909355016e-05, |
|
"loss": 0.9774, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8627806256044466e-05, |
|
"loss": 0.9646, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8532241602733916e-05, |
|
"loss": 0.959, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8436676949423366e-05, |
|
"loss": 0.9623, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8341112296112816e-05, |
|
"loss": 0.9637, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8245547642802265e-05, |
|
"loss": 0.9704, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.814998298949171e-05, |
|
"loss": 0.9557, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8054418336181165e-05, |
|
"loss": 0.9609, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.795885368287061e-05, |
|
"loss": 0.9465, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7863289029560065e-05, |
|
"loss": 0.9526, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.776772437624951e-05, |
|
"loss": 0.96, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.767215972293896e-05, |
|
"loss": 0.9513, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.757659506962841e-05, |
|
"loss": 0.959, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.748103041631786e-05, |
|
"loss": 0.9484, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.738546576300731e-05, |
|
"loss": 0.9443, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7289901109696756e-05, |
|
"loss": 0.9556, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7194336456386206e-05, |
|
"loss": 0.9579, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7098771803075656e-05, |
|
"loss": 0.9432, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.70032071497651e-05, |
|
"loss": 0.9415, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6907642496454556e-05, |
|
"loss": 0.9305, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6812077843144005e-05, |
|
"loss": 0.9464, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.671651318983345e-05, |
|
"loss": 0.9551, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6620948536522905e-05, |
|
"loss": 0.9435, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.652538388321235e-05, |
|
"loss": 0.9429, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6429819229901805e-05, |
|
"loss": 0.9314, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.633425457659125e-05, |
|
"loss": 0.9499, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.62386899232807e-05, |
|
"loss": 0.9335, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.614312526997015e-05, |
|
"loss": 0.9423, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.60475606166596e-05, |
|
"loss": 0.924, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.595199596334905e-05, |
|
"loss": 0.9386, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5856431310038497e-05, |
|
"loss": 0.923, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.576086665672794e-05, |
|
"loss": 0.9288, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5665302003417396e-05, |
|
"loss": 0.9335, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.556973735010684e-05, |
|
"loss": 0.9173, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5474172696796296e-05, |
|
"loss": 0.9339, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.537860804348574e-05, |
|
"loss": 0.9105, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.528304339017519e-05, |
|
"loss": 0.9377, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.518747873686464e-05, |
|
"loss": 0.9166, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.509191408355409e-05, |
|
"loss": 0.9168, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.4996349430243545e-05, |
|
"loss": 0.911, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.490078477693299e-05, |
|
"loss": 0.9174, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.480522012362244e-05, |
|
"loss": 0.9292, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.470965547031189e-05, |
|
"loss": 0.9146, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.461409081700134e-05, |
|
"loss": 0.9122, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.451852616369079e-05, |
|
"loss": 0.9186, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4422961510380237e-05, |
|
"loss": 0.919, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.432739685706968e-05, |
|
"loss": 0.9108, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4231832203759136e-05, |
|
"loss": 0.8935, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.413626755044858e-05, |
|
"loss": 0.903, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.4040702897138036e-05, |
|
"loss": 0.92, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.394513824382748e-05, |
|
"loss": 0.9104, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.384957359051693e-05, |
|
"loss": 0.9074, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.375400893720638e-05, |
|
"loss": 0.9063, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.365844428389583e-05, |
|
"loss": 0.905, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.356287963058528e-05, |
|
"loss": 0.8945, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.346731497727473e-05, |
|
"loss": 0.9077, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.337175032396417e-05, |
|
"loss": 0.8878, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.327618567065363e-05, |
|
"loss": 0.8388, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.318062101734308e-05, |
|
"loss": 0.8148, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.308505636403253e-05, |
|
"loss": 0.8024, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.298949171072198e-05, |
|
"loss": 0.8132, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.289392705741142e-05, |
|
"loss": 0.8028, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2798362404100876e-05, |
|
"loss": 0.8078, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.270279775079032e-05, |
|
"loss": 0.8252, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2607233097479776e-05, |
|
"loss": 0.7964, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.251166844416922e-05, |
|
"loss": 0.813, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.241610379085867e-05, |
|
"loss": 0.798, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.232053913754812e-05, |
|
"loss": 0.8003, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.222497448423757e-05, |
|
"loss": 0.8108, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.212940983092702e-05, |
|
"loss": 0.8256, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.203384517761647e-05, |
|
"loss": 0.8262, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.193828052430591e-05, |
|
"loss": 0.8144, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.184271587099537e-05, |
|
"loss": 0.8183, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.174715121768481e-05, |
|
"loss": 0.8124, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.165158656437427e-05, |
|
"loss": 0.8143, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.155602191106371e-05, |
|
"loss": 0.8122, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.146045725775316e-05, |
|
"loss": 0.815, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.136489260444261e-05, |
|
"loss": 0.8069, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.126932795113206e-05, |
|
"loss": 0.8088, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.117376329782151e-05, |
|
"loss": 0.8184, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.107819864451096e-05, |
|
"loss": 0.8036, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.098263399120041e-05, |
|
"loss": 0.8372, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.088706933788986e-05, |
|
"loss": 0.8116, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.079150468457931e-05, |
|
"loss": 0.8077, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.069594003126876e-05, |
|
"loss": 0.81, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.060037537795821e-05, |
|
"loss": 0.7957, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0504810724647654e-05, |
|
"loss": 0.818, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0409246071337104e-05, |
|
"loss": 0.8097, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.031368141802655e-05, |
|
"loss": 0.8113, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0218116764716004e-05, |
|
"loss": 0.7916, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.012255211140545e-05, |
|
"loss": 0.8194, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0026987458094903e-05, |
|
"loss": 0.8172, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.993142280478435e-05, |
|
"loss": 0.8141, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.98358581514738e-05, |
|
"loss": 0.8155, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9740293498163246e-05, |
|
"loss": 0.8197, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.96447288448527e-05, |
|
"loss": 0.8015, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9549164191542145e-05, |
|
"loss": 0.7978, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9453599538231595e-05, |
|
"loss": 0.8057, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9358034884921048e-05, |
|
"loss": 0.8102, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9262470231610495e-05, |
|
"loss": 0.7861, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9166905578299948e-05, |
|
"loss": 0.8137, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9071340924989394e-05, |
|
"loss": 0.8057, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8975776271678844e-05, |
|
"loss": 0.8239, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.888021161836829e-05, |
|
"loss": 0.7989, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8784646965057744e-05, |
|
"loss": 0.8048, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.868908231174719e-05, |
|
"loss": 0.8038, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8593517658436643e-05, |
|
"loss": 0.7906, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.849795300512609e-05, |
|
"loss": 0.8044, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.840238835181554e-05, |
|
"loss": 0.8114, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8306823698504986e-05, |
|
"loss": 0.7954, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.821125904519444e-05, |
|
"loss": 0.7984, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8115694391883885e-05, |
|
"loss": 0.8192, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8020129738573335e-05, |
|
"loss": 0.8027, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.792456508526278e-05, |
|
"loss": 0.8054, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7829000431952235e-05, |
|
"loss": 0.8059, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.773343577864168e-05, |
|
"loss": 0.8027, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7637871125331134e-05, |
|
"loss": 0.8053, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7542306472020584e-05, |
|
"loss": 0.796, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.744674181871003e-05, |
|
"loss": 0.7983, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7351177165399484e-05, |
|
"loss": 0.7908, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.725561251208893e-05, |
|
"loss": 0.7987, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.716004785877838e-05, |
|
"loss": 0.7959, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7064483205467826e-05, |
|
"loss": 0.8101, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.696891855215728e-05, |
|
"loss": 0.7921, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6873353898846726e-05, |
|
"loss": 0.7842, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.677778924553618e-05, |
|
"loss": 0.7932, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6682224592225625e-05, |
|
"loss": 0.7845, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6586659938915075e-05, |
|
"loss": 0.7833, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.649109528560452e-05, |
|
"loss": 0.7894, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6395530632293975e-05, |
|
"loss": 0.7718, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.629996597898342e-05, |
|
"loss": 0.7878, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6204401325672874e-05, |
|
"loss": 0.7986, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6108836672362317e-05, |
|
"loss": 0.797, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.601327201905177e-05, |
|
"loss": 0.7918, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5917707365741217e-05, |
|
"loss": 0.7852, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.582214271243067e-05, |
|
"loss": 0.7873, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.572657805912012e-05, |
|
"loss": 0.7999, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5631013405809566e-05, |
|
"loss": 0.7919, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.553544875249902e-05, |
|
"loss": 0.7941, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5439884099188466e-05, |
|
"loss": 0.7808, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.534431944587792e-05, |
|
"loss": 0.7806, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5248754792567365e-05, |
|
"loss": 0.78, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5153190139256815e-05, |
|
"loss": 0.7908, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.505762548594626e-05, |
|
"loss": 0.7819, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.496206083263571e-05, |
|
"loss": 0.7863, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.486649617932516e-05, |
|
"loss": 0.7927, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.477093152601461e-05, |
|
"loss": 0.7974, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.467536687270406e-05, |
|
"loss": 0.7836, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.457980221939351e-05, |
|
"loss": 0.7831, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.448423756608296e-05, |
|
"loss": 0.7866, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.438867291277241e-05, |
|
"loss": 0.787, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4293108259461857e-05, |
|
"loss": 0.7782, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4197543606151306e-05, |
|
"loss": 0.7848, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4101978952840756e-05, |
|
"loss": 0.7858, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4006414299530206e-05, |
|
"loss": 0.7909, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3910849646219656e-05, |
|
"loss": 0.7842, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3815284992909102e-05, |
|
"loss": 0.7716, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3719720339598552e-05, |
|
"loss": 0.7858, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3624155686288e-05, |
|
"loss": 0.7856, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.352859103297745e-05, |
|
"loss": 0.7797, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.34330263796669e-05, |
|
"loss": 0.7619, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.333746172635635e-05, |
|
"loss": 0.7902, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3241897073045797e-05, |
|
"loss": 0.7639, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3146332419735247e-05, |
|
"loss": 0.7784, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3050767766424697e-05, |
|
"loss": 0.7805, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.295520311311415e-05, |
|
"loss": 0.7716, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2859638459803597e-05, |
|
"loss": 0.7781, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2764073806493046e-05, |
|
"loss": 0.7929, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2668509153182496e-05, |
|
"loss": 0.7791, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2572944499871946e-05, |
|
"loss": 0.763, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2477379846561396e-05, |
|
"loss": 0.7859, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2381815193250842e-05, |
|
"loss": 0.7727, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2286250539940292e-05, |
|
"loss": 0.7804, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2190685886629742e-05, |
|
"loss": 0.7923, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.209512123331919e-05, |
|
"loss": 0.7538, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.199955658000864e-05, |
|
"loss": 0.7815, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1903991926698088e-05, |
|
"loss": 0.7655, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1808427273387537e-05, |
|
"loss": 0.7767, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1712862620076987e-05, |
|
"loss": 0.7731, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1617297966766437e-05, |
|
"loss": 0.7742, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1521733313455887e-05, |
|
"loss": 0.755, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1426168660145333e-05, |
|
"loss": 0.7631, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1330604006834783e-05, |
|
"loss": 0.7636, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1235039353524233e-05, |
|
"loss": 0.7712, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1139474700213683e-05, |
|
"loss": 0.7581, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1043910046903136e-05, |
|
"loss": 0.7654, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0948345393592582e-05, |
|
"loss": 0.7581, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0852780740282032e-05, |
|
"loss": 0.7595, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0757216086971482e-05, |
|
"loss": 0.7693, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.066165143366093e-05, |
|
"loss": 0.7599, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.056608678035038e-05, |
|
"loss": 0.7563, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0470522127039828e-05, |
|
"loss": 0.7644, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0374957473729278e-05, |
|
"loss": 0.7669, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0279392820418727e-05, |
|
"loss": 0.7559, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0183828167108177e-05, |
|
"loss": 0.7646, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0088263513797627e-05, |
|
"loss": 0.7602, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9992698860487073e-05, |
|
"loss": 0.7594, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9897134207176523e-05, |
|
"loss": 0.7593, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9801569553865973e-05, |
|
"loss": 0.7578, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9706004900555423e-05, |
|
"loss": 0.7612, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9610440247244872e-05, |
|
"loss": 0.7553, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.951487559393432e-05, |
|
"loss": 0.7599, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.941931094062377e-05, |
|
"loss": 0.7387, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.932374628731322e-05, |
|
"loss": 0.7558, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9228181634002668e-05, |
|
"loss": 0.7482, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9132616980692118e-05, |
|
"loss": 0.7703, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9037052327381568e-05, |
|
"loss": 0.7375, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8941487674071018e-05, |
|
"loss": 0.7565, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8845923020760467e-05, |
|
"loss": 0.7591, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8750358367449917e-05, |
|
"loss": 0.7639, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8654793714139367e-05, |
|
"loss": 0.7493, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8559229060828813e-05, |
|
"loss": 0.7435, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8463664407518263e-05, |
|
"loss": 0.7492, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8368099754207713e-05, |
|
"loss": 0.745, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8272535100897163e-05, |
|
"loss": 0.7529, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8176970447586612e-05, |
|
"loss": 0.7599, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.808140579427606e-05, |
|
"loss": 0.7546, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.798584114096551e-05, |
|
"loss": 0.7485, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.789027648765496e-05, |
|
"loss": 0.7591, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7794711834344408e-05, |
|
"loss": 0.7466, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7699147181033858e-05, |
|
"loss": 0.7399, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7603582527723304e-05, |
|
"loss": 0.7547, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7508017874412754e-05, |
|
"loss": 0.7459, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7412453221102204e-05, |
|
"loss": 0.7503, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7316888567791657e-05, |
|
"loss": 0.7451, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7221323914481104e-05, |
|
"loss": 0.7369, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7125759261170553e-05, |
|
"loss": 0.7363, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7030194607860003e-05, |
|
"loss": 0.7457, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6934629954549453e-05, |
|
"loss": 0.7473, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6839065301238903e-05, |
|
"loss": 0.7508, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.674350064792835e-05, |
|
"loss": 0.7477, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.66479359946178e-05, |
|
"loss": 0.7332, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.655237134130725e-05, |
|
"loss": 0.6564, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.64568066879967e-05, |
|
"loss": 0.66, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.636124203468615e-05, |
|
"loss": 0.6528, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6265677381375595e-05, |
|
"loss": 0.6626, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6170112728065044e-05, |
|
"loss": 0.6507, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6074548074754494e-05, |
|
"loss": 0.6543, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5978983421443944e-05, |
|
"loss": 0.6613, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5883418768133394e-05, |
|
"loss": 0.6535, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5787854114822844e-05, |
|
"loss": 0.6544, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.569228946151229e-05, |
|
"loss": 0.6553, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.559672480820174e-05, |
|
"loss": 0.6576, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.550116015489119e-05, |
|
"loss": 0.6497, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5405595501580643e-05, |
|
"loss": 0.6618, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.531003084827009e-05, |
|
"loss": 0.646, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5214466194959539e-05, |
|
"loss": 0.6451, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5118901541648989e-05, |
|
"loss": 0.6566, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5023336888338439e-05, |
|
"loss": 0.6641, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4927772235027887e-05, |
|
"loss": 0.6587, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4832207581717336e-05, |
|
"loss": 0.655, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4736642928406785e-05, |
|
"loss": 0.6515, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4641078275096234e-05, |
|
"loss": 0.6415, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4545513621785684e-05, |
|
"loss": 0.642, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4449948968475132e-05, |
|
"loss": 0.6513, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4354384315164582e-05, |
|
"loss": 0.6538, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4258819661854032e-05, |
|
"loss": 0.6543, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.416325500854348e-05, |
|
"loss": 0.6587, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.406769035523293e-05, |
|
"loss": 0.6544, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3972125701922378e-05, |
|
"loss": 0.6556, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3876561048611828e-05, |
|
"loss": 0.6509, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3780996395301277e-05, |
|
"loss": 0.6514, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3685431741990725e-05, |
|
"loss": 0.6606, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3589867088680177e-05, |
|
"loss": 0.654, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3494302435369627e-05, |
|
"loss": 0.6494, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3398737782059077e-05, |
|
"loss": 0.6482, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3303173128748525e-05, |
|
"loss": 0.6615, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3207608475437974e-05, |
|
"loss": 0.6517, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3112043822127424e-05, |
|
"loss": 0.6415, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3016479168816872e-05, |
|
"loss": 0.6388, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2920914515506322e-05, |
|
"loss": 0.6472, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.282534986219577e-05, |
|
"loss": 0.6396, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.272978520888522e-05, |
|
"loss": 0.6549, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.263422055557467e-05, |
|
"loss": 0.6471, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2538655902264118e-05, |
|
"loss": 0.6502, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2443091248953568e-05, |
|
"loss": 0.6448, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2347526595643016e-05, |
|
"loss": 0.6471, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2251961942332465e-05, |
|
"loss": 0.639, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2156397289021915e-05, |
|
"loss": 0.6507, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2060832635711365e-05, |
|
"loss": 0.6513, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1965267982400815e-05, |
|
"loss": 0.642, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1869703329090263e-05, |
|
"loss": 0.6459, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1774138675779713e-05, |
|
"loss": 0.6431, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1678574022469163e-05, |
|
"loss": 0.6497, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.158300936915861e-05, |
|
"loss": 0.6513, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.148744471584806e-05, |
|
"loss": 0.6426, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1391880062537509e-05, |
|
"loss": 0.6454, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.129631540922696e-05, |
|
"loss": 0.6461, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1200750755916408e-05, |
|
"loss": 0.6513, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1105186102605858e-05, |
|
"loss": 0.6426, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1009621449295308e-05, |
|
"loss": 0.6445, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0914056795984756e-05, |
|
"loss": 0.6455, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0818492142674206e-05, |
|
"loss": 0.6621, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0722927489363654e-05, |
|
"loss": 0.6403, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0627362836053103e-05, |
|
"loss": 0.6339, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0531798182742553e-05, |
|
"loss": 0.6407, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0436233529432001e-05, |
|
"loss": 0.6573, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0340668876121453e-05, |
|
"loss": 0.6348, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0245104222810901e-05, |
|
"loss": 0.6425, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.014953956950035e-05, |
|
"loss": 0.6475, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.00539749161898e-05, |
|
"loss": 0.6416, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.958410262879249e-06, |
|
"loss": 0.6496, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.862845609568698e-06, |
|
"loss": 0.6585, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.767280956258146e-06, |
|
"loss": 0.636, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.671716302947596e-06, |
|
"loss": 0.6479, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.576151649637046e-06, |
|
"loss": 0.6557, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.480586996326494e-06, |
|
"loss": 0.6438, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.385022343015946e-06, |
|
"loss": 0.6371, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.289457689705394e-06, |
|
"loss": 0.635, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.193893036394843e-06, |
|
"loss": 0.6456, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.098328383084293e-06, |
|
"loss": 0.6419, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.002763729773741e-06, |
|
"loss": 0.6355, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.907199076463191e-06, |
|
"loss": 0.6464, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.81163442315264e-06, |
|
"loss": 0.6468, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.716069769842089e-06, |
|
"loss": 0.6324, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.620505116531539e-06, |
|
"loss": 0.6495, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.524940463220987e-06, |
|
"loss": 0.6412, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.429375809910438e-06, |
|
"loss": 0.6302, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.333811156599886e-06, |
|
"loss": 0.6464, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.238246503289336e-06, |
|
"loss": 0.6353, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.142681849978784e-06, |
|
"loss": 0.632, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.047117196668234e-06, |
|
"loss": 0.6438, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.951552543357684e-06, |
|
"loss": 0.6522, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.855987890047132e-06, |
|
"loss": 0.633, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.760423236736582e-06, |
|
"loss": 0.6465, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.664858583426032e-06, |
|
"loss": 0.6442, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.569293930115481e-06, |
|
"loss": 0.6342, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.47372927680493e-06, |
|
"loss": 0.6526, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.378164623494379e-06, |
|
"loss": 0.6327, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.282599970183829e-06, |
|
"loss": 0.6281, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.187035316873278e-06, |
|
"loss": 0.6431, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.091470663562727e-06, |
|
"loss": 0.6428, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.995906010252176e-06, |
|
"loss": 0.64, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.900341356941626e-06, |
|
"loss": 0.6345, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.804776703631075e-06, |
|
"loss": 0.6446, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.7092120503205236e-06, |
|
"loss": 0.6405, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.613647397009974e-06, |
|
"loss": 0.6269, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.518082743699423e-06, |
|
"loss": 0.6423, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.422518090388872e-06, |
|
"loss": 0.6197, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.326953437078322e-06, |
|
"loss": 0.6273, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.231388783767771e-06, |
|
"loss": 0.6245, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.13582413045722e-06, |
|
"loss": 0.6333, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.040259477146669e-06, |
|
"loss": 0.6312, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.944694823836118e-06, |
|
"loss": 0.653, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.849130170525568e-06, |
|
"loss": 0.6325, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.753565517215017e-06, |
|
"loss": 0.6387, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.658000863904466e-06, |
|
"loss": 0.6298, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.562436210593915e-06, |
|
"loss": 0.6368, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.466871557283365e-06, |
|
"loss": 0.6298, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.371306903972814e-06, |
|
"loss": 0.6233, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.275742250662264e-06, |
|
"loss": 0.6265, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.1801775973517126e-06, |
|
"loss": 0.6283, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.0846129440411615e-06, |
|
"loss": 0.6316, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.989048290730611e-06, |
|
"loss": 0.617, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.89348363742006e-06, |
|
"loss": 0.6298, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.79791898410951e-06, |
|
"loss": 0.6439, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.702354330798959e-06, |
|
"loss": 0.6437, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.606789677488408e-06, |
|
"loss": 0.6245, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.511225024177858e-06, |
|
"loss": 0.6295, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.415660370867307e-06, |
|
"loss": 0.6264, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.320095717556756e-06, |
|
"loss": 0.6121, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.224531064246205e-06, |
|
"loss": 0.6365, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.128966410935654e-06, |
|
"loss": 0.6243, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.033401757625104e-06, |
|
"loss": 0.6247, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.937837104314553e-06, |
|
"loss": 0.6258, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.842272451004003e-06, |
|
"loss": 0.6272, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.7467077976934518e-06, |
|
"loss": 0.6284, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6511431443829007e-06, |
|
"loss": 0.6213, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5555784910723505e-06, |
|
"loss": 0.6263, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4600138377618e-06, |
|
"loss": 0.6336, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.364449184451249e-06, |
|
"loss": 0.6324, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.2688845311406977e-06, |
|
"loss": 0.6398, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.173319877830147e-06, |
|
"loss": 0.6373, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0777552245195965e-06, |
|
"loss": 0.6203, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.982190571209046e-06, |
|
"loss": 0.6392, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.886625917898495e-06, |
|
"loss": 0.6196, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.7910612645879446e-06, |
|
"loss": 0.629, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.695496611277394e-06, |
|
"loss": 0.6216, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.599931957966843e-06, |
|
"loss": 0.6272, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5043673046562922e-06, |
|
"loss": 0.6156, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.4088026513457416e-06, |
|
"loss": 0.6135, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.313237998035191e-06, |
|
"loss": 0.6118, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.2176733447246403e-06, |
|
"loss": 0.6151, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1221086914140893e-06, |
|
"loss": 0.6281, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0265440381035386e-06, |
|
"loss": 0.6241, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.930979384792988e-06, |
|
"loss": 0.6334, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8354147314824374e-06, |
|
"loss": 0.6143, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7398500781718865e-06, |
|
"loss": 0.631, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6442854248613357e-06, |
|
"loss": 0.622, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.548720771550785e-06, |
|
"loss": 0.6228, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4531561182402342e-06, |
|
"loss": 0.6224, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3575914649296835e-06, |
|
"loss": 0.6253, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.262026811619133e-06, |
|
"loss": 0.639, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.166462158308582e-06, |
|
"loss": 0.6161, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0708975049980314e-06, |
|
"loss": 0.618, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.753328516874808e-07, |
|
"loss": 0.6155, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.7976819837693e-07, |
|
"loss": 0.6096, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.842035450663792e-07, |
|
"loss": 0.6226, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.886388917558285e-07, |
|
"loss": 0.618, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.930742384452778e-07, |
|
"loss": 0.6241, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.975095851347271e-07, |
|
"loss": 0.6303, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.0194493182417635e-07, |
|
"loss": 0.626, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.063802785136256e-07, |
|
"loss": 0.6324, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.1081562520307492e-07, |
|
"loss": 0.619, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.1525097189252418e-07, |
|
"loss": 0.609, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.968631858197345e-08, |
|
"loss": 0.6266, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 261603, |
|
"total_flos": 2.70854049202176e+17, |
|
"train_loss": 0.8388812589635725, |
|
"train_runtime": 66410.1599, |
|
"train_samples_per_second": 39.392, |
|
"train_steps_per_second": 3.939 |
|
} |
|
], |
|
"max_steps": 261603, |
|
"num_train_epochs": 3, |
|
"total_flos": 2.70854049202176e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|