|
{ |
|
"best_metric": 0.7774538386783285, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-img_orientation/checkpoint-1518", |
|
"epoch": 2.9970384995064165, |
|
"eval_steps": 500, |
|
"global_step": 1518, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.6447368421052632e-06, |
|
"loss": 1.4583, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.2894736842105265e-06, |
|
"loss": 1.4742, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9342105263157895e-06, |
|
"loss": 1.4362, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.578947368421053e-06, |
|
"loss": 1.458, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.223684210526317e-06, |
|
"loss": 1.3734, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.868421052631579e-06, |
|
"loss": 1.3655, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1513157894736843e-05, |
|
"loss": 1.3442, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3157894736842106e-05, |
|
"loss": 1.3333, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4802631578947368e-05, |
|
"loss": 1.2669, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6447368421052635e-05, |
|
"loss": 1.2279, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.8092105263157896e-05, |
|
"loss": 1.176, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9736842105263158e-05, |
|
"loss": 1.1198, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.1381578947368423e-05, |
|
"loss": 1.0456, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.3026315789473685e-05, |
|
"loss": 0.9941, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.4671052631578947e-05, |
|
"loss": 0.991, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 0.9243, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.7960526315789477e-05, |
|
"loss": 0.9057, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.9605263157894735e-05, |
|
"loss": 0.8941, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.125e-05, |
|
"loss": 0.8725, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.289473684210527e-05, |
|
"loss": 0.8545, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.4539473684210524e-05, |
|
"loss": 0.8274, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.618421052631579e-05, |
|
"loss": 0.818, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.7828947368421054e-05, |
|
"loss": 0.8329, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.9473684210526316e-05, |
|
"loss": 0.8005, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.111842105263158e-05, |
|
"loss": 0.7636, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.2763157894736847e-05, |
|
"loss": 0.8036, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.440789473684211e-05, |
|
"loss": 0.7663, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.605263157894737e-05, |
|
"loss": 0.7535, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.769736842105263e-05, |
|
"loss": 0.7418, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.9342105263157894e-05, |
|
"loss": 0.7471, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.9890190336749635e-05, |
|
"loss": 0.7603, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.970717423133236e-05, |
|
"loss": 0.7101, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.952415812591508e-05, |
|
"loss": 0.6832, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.934114202049781e-05, |
|
"loss": 0.6959, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.9158125915080526e-05, |
|
"loss": 0.7345, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.897510980966325e-05, |
|
"loss": 0.7131, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.879209370424598e-05, |
|
"loss": 0.7713, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.86090775988287e-05, |
|
"loss": 0.6945, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.8426061493411424e-05, |
|
"loss": 0.6644, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.824304538799414e-05, |
|
"loss": 0.679, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.806002928257687e-05, |
|
"loss": 0.6657, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.787701317715959e-05, |
|
"loss": 0.685, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7693997071742315e-05, |
|
"loss": 0.6931, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.751098096632504e-05, |
|
"loss": 0.6803, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.732796486090777e-05, |
|
"loss": 0.6415, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7144948755490486e-05, |
|
"loss": 0.6753, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.696193265007321e-05, |
|
"loss": 0.6676, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.677891654465593e-05, |
|
"loss": 0.6445, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.659590043923866e-05, |
|
"loss": 0.7077, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.641288433382138e-05, |
|
"loss": 0.6417, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.6229868228404096e-05, |
|
"loss": 0.6383, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.604685212298683e-05, |
|
"loss": 0.6662, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.586383601756955e-05, |
|
"loss": 0.6741, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.5680819912152275e-05, |
|
"loss": 0.6429, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.5497803806734994e-05, |
|
"loss": 0.6361, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.531478770131772e-05, |
|
"loss": 0.6221, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.513177159590044e-05, |
|
"loss": 0.64, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.4948755490483165e-05, |
|
"loss": 0.6432, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.4765739385065885e-05, |
|
"loss": 0.6621, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.458272327964861e-05, |
|
"loss": 0.5987, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.439970717423134e-05, |
|
"loss": 0.6368, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.4216691068814056e-05, |
|
"loss": 0.6285, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.403367496339678e-05, |
|
"loss": 0.645, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.38506588579795e-05, |
|
"loss": 0.6416, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.366764275256223e-05, |
|
"loss": 0.6513, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.348462664714495e-05, |
|
"loss": 0.6353, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.330161054172767e-05, |
|
"loss": 0.6323, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.31185944363104e-05, |
|
"loss": 0.6454, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.2935578330893125e-05, |
|
"loss": 0.6282, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.2752562225475845e-05, |
|
"loss": 0.6029, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.256954612005857e-05, |
|
"loss": 0.6069, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.238653001464129e-05, |
|
"loss": 0.5707, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.2203513909224016e-05, |
|
"loss": 0.6161, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.2020497803806735e-05, |
|
"loss": 0.6118, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.1837481698389455e-05, |
|
"loss": 0.613, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.165446559297219e-05, |
|
"loss": 0.6044, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.147144948755491e-05, |
|
"loss": 0.5668, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.128843338213763e-05, |
|
"loss": 0.6372, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.110541727672035e-05, |
|
"loss": 0.5783, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.092240117130308e-05, |
|
"loss": 0.6007, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.07393850658858e-05, |
|
"loss": 0.5667, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.0556368960468524e-05, |
|
"loss": 0.5819, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.037335285505124e-05, |
|
"loss": 0.5884, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.019033674963397e-05, |
|
"loss": 0.6358, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.0007320644216695e-05, |
|
"loss": 0.6365, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.9824304538799415e-05, |
|
"loss": 0.569, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.964128843338214e-05, |
|
"loss": 0.6281, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.945827232796486e-05, |
|
"loss": 0.5812, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.9275256222547586e-05, |
|
"loss": 0.604, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.9092240117130305e-05, |
|
"loss": 0.5833, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.890922401171303e-05, |
|
"loss": 0.5247, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.872620790629576e-05, |
|
"loss": 0.5513, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.8543191800878484e-05, |
|
"loss": 0.5885, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.83601756954612e-05, |
|
"loss": 0.5995, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.817715959004393e-05, |
|
"loss": 0.591, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.799414348462665e-05, |
|
"loss": 0.6269, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.7811127379209375e-05, |
|
"loss": 0.5496, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.7628111273792094e-05, |
|
"loss": 0.6007, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.744509516837481e-05, |
|
"loss": 0.5894, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.7262079062957546e-05, |
|
"loss": 0.6417, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7079062957540265e-05, |
|
"loss": 0.5654, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7356656948493683, |
|
"eval_loss": 0.46419134736061096, |
|
"eval_runtime": 54.4631, |
|
"eval_samples_per_second": 132.255, |
|
"eval_steps_per_second": 4.15, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.689604685212299e-05, |
|
"loss": 0.5932, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.671303074670571e-05, |
|
"loss": 0.5732, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.653001464128844e-05, |
|
"loss": 0.5225, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.6346998535871156e-05, |
|
"loss": 0.57, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.616398243045388e-05, |
|
"loss": 0.5341, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.59809663250366e-05, |
|
"loss": 0.5654, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.579795021961933e-05, |
|
"loss": 0.5749, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.5614934114202054e-05, |
|
"loss": 0.5546, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.543191800878477e-05, |
|
"loss": 0.5608, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.52489019033675e-05, |
|
"loss": 0.5155, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.506588579795022e-05, |
|
"loss": 0.541, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.4882869692532945e-05, |
|
"loss": 0.5515, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.4699853587115664e-05, |
|
"loss": 0.5686, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.451683748169839e-05, |
|
"loss": 0.566, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.4333821376281116e-05, |
|
"loss": 0.5548, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.415080527086384e-05, |
|
"loss": 0.5808, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.396778916544656e-05, |
|
"loss": 0.5683, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.378477306002929e-05, |
|
"loss": 0.5428, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.360175695461201e-05, |
|
"loss": 0.541, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.3418740849194726e-05, |
|
"loss": 0.5082, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.323572474377745e-05, |
|
"loss": 0.5672, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.305270863836017e-05, |
|
"loss": 0.5593, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.2869692532942905e-05, |
|
"loss": 0.5492, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.2686676427525624e-05, |
|
"loss": 0.553, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.250366032210835e-05, |
|
"loss": 0.5812, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.232064421669107e-05, |
|
"loss": 0.5467, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.2137628111273795e-05, |
|
"loss": 0.5604, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.1954612005856515e-05, |
|
"loss": 0.5613, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.177159590043924e-05, |
|
"loss": 0.5247, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.158857979502196e-05, |
|
"loss": 0.5596, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.1405563689604686e-05, |
|
"loss": 0.5211, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.122254758418741e-05, |
|
"loss": 0.5338, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.103953147877013e-05, |
|
"loss": 0.5422, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.085651537335286e-05, |
|
"loss": 0.5502, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.067349926793558e-05, |
|
"loss": 0.5305, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.0490483162518303e-05, |
|
"loss": 0.5415, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.0307467057101026e-05, |
|
"loss": 0.5065, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.012445095168375e-05, |
|
"loss": 0.551, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.9941434846266475e-05, |
|
"loss": 0.5345, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.9758418740849197e-05, |
|
"loss": 0.514, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.957540263543192e-05, |
|
"loss": 0.5535, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.9392386530014643e-05, |
|
"loss": 0.5537, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.9209370424597365e-05, |
|
"loss": 0.5305, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.9026354319180088e-05, |
|
"loss": 0.5551, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.884333821376281e-05, |
|
"loss": 0.5723, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.8660322108345534e-05, |
|
"loss": 0.5722, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.847730600292826e-05, |
|
"loss": 0.5376, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.8294289897510982e-05, |
|
"loss": 0.577, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.8111273792093705e-05, |
|
"loss": 0.5322, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.7928257686676428e-05, |
|
"loss": 0.5505, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.774524158125915e-05, |
|
"loss": 0.4995, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.7562225475841873e-05, |
|
"loss": 0.5371, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.7379209370424596e-05, |
|
"loss": 0.5526, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.719619326500732e-05, |
|
"loss": 0.5275, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.7013177159590048e-05, |
|
"loss": 0.5073, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.683016105417277e-05, |
|
"loss": 0.5645, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.6647144948755493e-05, |
|
"loss": 0.5253, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.6464128843338216e-05, |
|
"loss": 0.5327, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.628111273792094e-05, |
|
"loss": 0.5219, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.609809663250366e-05, |
|
"loss": 0.5068, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.5915080527086384e-05, |
|
"loss": 0.5378, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.5732064421669107e-05, |
|
"loss": 0.5464, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.5549048316251833e-05, |
|
"loss": 0.5195, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.5366032210834556e-05, |
|
"loss": 0.5481, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.518301610541728e-05, |
|
"loss": 0.512, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.5342, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.4816983894582724e-05, |
|
"loss": 0.5444, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.463396778916545e-05, |
|
"loss": 0.533, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.4450951683748173e-05, |
|
"loss": 0.5249, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.4267935578330895e-05, |
|
"loss": 0.5867, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.4084919472913618e-05, |
|
"loss": 0.4845, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.390190336749634e-05, |
|
"loss": 0.5419, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.3718887262079064e-05, |
|
"loss": 0.5173, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.3535871156661786e-05, |
|
"loss": 0.5138, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.335285505124451e-05, |
|
"loss": 0.5291, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.3169838945827235e-05, |
|
"loss": 0.5299, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.2986822840409958e-05, |
|
"loss": 0.5249, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.280380673499268e-05, |
|
"loss": 0.4676, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.2620790629575403e-05, |
|
"loss": 0.5323, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.243777452415813e-05, |
|
"loss": 0.4845, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.2254758418740852e-05, |
|
"loss": 0.5559, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.2071742313323575e-05, |
|
"loss": 0.5064, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.1888726207906297e-05, |
|
"loss": 0.5281, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.170571010248902e-05, |
|
"loss": 0.5471, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.1522693997071743e-05, |
|
"loss": 0.4968, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.1339677891654465e-05, |
|
"loss": 0.481, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.1156661786237188e-05, |
|
"loss": 0.5199, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.0973645680819914e-05, |
|
"loss": 0.4974, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.0790629575402637e-05, |
|
"loss": 0.5176, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.060761346998536e-05, |
|
"loss": 0.5054, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.0424597364568082e-05, |
|
"loss": 0.5211, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.024158125915081e-05, |
|
"loss": 0.4668, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.005856515373353e-05, |
|
"loss": 0.4834, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.9875549048316254e-05, |
|
"loss": 0.5393, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.9692532942898977e-05, |
|
"loss": 0.5269, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.95095168374817e-05, |
|
"loss": 0.5445, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.9326500732064422e-05, |
|
"loss": 0.5473, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.9143484626647145e-05, |
|
"loss": 0.5002, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.8960468521229867e-05, |
|
"loss": 0.4839, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.8777452415812594e-05, |
|
"loss": 0.5268, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.8594436310395316e-05, |
|
"loss": 0.5194, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.7675968346522283, |
|
"eval_loss": 0.4097585380077362, |
|
"eval_runtime": 51.2327, |
|
"eval_samples_per_second": 140.594, |
|
"eval_steps_per_second": 4.411, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.841142020497804e-05, |
|
"loss": 0.4812, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.822840409956076e-05, |
|
"loss": 0.5085, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.8045387994143488e-05, |
|
"loss": 0.461, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.786237188872621e-05, |
|
"loss": 0.5006, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.7679355783308933e-05, |
|
"loss": 0.4821, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.7496339677891656e-05, |
|
"loss": 0.4414, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.731332357247438e-05, |
|
"loss": 0.4986, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.71303074670571e-05, |
|
"loss": 0.4747, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.6947291361639824e-05, |
|
"loss": 0.5198, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.6764275256222547e-05, |
|
"loss": 0.4425, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.6581259150805273e-05, |
|
"loss": 0.4593, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.6398243045387995e-05, |
|
"loss": 0.5233, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.6215226939970718e-05, |
|
"loss": 0.4698, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.603221083455344e-05, |
|
"loss": 0.5108, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.5849194729136167e-05, |
|
"loss": 0.5274, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.566617862371889e-05, |
|
"loss": 0.4443, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.5483162518301612e-05, |
|
"loss": 0.4591, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.5300146412884335e-05, |
|
"loss": 0.5101, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.511713030746706e-05, |
|
"loss": 0.4938, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.4934114202049782e-05, |
|
"loss": 0.5465, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.4751098096632505e-05, |
|
"loss": 0.4086, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.4568081991215226e-05, |
|
"loss": 0.4771, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.4385065885797952e-05, |
|
"loss": 0.5017, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.4202049780380675e-05, |
|
"loss": 0.4975, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.4019033674963397e-05, |
|
"loss": 0.573, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.383601756954612e-05, |
|
"loss": 0.4946, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.3653001464128845e-05, |
|
"loss": 0.4861, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.3469985358711567e-05, |
|
"loss": 0.4579, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.328696925329429e-05, |
|
"loss": 0.4574, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.3103953147877013e-05, |
|
"loss": 0.5262, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.2920937042459739e-05, |
|
"loss": 0.4991, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.2737920937042461e-05, |
|
"loss": 0.5082, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.2554904831625182e-05, |
|
"loss": 0.4557, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.2371888726207907e-05, |
|
"loss": 0.5222, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.218887262079063e-05, |
|
"loss": 0.4846, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.2005856515373354e-05, |
|
"loss": 0.4616, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.1822840409956077e-05, |
|
"loss": 0.4609, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.1639824304538801e-05, |
|
"loss": 0.501, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.1456808199121522e-05, |
|
"loss": 0.4591, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.1273792093704246e-05, |
|
"loss": 0.4996, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.109077598828697e-05, |
|
"loss": 0.4629, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.0907759882869694e-05, |
|
"loss": 0.4559, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.0724743777452416e-05, |
|
"loss": 0.4881, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.054172767203514e-05, |
|
"loss": 0.4593, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.0358711566617862e-05, |
|
"loss": 0.4847, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.0175695461200586e-05, |
|
"loss": 0.4676, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.992679355783309e-06, |
|
"loss": 0.4893, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.809663250366033e-06, |
|
"loss": 0.4845, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 9.626647144948756e-06, |
|
"loss": 0.4829, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.44363103953148e-06, |
|
"loss": 0.5043, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.260614934114201e-06, |
|
"loss": 0.497, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 9.077598828696926e-06, |
|
"loss": 0.5003, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.894582723279648e-06, |
|
"loss": 0.4462, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 8.711566617862373e-06, |
|
"loss": 0.5062, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 8.528550512445096e-06, |
|
"loss": 0.4795, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 8.34553440702782e-06, |
|
"loss": 0.4773, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 8.162518301610541e-06, |
|
"loss": 0.4667, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.979502196193265e-06, |
|
"loss": 0.5078, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.796486090775988e-06, |
|
"loss": 0.4586, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.613469985358712e-06, |
|
"loss": 0.4328, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.430453879941435e-06, |
|
"loss": 0.443, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 7.247437774524159e-06, |
|
"loss": 0.4597, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.064421669106881e-06, |
|
"loss": 0.5333, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.881405563689605e-06, |
|
"loss": 0.4654, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.698389458272328e-06, |
|
"loss": 0.4383, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.515373352855052e-06, |
|
"loss": 0.4828, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.332357247437774e-06, |
|
"loss": 0.4849, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.149341142020498e-06, |
|
"loss": 0.465, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.966325036603222e-06, |
|
"loss": 0.5227, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.7833089311859446e-06, |
|
"loss": 0.5229, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.600292825768668e-06, |
|
"loss": 0.4592, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.417276720351392e-06, |
|
"loss": 0.4769, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.234260614934114e-06, |
|
"loss": 0.4959, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.051244509516838e-06, |
|
"loss": 0.4731, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.8682284040995615e-06, |
|
"loss": 0.4718, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.685212298682284e-06, |
|
"loss": 0.5007, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.502196193265008e-06, |
|
"loss": 0.437, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.319180087847731e-06, |
|
"loss": 0.4732, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.136163982430454e-06, |
|
"loss": 0.4582, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.9531478770131775e-06, |
|
"loss": 0.4903, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.7701317715959007e-06, |
|
"loss": 0.5049, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.587115666178624e-06, |
|
"loss": 0.457, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.4040995607613473e-06, |
|
"loss": 0.4516, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.2210834553440705e-06, |
|
"loss": 0.5026, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.0380673499267936e-06, |
|
"loss": 0.5247, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.855051244509517e-06, |
|
"loss": 0.4605, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.6720351390922403e-06, |
|
"loss": 0.4889, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.4890190336749634e-06, |
|
"loss": 0.4645, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.306002928257687e-06, |
|
"loss": 0.4493, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.12298682284041e-06, |
|
"loss": 0.492, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.9399707174231332e-06, |
|
"loss": 0.5066, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7569546120058566e-06, |
|
"loss": 0.4364, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.57393850658858e-06, |
|
"loss": 0.4823, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3909224011713032e-06, |
|
"loss": 0.5101, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2079062957540264e-06, |
|
"loss": 0.5322, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0248901903367497e-06, |
|
"loss": 0.4745, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.41874084919473e-07, |
|
"loss": 0.4728, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.588579795021963e-07, |
|
"loss": 0.5055, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.758418740849195e-07, |
|
"loss": 0.5092, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.9282576866764276e-07, |
|
"loss": 0.4562, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.0980966325036604e-07, |
|
"loss": 0.4478, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.7774538386783285, |
|
"eval_loss": 0.39165472984313965, |
|
"eval_runtime": 56.6277, |
|
"eval_samples_per_second": 127.199, |
|
"eval_steps_per_second": 3.991, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1518, |
|
"total_flos": 4.829589697691566e+18, |
|
"train_loss": 0.5911478077470078, |
|
"train_runtime": 3253.5777, |
|
"train_samples_per_second": 59.774, |
|
"train_steps_per_second": 0.467 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1518, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 4.829589697691566e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|