|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9999973749767029, |
|
"global_step": 190474, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 3.5718, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 3.5525, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3e-06, |
|
"loss": 3.5047, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 3.4853, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 5e-06, |
|
"loss": 3.4635, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 6e-06, |
|
"loss": 3.4339, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7e-06, |
|
"loss": 3.4143, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 3.3797, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9e-06, |
|
"loss": 3.368, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1e-05, |
|
"loss": 3.3519, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.944078947368422e-06, |
|
"loss": 3.3406, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 5.888157894736842e-06, |
|
"loss": 3.2885, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.832236842105263e-06, |
|
"loss": 3.262, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.7763157894736844e-06, |
|
"loss": 3.2664, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"step": 7432, |
|
"total_flos": 32244153748534272, |
|
"train_runtime": 4035.7935, |
|
"train_samples_per_second": 1.842 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.36e-07, |
|
"loss": 3.2257, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.1360000000000002e-06, |
|
"loss": 3.2579, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.1360000000000004e-06, |
|
"loss": 3.2497, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 3.136e-06, |
|
"loss": 3.2399, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 4.136000000000001e-06, |
|
"loss": 3.2354, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 5.136e-06, |
|
"loss": 3.2227, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 6.136000000000001e-06, |
|
"loss": 3.2309, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 7.136000000000001e-06, |
|
"loss": 3.237, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"step": 11148, |
|
"total_flos": 48368948381409792, |
|
"train_runtime": 2016.9377, |
|
"train_samples_per_second": 5.527 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.040000000000001e-07, |
|
"loss": 3.1385, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.7040000000000001e-06, |
|
"loss": 3.1219, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.704e-06, |
|
"loss": 3.0988, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.7040000000000005e-06, |
|
"loss": 3.0695, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.704e-06, |
|
"loss": 3.0795, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 5.704000000000001e-06, |
|
"loss": 3.0666, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 6.7040000000000005e-06, |
|
"loss": 3.0593, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.704000000000001e-06, |
|
"loss": 3.0466, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.704e-06, |
|
"loss": 3.0547, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.704e-06, |
|
"loss": 3.0598, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.981021598714645e-06, |
|
"loss": 3.0721, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.954063642343403e-06, |
|
"loss": 3.0614, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.927105685972159e-06, |
|
"loss": 3.0434, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.900147729600915e-06, |
|
"loss": 3.0653, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.873189773229673e-06, |
|
"loss": 3.0618, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.846231816858427e-06, |
|
"loss": 3.0674, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.819273860487185e-06, |
|
"loss": 3.0515, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.792315904115941e-06, |
|
"loss": 3.0559, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.765357947744698e-06, |
|
"loss": 3.0658, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.738399991373455e-06, |
|
"loss": 3.0525, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.711442035002212e-06, |
|
"loss": 3.0425, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.684484078630968e-06, |
|
"loss": 3.0293, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.657526122259726e-06, |
|
"loss": 3.05, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.63056816588848e-06, |
|
"loss": 3.053, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.603610209517238e-06, |
|
"loss": 3.0306, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.576652253145994e-06, |
|
"loss": 3.0416, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.54969429677475e-06, |
|
"loss": 3.0292, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.522736340403508e-06, |
|
"loss": 3.0431, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.495778384032264e-06, |
|
"loss": 3.0429, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.46882042766102e-06, |
|
"loss": 3.0305, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.441862471289779e-06, |
|
"loss": 3.0355, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.414904514918533e-06, |
|
"loss": 3.0251, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.387946558547291e-06, |
|
"loss": 3.0323, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.360988602176047e-06, |
|
"loss": 3.0118, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.334030645804803e-06, |
|
"loss": 3.0138, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.307072689433561e-06, |
|
"loss": 3.0217, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.280114733062316e-06, |
|
"loss": 3.0138, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.253156776691074e-06, |
|
"loss": 3.0402, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.22619882031983e-06, |
|
"loss": 3.0297, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.199240863948586e-06, |
|
"loss": 3.0147, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.172282907577344e-06, |
|
"loss": 3.0317, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.1453249512061e-06, |
|
"loss": 3.021, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.118366994834856e-06, |
|
"loss": 3.0284, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.091409038463614e-06, |
|
"loss": 3.0274, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.064451082092369e-06, |
|
"loss": 3.0163, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.037493125721126e-06, |
|
"loss": 3.0173, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.010535169349883e-06, |
|
"loss": 3.033, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.983577212978639e-06, |
|
"loss": 3.0174, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.956619256607397e-06, |
|
"loss": 3.0274, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.929661300236153e-06, |
|
"loss": 3.0003, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.902703343864909e-06, |
|
"loss": 3.0088, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.875745387493667e-06, |
|
"loss": 3.0126, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.848787431122421e-06, |
|
"loss": 2.9996, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.82182947475118e-06, |
|
"loss": 3.0292, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.794871518379935e-06, |
|
"loss": 3.008, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.767913562008692e-06, |
|
"loss": 3.0032, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 8.74095560563745e-06, |
|
"loss": 2.9903, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 8.713997649266204e-06, |
|
"loss": 3.0021, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 8.687039692894962e-06, |
|
"loss": 3.0082, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 8.660081736523718e-06, |
|
"loss": 2.9998, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 8.633123780152474e-06, |
|
"loss": 2.9956, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 8.606165823781232e-06, |
|
"loss": 3.0029, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 8.579207867409988e-06, |
|
"loss": 2.9969, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.552249911038744e-06, |
|
"loss": 2.9917, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.525291954667502e-06, |
|
"loss": 2.9977, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.498333998296257e-06, |
|
"loss": 3.0194, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.471376041925015e-06, |
|
"loss": 2.9922, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.444418085553771e-06, |
|
"loss": 3.0042, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.417460129182527e-06, |
|
"loss": 3.0048, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.390502172811285e-06, |
|
"loss": 2.9955, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.363544216440041e-06, |
|
"loss": 3.0094, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.336586260068797e-06, |
|
"loss": 2.9846, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.309628303697555e-06, |
|
"loss": 3.0013, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.28267034732631e-06, |
|
"loss": 2.9834, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.255712390955068e-06, |
|
"loss": 3.0037, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.228754434583824e-06, |
|
"loss": 2.9997, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.20179647821258e-06, |
|
"loss": 2.9993, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.174838521841338e-06, |
|
"loss": 2.979, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.147880565470094e-06, |
|
"loss": 2.9908, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.12092260909885e-06, |
|
"loss": 2.9872, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.093964652727606e-06, |
|
"loss": 2.9869, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.067006696356363e-06, |
|
"loss": 2.9975, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.04004873998512e-06, |
|
"loss": 2.9916, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.013090783613877e-06, |
|
"loss": 2.9964, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.986132827242633e-06, |
|
"loss": 2.9879, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 7.95917487087139e-06, |
|
"loss": 2.9832, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.932216914500145e-06, |
|
"loss": 2.9927, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.905258958128903e-06, |
|
"loss": 2.981, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.87830100175766e-06, |
|
"loss": 2.9894, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.851343045386415e-06, |
|
"loss": 2.987, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.824385089015173e-06, |
|
"loss": 2.9632, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.79742713264393e-06, |
|
"loss": 2.9863, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.770469176272686e-06, |
|
"loss": 2.9848, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.743511219901444e-06, |
|
"loss": 2.987, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.716553263530198e-06, |
|
"loss": 2.9731, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.689595307158956e-06, |
|
"loss": 2.9886, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.662637350787712e-06, |
|
"loss": 2.9826, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.635679394416468e-06, |
|
"loss": 2.9773, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.608721438045225e-06, |
|
"loss": 2.958, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.5817634816739815e-06, |
|
"loss": 2.9887, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.5548055253027386e-06, |
|
"loss": 2.9685, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.527847568931496e-06, |
|
"loss": 2.9632, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.500889612560252e-06, |
|
"loss": 2.9816, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.473931656189009e-06, |
|
"loss": 2.9726, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.446973699817764e-06, |
|
"loss": 2.9753, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.420015743446521e-06, |
|
"loss": 2.9785, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.393057787075278e-06, |
|
"loss": 2.9749, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.366099830704034e-06, |
|
"loss": 2.9722, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.3391418743327914e-06, |
|
"loss": 2.9636, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.312183917961548e-06, |
|
"loss": 2.9849, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.285225961590305e-06, |
|
"loss": 2.9696, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.258268005219062e-06, |
|
"loss": 2.9667, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 7.231310048847817e-06, |
|
"loss": 2.9635, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.204352092476574e-06, |
|
"loss": 2.9669, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.177394136105331e-06, |
|
"loss": 2.9778, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.150436179734087e-06, |
|
"loss": 2.9681, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 7.123478223362844e-06, |
|
"loss": 2.9739, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.0965202669916e-06, |
|
"loss": 2.9737, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.069562310620357e-06, |
|
"loss": 2.9659, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.042604354249114e-06, |
|
"loss": 2.9698, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.01564639787787e-06, |
|
"loss": 2.9675, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.988688441506627e-06, |
|
"loss": 2.9718, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.961730485135384e-06, |
|
"loss": 2.9665, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.93477252876414e-06, |
|
"loss": 2.9883, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.907814572392897e-06, |
|
"loss": 2.9842, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.8808566160216525e-06, |
|
"loss": 2.9607, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.8538986596504095e-06, |
|
"loss": 2.9776, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.8269407032791666e-06, |
|
"loss": 2.963, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.799982746907923e-06, |
|
"loss": 2.9571, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.77302479053668e-06, |
|
"loss": 2.9495, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.746066834165436e-06, |
|
"loss": 2.9601, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 6.719108877794193e-06, |
|
"loss": 2.9645, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.69215092142295e-06, |
|
"loss": 2.9559, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.665192965051705e-06, |
|
"loss": 2.9583, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.638235008680462e-06, |
|
"loss": 2.9543, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 6.6112770523092194e-06, |
|
"loss": 2.9585, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.584319095937976e-06, |
|
"loss": 2.9437, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.557361139566733e-06, |
|
"loss": 2.9573, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.530403183195488e-06, |
|
"loss": 2.9595, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.503445226824245e-06, |
|
"loss": 2.9693, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.476487270453002e-06, |
|
"loss": 2.956, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.449529314081758e-06, |
|
"loss": 2.9568, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.422571357710515e-06, |
|
"loss": 2.9555, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.395613401339272e-06, |
|
"loss": 2.9467, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.3686554449680285e-06, |
|
"loss": 2.9605, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.3416974885967855e-06, |
|
"loss": 2.9549, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.314739532225541e-06, |
|
"loss": 2.9449, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.287781575854298e-06, |
|
"loss": 2.9564, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.260823619483055e-06, |
|
"loss": 2.9448, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.233865663111811e-06, |
|
"loss": 2.9518, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.206907706740568e-06, |
|
"loss": 2.9597, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.179949750369324e-06, |
|
"loss": 2.9664, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.152991793998081e-06, |
|
"loss": 2.9503, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.126033837626838e-06, |
|
"loss": 2.931, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.099075881255594e-06, |
|
"loss": 2.9675, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.072117924884351e-06, |
|
"loss": 2.9647, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.045159968513108e-06, |
|
"loss": 2.9345, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.018202012141864e-06, |
|
"loss": 2.9479, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.991244055770621e-06, |
|
"loss": 2.947, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.964286099399376e-06, |
|
"loss": 2.9549, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.937328143028133e-06, |
|
"loss": 2.9594, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.91037018665689e-06, |
|
"loss": 2.9543, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.883412230285647e-06, |
|
"loss": 2.9481, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.856454273914404e-06, |
|
"loss": 2.9593, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.829496317543161e-06, |
|
"loss": 2.94, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.802538361171917e-06, |
|
"loss": 2.9484, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.775580404800674e-06, |
|
"loss": 2.9431, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.748622448429429e-06, |
|
"loss": 2.9449, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.721664492058186e-06, |
|
"loss": 2.9369, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.694706535686943e-06, |
|
"loss": 2.9542, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.6677485793156994e-06, |
|
"loss": 2.9468, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.6407906229444565e-06, |
|
"loss": 2.9551, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.613832666573213e-06, |
|
"loss": 2.9282, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.58687471020197e-06, |
|
"loss": 2.9475, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.559916753830727e-06, |
|
"loss": 2.9546, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.532958797459482e-06, |
|
"loss": 2.9335, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.506000841088239e-06, |
|
"loss": 2.9486, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.479042884716996e-06, |
|
"loss": 2.9595, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.452084928345752e-06, |
|
"loss": 2.9223, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.425126971974509e-06, |
|
"loss": 2.9461, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.3981690156032655e-06, |
|
"loss": 2.9471, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.371211059232022e-06, |
|
"loss": 2.9323, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.344253102860779e-06, |
|
"loss": 2.9288, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.317295146489535e-06, |
|
"loss": 2.9378, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.290337190118292e-06, |
|
"loss": 2.9221, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.263379233747049e-06, |
|
"loss": 2.9339, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.236421277375805e-06, |
|
"loss": 2.9353, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.209463321004562e-06, |
|
"loss": 2.9405, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.1825053646333175e-06, |
|
"loss": 2.9514, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.155547408262075e-06, |
|
"loss": 2.9346, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.128589451890832e-06, |
|
"loss": 2.9421, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.101631495519588e-06, |
|
"loss": 2.9353, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.074673539148345e-06, |
|
"loss": 2.933, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.047715582777101e-06, |
|
"loss": 2.9221, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.020757626405858e-06, |
|
"loss": 2.9621, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.993799670034614e-06, |
|
"loss": 2.9304, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.966841713663371e-06, |
|
"loss": 2.9428, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.9398837572921274e-06, |
|
"loss": 2.9215, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.9129258009208845e-06, |
|
"loss": 2.948, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.885967844549641e-06, |
|
"loss": 2.9427, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.859009888178398e-06, |
|
"loss": 2.9462, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.832051931807154e-06, |
|
"loss": 2.9374, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.805093975435911e-06, |
|
"loss": 2.9145, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.778136019064667e-06, |
|
"loss": 2.9384, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.751178062693424e-06, |
|
"loss": 2.9349, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.72422010632218e-06, |
|
"loss": 2.9303, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.6972621499509365e-06, |
|
"loss": 2.9306, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.6703041935796935e-06, |
|
"loss": 2.921, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.6433462372084506e-06, |
|
"loss": 2.928, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.616388280837207e-06, |
|
"loss": 2.9289, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.589430324465963e-06, |
|
"loss": 2.9219, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.56247236809472e-06, |
|
"loss": 2.914, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.535514411723476e-06, |
|
"loss": 2.9339, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.508556455352233e-06, |
|
"loss": 2.9264, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.481598498980989e-06, |
|
"loss": 2.9312, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.454640542609746e-06, |
|
"loss": 2.9132, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.427682586238503e-06, |
|
"loss": 2.9201, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.40072462986726e-06, |
|
"loss": 2.9214, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.373766673496016e-06, |
|
"loss": 2.9197, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.346808717124773e-06, |
|
"loss": 2.9238, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.319850760753529e-06, |
|
"loss": 2.9114, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.292892804382286e-06, |
|
"loss": 2.9142, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.265934848011042e-06, |
|
"loss": 2.9223, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.238976891639799e-06, |
|
"loss": 2.9169, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.2120189352685554e-06, |
|
"loss": 2.9305, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.1850609788973125e-06, |
|
"loss": 2.9298, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.158103022526069e-06, |
|
"loss": 2.9412, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.131145066154825e-06, |
|
"loss": 2.9149, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.104187109783582e-06, |
|
"loss": 2.9191, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.077229153412339e-06, |
|
"loss": 2.9188, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.050271197041095e-06, |
|
"loss": 2.9163, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.023313240669851e-06, |
|
"loss": 2.9082, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.996355284298608e-06, |
|
"loss": 2.9169, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.9693973279273645e-06, |
|
"loss": 2.9091, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.9424393715561215e-06, |
|
"loss": 2.9196, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.915481415184878e-06, |
|
"loss": 2.9287, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.888523458813635e-06, |
|
"loss": 2.8979, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.861565502442391e-06, |
|
"loss": 2.9169, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.834607546071148e-06, |
|
"loss": 2.9144, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8076495896999046e-06, |
|
"loss": 2.9094, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.7806916333286608e-06, |
|
"loss": 2.9113, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.7537336769574174e-06, |
|
"loss": 2.9179, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.7267757205861744e-06, |
|
"loss": 2.9321, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.6998177642149306e-06, |
|
"loss": 2.9284, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.672859807843687e-06, |
|
"loss": 2.9231, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.645901851472444e-06, |
|
"loss": 2.9114, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.618943895101201e-06, |
|
"loss": 2.9173, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.591985938729957e-06, |
|
"loss": 2.9145, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.5650279823587136e-06, |
|
"loss": 2.9127, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.5380700259874702e-06, |
|
"loss": 2.9149, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.5111120696162273e-06, |
|
"loss": 2.926, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.4841541132449834e-06, |
|
"loss": 2.9061, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.45719615687374e-06, |
|
"loss": 2.9266, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.4302382005024967e-06, |
|
"loss": 2.9092, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.403280244131253e-06, |
|
"loss": 2.9268, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.37632228776001e-06, |
|
"loss": 2.9088, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.3493643313887665e-06, |
|
"loss": 2.8918, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3224063750175227e-06, |
|
"loss": 2.9174, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2954484186462793e-06, |
|
"loss": 2.9104, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2684904622750363e-06, |
|
"loss": 2.9042, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.241532505903793e-06, |
|
"loss": 2.9035, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.214574549532549e-06, |
|
"loss": 2.8953, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1876165931613057e-06, |
|
"loss": 2.9143, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1606586367900627e-06, |
|
"loss": 2.9151, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1337006804188193e-06, |
|
"loss": 2.9104, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.1067427240475755e-06, |
|
"loss": 2.9164, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.079784767676332e-06, |
|
"loss": 2.9027, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.052826811305089e-06, |
|
"loss": 2.9007, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.0258688549338454e-06, |
|
"loss": 2.9028, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.998910898562602e-06, |
|
"loss": 2.9095, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.9719529421913586e-06, |
|
"loss": 2.8995, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.9449949858201156e-06, |
|
"loss": 2.9016, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.9180370294488718e-06, |
|
"loss": 2.9078, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.8910790730776284e-06, |
|
"loss": 2.9225, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.864121116706385e-06, |
|
"loss": 2.8951, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.837163160335141e-06, |
|
"loss": 2.8937, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.8102052039638982e-06, |
|
"loss": 2.9105, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.783247247592655e-06, |
|
"loss": 2.9172, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.756289291221411e-06, |
|
"loss": 2.8935, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.7293313348501676e-06, |
|
"loss": 2.8989, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.7023733784789247e-06, |
|
"loss": 2.9197, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.6754154221076813e-06, |
|
"loss": 2.9014, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.6484574657364374e-06, |
|
"loss": 2.917, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.621499509365194e-06, |
|
"loss": 2.9114, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.594541552993951e-06, |
|
"loss": 2.8914, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.5675835966227077e-06, |
|
"loss": 2.8933, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.540625640251464e-06, |
|
"loss": 2.9082, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.5136676838802205e-06, |
|
"loss": 2.9024, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.486709727508977e-06, |
|
"loss": 2.8803, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.4597517711377337e-06, |
|
"loss": 2.9039, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.4327938147664903e-06, |
|
"loss": 2.9101, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.405835858395247e-06, |
|
"loss": 2.8997, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3788779020240035e-06, |
|
"loss": 2.9082, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.35191994565276e-06, |
|
"loss": 2.8857, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3249619892815167e-06, |
|
"loss": 2.9077, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.2980040329102734e-06, |
|
"loss": 2.8974, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.27104607653903e-06, |
|
"loss": 2.8819, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2440881201677866e-06, |
|
"loss": 2.9061, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.217130163796543e-06, |
|
"loss": 2.9103, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1901722074252994e-06, |
|
"loss": 2.9046, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1632142510540564e-06, |
|
"loss": 2.8976, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1362562946828126e-06, |
|
"loss": 2.8751, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1092983383115696e-06, |
|
"loss": 2.9034, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.082340381940326e-06, |
|
"loss": 2.9018, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.055382425569083e-06, |
|
"loss": 2.8938, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.028424469197839e-06, |
|
"loss": 2.9216, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.001466512826596e-06, |
|
"loss": 2.8841, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9745085564553522e-06, |
|
"loss": 2.893, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9475506000841093e-06, |
|
"loss": 2.9049, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9205926437128654e-06, |
|
"loss": 2.8978, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8936346873416223e-06, |
|
"loss": 2.8871, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.8666767309703787e-06, |
|
"loss": 2.8934, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.8397187745991355e-06, |
|
"loss": 2.9071, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.8127608182278919e-06, |
|
"loss": 2.8993, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7858028618566487e-06, |
|
"loss": 2.8957, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.758844905485405e-06, |
|
"loss": 2.9015, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.731886949114162e-06, |
|
"loss": 2.8916, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.7049289927429183e-06, |
|
"loss": 2.9013, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6779710363716747e-06, |
|
"loss": 2.8928, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6510130800004315e-06, |
|
"loss": 2.8813, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.624055123629188e-06, |
|
"loss": 2.8782, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.5970971672579447e-06, |
|
"loss": 2.8838, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5701392108867011e-06, |
|
"loss": 2.8859, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.543181254515458e-06, |
|
"loss": 2.8822, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5162232981442144e-06, |
|
"loss": 2.8794, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.489265341772971e-06, |
|
"loss": 2.894, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.4623073854017276e-06, |
|
"loss": 2.8849, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.4353494290304842e-06, |
|
"loss": 2.9072, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.4083914726592408e-06, |
|
"loss": 2.8869, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.3814335162879974e-06, |
|
"loss": 2.8873, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.3544755599167538e-06, |
|
"loss": 2.9072, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.3275176035455106e-06, |
|
"loss": 2.889, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.300559647174267e-06, |
|
"loss": 2.8639, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.2736016908030238e-06, |
|
"loss": 2.8928, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.2466437344317802e-06, |
|
"loss": 2.8948, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.2196857780605368e-06, |
|
"loss": 2.8877, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1927278216892934e-06, |
|
"loss": 2.8897, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.16576986531805e-06, |
|
"loss": 2.8959, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1388119089468067e-06, |
|
"loss": 2.8988, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1118539525755633e-06, |
|
"loss": 2.8774, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0848959962043199e-06, |
|
"loss": 2.8977, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0579380398330765e-06, |
|
"loss": 2.8776, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.030980083461833e-06, |
|
"loss": 2.889, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0040221270905897e-06, |
|
"loss": 2.8759, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.770641707193463e-07, |
|
"loss": 2.8768, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.501062143481028e-07, |
|
"loss": 2.8877, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.231482579768594e-07, |
|
"loss": 2.8839, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 8.96190301605616e-07, |
|
"loss": 2.8889, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 8.692323452343726e-07, |
|
"loss": 2.8838, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.42274388863129e-07, |
|
"loss": 2.8698, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.153164324918856e-07, |
|
"loss": 2.8761, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.883584761206422e-07, |
|
"loss": 2.8769, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.614005197493989e-07, |
|
"loss": 2.8713, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.344425633781555e-07, |
|
"loss": 2.8764, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.074846070069121e-07, |
|
"loss": 2.8854, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.805266506356687e-07, |
|
"loss": 2.8777, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.535686942644253e-07, |
|
"loss": 2.8941, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.266107378931818e-07, |
|
"loss": 2.8917, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 190474, |
|
"total_flos": 812201236873720320, |
|
"train_runtime": 31399.0337, |
|
"train_samples_per_second": 6.066 |
|
} |
|
], |
|
"max_steps": 190474, |
|
"num_train_epochs": 1, |
|
"total_flos": 812201236873720320, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|