|
{ |
|
"best_metric": 0.9225346534653466, |
|
"best_model_checkpoint": "swin-finetuned-food101/checkpoint-3549", |
|
"epoch": 2.9993664202745514, |
|
"global_step": 3549, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4084507042253521e-06, |
|
"loss": 4.6907, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.8169014084507042e-06, |
|
"loss": 4.679, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.225352112676056e-06, |
|
"loss": 4.6485, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.6338028169014084e-06, |
|
"loss": 4.6353, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.042253521126762e-06, |
|
"loss": 4.6212, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.450704225352112e-06, |
|
"loss": 4.5806, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.859154929577465e-06, |
|
"loss": 4.5613, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1267605633802817e-05, |
|
"loss": 4.4989, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.267605633802817e-05, |
|
"loss": 4.4342, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.4084507042253523e-05, |
|
"loss": 4.3511, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.5492957746478872e-05, |
|
"loss": 4.2613, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6901408450704224e-05, |
|
"loss": 4.0647, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.830985915492958e-05, |
|
"loss": 3.8823, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.971830985915493e-05, |
|
"loss": 3.6052, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.112676056338028e-05, |
|
"loss": 3.236, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.2535211267605634e-05, |
|
"loss": 2.8905, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.3943661971830986e-05, |
|
"loss": 2.6692, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.535211267605634e-05, |
|
"loss": 2.4274, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.676056338028169e-05, |
|
"loss": 2.2282, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.8169014084507046e-05, |
|
"loss": 2.1145, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.9577464788732395e-05, |
|
"loss": 1.9002, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.0985915492957744e-05, |
|
"loss": 1.7071, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.23943661971831e-05, |
|
"loss": 1.4122, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.380281690140845e-05, |
|
"loss": 1.5119, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.5211267605633805e-05, |
|
"loss": 1.3445, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.661971830985916e-05, |
|
"loss": 1.2829, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.802816901408451e-05, |
|
"loss": 1.2674, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.943661971830986e-05, |
|
"loss": 1.2246, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.0845070422535214e-05, |
|
"loss": 1.1809, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.225352112676056e-05, |
|
"loss": 1.1334, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.366197183098591e-05, |
|
"loss": 0.9901, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.507042253521127e-05, |
|
"loss": 1.0773, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.647887323943662e-05, |
|
"loss": 1.1217, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.788732394366197e-05, |
|
"loss": 0.9982, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.929577464788733e-05, |
|
"loss": 1.0588, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.992172824045085e-05, |
|
"loss": 1.0729, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.9765184721352535e-05, |
|
"loss": 1.0962, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.960864120225423e-05, |
|
"loss": 1.0493, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.9452097683155916e-05, |
|
"loss": 0.928, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.9295554164057614e-05, |
|
"loss": 0.907, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.91390106449593e-05, |
|
"loss": 0.7965, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8982467125860995e-05, |
|
"loss": 0.9437, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.882592360676268e-05, |
|
"loss": 0.9301, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.866938008766438e-05, |
|
"loss": 0.9389, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.851283656856606e-05, |
|
"loss": 0.8393, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.835629304946775e-05, |
|
"loss": 0.83, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.819974953036944e-05, |
|
"loss": 0.8122, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.804320601127113e-05, |
|
"loss": 0.8234, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.788666249217283e-05, |
|
"loss": 0.8109, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7730118973074514e-05, |
|
"loss": 0.7209, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.757357545397621e-05, |
|
"loss": 0.9059, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7417031934877896e-05, |
|
"loss": 0.8376, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.726048841577959e-05, |
|
"loss": 0.826, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.710394489668128e-05, |
|
"loss": 0.8173, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.6947401377582975e-05, |
|
"loss": 0.8283, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.679085785848466e-05, |
|
"loss": 0.7782, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.6634314339386356e-05, |
|
"loss": 0.8335, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.647777082028804e-05, |
|
"loss": 0.9126, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.632122730118974e-05, |
|
"loss": 0.7519, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.616468378209142e-05, |
|
"loss": 0.7175, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.600814026299312e-05, |
|
"loss": 0.7496, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.58515967438948e-05, |
|
"loss": 0.7107, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.56950532247965e-05, |
|
"loss": 0.7209, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.5538509705698184e-05, |
|
"loss": 0.7283, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.5381966186599875e-05, |
|
"loss": 0.7179, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.5225422667501566e-05, |
|
"loss": 0.7398, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.5068879148403256e-05, |
|
"loss": 0.6623, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.491233562930495e-05, |
|
"loss": 0.6737, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.475579211020664e-05, |
|
"loss": 0.694, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.459924859110833e-05, |
|
"loss": 0.7095, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.444270507201002e-05, |
|
"loss": 0.6975, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.428616155291171e-05, |
|
"loss": 0.6883, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.41296180338134e-05, |
|
"loss": 0.6585, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.397307451471509e-05, |
|
"loss": 0.6692, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.381653099561678e-05, |
|
"loss": 0.7412, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.365998747651848e-05, |
|
"loss": 0.6666, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.350344395742016e-05, |
|
"loss": 0.6931, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.334690043832186e-05, |
|
"loss": 0.6701, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.3190356919223545e-05, |
|
"loss": 0.728, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.303381340012524e-05, |
|
"loss": 0.6723, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.2877269881026926e-05, |
|
"loss": 0.6367, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.2720726361928624e-05, |
|
"loss": 0.7133, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.256418284283031e-05, |
|
"loss": 0.6657, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.2407639323732e-05, |
|
"loss": 0.699, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.225109580463369e-05, |
|
"loss": 0.6833, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.209455228553538e-05, |
|
"loss": 0.6456, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.193800876643707e-05, |
|
"loss": 0.6006, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.178146524733876e-05, |
|
"loss": 0.6458, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.162492172824045e-05, |
|
"loss": 0.6764, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.146837820914214e-05, |
|
"loss": 0.5941, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.131183469004383e-05, |
|
"loss": 0.6831, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.1155291170945524e-05, |
|
"loss": 0.7029, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.0998747651847215e-05, |
|
"loss": 0.706, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.0842204132748905e-05, |
|
"loss": 0.5995, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.0685660613650596e-05, |
|
"loss": 0.6364, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.052911709455229e-05, |
|
"loss": 0.5712, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.037257357545398e-05, |
|
"loss": 0.776, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.021603005635567e-05, |
|
"loss": 0.5841, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.005948653725736e-05, |
|
"loss": 0.6662, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.990294301815905e-05, |
|
"loss": 0.6862, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.974639949906074e-05, |
|
"loss": 0.6664, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.958985597996243e-05, |
|
"loss": 0.6962, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.943331246086412e-05, |
|
"loss": 0.5786, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.927676894176581e-05, |
|
"loss": 0.6521, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.91202254226675e-05, |
|
"loss": 0.6417, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.8963681903569194e-05, |
|
"loss": 0.6313, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.8807138384470884e-05, |
|
"loss": 0.5653, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.8650594865372575e-05, |
|
"loss": 0.6452, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.8494051346274266e-05, |
|
"loss": 0.6581, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.8337507827175957e-05, |
|
"loss": 0.5864, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.818096430807765e-05, |
|
"loss": 0.6943, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.802442078897934e-05, |
|
"loss": 0.5743, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.786787726988103e-05, |
|
"loss": 0.6748, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.771133375078272e-05, |
|
"loss": 0.6692, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.755479023168441e-05, |
|
"loss": 0.5344, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.73982467125861e-05, |
|
"loss": 0.5794, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.724170319348779e-05, |
|
"loss": 0.6004, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.708515967438948e-05, |
|
"loss": 0.5704, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8905346534653465, |
|
"eval_loss": 0.3809664249420166, |
|
"eval_runtime": 524.3821, |
|
"eval_samples_per_second": 48.152, |
|
"eval_steps_per_second": 3.011, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.692861615529117e-05, |
|
"loss": 0.4233, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6772072636192864e-05, |
|
"loss": 0.5106, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6615529117094554e-05, |
|
"loss": 0.4044, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.6458985597996245e-05, |
|
"loss": 0.4241, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.6302442078897936e-05, |
|
"loss": 0.5049, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.6145898559799626e-05, |
|
"loss": 0.3672, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.598935504070132e-05, |
|
"loss": 0.3603, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.583281152160301e-05, |
|
"loss": 0.422, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.56762680025047e-05, |
|
"loss": 0.369, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.551972448340639e-05, |
|
"loss": 0.4055, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.536318096430808e-05, |
|
"loss": 0.3909, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.520663744520977e-05, |
|
"loss": 0.4086, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.505009392611146e-05, |
|
"loss": 0.3994, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.489355040701315e-05, |
|
"loss": 0.4539, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.473700688791484e-05, |
|
"loss": 0.3762, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.4580463368816533e-05, |
|
"loss": 0.3847, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.4423919849718224e-05, |
|
"loss": 0.3887, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.4267376330619915e-05, |
|
"loss": 0.4879, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.4110832811521606e-05, |
|
"loss": 0.3502, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.3954289292423296e-05, |
|
"loss": 0.3826, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.379774577332499e-05, |
|
"loss": 0.4537, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.364120225422668e-05, |
|
"loss": 0.4367, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.348465873512837e-05, |
|
"loss": 0.3518, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.332811521603006e-05, |
|
"loss": 0.3996, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.317157169693175e-05, |
|
"loss": 0.2905, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.301502817783344e-05, |
|
"loss": 0.3632, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.285848465873513e-05, |
|
"loss": 0.3949, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.270194113963682e-05, |
|
"loss": 0.3714, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.2545397620538506e-05, |
|
"loss": 0.3842, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.23888541014402e-05, |
|
"loss": 0.371, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.223231058234189e-05, |
|
"loss": 0.4382, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.2075767063243585e-05, |
|
"loss": 0.3941, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.191922354414527e-05, |
|
"loss": 0.4186, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.1762680025046966e-05, |
|
"loss": 0.3973, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.160613650594865e-05, |
|
"loss": 0.3901, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.144959298685035e-05, |
|
"loss": 0.4067, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.129304946775204e-05, |
|
"loss": 0.4196, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.113650594865373e-05, |
|
"loss": 0.3724, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.097996242955542e-05, |
|
"loss": 0.4579, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.082341891045711e-05, |
|
"loss": 0.4353, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.06668753913588e-05, |
|
"loss": 0.4122, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.051033187226049e-05, |
|
"loss": 0.364, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.0353788353162182e-05, |
|
"loss": 0.4484, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.019724483406387e-05, |
|
"loss": 0.3375, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.0040701314965564e-05, |
|
"loss": 0.5085, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.988415779586725e-05, |
|
"loss": 0.3881, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.9727614276768945e-05, |
|
"loss": 0.4138, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.9571070757670633e-05, |
|
"loss": 0.3907, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.9414527238572327e-05, |
|
"loss": 0.4817, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.9257983719474014e-05, |
|
"loss": 0.4401, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.9101440200375708e-05, |
|
"loss": 0.3135, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.8944896681277395e-05, |
|
"loss": 0.4107, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.878835316217909e-05, |
|
"loss": 0.3546, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.8631809643080777e-05, |
|
"loss": 0.4396, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.8475266123982468e-05, |
|
"loss": 0.3604, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.8318722604884158e-05, |
|
"loss": 0.4266, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.816217908578585e-05, |
|
"loss": 0.4222, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.800563556668754e-05, |
|
"loss": 0.4185, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.784909204758923e-05, |
|
"loss": 0.3434, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.7692548528490918e-05, |
|
"loss": 0.3645, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.7536005009392612e-05, |
|
"loss": 0.4132, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.7379461490294306e-05, |
|
"loss": 0.3973, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.7222917971195993e-05, |
|
"loss": 0.3852, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.7066374452097687e-05, |
|
"loss": 0.4473, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.6909830932999375e-05, |
|
"loss": 0.3972, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.675328741390107e-05, |
|
"loss": 0.293, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.6596743894802756e-05, |
|
"loss": 0.3879, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.644020037570445e-05, |
|
"loss": 0.3674, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.6283656856606137e-05, |
|
"loss": 0.3288, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.612711333750783e-05, |
|
"loss": 0.4182, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.597056981840952e-05, |
|
"loss": 0.3469, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.5814026299311213e-05, |
|
"loss": 0.3413, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.56574827802129e-05, |
|
"loss": 0.3579, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.550093926111459e-05, |
|
"loss": 0.3964, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.534439574201628e-05, |
|
"loss": 0.4302, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.5187852222917972e-05, |
|
"loss": 0.3402, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.5031308703819663e-05, |
|
"loss": 0.4623, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.4874765184721354e-05, |
|
"loss": 0.3985, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.4718221665623044e-05, |
|
"loss": 0.297, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.4561678146524735e-05, |
|
"loss": 0.4284, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.4405134627426426e-05, |
|
"loss": 0.346, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.4248591108328117e-05, |
|
"loss": 0.3914, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.4092047589229807e-05, |
|
"loss": 0.3653, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.3935504070131498e-05, |
|
"loss": 0.3716, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.377896055103319e-05, |
|
"loss": 0.3226, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.362241703193488e-05, |
|
"loss": 0.3124, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.346587351283657e-05, |
|
"loss": 0.3761, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.330932999373826e-05, |
|
"loss": 0.3992, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.315278647463995e-05, |
|
"loss": 0.3198, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.2996242955541642e-05, |
|
"loss": 0.3836, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.2839699436443333e-05, |
|
"loss": 0.3684, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.2683155917345024e-05, |
|
"loss": 0.4109, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.2526612398246714e-05, |
|
"loss": 0.3248, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.2370068879148405e-05, |
|
"loss": 0.3979, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.2213525360050096e-05, |
|
"loss": 0.3678, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.2056981840951786e-05, |
|
"loss": 0.4149, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.1900438321853474e-05, |
|
"loss": 0.3519, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.1743894802755164e-05, |
|
"loss": 0.4255, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.1587351283656855e-05, |
|
"loss": 0.3771, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.1430807764558546e-05, |
|
"loss": 0.4284, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.127426424546024e-05, |
|
"loss": 0.3565, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.111772072636193e-05, |
|
"loss": 0.3508, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.096117720726362e-05, |
|
"loss": 0.4182, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.0804633688165312e-05, |
|
"loss": 0.3894, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.0648090169067003e-05, |
|
"loss": 0.3566, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.0491546649968694e-05, |
|
"loss": 0.3442, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.0335003130870384e-05, |
|
"loss": 0.3525, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.0178459611772075e-05, |
|
"loss": 0.3184, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.0021916092673766e-05, |
|
"loss": 0.344, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.9865372573575456e-05, |
|
"loss": 0.4054, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.9708829054477147e-05, |
|
"loss": 0.313, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.9552285535378838e-05, |
|
"loss": 0.3411, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.939574201628053e-05, |
|
"loss": 0.3272, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.923919849718222e-05, |
|
"loss": 0.3692, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.9082654978083906e-05, |
|
"loss": 0.4316, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.8926111458985597e-05, |
|
"loss": 0.3872, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.8769567939887288e-05, |
|
"loss": 0.2391, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.861302442078898e-05, |
|
"loss": 0.307, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9148514851485149, |
|
"eval_loss": 0.29710039496421814, |
|
"eval_runtime": 522.4602, |
|
"eval_samples_per_second": 48.329, |
|
"eval_steps_per_second": 3.022, |
|
"step": 2366 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.845648090169067e-05, |
|
"loss": 0.3321, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.829993738259236e-05, |
|
"loss": 0.2162, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.814339386349405e-05, |
|
"loss": 0.1984, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.798685034439574e-05, |
|
"loss": 0.1896, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.7830306825297432e-05, |
|
"loss": 0.1757, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.7673763306199123e-05, |
|
"loss": 0.1396, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.7517219787100814e-05, |
|
"loss": 0.1653, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.7360676268002504e-05, |
|
"loss": 0.1857, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.7204132748904198e-05, |
|
"loss": 0.2153, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.704758922980589e-05, |
|
"loss": 0.1996, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.689104571070758e-05, |
|
"loss": 0.2101, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.673450219160927e-05, |
|
"loss": 0.1711, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.657795867251096e-05, |
|
"loss": 0.2192, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.6421415153412652e-05, |
|
"loss": 0.2145, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.6264871634314343e-05, |
|
"loss": 0.1849, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.610832811521603e-05, |
|
"loss": 0.1968, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.595178459611772e-05, |
|
"loss": 0.2043, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.579524107701941e-05, |
|
"loss": 0.1686, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.5638697557921102e-05, |
|
"loss": 0.2046, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.5482154038822793e-05, |
|
"loss": 0.2238, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.5325610519724483e-05, |
|
"loss": 0.2334, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.5169067000626174e-05, |
|
"loss": 0.2326, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.5012523481527865e-05, |
|
"loss": 0.2155, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.4855979962429555e-05, |
|
"loss": 0.1822, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.4699436443331246e-05, |
|
"loss": 0.1817, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.4542892924232937e-05, |
|
"loss": 0.2352, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.4386349405134628e-05, |
|
"loss": 0.1988, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.4229805886036318e-05, |
|
"loss": 0.2568, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.4073262366938009e-05, |
|
"loss": 0.1821, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.39167188478397e-05, |
|
"loss": 0.1966, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.376017532874139e-05, |
|
"loss": 0.1798, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.3603631809643081e-05, |
|
"loss": 0.2116, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.344708829054477e-05, |
|
"loss": 0.2097, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.329054477144646e-05, |
|
"loss": 0.2196, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.3134001252348152e-05, |
|
"loss": 0.2156, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.2977457733249846e-05, |
|
"loss": 0.2004, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.2820914214151536e-05, |
|
"loss": 0.1982, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.2664370695053227e-05, |
|
"loss": 0.2343, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.2507827175954918e-05, |
|
"loss": 0.1886, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.2351283656856607e-05, |
|
"loss": 0.1739, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.2194740137758297e-05, |
|
"loss": 0.1748, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.2038196618659988e-05, |
|
"loss": 0.1591, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.1881653099561679e-05, |
|
"loss": 0.1822, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.172510958046337e-05, |
|
"loss": 0.2175, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.156856606136506e-05, |
|
"loss": 0.1847, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.1412022542266751e-05, |
|
"loss": 0.2193, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.1255479023168442e-05, |
|
"loss": 0.2007, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.1098935504070132e-05, |
|
"loss": 0.1632, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.0942391984971823e-05, |
|
"loss": 0.2413, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.0785848465873514e-05, |
|
"loss": 0.1942, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.0629304946775205e-05, |
|
"loss": 0.172, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.0472761427676894e-05, |
|
"loss": 0.1765, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.0316217908578584e-05, |
|
"loss": 0.1792, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.0159674389480275e-05, |
|
"loss": 0.1938, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.0003130870381967e-05, |
|
"loss": 0.1553, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.846587351283658e-06, |
|
"loss": 0.1649, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 9.690043832185349e-06, |
|
"loss": 0.187, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 9.53350031308704e-06, |
|
"loss": 0.2572, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.37695679398873e-06, |
|
"loss": 0.1554, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.220413274890421e-06, |
|
"loss": 0.1555, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 9.06386975579211e-06, |
|
"loss": 0.1896, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.9073262366938e-06, |
|
"loss": 0.2001, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 8.750782717595491e-06, |
|
"loss": 0.1689, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 8.594239198497182e-06, |
|
"loss": 0.2367, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 8.437695679398873e-06, |
|
"loss": 0.22, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 8.281152160300563e-06, |
|
"loss": 0.1816, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 8.124608641202254e-06, |
|
"loss": 0.1556, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.968065122103947e-06, |
|
"loss": 0.1811, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.811521603005637e-06, |
|
"loss": 0.1788, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.654978083907326e-06, |
|
"loss": 0.1935, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.498434564809018e-06, |
|
"loss": 0.1686, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.3418910457107085e-06, |
|
"loss": 0.204, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 7.185347526612398e-06, |
|
"loss": 0.1357, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.028804007514089e-06, |
|
"loss": 0.1574, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.87226048841578e-06, |
|
"loss": 0.1891, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.7157169693174705e-06, |
|
"loss": 0.1924, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.559173450219161e-06, |
|
"loss": 0.1744, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.402629931120852e-06, |
|
"loss": 0.1921, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.2460864120225426e-06, |
|
"loss": 0.1213, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.089542892924233e-06, |
|
"loss": 0.1767, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.932999373825924e-06, |
|
"loss": 0.2516, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.776455854727615e-06, |
|
"loss": 0.1785, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.619912335629305e-06, |
|
"loss": 0.1522, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.463368816530995e-06, |
|
"loss": 0.1759, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.306825297432687e-06, |
|
"loss": 0.226, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.1502817783343775e-06, |
|
"loss": 0.1422, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.993738259236068e-06, |
|
"loss": 0.1358, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.837194740137758e-06, |
|
"loss": 0.2554, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.680651221039449e-06, |
|
"loss": 0.1617, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.5241077019411395e-06, |
|
"loss": 0.1635, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.367564182842831e-06, |
|
"loss": 0.194, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.211020663744522e-06, |
|
"loss": 0.1859, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.054477144646212e-06, |
|
"loss": 0.2416, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.897933625547902e-06, |
|
"loss": 0.1558, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.741390106449593e-06, |
|
"loss": 0.1236, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.5848465873512837e-06, |
|
"loss": 0.1467, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.428303068252974e-06, |
|
"loss": 0.2163, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.2717595491546655e-06, |
|
"loss": 0.2455, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.115216030056356e-06, |
|
"loss": 0.1651, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.9586725109580465e-06, |
|
"loss": 0.164, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.8021289918597372e-06, |
|
"loss": 0.1701, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.6455854727614275e-06, |
|
"loss": 0.1292, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.4890419536631186e-06, |
|
"loss": 0.1596, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.332498434564809e-06, |
|
"loss": 0.2095, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.1759549154664996e-06, |
|
"loss": 0.1198, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 2.0194113963681908e-06, |
|
"loss": 0.1309, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.862867877269881e-06, |
|
"loss": 0.224, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.7063243581715717e-06, |
|
"loss": 0.2464, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.5497808390732624e-06, |
|
"loss": 0.1782, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3932373199749531e-06, |
|
"loss": 0.1605, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2366938008766436e-06, |
|
"loss": 0.1797, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0801502817783343e-06, |
|
"loss": 0.1734, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 9.236067626800252e-07, |
|
"loss": 0.1678, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.670632435817158e-07, |
|
"loss": 0.1809, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.105197244834065e-07, |
|
"loss": 0.1637, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.539762053850971e-07, |
|
"loss": 0.1766, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.974326862867877e-07, |
|
"loss": 0.1145, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.408891671884784e-07, |
|
"loss": 0.1213, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9225346534653466, |
|
"eval_loss": 0.2749262750148773, |
|
"eval_runtime": 521.2728, |
|
"eval_samples_per_second": 48.439, |
|
"eval_steps_per_second": 3.029, |
|
"step": 3549 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 3549, |
|
"total_flos": 1.782187413176424e+19, |
|
"train_loss": 0.6323049043930561, |
|
"train_runtime": 11444.2247, |
|
"train_samples_per_second": 19.857, |
|
"train_steps_per_second": 0.31 |
|
} |
|
], |
|
"max_steps": 3549, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.782187413176424e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|