|
{ |
|
"best_metric": 0.9866, |
|
"best_model_checkpoint": "vit-small-patch16-224-finetuned-Cifar10/checkpoint-703", |
|
"epoch": 2.9936034115138592, |
|
"eval_steps": 500, |
|
"global_step": 1053, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 11.993635177612305, |
|
"learning_rate": 4.716981132075472e-06, |
|
"loss": 2.7466, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 9.214780807495117, |
|
"learning_rate": 9.433962264150944e-06, |
|
"loss": 2.4372, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.453407287597656, |
|
"learning_rate": 1.4150943396226415e-05, |
|
"loss": 2.0196, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 7.144883155822754, |
|
"learning_rate": 1.8867924528301888e-05, |
|
"loss": 1.5377, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.839055061340332, |
|
"learning_rate": 2.358490566037736e-05, |
|
"loss": 0.9774, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.836381912231445, |
|
"learning_rate": 2.830188679245283e-05, |
|
"loss": 0.6557, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 7.173834323883057, |
|
"learning_rate": 3.30188679245283e-05, |
|
"loss": 0.5304, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.7883830070495605, |
|
"learning_rate": 3.7735849056603776e-05, |
|
"loss": 0.4689, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 7.019888401031494, |
|
"learning_rate": 4.245283018867925e-05, |
|
"loss": 0.4659, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.123730182647705, |
|
"learning_rate": 4.716981132075472e-05, |
|
"loss": 0.4127, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 6.988221645355225, |
|
"learning_rate": 4.978880675818374e-05, |
|
"loss": 0.4155, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.683491230010986, |
|
"learning_rate": 4.9260823653643085e-05, |
|
"loss": 0.3546, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 7.395442962646484, |
|
"learning_rate": 4.8732840549102435e-05, |
|
"loss": 0.3745, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 4.838753700256348, |
|
"learning_rate": 4.820485744456177e-05, |
|
"loss": 0.3529, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 4.989682674407959, |
|
"learning_rate": 4.767687434002112e-05, |
|
"loss": 0.3487, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 6.224100589752197, |
|
"learning_rate": 4.7148891235480466e-05, |
|
"loss": 0.3452, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.418895721435547, |
|
"learning_rate": 4.662090813093981e-05, |
|
"loss": 0.3545, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 5.342619895935059, |
|
"learning_rate": 4.609292502639916e-05, |
|
"loss": 0.3171, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 4.573193550109863, |
|
"learning_rate": 4.55649419218585e-05, |
|
"loss": 0.3242, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 5.019192218780518, |
|
"learning_rate": 4.503695881731785e-05, |
|
"loss": 0.3241, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 5.130370140075684, |
|
"learning_rate": 4.45089757127772e-05, |
|
"loss": 0.3316, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 6.118399620056152, |
|
"learning_rate": 4.398099260823654e-05, |
|
"loss": 0.331, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.279905796051025, |
|
"learning_rate": 4.3453009503695884e-05, |
|
"loss": 0.3276, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 3.9065074920654297, |
|
"learning_rate": 4.292502639915523e-05, |
|
"loss": 0.2719, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 5.098165988922119, |
|
"learning_rate": 4.239704329461457e-05, |
|
"loss": 0.3026, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 5.796530723571777, |
|
"learning_rate": 4.186906019007392e-05, |
|
"loss": 0.2795, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 5.165294647216797, |
|
"learning_rate": 4.1341077085533265e-05, |
|
"loss": 0.3337, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 4.210253715515137, |
|
"learning_rate": 4.081309398099261e-05, |
|
"loss": 0.2845, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 6.663649559020996, |
|
"learning_rate": 4.028511087645195e-05, |
|
"loss": 0.3021, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 7.552988529205322, |
|
"learning_rate": 3.97571277719113e-05, |
|
"loss": 0.3157, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.279345989227295, |
|
"learning_rate": 3.9229144667370646e-05, |
|
"loss": 0.2724, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 6.712580680847168, |
|
"learning_rate": 3.870116156282999e-05, |
|
"loss": 0.3492, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 5.240344524383545, |
|
"learning_rate": 3.817317845828934e-05, |
|
"loss": 0.2719, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 6.162644386291504, |
|
"learning_rate": 3.764519535374868e-05, |
|
"loss": 0.2594, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 5.918422222137451, |
|
"learning_rate": 3.711721224920803e-05, |
|
"loss": 0.3253, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9784, |
|
"eval_loss": 0.06568869948387146, |
|
"eval_runtime": 49.9126, |
|
"eval_samples_per_second": 100.175, |
|
"eval_steps_per_second": 3.145, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 5.946422576904297, |
|
"learning_rate": 3.658922914466738e-05, |
|
"loss": 0.2393, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 4.802185535430908, |
|
"learning_rate": 3.6061246040126714e-05, |
|
"loss": 0.3089, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 6.8064351081848145, |
|
"learning_rate": 3.5533262935586064e-05, |
|
"loss": 0.3038, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.575678825378418, |
|
"learning_rate": 3.500527983104541e-05, |
|
"loss": 0.267, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 4.146677494049072, |
|
"learning_rate": 3.447729672650475e-05, |
|
"loss": 0.2544, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 5.48955774307251, |
|
"learning_rate": 3.3949313621964095e-05, |
|
"loss": 0.2901, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 5.2598490715026855, |
|
"learning_rate": 3.3421330517423445e-05, |
|
"loss": 0.2599, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 3.1469264030456543, |
|
"learning_rate": 3.289334741288279e-05, |
|
"loss": 0.2696, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 4.3553266525268555, |
|
"learning_rate": 3.236536430834213e-05, |
|
"loss": 0.2813, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 5.741024017333984, |
|
"learning_rate": 3.183738120380148e-05, |
|
"loss": 0.2886, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 5.031249046325684, |
|
"learning_rate": 3.130939809926082e-05, |
|
"loss": 0.2721, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 4.464756488800049, |
|
"learning_rate": 3.078141499472017e-05, |
|
"loss": 0.2496, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 3.4022979736328125, |
|
"learning_rate": 3.0253431890179517e-05, |
|
"loss": 0.245, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 5.276814937591553, |
|
"learning_rate": 2.972544878563886e-05, |
|
"loss": 0.2651, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 3.5680410861968994, |
|
"learning_rate": 2.9197465681098207e-05, |
|
"loss": 0.2351, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 3.921807050704956, |
|
"learning_rate": 2.8669482576557548e-05, |
|
"loss": 0.2667, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 3.4071061611175537, |
|
"learning_rate": 2.8141499472016898e-05, |
|
"loss": 0.252, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 4.332042694091797, |
|
"learning_rate": 2.7613516367476245e-05, |
|
"loss": 0.2868, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 4.950148105621338, |
|
"learning_rate": 2.7085533262935585e-05, |
|
"loss": 0.2966, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 4.6252312660217285, |
|
"learning_rate": 2.6557550158394935e-05, |
|
"loss": 0.2718, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 3.9921112060546875, |
|
"learning_rate": 2.6029567053854276e-05, |
|
"loss": 0.2457, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 4.575648784637451, |
|
"learning_rate": 2.5501583949313622e-05, |
|
"loss": 0.2488, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 5.220016002655029, |
|
"learning_rate": 2.497360084477297e-05, |
|
"loss": 0.2482, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 4.158516883850098, |
|
"learning_rate": 2.4445617740232313e-05, |
|
"loss": 0.2519, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 4.686926364898682, |
|
"learning_rate": 2.391763463569166e-05, |
|
"loss": 0.2796, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 5.014163017272949, |
|
"learning_rate": 2.3389651531151003e-05, |
|
"loss": 0.2195, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 5.498648643493652, |
|
"learning_rate": 2.286166842661035e-05, |
|
"loss": 0.2881, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 5.010299205780029, |
|
"learning_rate": 2.2333685322069694e-05, |
|
"loss": 0.2537, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 4.682175636291504, |
|
"learning_rate": 2.180570221752904e-05, |
|
"loss": 0.2691, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 4.073044776916504, |
|
"learning_rate": 2.1277719112988384e-05, |
|
"loss": 0.2358, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 5.119712829589844, |
|
"learning_rate": 2.074973600844773e-05, |
|
"loss": 0.2496, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 4.570176124572754, |
|
"learning_rate": 2.0221752903907075e-05, |
|
"loss": 0.2327, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 4.677672386169434, |
|
"learning_rate": 1.9693769799366422e-05, |
|
"loss": 0.2265, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 4.625946521759033, |
|
"learning_rate": 1.9165786694825765e-05, |
|
"loss": 0.2248, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 4.514760971069336, |
|
"learning_rate": 1.863780359028511e-05, |
|
"loss": 0.266, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9866, |
|
"eval_loss": 0.046029962599277496, |
|
"eval_runtime": 49.803, |
|
"eval_samples_per_second": 100.396, |
|
"eval_steps_per_second": 3.152, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 3.862548589706421, |
|
"learning_rate": 1.810982048574446e-05, |
|
"loss": 0.2284, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 4.738348007202148, |
|
"learning_rate": 1.7581837381203803e-05, |
|
"loss": 0.2416, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 4.671838760375977, |
|
"learning_rate": 1.7053854276663146e-05, |
|
"loss": 0.2409, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 3.9875543117523193, |
|
"learning_rate": 1.6525871172122493e-05, |
|
"loss": 0.2188, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 3.695639133453369, |
|
"learning_rate": 1.5997888067581837e-05, |
|
"loss": 0.2498, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 3.4530131816864014, |
|
"learning_rate": 1.5469904963041184e-05, |
|
"loss": 0.2152, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 4.31766414642334, |
|
"learning_rate": 1.4941921858500529e-05, |
|
"loss": 0.2374, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 4.569504261016846, |
|
"learning_rate": 1.4413938753959874e-05, |
|
"loss": 0.2397, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 3.1321048736572266, |
|
"learning_rate": 1.388595564941922e-05, |
|
"loss": 0.2367, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 4.778504371643066, |
|
"learning_rate": 1.3357972544878563e-05, |
|
"loss": 0.1993, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 4.513803482055664, |
|
"learning_rate": 1.2829989440337912e-05, |
|
"loss": 0.2197, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 5.787839412689209, |
|
"learning_rate": 1.2302006335797255e-05, |
|
"loss": 0.2117, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 4.725786209106445, |
|
"learning_rate": 1.17740232312566e-05, |
|
"loss": 0.2326, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 4.849221706390381, |
|
"learning_rate": 1.1246040126715946e-05, |
|
"loss": 0.1853, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 4.66175651550293, |
|
"learning_rate": 1.0718057022175291e-05, |
|
"loss": 0.2144, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 3.1749322414398193, |
|
"learning_rate": 1.0190073917634636e-05, |
|
"loss": 0.2448, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 5.343231678009033, |
|
"learning_rate": 9.662090813093982e-06, |
|
"loss": 0.1796, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 4.883101463317871, |
|
"learning_rate": 9.134107708553327e-06, |
|
"loss": 0.1833, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 4.559369087219238, |
|
"learning_rate": 8.606124604012672e-06, |
|
"loss": 0.2449, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 6.445333480834961, |
|
"learning_rate": 8.078141499472017e-06, |
|
"loss": 0.2525, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 3.946509599685669, |
|
"learning_rate": 7.5501583949313625e-06, |
|
"loss": 0.192, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 3.9938108921051025, |
|
"learning_rate": 7.022175290390708e-06, |
|
"loss": 0.1893, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 3.878434419631958, |
|
"learning_rate": 6.494192185850054e-06, |
|
"loss": 0.1843, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 3.8179821968078613, |
|
"learning_rate": 5.966209081309398e-06, |
|
"loss": 0.2282, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 5.556141376495361, |
|
"learning_rate": 5.438225976768744e-06, |
|
"loss": 0.18, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 3.250397205352783, |
|
"learning_rate": 4.910242872228089e-06, |
|
"loss": 0.1947, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 2.9559216499328613, |
|
"learning_rate": 4.382259767687434e-06, |
|
"loss": 0.1889, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 4.601840972900391, |
|
"learning_rate": 3.854276663146779e-06, |
|
"loss": 0.1805, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 4.955376148223877, |
|
"learning_rate": 3.326293558606125e-06, |
|
"loss": 0.2462, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 3.84184193611145, |
|
"learning_rate": 2.79831045406547e-06, |
|
"loss": 0.194, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 3.8135769367218018, |
|
"learning_rate": 2.2703273495248154e-06, |
|
"loss": 0.2011, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 4.607426166534424, |
|
"learning_rate": 1.7423442449841606e-06, |
|
"loss": 0.1948, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 4.66074275970459, |
|
"learning_rate": 1.2143611404435059e-06, |
|
"loss": 0.1996, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 3.2419092655181885, |
|
"learning_rate": 6.863780359028511e-07, |
|
"loss": 0.1981, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 3.586371660232544, |
|
"learning_rate": 1.5839493136219642e-07, |
|
"loss": 0.2298, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"eval_accuracy": 0.9862, |
|
"eval_loss": 0.04177713394165039, |
|
"eval_runtime": 49.9847, |
|
"eval_samples_per_second": 100.031, |
|
"eval_steps_per_second": 3.141, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"step": 1053, |
|
"total_flos": 2.636946768879747e+18, |
|
"train_loss": 0.3514244137552955, |
|
"train_runtime": 1744.8692, |
|
"train_samples_per_second": 77.37, |
|
"train_steps_per_second": 0.603 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1053, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.636946768879747e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|