|
{ |
|
"best_metric": 0.9876, |
|
"best_model_checkpoint": "vit-small-patch16-224-finetuned-Cifar10/checkpoint-1053", |
|
"epoch": 2.9936034115138592, |
|
"eval_steps": 500, |
|
"global_step": 1053, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 10.314929962158203, |
|
"learning_rate": 4.716981132075472e-06, |
|
"loss": 2.6416, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 8.337972640991211, |
|
"learning_rate": 9.433962264150944e-06, |
|
"loss": 2.3876, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.081725120544434, |
|
"learning_rate": 1.4150943396226415e-05, |
|
"loss": 1.9616, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.572023868560791, |
|
"learning_rate": 1.8867924528301888e-05, |
|
"loss": 1.4868, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.461695194244385, |
|
"learning_rate": 2.358490566037736e-05, |
|
"loss": 0.9903, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.515841484069824, |
|
"learning_rate": 2.830188679245283e-05, |
|
"loss": 0.7169, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 7.923908710479736, |
|
"learning_rate": 3.30188679245283e-05, |
|
"loss": 0.5633, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.821664333343506, |
|
"learning_rate": 3.7735849056603776e-05, |
|
"loss": 0.4791, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.747777938842773, |
|
"learning_rate": 4.245283018867925e-05, |
|
"loss": 0.438, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 9.386958122253418, |
|
"learning_rate": 4.716981132075472e-05, |
|
"loss": 0.4276, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 5.98386812210083, |
|
"learning_rate": 4.978880675818374e-05, |
|
"loss": 0.4085, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.977865219116211, |
|
"learning_rate": 4.9260823653643085e-05, |
|
"loss": 0.3609, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.478827476501465, |
|
"learning_rate": 4.8732840549102435e-05, |
|
"loss": 0.346, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.904544353485107, |
|
"learning_rate": 4.820485744456177e-05, |
|
"loss": 0.4145, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.136106967926025, |
|
"learning_rate": 4.767687434002112e-05, |
|
"loss": 0.3622, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 6.393275260925293, |
|
"learning_rate": 4.7148891235480466e-05, |
|
"loss": 0.3613, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.859918117523193, |
|
"learning_rate": 4.662090813093981e-05, |
|
"loss": 0.3733, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 4.1030426025390625, |
|
"learning_rate": 4.609292502639916e-05, |
|
"loss": 0.3029, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 7.502547264099121, |
|
"learning_rate": 4.55649419218585e-05, |
|
"loss": 0.3357, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 5.060296535491943, |
|
"learning_rate": 4.503695881731785e-05, |
|
"loss": 0.3161, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.308870792388916, |
|
"learning_rate": 4.45089757127772e-05, |
|
"loss": 0.3041, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 6.026098251342773, |
|
"learning_rate": 4.398099260823654e-05, |
|
"loss": 0.301, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.656477928161621, |
|
"learning_rate": 4.3453009503695884e-05, |
|
"loss": 0.3717, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 3.4833385944366455, |
|
"learning_rate": 4.292502639915523e-05, |
|
"loss": 0.3265, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 5.039575099945068, |
|
"learning_rate": 4.239704329461457e-05, |
|
"loss": 0.314, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 6.7204155921936035, |
|
"learning_rate": 4.186906019007392e-05, |
|
"loss": 0.3216, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.30304479598999, |
|
"learning_rate": 4.1341077085533265e-05, |
|
"loss": 0.3166, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 6.109108924865723, |
|
"learning_rate": 4.081309398099261e-05, |
|
"loss": 0.3528, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 4.084746837615967, |
|
"learning_rate": 4.028511087645195e-05, |
|
"loss": 0.2744, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 5.485677719116211, |
|
"learning_rate": 3.97571277719113e-05, |
|
"loss": 0.3019, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 7.279469013214111, |
|
"learning_rate": 3.9229144667370646e-05, |
|
"loss": 0.3113, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 6.751004695892334, |
|
"learning_rate": 3.870116156282999e-05, |
|
"loss": 0.3205, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 5.167363166809082, |
|
"learning_rate": 3.817317845828934e-05, |
|
"loss": 0.2882, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.599797248840332, |
|
"learning_rate": 3.764519535374868e-05, |
|
"loss": 0.3278, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 6.141386985778809, |
|
"learning_rate": 3.711721224920803e-05, |
|
"loss": 0.3129, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9818, |
|
"eval_loss": 0.05345360189676285, |
|
"eval_runtime": 46.7391, |
|
"eval_samples_per_second": 106.977, |
|
"eval_steps_per_second": 3.359, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 2.8357040882110596, |
|
"learning_rate": 3.658922914466738e-05, |
|
"loss": 0.2452, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 4.519667148590088, |
|
"learning_rate": 3.6061246040126714e-05, |
|
"loss": 0.2713, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 6.6121063232421875, |
|
"learning_rate": 3.5533262935586064e-05, |
|
"loss": 0.273, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.9773125648498535, |
|
"learning_rate": 3.500527983104541e-05, |
|
"loss": 0.2632, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 4.788541316986084, |
|
"learning_rate": 3.447729672650475e-05, |
|
"loss": 0.2891, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 3.6516640186309814, |
|
"learning_rate": 3.3949313621964095e-05, |
|
"loss": 0.2641, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 4.8205695152282715, |
|
"learning_rate": 3.3421330517423445e-05, |
|
"loss": 0.2726, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 4.041383743286133, |
|
"learning_rate": 3.289334741288279e-05, |
|
"loss": 0.2792, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 5.766094207763672, |
|
"learning_rate": 3.236536430834213e-05, |
|
"loss": 0.2815, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 6.516313076019287, |
|
"learning_rate": 3.183738120380148e-05, |
|
"loss": 0.2462, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 5.090723037719727, |
|
"learning_rate": 3.130939809926082e-05, |
|
"loss": 0.2547, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 3.82003116607666, |
|
"learning_rate": 3.078141499472017e-05, |
|
"loss": 0.2593, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 3.9347455501556396, |
|
"learning_rate": 3.0253431890179517e-05, |
|
"loss": 0.2534, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 6.01845121383667, |
|
"learning_rate": 2.972544878563886e-05, |
|
"loss": 0.2836, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 4.622760772705078, |
|
"learning_rate": 2.9197465681098207e-05, |
|
"loss": 0.2673, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 4.938364505767822, |
|
"learning_rate": 2.8669482576557548e-05, |
|
"loss": 0.2527, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 5.388880729675293, |
|
"learning_rate": 2.8141499472016898e-05, |
|
"loss": 0.2472, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 4.989612102508545, |
|
"learning_rate": 2.7613516367476245e-05, |
|
"loss": 0.2325, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 4.6943254470825195, |
|
"learning_rate": 2.7085533262935585e-05, |
|
"loss": 0.2796, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 5.073199272155762, |
|
"learning_rate": 2.6557550158394935e-05, |
|
"loss": 0.2162, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 3.7621564865112305, |
|
"learning_rate": 2.6029567053854276e-05, |
|
"loss": 0.232, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 3.057474136352539, |
|
"learning_rate": 2.5501583949313622e-05, |
|
"loss": 0.2246, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 5.149780750274658, |
|
"learning_rate": 2.497360084477297e-05, |
|
"loss": 0.2469, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 6.816042900085449, |
|
"learning_rate": 2.4445617740232313e-05, |
|
"loss": 0.2461, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 4.830414772033691, |
|
"learning_rate": 2.391763463569166e-05, |
|
"loss": 0.2551, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 4.188309669494629, |
|
"learning_rate": 2.3389651531151003e-05, |
|
"loss": 0.2388, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 7.241832733154297, |
|
"learning_rate": 2.286166842661035e-05, |
|
"loss": 0.2674, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 4.772040843963623, |
|
"learning_rate": 2.2333685322069694e-05, |
|
"loss": 0.2537, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 4.274710655212402, |
|
"learning_rate": 2.180570221752904e-05, |
|
"loss": 0.231, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 4.281772613525391, |
|
"learning_rate": 2.1277719112988384e-05, |
|
"loss": 0.3039, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 5.059981822967529, |
|
"learning_rate": 2.074973600844773e-05, |
|
"loss": 0.2894, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 3.4362783432006836, |
|
"learning_rate": 2.0221752903907075e-05, |
|
"loss": 0.2272, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 4.354649066925049, |
|
"learning_rate": 1.9693769799366422e-05, |
|
"loss": 0.2172, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 3.579586982727051, |
|
"learning_rate": 1.9165786694825765e-05, |
|
"loss": 0.2248, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 4.853511333465576, |
|
"learning_rate": 1.863780359028511e-05, |
|
"loss": 0.2333, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9836, |
|
"eval_loss": 0.04917434602975845, |
|
"eval_runtime": 46.8032, |
|
"eval_samples_per_second": 106.83, |
|
"eval_steps_per_second": 3.354, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 5.036158084869385, |
|
"learning_rate": 1.810982048574446e-05, |
|
"loss": 0.2369, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 4.580857753753662, |
|
"learning_rate": 1.7581837381203803e-05, |
|
"loss": 0.2235, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 3.7654311656951904, |
|
"learning_rate": 1.7053854276663146e-05, |
|
"loss": 0.2304, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 4.297289848327637, |
|
"learning_rate": 1.6525871172122493e-05, |
|
"loss": 0.2179, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 5.170669078826904, |
|
"learning_rate": 1.5997888067581837e-05, |
|
"loss": 0.213, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 4.770534038543701, |
|
"learning_rate": 1.5469904963041184e-05, |
|
"loss": 0.2245, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 2.8574471473693848, |
|
"learning_rate": 1.4941921858500529e-05, |
|
"loss": 0.1713, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 3.895998954772949, |
|
"learning_rate": 1.4413938753959874e-05, |
|
"loss": 0.205, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 2.9967050552368164, |
|
"learning_rate": 1.388595564941922e-05, |
|
"loss": 0.2204, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 4.339881420135498, |
|
"learning_rate": 1.3357972544878563e-05, |
|
"loss": 0.1817, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 5.125692844390869, |
|
"learning_rate": 1.2829989440337912e-05, |
|
"loss": 0.2181, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 6.453378200531006, |
|
"learning_rate": 1.2302006335797255e-05, |
|
"loss": 0.2058, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 4.746486663818359, |
|
"learning_rate": 1.17740232312566e-05, |
|
"loss": 0.213, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 5.431849956512451, |
|
"learning_rate": 1.1246040126715946e-05, |
|
"loss": 0.2257, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 6.360583305358887, |
|
"learning_rate": 1.0718057022175291e-05, |
|
"loss": 0.224, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 2.9380533695220947, |
|
"learning_rate": 1.0190073917634636e-05, |
|
"loss": 0.2245, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 5.245874881744385, |
|
"learning_rate": 9.662090813093982e-06, |
|
"loss": 0.1948, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 4.233717918395996, |
|
"learning_rate": 9.134107708553327e-06, |
|
"loss": 0.2169, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 4.133506774902344, |
|
"learning_rate": 8.606124604012672e-06, |
|
"loss": 0.1894, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 3.6981563568115234, |
|
"learning_rate": 8.078141499472017e-06, |
|
"loss": 0.2162, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 5.028733730316162, |
|
"learning_rate": 7.5501583949313625e-06, |
|
"loss": 0.2192, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 3.4768264293670654, |
|
"learning_rate": 7.022175290390708e-06, |
|
"loss": 0.1761, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 5.03364896774292, |
|
"learning_rate": 6.494192185850054e-06, |
|
"loss": 0.1679, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 4.1295671463012695, |
|
"learning_rate": 5.966209081309398e-06, |
|
"loss": 0.2324, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 4.6029744148254395, |
|
"learning_rate": 5.438225976768744e-06, |
|
"loss": 0.2357, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 4.893083572387695, |
|
"learning_rate": 4.910242872228089e-06, |
|
"loss": 0.1903, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 4.711769104003906, |
|
"learning_rate": 4.382259767687434e-06, |
|
"loss": 0.2094, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 4.935668468475342, |
|
"learning_rate": 3.854276663146779e-06, |
|
"loss": 0.2066, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 4.085482597351074, |
|
"learning_rate": 3.326293558606125e-06, |
|
"loss": 0.2225, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 4.599144458770752, |
|
"learning_rate": 2.79831045406547e-06, |
|
"loss": 0.1858, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 3.7395975589752197, |
|
"learning_rate": 2.2703273495248154e-06, |
|
"loss": 0.1868, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 3.5131924152374268, |
|
"learning_rate": 1.7423442449841606e-06, |
|
"loss": 0.2092, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 3.5327999591827393, |
|
"learning_rate": 1.2143611404435059e-06, |
|
"loss": 0.2194, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 4.314801216125488, |
|
"learning_rate": 6.863780359028511e-07, |
|
"loss": 0.2106, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 2.447665214538574, |
|
"learning_rate": 1.5839493136219642e-07, |
|
"loss": 0.1991, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"eval_accuracy": 0.9876, |
|
"eval_loss": 0.037825874984264374, |
|
"eval_runtime": 46.7089, |
|
"eval_samples_per_second": 107.046, |
|
"eval_steps_per_second": 3.361, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"step": 1053, |
|
"total_flos": 2.636946768879747e+18, |
|
"train_loss": 0.3487827978242836, |
|
"train_runtime": 1772.9517, |
|
"train_samples_per_second": 76.144, |
|
"train_steps_per_second": 0.594 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1053, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.636946768879747e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|