|
{ |
|
"best_metric": 0.9992091735863978, |
|
"best_model_checkpoint": "vit-base-patch16-224-clothes-filter/checkpoint-1265", |
|
"epoch": 4.996047430830039, |
|
"eval_steps": 500, |
|
"global_step": 1580, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.780308246612549, |
|
"learning_rate": 3.1645569620253167e-06, |
|
"loss": 0.6282, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.528079986572266, |
|
"learning_rate": 6.329113924050633e-06, |
|
"loss": 0.5794, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.32427978515625, |
|
"learning_rate": 9.49367088607595e-06, |
|
"loss": 0.4682, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.10469388961792, |
|
"learning_rate": 1.2658227848101267e-05, |
|
"loss": 0.3436, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.8978112936019897, |
|
"learning_rate": 1.5822784810126583e-05, |
|
"loss": 0.2182, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.4250308275222778, |
|
"learning_rate": 1.89873417721519e-05, |
|
"loss": 0.1309, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.0968660116195679, |
|
"learning_rate": 2.2151898734177217e-05, |
|
"loss": 0.0826, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.002328872680664, |
|
"learning_rate": 2.5316455696202533e-05, |
|
"loss": 0.0477, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.20249253511428833, |
|
"learning_rate": 2.848101265822785e-05, |
|
"loss": 0.0207, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.15333689749240875, |
|
"learning_rate": 3.1645569620253167e-05, |
|
"loss": 0.0164, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.08237770944833755, |
|
"learning_rate": 3.4810126582278487e-05, |
|
"loss": 0.0419, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6087208986282349, |
|
"learning_rate": 3.79746835443038e-05, |
|
"loss": 0.0081, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.059435680508613586, |
|
"learning_rate": 4.113924050632912e-05, |
|
"loss": 0.007, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.08203254640102386, |
|
"learning_rate": 4.430379746835443e-05, |
|
"loss": 0.022, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.059913892298936844, |
|
"learning_rate": 4.7468354430379746e-05, |
|
"loss": 0.02, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.04219400882720947, |
|
"learning_rate": 4.9929676511955e-05, |
|
"loss": 0.0241, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.4119158089160919, |
|
"learning_rate": 4.957805907172996e-05, |
|
"loss": 0.011, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.08845602720975876, |
|
"learning_rate": 4.9226441631504925e-05, |
|
"loss": 0.012, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.022793015465140343, |
|
"learning_rate": 4.887482419127989e-05, |
|
"loss": 0.0044, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.1266743242740631, |
|
"learning_rate": 4.852320675105486e-05, |
|
"loss": 0.0026, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.2308553010225296, |
|
"learning_rate": 4.817158931082982e-05, |
|
"loss": 0.0088, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.2685852348804474, |
|
"learning_rate": 4.7819971870604783e-05, |
|
"loss": 0.0191, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.012726900167763233, |
|
"learning_rate": 4.7468354430379746e-05, |
|
"loss": 0.0084, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.30946388840675354, |
|
"learning_rate": 4.7116736990154716e-05, |
|
"loss": 0.0242, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.03251117840409279, |
|
"learning_rate": 4.676511954992968e-05, |
|
"loss": 0.0028, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.622480034828186, |
|
"learning_rate": 4.641350210970464e-05, |
|
"loss": 0.0123, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.013131936080753803, |
|
"learning_rate": 4.606188466947961e-05, |
|
"loss": 0.0013, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.042751457542181015, |
|
"learning_rate": 4.5710267229254575e-05, |
|
"loss": 0.0403, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.023626720532774925, |
|
"learning_rate": 4.535864978902954e-05, |
|
"loss": 0.0376, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.03286024183034897, |
|
"learning_rate": 4.50070323488045e-05, |
|
"loss": 0.0095, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.19246149063110352, |
|
"learning_rate": 4.465541490857947e-05, |
|
"loss": 0.0462, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9936733886911823, |
|
"eval_loss": 0.016152312979102135, |
|
"eval_runtime": 16.8828, |
|
"eval_samples_per_second": 149.798, |
|
"eval_steps_per_second": 18.777, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 4.946556568145752, |
|
"learning_rate": 4.430379746835443e-05, |
|
"loss": 0.02, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.085129737854004, |
|
"learning_rate": 4.3952180028129396e-05, |
|
"loss": 0.0067, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.026508348062634468, |
|
"learning_rate": 4.3600562587904366e-05, |
|
"loss": 0.0151, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.13778889179229736, |
|
"learning_rate": 4.324894514767933e-05, |
|
"loss": 0.007, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 3.9214696884155273, |
|
"learning_rate": 4.289732770745429e-05, |
|
"loss": 0.0384, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.047678280621767044, |
|
"learning_rate": 4.2545710267229255e-05, |
|
"loss": 0.001, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.579052448272705, |
|
"learning_rate": 4.2194092827004224e-05, |
|
"loss": 0.0254, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.04058700054883957, |
|
"learning_rate": 4.184247538677919e-05, |
|
"loss": 0.0037, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.02530195750296116, |
|
"learning_rate": 4.149085794655415e-05, |
|
"loss": 0.0018, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 2.1537139415740967, |
|
"learning_rate": 4.113924050632912e-05, |
|
"loss": 0.0046, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.4535873234272003, |
|
"learning_rate": 4.078762306610408e-05, |
|
"loss": 0.006, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.050164151936769485, |
|
"learning_rate": 4.0436005625879046e-05, |
|
"loss": 0.0055, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.015650559216737747, |
|
"learning_rate": 4.008438818565401e-05, |
|
"loss": 0.0159, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.02410263940691948, |
|
"learning_rate": 3.973277074542898e-05, |
|
"loss": 0.0008, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.005263881757855415, |
|
"learning_rate": 3.938115330520394e-05, |
|
"loss": 0.004, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.005594250746071339, |
|
"learning_rate": 3.9029535864978904e-05, |
|
"loss": 0.0004, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.008686939254403114, |
|
"learning_rate": 3.867791842475387e-05, |
|
"loss": 0.0014, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.00374673493206501, |
|
"learning_rate": 3.832630098452884e-05, |
|
"loss": 0.0086, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.004833437968045473, |
|
"learning_rate": 3.79746835443038e-05, |
|
"loss": 0.0136, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 2.200552463531494, |
|
"learning_rate": 3.762306610407876e-05, |
|
"loss": 0.0156, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.1714821308851242, |
|
"learning_rate": 3.727144866385373e-05, |
|
"loss": 0.0006, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.009503379464149475, |
|
"learning_rate": 3.6919831223628695e-05, |
|
"loss": 0.0142, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.004495191853493452, |
|
"learning_rate": 3.656821378340366e-05, |
|
"loss": 0.0119, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.2889450192451477, |
|
"learning_rate": 3.621659634317862e-05, |
|
"loss": 0.0021, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.04732218757271767, |
|
"learning_rate": 3.586497890295359e-05, |
|
"loss": 0.005, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.710472047328949, |
|
"learning_rate": 3.551336146272855e-05, |
|
"loss": 0.0009, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.025769829750061035, |
|
"learning_rate": 3.516174402250352e-05, |
|
"loss": 0.0013, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 5.155148983001709, |
|
"learning_rate": 3.4810126582278487e-05, |
|
"loss": 0.016, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.009949293918907642, |
|
"learning_rate": 3.445850914205345e-05, |
|
"loss": 0.0005, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.004877807106822729, |
|
"learning_rate": 3.410689170182841e-05, |
|
"loss": 0.006, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 3.116375684738159, |
|
"learning_rate": 3.3755274261603375e-05, |
|
"loss": 0.0076, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.0024900627322494984, |
|
"learning_rate": 3.3403656821378345e-05, |
|
"loss": 0.0108, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9984183471727955, |
|
"eval_loss": 0.004202807787805796, |
|
"eval_runtime": 17.1708, |
|
"eval_samples_per_second": 147.285, |
|
"eval_steps_per_second": 18.462, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.027852624654769897, |
|
"learning_rate": 3.305203938115331e-05, |
|
"loss": 0.0126, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.00895751640200615, |
|
"learning_rate": 3.270042194092827e-05, |
|
"loss": 0.0247, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.005895175039768219, |
|
"learning_rate": 3.234880450070324e-05, |
|
"loss": 0.0031, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.0067118131555616856, |
|
"learning_rate": 3.1997187060478204e-05, |
|
"loss": 0.0123, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 3.036545991897583, |
|
"learning_rate": 3.1645569620253167e-05, |
|
"loss": 0.0139, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.013006511144340038, |
|
"learning_rate": 3.129395218002813e-05, |
|
"loss": 0.0007, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.011808563023805618, |
|
"learning_rate": 3.09423347398031e-05, |
|
"loss": 0.001, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.0029538481030613184, |
|
"learning_rate": 3.059071729957806e-05, |
|
"loss": 0.004, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.0031332324724644423, |
|
"learning_rate": 3.0239099859353025e-05, |
|
"loss": 0.0006, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.07193127274513245, |
|
"learning_rate": 2.9887482419127988e-05, |
|
"loss": 0.0134, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.009125332348048687, |
|
"learning_rate": 2.9535864978902954e-05, |
|
"loss": 0.0009, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.03563140332698822, |
|
"learning_rate": 2.9184247538677924e-05, |
|
"loss": 0.008, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.4033479690551758, |
|
"learning_rate": 2.8832630098452884e-05, |
|
"loss": 0.0006, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.005182890221476555, |
|
"learning_rate": 2.848101265822785e-05, |
|
"loss": 0.0003, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.06785815209150314, |
|
"learning_rate": 2.8129395218002813e-05, |
|
"loss": 0.0005, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.00515472237020731, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.0116, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.06535543501377106, |
|
"learning_rate": 2.7426160337552742e-05, |
|
"loss": 0.0002, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.2995426654815674, |
|
"learning_rate": 2.707454289732771e-05, |
|
"loss": 0.014, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.011195150204002857, |
|
"learning_rate": 2.672292545710267e-05, |
|
"loss": 0.0257, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.00852285698056221, |
|
"learning_rate": 2.6371308016877638e-05, |
|
"loss": 0.0249, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.3155508041381836, |
|
"learning_rate": 2.6019690576652604e-05, |
|
"loss": 0.0008, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.00613983441144228, |
|
"learning_rate": 2.5668073136427567e-05, |
|
"loss": 0.0024, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.18053768575191498, |
|
"learning_rate": 2.5316455696202533e-05, |
|
"loss": 0.0021, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.3204176127910614, |
|
"learning_rate": 2.49648382559775e-05, |
|
"loss": 0.0087, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.019261712208390236, |
|
"learning_rate": 2.4613220815752462e-05, |
|
"loss": 0.0004, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.04258158802986145, |
|
"learning_rate": 2.426160337552743e-05, |
|
"loss": 0.0157, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.8219675421714783, |
|
"learning_rate": 2.3909985935302392e-05, |
|
"loss": 0.0018, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.007423636503517628, |
|
"learning_rate": 2.3558368495077358e-05, |
|
"loss": 0.0004, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.039796918630599976, |
|
"learning_rate": 2.320675105485232e-05, |
|
"loss": 0.0005, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.007585118990391493, |
|
"learning_rate": 2.2855133614627287e-05, |
|
"loss": 0.0408, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.004360842052847147, |
|
"learning_rate": 2.250351617440225e-05, |
|
"loss": 0.0051, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9972321075523922, |
|
"eval_loss": 0.0071137435734272, |
|
"eval_runtime": 16.9966, |
|
"eval_samples_per_second": 148.794, |
|
"eval_steps_per_second": 18.651, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.012101028114557266, |
|
"learning_rate": 2.2151898734177217e-05, |
|
"loss": 0.0003, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 0.021550431847572327, |
|
"learning_rate": 2.1800281293952183e-05, |
|
"loss": 0.0036, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 0.003491973737254739, |
|
"learning_rate": 2.1448663853727146e-05, |
|
"loss": 0.0002, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 0.0050508989952504635, |
|
"learning_rate": 2.1097046413502112e-05, |
|
"loss": 0.0007, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 0.004054068587720394, |
|
"learning_rate": 2.0745428973277075e-05, |
|
"loss": 0.0003, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 0.0022561594378203154, |
|
"learning_rate": 2.039381153305204e-05, |
|
"loss": 0.0007, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 0.016699934378266335, |
|
"learning_rate": 2.0042194092827004e-05, |
|
"loss": 0.0052, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 0.016947178170084953, |
|
"learning_rate": 1.969057665260197e-05, |
|
"loss": 0.0027, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 0.0021668283734470606, |
|
"learning_rate": 1.9338959212376934e-05, |
|
"loss": 0.0085, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 0.0019585881382226944, |
|
"learning_rate": 1.89873417721519e-05, |
|
"loss": 0.0003, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 0.02020511031150818, |
|
"learning_rate": 1.8635724331926866e-05, |
|
"loss": 0.0084, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 0.002657042583450675, |
|
"learning_rate": 1.828410689170183e-05, |
|
"loss": 0.0003, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 0.002752606989815831, |
|
"learning_rate": 1.7932489451476795e-05, |
|
"loss": 0.0278, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 0.4717991352081299, |
|
"learning_rate": 1.758087201125176e-05, |
|
"loss": 0.0004, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 0.01203097589313984, |
|
"learning_rate": 1.7229254571026725e-05, |
|
"loss": 0.0271, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 0.001858622650615871, |
|
"learning_rate": 1.6877637130801688e-05, |
|
"loss": 0.0341, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 0.021344369277358055, |
|
"learning_rate": 1.6526019690576654e-05, |
|
"loss": 0.0198, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 0.05389473959803581, |
|
"learning_rate": 1.617440225035162e-05, |
|
"loss": 0.0022, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 0.0023719954770058393, |
|
"learning_rate": 1.5822784810126583e-05, |
|
"loss": 0.0069, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 0.0039686476811766624, |
|
"learning_rate": 1.547116736990155e-05, |
|
"loss": 0.0007, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 0.004471825901418924, |
|
"learning_rate": 1.5119549929676513e-05, |
|
"loss": 0.0003, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 0.0025274273939430714, |
|
"learning_rate": 1.4767932489451477e-05, |
|
"loss": 0.0004, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 0.005175269674509764, |
|
"learning_rate": 1.4416315049226442e-05, |
|
"loss": 0.0037, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 0.4415951073169708, |
|
"learning_rate": 1.4064697609001406e-05, |
|
"loss": 0.0022, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 0.02205347828567028, |
|
"learning_rate": 1.3713080168776371e-05, |
|
"loss": 0.0049, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 0.0073618339374661446, |
|
"learning_rate": 1.3361462728551336e-05, |
|
"loss": 0.0005, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 0.01835988275706768, |
|
"learning_rate": 1.3009845288326302e-05, |
|
"loss": 0.0002, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 0.006863409653306007, |
|
"learning_rate": 1.2658227848101267e-05, |
|
"loss": 0.0012, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"grad_norm": 0.009447610937058926, |
|
"learning_rate": 1.2306610407876231e-05, |
|
"loss": 0.0008, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 0.0040683564729988575, |
|
"learning_rate": 1.1954992967651196e-05, |
|
"loss": 0.0003, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 0.0015517990104854107, |
|
"learning_rate": 1.160337552742616e-05, |
|
"loss": 0.0004, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 0.0038061882369220257, |
|
"learning_rate": 1.1251758087201125e-05, |
|
"loss": 0.0002, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9992091735863978, |
|
"eval_loss": 0.0019393304828554392, |
|
"eval_runtime": 17.2093, |
|
"eval_samples_per_second": 146.955, |
|
"eval_steps_per_second": 18.42, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 0.021453727036714554, |
|
"learning_rate": 1.0900140646976091e-05, |
|
"loss": 0.0005, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 0.003984913229942322, |
|
"learning_rate": 1.0548523206751056e-05, |
|
"loss": 0.0005, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 0.00517477560788393, |
|
"learning_rate": 1.019690576652602e-05, |
|
"loss": 0.001, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 0.002068225760012865, |
|
"learning_rate": 9.845288326300985e-06, |
|
"loss": 0.0002, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 0.00740728760138154, |
|
"learning_rate": 9.49367088607595e-06, |
|
"loss": 0.0002, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 0.0014793493319302797, |
|
"learning_rate": 9.142053445850915e-06, |
|
"loss": 0.0017, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 0.0028481709305197, |
|
"learning_rate": 8.79043600562588e-06, |
|
"loss": 0.0011, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 0.06655070930719376, |
|
"learning_rate": 8.438818565400844e-06, |
|
"loss": 0.0009, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 0.012430869042873383, |
|
"learning_rate": 8.08720112517581e-06, |
|
"loss": 0.0014, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 0.0027373009361326694, |
|
"learning_rate": 7.735583684950775e-06, |
|
"loss": 0.0067, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"grad_norm": 0.0010446230880916119, |
|
"learning_rate": 7.3839662447257386e-06, |
|
"loss": 0.0001, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 0.0015856565441936255, |
|
"learning_rate": 7.032348804500703e-06, |
|
"loss": 0.0002, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 0.0012825512094423175, |
|
"learning_rate": 6.680731364275668e-06, |
|
"loss": 0.0003, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 0.0009867001790553331, |
|
"learning_rate": 6.329113924050633e-06, |
|
"loss": 0.0002, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 0.0017112062778323889, |
|
"learning_rate": 5.977496483825598e-06, |
|
"loss": 0.0003, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"grad_norm": 0.0032221453730016947, |
|
"learning_rate": 5.6258790436005626e-06, |
|
"loss": 0.0002, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 0.003395942272618413, |
|
"learning_rate": 5.274261603375528e-06, |
|
"loss": 0.0001, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 0.0007977127097547054, |
|
"learning_rate": 4.922644163150493e-06, |
|
"loss": 0.0132, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"grad_norm": 0.0035839290358126163, |
|
"learning_rate": 4.571026722925457e-06, |
|
"loss": 0.0001, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 2.0375587940216064, |
|
"learning_rate": 4.219409282700422e-06, |
|
"loss": 0.0166, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 0.005914665758609772, |
|
"learning_rate": 3.867791842475387e-06, |
|
"loss": 0.0001, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 0.6946305632591248, |
|
"learning_rate": 3.5161744022503516e-06, |
|
"loss": 0.0011, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 0.003537875832989812, |
|
"learning_rate": 3.1645569620253167e-06, |
|
"loss": 0.0129, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 0.01320402417331934, |
|
"learning_rate": 2.8129395218002813e-06, |
|
"loss": 0.0092, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 0.10570605099201202, |
|
"learning_rate": 2.4613220815752463e-06, |
|
"loss": 0.0008, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 0.0033983599860221148, |
|
"learning_rate": 2.109704641350211e-06, |
|
"loss": 0.0002, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 0.0025147399865090847, |
|
"learning_rate": 1.7580872011251758e-06, |
|
"loss": 0.0001, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"grad_norm": 0.004903899040073156, |
|
"learning_rate": 1.4064697609001406e-06, |
|
"loss": 0.016, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 0.002249601762741804, |
|
"learning_rate": 1.0548523206751055e-06, |
|
"loss": 0.0024, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"grad_norm": 0.005820867605507374, |
|
"learning_rate": 7.032348804500703e-07, |
|
"loss": 0.0003, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 0.46763402223587036, |
|
"learning_rate": 3.5161744022503516e-07, |
|
"loss": 0.0006, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 1.628798246383667, |
|
"learning_rate": 0.0, |
|
"loss": 0.0019, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9992091735863978, |
|
"eval_loss": 0.0018690477591007948, |
|
"eval_runtime": 17.1455, |
|
"eval_samples_per_second": 147.502, |
|
"eval_steps_per_second": 18.489, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 1580, |
|
"total_flos": 3.915825219204563e+18, |
|
"train_loss": 0.0232889701421461, |
|
"train_runtime": 841.0378, |
|
"train_samples_per_second": 60.122, |
|
"train_steps_per_second": 1.879 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1580, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 3.915825219204563e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|