|
{ |
|
"best_metric": 0.18490658700466156, |
|
"best_model_checkpoint": "vit-base-patch16-224-in21k-crack-detectorVITmain50epochs\\checkpoint-3344", |
|
"epoch": 2.999775734469612, |
|
"eval_steps": 500, |
|
"global_step": 3344, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.976660682226213e-08, |
|
"loss": 1.3961, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.7953321364452426e-07, |
|
"loss": 1.395, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.692998204667864e-07, |
|
"loss": 1.3978, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.590664272890485e-07, |
|
"loss": 1.3963, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.4883303411131064e-07, |
|
"loss": 1.3907, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.385996409335728e-07, |
|
"loss": 1.396, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.283662477558349e-07, |
|
"loss": 1.3885, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.18132854578097e-07, |
|
"loss": 1.387, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.078994614003591e-07, |
|
"loss": 1.3894, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.976660682226213e-07, |
|
"loss": 1.3806, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.874326750448833e-07, |
|
"loss": 1.383, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.0771992818671456e-06, |
|
"loss": 1.3764, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.1669658886894075e-06, |
|
"loss": 1.3688, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.2567324955116697e-06, |
|
"loss": 1.3708, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.3464991023339318e-06, |
|
"loss": 1.366, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.436265709156194e-06, |
|
"loss": 1.371, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.5260323159784561e-06, |
|
"loss": 1.3558, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.6157989228007182e-06, |
|
"loss": 1.3584, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.7055655296229805e-06, |
|
"loss": 1.3409, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.7953321364452425e-06, |
|
"loss": 1.346, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8850987432675046e-06, |
|
"loss": 1.3355, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9748653500897667e-06, |
|
"loss": 1.3323, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.064631956912029e-06, |
|
"loss": 1.3231, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.1543985637342912e-06, |
|
"loss": 1.3129, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.244165170556553e-06, |
|
"loss": 1.3178, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.333931777378815e-06, |
|
"loss": 1.2953, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.423698384201077e-06, |
|
"loss": 1.2938, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.5134649910233395e-06, |
|
"loss": 1.2849, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.6032315978456015e-06, |
|
"loss": 1.2813, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.6929982046678636e-06, |
|
"loss": 1.2811, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.7827648114901257e-06, |
|
"loss": 1.2526, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.872531418312388e-06, |
|
"loss": 1.2438, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9622980251346502e-06, |
|
"loss": 1.2145, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.0520646319569123e-06, |
|
"loss": 1.2146, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.1418312387791743e-06, |
|
"loss": 1.1883, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.2315978456014364e-06, |
|
"loss": 1.1891, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.321364452423698e-06, |
|
"loss": 1.1723, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.411131059245961e-06, |
|
"loss": 1.1417, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.500897666068223e-06, |
|
"loss": 1.1338, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.590664272890485e-06, |
|
"loss": 1.1004, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.680430879712747e-06, |
|
"loss": 1.0873, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.770197486535009e-06, |
|
"loss": 1.0358, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.859964093357271e-06, |
|
"loss": 1.0517, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.949730700179533e-06, |
|
"loss": 1.0139, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.039497307001795e-06, |
|
"loss": 1.0294, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.129263913824058e-06, |
|
"loss": 1.0171, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.21903052064632e-06, |
|
"loss": 0.9827, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.3087971274685824e-06, |
|
"loss": 0.9382, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.3985637342908445e-06, |
|
"loss": 0.9145, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.488330341113106e-06, |
|
"loss": 0.9276, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.578096947935368e-06, |
|
"loss": 0.8801, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.66786355475763e-06, |
|
"loss": 0.8906, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.757630161579892e-06, |
|
"loss": 0.9025, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.847396768402154e-06, |
|
"loss": 0.8621, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.937163375224417e-06, |
|
"loss": 0.8653, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.026929982046679e-06, |
|
"loss": 0.8292, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.116696588868941e-06, |
|
"loss": 0.8195, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.206463195691203e-06, |
|
"loss": 0.8026, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.296229802513465e-06, |
|
"loss": 0.841, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.385996409335727e-06, |
|
"loss": 0.778, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.475763016157989e-06, |
|
"loss": 0.7706, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.565529622980251e-06, |
|
"loss": 0.733, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.655296229802514e-06, |
|
"loss": 0.7437, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.745062836624776e-06, |
|
"loss": 0.7644, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.834829443447038e-06, |
|
"loss": 0.7125, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.9245960502693004e-06, |
|
"loss": 0.7146, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.0143626570915625e-06, |
|
"loss": 0.7201, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.1041292639138246e-06, |
|
"loss": 0.6851, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.193895870736087e-06, |
|
"loss": 0.6417, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.283662477558349e-06, |
|
"loss": 0.6656, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.373429084380611e-06, |
|
"loss": 0.6122, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.463195691202873e-06, |
|
"loss": 0.6536, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.552962298025135e-06, |
|
"loss": 0.6315, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 6.642728904847396e-06, |
|
"loss": 0.6618, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 6.732495511669659e-06, |
|
"loss": 0.6346, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 6.822262118491922e-06, |
|
"loss": 0.6866, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 6.912028725314183e-06, |
|
"loss": 0.6093, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.001795332136446e-06, |
|
"loss": 0.6116, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 7.091561938958707e-06, |
|
"loss": 0.602, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.18132854578097e-06, |
|
"loss": 0.6266, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 7.271095152603231e-06, |
|
"loss": 0.6082, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.360861759425494e-06, |
|
"loss": 0.6194, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.4506283662477555e-06, |
|
"loss": 0.6277, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.540394973070018e-06, |
|
"loss": 0.609, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 7.630161579892281e-06, |
|
"loss": 0.6252, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 7.719928186714543e-06, |
|
"loss": 0.5813, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 7.809694793536805e-06, |
|
"loss": 0.5799, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.899461400359067e-06, |
|
"loss": 0.6398, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 7.98922800718133e-06, |
|
"loss": 0.5549, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.07899461400359e-06, |
|
"loss": 0.6023, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.168761220825854e-06, |
|
"loss": 0.582, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.258527827648117e-06, |
|
"loss": 0.544, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.348294434470378e-06, |
|
"loss": 0.5912, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.43806104129264e-06, |
|
"loss": 0.5461, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.527827648114902e-06, |
|
"loss": 0.5238, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 8.617594254937165e-06, |
|
"loss": 0.5558, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.707360861759426e-06, |
|
"loss": 0.5371, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.797127468581689e-06, |
|
"loss": 0.5529, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 8.88689407540395e-06, |
|
"loss": 0.5691, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 8.976660682226211e-06, |
|
"loss": 0.5567, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.066427289048474e-06, |
|
"loss": 0.5135, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.156193895870736e-06, |
|
"loss": 0.5099, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.245960502692998e-06, |
|
"loss": 0.5422, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.33572710951526e-06, |
|
"loss": 0.5106, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.425493716337523e-06, |
|
"loss": 0.4892, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.515260323159784e-06, |
|
"loss": 0.4541, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.605026929982047e-06, |
|
"loss": 0.4675, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.694793536804308e-06, |
|
"loss": 0.5123, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.784560143626571e-06, |
|
"loss": 0.5089, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.874326750448834e-06, |
|
"loss": 0.5233, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.964093357271095e-06, |
|
"loss": 0.5622, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": { |
|
"accuracy": 0.8736263736263736 |
|
}, |
|
"eval_f1": { |
|
"f1": 0.8653360894930927 |
|
}, |
|
"eval_loss": 0.4108576774597168, |
|
"eval_precision": { |
|
"precision": 0.8669392826942903 |
|
}, |
|
"eval_recall": { |
|
"recall": 0.8641606609285752 |
|
}, |
|
"eval_runtime": 236.9507, |
|
"eval_samples_per_second": 75.273, |
|
"eval_steps_per_second": 4.706, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0053859964093358e-05, |
|
"loss": 0.4957, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0143626570915619e-05, |
|
"loss": 0.5729, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0233393177737882e-05, |
|
"loss": 0.4884, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.0323159784560143e-05, |
|
"loss": 0.4744, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.0412926391382406e-05, |
|
"loss": 0.4683, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.0502692998204669e-05, |
|
"loss": 0.5, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.059245960502693e-05, |
|
"loss": 0.4649, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.0682226211849193e-05, |
|
"loss": 0.508, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.0771992818671454e-05, |
|
"loss": 0.4898, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.0861759425493717e-05, |
|
"loss": 0.4722, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.0951526032315979e-05, |
|
"loss": 0.4814, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.1041292639138241e-05, |
|
"loss": 0.4505, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.1131059245960503e-05, |
|
"loss": 0.4505, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.1220825852782766e-05, |
|
"loss": 0.4187, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.1310592459605028e-05, |
|
"loss": 0.4603, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.140035906642729e-05, |
|
"loss": 0.4721, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.1490125673249553e-05, |
|
"loss": 0.4368, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.1579892280071814e-05, |
|
"loss": 0.4722, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.1669658886894077e-05, |
|
"loss": 0.4718, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.1759425493716338e-05, |
|
"loss": 0.4899, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.1849192100538601e-05, |
|
"loss": 0.4758, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.1938958707360862e-05, |
|
"loss": 0.4523, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.2028725314183125e-05, |
|
"loss": 0.485, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.2118491921005388e-05, |
|
"loss": 0.4977, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.2208258527827649e-05, |
|
"loss": 0.4305, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.2298025134649912e-05, |
|
"loss": 0.4221, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.2387791741472173e-05, |
|
"loss": 0.4975, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.2477558348294434e-05, |
|
"loss": 0.4506, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.2567324955116697e-05, |
|
"loss": 0.4326, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.2657091561938959e-05, |
|
"loss": 0.4479, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2746858168761221e-05, |
|
"loss": 0.4468, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2836624775583483e-05, |
|
"loss": 0.402, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2926391382405746e-05, |
|
"loss": 0.4711, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3016157989228009e-05, |
|
"loss": 0.4686, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.310592459605027e-05, |
|
"loss": 0.4639, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3195691202872531e-05, |
|
"loss": 0.435, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3285457809694792e-05, |
|
"loss": 0.3868, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3375224416517057e-05, |
|
"loss": 0.4303, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.3464991023339318e-05, |
|
"loss": 0.432, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.355475763016158e-05, |
|
"loss": 0.4206, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.3644524236983844e-05, |
|
"loss": 0.4282, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.3734290843806105e-05, |
|
"loss": 0.4284, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3824057450628366e-05, |
|
"loss": 0.4096, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.3913824057450627e-05, |
|
"loss": 0.4259, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.4003590664272892e-05, |
|
"loss": 0.4436, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.4093357271095153e-05, |
|
"loss": 0.4453, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.4183123877917415e-05, |
|
"loss": 0.4065, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.4272890484739679e-05, |
|
"loss": 0.4011, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.436265709156194e-05, |
|
"loss": 0.4488, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.4452423698384202e-05, |
|
"loss": 0.4122, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.4542190305206463e-05, |
|
"loss": 0.429, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.4631956912028727e-05, |
|
"loss": 0.4377, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.4721723518850989e-05, |
|
"loss": 0.4186, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.481149012567325e-05, |
|
"loss": 0.4063, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.4901256732495511e-05, |
|
"loss": 0.4055, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.4991023339317776e-05, |
|
"loss": 0.4173, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.5080789946140037e-05, |
|
"loss": 0.4361, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.5170556552962298e-05, |
|
"loss": 0.4528, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.5260323159784563e-05, |
|
"loss": 0.3665, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.5350089766606824e-05, |
|
"loss": 0.3913, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.5439856373429085e-05, |
|
"loss": 0.4002, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.5529622980251346e-05, |
|
"loss": 0.4004, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.561938958707361e-05, |
|
"loss": 0.4432, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.5709156193895872e-05, |
|
"loss": 0.3743, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.5798922800718133e-05, |
|
"loss": 0.3972, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.5888689407540398e-05, |
|
"loss": 0.368, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.597845601436266e-05, |
|
"loss": 0.4525, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.606822262118492e-05, |
|
"loss": 0.3962, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.615798922800718e-05, |
|
"loss": 0.3888, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.6247755834829446e-05, |
|
"loss": 0.4111, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6337522441651707e-05, |
|
"loss": 0.4084, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.642728904847397e-05, |
|
"loss": 0.4181, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.6517055655296233e-05, |
|
"loss": 0.4022, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.6606822262118494e-05, |
|
"loss": 0.3886, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.6696588868940756e-05, |
|
"loss": 0.4092, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.6786355475763017e-05, |
|
"loss": 0.3843, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.687612208258528e-05, |
|
"loss": 0.4237, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.6965888689407543e-05, |
|
"loss": 0.3801, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.7055655296229804e-05, |
|
"loss": 0.385, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.7145421903052065e-05, |
|
"loss": 0.4452, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.723518850987433e-05, |
|
"loss": 0.5147, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.732495511669659e-05, |
|
"loss": 0.353, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.7414721723518852e-05, |
|
"loss": 0.3816, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.7504488330341113e-05, |
|
"loss": 0.361, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.7594254937163378e-05, |
|
"loss": 0.3884, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.768402154398564e-05, |
|
"loss": 0.3391, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.77737881508079e-05, |
|
"loss": 0.4211, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.786355475763016e-05, |
|
"loss": 0.3796, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.7953321364452423e-05, |
|
"loss": 0.3881, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.8043087971274687e-05, |
|
"loss": 0.3904, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.813285457809695e-05, |
|
"loss": 0.3845, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.822262118491921e-05, |
|
"loss": 0.3721, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.831238779174147e-05, |
|
"loss": 0.4141, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.8402154398563732e-05, |
|
"loss": 0.3682, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.8491921005385997e-05, |
|
"loss": 0.3826, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8581687612208258e-05, |
|
"loss": 0.3588, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.867145421903052e-05, |
|
"loss": 0.3663, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8761220825852784e-05, |
|
"loss": 0.4186, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8850987432675045e-05, |
|
"loss": 0.3756, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8940754039497306e-05, |
|
"loss": 0.3655, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.9030520646319568e-05, |
|
"loss": 0.4158, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.9120287253141832e-05, |
|
"loss": 0.3985, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.9210053859964093e-05, |
|
"loss": 0.3955, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.9299820466786355e-05, |
|
"loss": 0.3757, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.9389587073608616e-05, |
|
"loss": 0.3712, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.947935368043088e-05, |
|
"loss": 0.3723, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.9569120287253142e-05, |
|
"loss": 0.367, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.9658886894075403e-05, |
|
"loss": 0.3659, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.9748653500897668e-05, |
|
"loss": 0.3752, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.983842010771993e-05, |
|
"loss": 0.318, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.992818671454219e-05, |
|
"loss": 0.3696, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": { |
|
"accuracy": 0.9249271137026239 |
|
}, |
|
"eval_f1": { |
|
"f1": 0.9207602119769538 |
|
}, |
|
"eval_loss": 0.2492757886648178, |
|
"eval_precision": { |
|
"precision": 0.9199949527238288 |
|
}, |
|
"eval_recall": { |
|
"recall": 0.9219224853720074 |
|
}, |
|
"eval_runtime": 166.1808, |
|
"eval_samples_per_second": 107.329, |
|
"eval_steps_per_second": 6.71, |
|
"step": 2229 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.001795332136445e-05, |
|
"loss": 0.3681, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.0107719928186716e-05, |
|
"loss": 0.321, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.0197486535008977e-05, |
|
"loss": 0.332, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.0287253141831238e-05, |
|
"loss": 0.3343, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.0377019748653503e-05, |
|
"loss": 0.3341, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.0466786355475764e-05, |
|
"loss": 0.3473, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.0556552962298025e-05, |
|
"loss": 0.3479, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.0646319569120286e-05, |
|
"loss": 0.3235, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.073608617594255e-05, |
|
"loss": 0.3455, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.0825852782764812e-05, |
|
"loss": 0.3675, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.0915619389587073e-05, |
|
"loss": 0.3267, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.1005385996409338e-05, |
|
"loss": 0.3588, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.10951526032316e-05, |
|
"loss": 0.3762, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.118491921005386e-05, |
|
"loss": 0.3474, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.1274685816876122e-05, |
|
"loss": 0.3614, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.1364452423698386e-05, |
|
"loss": 0.3611, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.1454219030520648e-05, |
|
"loss": 0.4051, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.154398563734291e-05, |
|
"loss": 0.3314, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.163375224416517e-05, |
|
"loss": 0.3427, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.1723518850987435e-05, |
|
"loss": 0.2886, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 2.1813285457809696e-05, |
|
"loss": 0.3262, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.1903052064631957e-05, |
|
"loss": 0.3516, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.199281867145422e-05, |
|
"loss": 0.3744, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.2082585278276483e-05, |
|
"loss": 0.3545, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.2172351885098744e-05, |
|
"loss": 0.2993, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 2.2262118491921005e-05, |
|
"loss": 0.3412, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.235188509874327e-05, |
|
"loss": 0.3823, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.244165170556553e-05, |
|
"loss": 0.3206, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.2531418312387792e-05, |
|
"loss": 0.2969, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.2621184919210057e-05, |
|
"loss": 0.3306, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.2710951526032318e-05, |
|
"loss": 0.3641, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.280071813285458e-05, |
|
"loss": 0.3588, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.289048473967684e-05, |
|
"loss": 0.3632, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.2980251346499105e-05, |
|
"loss": 0.3072, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.3070017953321366e-05, |
|
"loss": 0.3606, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.3159784560143628e-05, |
|
"loss": 0.4114, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.3249551166965892e-05, |
|
"loss": 0.282, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.3339317773788153e-05, |
|
"loss": 0.3336, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.3429084380610415e-05, |
|
"loss": 0.3406, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3518850987432676e-05, |
|
"loss": 0.3433, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.360861759425494e-05, |
|
"loss": 0.3169, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.3698384201077202e-05, |
|
"loss": 0.3855, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.3788150807899463e-05, |
|
"loss": 0.3036, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.3877917414721724e-05, |
|
"loss": 0.3076, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.396768402154399e-05, |
|
"loss": 0.3052, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.405745062836625e-05, |
|
"loss": 0.3253, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.414721723518851e-05, |
|
"loss": 0.3235, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.4236983842010776e-05, |
|
"loss": 0.2835, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.4326750448833037e-05, |
|
"loss": 0.2824, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.4416517055655298e-05, |
|
"loss": 0.3183, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.450628366247756e-05, |
|
"loss": 0.3281, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.4596050269299824e-05, |
|
"loss": 0.2939, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 2.4685816876122085e-05, |
|
"loss": 0.3679, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.4775583482944346e-05, |
|
"loss": 0.3165, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.4865350089766608e-05, |
|
"loss": 0.3146, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.495511669658887e-05, |
|
"loss": 0.2981, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.5044883303411134e-05, |
|
"loss": 0.3099, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.5134649910233395e-05, |
|
"loss": 0.267, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 2.5224416517055656e-05, |
|
"loss": 0.3317, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.5314183123877917e-05, |
|
"loss": 0.3241, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.5403949730700182e-05, |
|
"loss": 0.371, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.5493716337522443e-05, |
|
"loss": 0.3431, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.5583482944344704e-05, |
|
"loss": 0.3218, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.5673249551166965e-05, |
|
"loss": 0.3222, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.5763016157989227e-05, |
|
"loss": 0.3176, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.585278276481149e-05, |
|
"loss": 0.3256, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.5942549371633752e-05, |
|
"loss": 0.2971, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.6032315978456017e-05, |
|
"loss": 0.253, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.6122082585278278e-05, |
|
"loss": 0.3261, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 2.621184919210054e-05, |
|
"loss": 0.3325, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 2.63016157989228e-05, |
|
"loss": 0.3005, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 2.6391382405745062e-05, |
|
"loss": 0.2897, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 2.6481149012567323e-05, |
|
"loss": 0.3249, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 2.6570915619389584e-05, |
|
"loss": 0.3647, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 2.6660682226211852e-05, |
|
"loss": 0.3034, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 2.6750448833034114e-05, |
|
"loss": 0.2735, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 2.6840215439856375e-05, |
|
"loss": 0.3122, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 2.6929982046678636e-05, |
|
"loss": 0.2622, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 2.7019748653500897e-05, |
|
"loss": 0.322, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 2.710951526032316e-05, |
|
"loss": 0.2671, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 2.719928186714542e-05, |
|
"loss": 0.3392, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 2.7289048473967688e-05, |
|
"loss": 0.2507, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 2.737881508078995e-05, |
|
"loss": 0.294, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 2.746858168761221e-05, |
|
"loss": 0.3119, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 2.755834829443447e-05, |
|
"loss": 0.2956, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 2.7648114901256732e-05, |
|
"loss": 0.2744, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.7737881508078994e-05, |
|
"loss": 0.279, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.7827648114901255e-05, |
|
"loss": 0.379, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.7917414721723523e-05, |
|
"loss": 0.2667, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.8007181328545784e-05, |
|
"loss": 0.2732, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.8096947935368045e-05, |
|
"loss": 0.311, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.8186714542190307e-05, |
|
"loss": 0.3404, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8276481149012568e-05, |
|
"loss": 0.2479, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.836624775583483e-05, |
|
"loss": 0.3057, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.845601436265709e-05, |
|
"loss": 0.3075, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.8545780969479358e-05, |
|
"loss": 0.3008, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.863554757630162e-05, |
|
"loss": 0.2372, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.872531418312388e-05, |
|
"loss": 0.2617, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.8815080789946142e-05, |
|
"loss": 0.3, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 2.8904847396768403e-05, |
|
"loss": 0.2749, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.8994614003590664e-05, |
|
"loss": 0.2742, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.9084380610412926e-05, |
|
"loss": 0.2844, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.9174147217235194e-05, |
|
"loss": 0.2909, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.9263913824057455e-05, |
|
"loss": 0.3045, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.9353680430879716e-05, |
|
"loss": 0.2875, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.9443447037701977e-05, |
|
"loss": 0.2963, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 2.953321364452424e-05, |
|
"loss": 0.314, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 2.96229802513465e-05, |
|
"loss": 0.3005, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 2.971274685816876e-05, |
|
"loss": 0.3127, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.9802513464991022e-05, |
|
"loss": 0.3113, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.989228007181329e-05, |
|
"loss": 0.2886, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 2.998204667863555e-05, |
|
"loss": 0.321, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": { |
|
"accuracy": 0.9410742318905584 |
|
}, |
|
"eval_f1": { |
|
"f1": 0.9378351696354423 |
|
}, |
|
"eval_loss": 0.18490658700466156, |
|
"eval_precision": { |
|
"precision": 0.9376663800198108 |
|
}, |
|
"eval_recall": { |
|
"recall": 0.9381803360170522 |
|
}, |
|
"eval_runtime": 165.2584, |
|
"eval_samples_per_second": 107.928, |
|
"eval_steps_per_second": 6.747, |
|
"step": 3344 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 55700, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 50, |
|
"save_steps": 500, |
|
"total_flos": 1.6585132906586604e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|