swin-tiny-patch4-window7-224-crack-detectorMAIN50epochsFINAL
/
tmp-checkpoint-18955
/trainer_state.json
{ | |
"best_metric": 0.06340872496366501, | |
"best_model_checkpoint": "swin-tiny-patch4-window7-224-crack-detectorMAIN50epochsFINAL\\checkpoint-16167", | |
"epoch": 34.0, | |
"eval_steps": 500, | |
"global_step": 18955, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.02, | |
"learning_rate": 1.7953321364452426e-07, | |
"loss": 1.4257, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.04, | |
"learning_rate": 3.590664272890485e-07, | |
"loss": 1.4227, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.05, | |
"learning_rate": 5.385996409335728e-07, | |
"loss": 1.4112, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.07, | |
"learning_rate": 7.18132854578097e-07, | |
"loss": 1.4052, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.09, | |
"learning_rate": 8.976660682226213e-07, | |
"loss": 1.3954, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.11, | |
"learning_rate": 1.0771992818671456e-06, | |
"loss": 1.3978, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.13, | |
"learning_rate": 1.2567324955116697e-06, | |
"loss": 1.3862, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.14, | |
"learning_rate": 1.436265709156194e-06, | |
"loss": 1.3651, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.16, | |
"learning_rate": 1.6157989228007182e-06, | |
"loss": 1.359, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.18, | |
"learning_rate": 1.7953321364452425e-06, | |
"loss": 1.3476, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.2, | |
"learning_rate": 1.9748653500897667e-06, | |
"loss": 1.3303, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.22, | |
"learning_rate": 2.1543985637342912e-06, | |
"loss": 1.3047, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.23, | |
"learning_rate": 2.333931777378815e-06, | |
"loss": 1.2944, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.25, | |
"learning_rate": 2.5134649910233395e-06, | |
"loss": 1.284, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.27, | |
"learning_rate": 2.6929982046678636e-06, | |
"loss": 1.23, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.29, | |
"learning_rate": 2.872531418312388e-06, | |
"loss": 1.2103, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.3, | |
"learning_rate": 3.0520646319569123e-06, | |
"loss": 1.187, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.32, | |
"learning_rate": 3.2315978456014364e-06, | |
"loss": 1.1591, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.34, | |
"learning_rate": 3.411131059245961e-06, | |
"loss": 1.144, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.36, | |
"learning_rate": 3.590664272890485e-06, | |
"loss": 1.0904, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.38, | |
"learning_rate": 3.770197486535009e-06, | |
"loss": 1.0401, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.39, | |
"learning_rate": 3.949730700179533e-06, | |
"loss": 1.0281, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.41, | |
"learning_rate": 4.129263913824058e-06, | |
"loss": 0.9577, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.43, | |
"learning_rate": 4.3087971274685824e-06, | |
"loss": 0.9251, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.45, | |
"learning_rate": 4.488330341113106e-06, | |
"loss": 0.8964, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.47, | |
"learning_rate": 4.66786355475763e-06, | |
"loss": 0.8897, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.48, | |
"learning_rate": 4.847396768402154e-06, | |
"loss": 0.8481, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.5, | |
"learning_rate": 5.026929982046679e-06, | |
"loss": 0.8048, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.52, | |
"learning_rate": 5.206463195691203e-06, | |
"loss": 0.8014, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.54, | |
"learning_rate": 5.385996409335727e-06, | |
"loss": 0.7511, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.56, | |
"learning_rate": 5.565529622980251e-06, | |
"loss": 0.741, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.57, | |
"learning_rate": 5.745062836624776e-06, | |
"loss": 0.7327, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.59, | |
"learning_rate": 5.9245960502693004e-06, | |
"loss": 0.7492, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.61, | |
"learning_rate": 6.1041292639138246e-06, | |
"loss": 0.7613, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.63, | |
"learning_rate": 6.283662477558349e-06, | |
"loss": 0.6902, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.65, | |
"learning_rate": 6.463195691202873e-06, | |
"loss": 0.6687, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.66, | |
"learning_rate": 6.642728904847396e-06, | |
"loss": 0.6623, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.68, | |
"learning_rate": 6.822262118491922e-06, | |
"loss": 0.6901, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.7, | |
"learning_rate": 7.001795332136446e-06, | |
"loss": 0.6638, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.72, | |
"learning_rate": 7.18132854578097e-06, | |
"loss": 0.6414, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.74, | |
"learning_rate": 7.360861759425494e-06, | |
"loss": 0.6877, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.75, | |
"learning_rate": 7.540394973070018e-06, | |
"loss": 0.6249, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.77, | |
"learning_rate": 7.719928186714543e-06, | |
"loss": 0.6099, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.79, | |
"learning_rate": 7.899461400359067e-06, | |
"loss": 0.6321, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.81, | |
"learning_rate": 8.07899461400359e-06, | |
"loss": 0.6431, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.83, | |
"learning_rate": 8.258527827648117e-06, | |
"loss": 0.6404, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.84, | |
"learning_rate": 8.43806104129264e-06, | |
"loss": 0.6148, | |
"step": 470 | |
}, | |
{ | |
"epoch": 0.86, | |
"learning_rate": 8.617594254937165e-06, | |
"loss": 0.5844, | |
"step": 480 | |
}, | |
{ | |
"epoch": 0.88, | |
"learning_rate": 8.797127468581689e-06, | |
"loss": 0.6316, | |
"step": 490 | |
}, | |
{ | |
"epoch": 0.9, | |
"learning_rate": 8.976660682226211e-06, | |
"loss": 0.6018, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.91, | |
"learning_rate": 9.156193895870736e-06, | |
"loss": 0.5843, | |
"step": 510 | |
}, | |
{ | |
"epoch": 0.93, | |
"learning_rate": 9.33572710951526e-06, | |
"loss": 0.5986, | |
"step": 520 | |
}, | |
{ | |
"epoch": 0.95, | |
"learning_rate": 9.515260323159784e-06, | |
"loss": 0.5718, | |
"step": 530 | |
}, | |
{ | |
"epoch": 0.97, | |
"learning_rate": 9.694793536804308e-06, | |
"loss": 0.5163, | |
"step": 540 | |
}, | |
{ | |
"epoch": 0.99, | |
"learning_rate": 9.874326750448834e-06, | |
"loss": 0.5448, | |
"step": 550 | |
}, | |
{ | |
"epoch": 1.0, | |
"eval_accuracy": { | |
"accuracy": 0.8387530836510428 | |
}, | |
"eval_f1": { | |
"f1": 0.8242528613153339 | |
}, | |
"eval_loss": 0.43474748730659485, | |
"eval_precision": { | |
"precision": 0.8238035334510331 | |
}, | |
"eval_recall": { | |
"recall": 0.8266051811271123 | |
}, | |
"eval_runtime": 171.551, | |
"eval_samples_per_second": 103.969, | |
"eval_steps_per_second": 3.253, | |
"step": 557 | |
}, | |
{ | |
"epoch": 1.0, | |
"learning_rate": 1.0053859964093358e-05, | |
"loss": 0.58, | |
"step": 560 | |
}, | |
{ | |
"epoch": 1.02, | |
"learning_rate": 1.0233393177737882e-05, | |
"loss": 0.5298, | |
"step": 570 | |
}, | |
{ | |
"epoch": 1.04, | |
"learning_rate": 1.0412926391382406e-05, | |
"loss": 0.5592, | |
"step": 580 | |
}, | |
{ | |
"epoch": 1.06, | |
"learning_rate": 1.059245960502693e-05, | |
"loss": 0.5352, | |
"step": 590 | |
}, | |
{ | |
"epoch": 1.08, | |
"learning_rate": 1.0771992818671454e-05, | |
"loss": 0.5683, | |
"step": 600 | |
}, | |
{ | |
"epoch": 1.09, | |
"learning_rate": 1.0951526032315979e-05, | |
"loss": 0.571, | |
"step": 610 | |
}, | |
{ | |
"epoch": 1.11, | |
"learning_rate": 1.1131059245960503e-05, | |
"loss": 0.5603, | |
"step": 620 | |
}, | |
{ | |
"epoch": 1.13, | |
"learning_rate": 1.1310592459605028e-05, | |
"loss": 0.5157, | |
"step": 630 | |
}, | |
{ | |
"epoch": 1.15, | |
"learning_rate": 1.1490125673249553e-05, | |
"loss": 0.4885, | |
"step": 640 | |
}, | |
{ | |
"epoch": 1.17, | |
"learning_rate": 1.1669658886894077e-05, | |
"loss": 0.5005, | |
"step": 650 | |
}, | |
{ | |
"epoch": 1.18, | |
"learning_rate": 1.1849192100538601e-05, | |
"loss": 0.4951, | |
"step": 660 | |
}, | |
{ | |
"epoch": 1.2, | |
"learning_rate": 1.2028725314183125e-05, | |
"loss": 0.5197, | |
"step": 670 | |
}, | |
{ | |
"epoch": 1.22, | |
"learning_rate": 1.2208258527827649e-05, | |
"loss": 0.4746, | |
"step": 680 | |
}, | |
{ | |
"epoch": 1.24, | |
"learning_rate": 1.2387791741472173e-05, | |
"loss": 0.486, | |
"step": 690 | |
}, | |
{ | |
"epoch": 1.26, | |
"learning_rate": 1.2567324955116697e-05, | |
"loss": 0.4932, | |
"step": 700 | |
}, | |
{ | |
"epoch": 1.27, | |
"learning_rate": 1.2746858168761221e-05, | |
"loss": 0.5222, | |
"step": 710 | |
}, | |
{ | |
"epoch": 1.29, | |
"learning_rate": 1.2926391382405746e-05, | |
"loss": 0.434, | |
"step": 720 | |
}, | |
{ | |
"epoch": 1.31, | |
"learning_rate": 1.310592459605027e-05, | |
"loss": 0.4817, | |
"step": 730 | |
}, | |
{ | |
"epoch": 1.33, | |
"learning_rate": 1.3285457809694792e-05, | |
"loss": 0.487, | |
"step": 740 | |
}, | |
{ | |
"epoch": 1.35, | |
"learning_rate": 1.3464991023339318e-05, | |
"loss": 0.4478, | |
"step": 750 | |
}, | |
{ | |
"epoch": 1.36, | |
"learning_rate": 1.3644524236983844e-05, | |
"loss": 0.4564, | |
"step": 760 | |
}, | |
{ | |
"epoch": 1.38, | |
"learning_rate": 1.3824057450628366e-05, | |
"loss": 0.4455, | |
"step": 770 | |
}, | |
{ | |
"epoch": 1.4, | |
"learning_rate": 1.4003590664272892e-05, | |
"loss": 0.4701, | |
"step": 780 | |
}, | |
{ | |
"epoch": 1.42, | |
"learning_rate": 1.4183123877917415e-05, | |
"loss": 0.4333, | |
"step": 790 | |
}, | |
{ | |
"epoch": 1.43, | |
"learning_rate": 1.436265709156194e-05, | |
"loss": 0.4409, | |
"step": 800 | |
}, | |
{ | |
"epoch": 1.45, | |
"learning_rate": 1.4542190305206463e-05, | |
"loss": 0.4639, | |
"step": 810 | |
}, | |
{ | |
"epoch": 1.47, | |
"learning_rate": 1.4721723518850989e-05, | |
"loss": 0.4552, | |
"step": 820 | |
}, | |
{ | |
"epoch": 1.49, | |
"learning_rate": 1.4901256732495511e-05, | |
"loss": 0.4759, | |
"step": 830 | |
}, | |
{ | |
"epoch": 1.51, | |
"learning_rate": 1.5080789946140037e-05, | |
"loss": 0.4584, | |
"step": 840 | |
}, | |
{ | |
"epoch": 1.52, | |
"learning_rate": 1.5260323159784563e-05, | |
"loss": 0.4502, | |
"step": 850 | |
}, | |
{ | |
"epoch": 1.54, | |
"learning_rate": 1.5439856373429085e-05, | |
"loss": 0.4519, | |
"step": 860 | |
}, | |
{ | |
"epoch": 1.56, | |
"learning_rate": 1.561938958707361e-05, | |
"loss": 0.4695, | |
"step": 870 | |
}, | |
{ | |
"epoch": 1.58, | |
"learning_rate": 1.5798922800718133e-05, | |
"loss": 0.4466, | |
"step": 880 | |
}, | |
{ | |
"epoch": 1.6, | |
"learning_rate": 1.597845601436266e-05, | |
"loss": 0.4531, | |
"step": 890 | |
}, | |
{ | |
"epoch": 1.61, | |
"learning_rate": 1.615798922800718e-05, | |
"loss": 0.4674, | |
"step": 900 | |
}, | |
{ | |
"epoch": 1.63, | |
"learning_rate": 1.6337522441651707e-05, | |
"loss": 0.4262, | |
"step": 910 | |
}, | |
{ | |
"epoch": 1.65, | |
"learning_rate": 1.6517055655296233e-05, | |
"loss": 0.3945, | |
"step": 920 | |
}, | |
{ | |
"epoch": 1.67, | |
"learning_rate": 1.6696588868940756e-05, | |
"loss": 0.4318, | |
"step": 930 | |
}, | |
{ | |
"epoch": 1.69, | |
"learning_rate": 1.687612208258528e-05, | |
"loss": 0.439, | |
"step": 940 | |
}, | |
{ | |
"epoch": 1.7, | |
"learning_rate": 1.7055655296229804e-05, | |
"loss": 0.4036, | |
"step": 950 | |
}, | |
{ | |
"epoch": 1.72, | |
"learning_rate": 1.723518850987433e-05, | |
"loss": 0.422, | |
"step": 960 | |
}, | |
{ | |
"epoch": 1.74, | |
"learning_rate": 1.7414721723518852e-05, | |
"loss": 0.4315, | |
"step": 970 | |
}, | |
{ | |
"epoch": 1.76, | |
"learning_rate": 1.7594254937163378e-05, | |
"loss": 0.4516, | |
"step": 980 | |
}, | |
{ | |
"epoch": 1.78, | |
"learning_rate": 1.77737881508079e-05, | |
"loss": 0.4092, | |
"step": 990 | |
}, | |
{ | |
"epoch": 1.79, | |
"learning_rate": 1.7953321364452423e-05, | |
"loss": 0.4367, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 1.81, | |
"learning_rate": 1.813285457809695e-05, | |
"loss": 0.4321, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 1.83, | |
"learning_rate": 1.831238779174147e-05, | |
"loss": 0.3859, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 1.85, | |
"learning_rate": 1.8491921005385997e-05, | |
"loss": 0.415, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 1.87, | |
"learning_rate": 1.867145421903052e-05, | |
"loss": 0.4388, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 1.88, | |
"learning_rate": 1.8850987432675045e-05, | |
"loss": 0.3968, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 1.9, | |
"learning_rate": 1.9030520646319568e-05, | |
"loss": 0.4172, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 1.92, | |
"learning_rate": 1.9210053859964093e-05, | |
"loss": 0.4593, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 1.94, | |
"learning_rate": 1.9389587073608616e-05, | |
"loss": 0.3924, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 1.96, | |
"learning_rate": 1.9569120287253142e-05, | |
"loss": 0.4067, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 1.97, | |
"learning_rate": 1.9748653500897668e-05, | |
"loss": 0.3906, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 1.99, | |
"learning_rate": 1.992818671454219e-05, | |
"loss": 0.3915, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 2.0, | |
"eval_accuracy": { | |
"accuracy": 0.8952679973088137 | |
}, | |
"eval_f1": { | |
"f1": 0.8895212539619177 | |
}, | |
"eval_loss": 0.2781514525413513, | |
"eval_precision": { | |
"precision": 0.8895590270096669 | |
}, | |
"eval_recall": { | |
"recall": 0.892156947982043 | |
}, | |
"eval_runtime": 99.0662, | |
"eval_samples_per_second": 180.041, | |
"eval_steps_per_second": 5.633, | |
"step": 1115 | |
}, | |
{ | |
"epoch": 2.01, | |
"learning_rate": 2.0107719928186716e-05, | |
"loss": 0.4099, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 2.03, | |
"learning_rate": 2.0287253141831238e-05, | |
"loss": 0.4165, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 2.04, | |
"learning_rate": 2.0466786355475764e-05, | |
"loss": 0.3958, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 2.06, | |
"learning_rate": 2.0646319569120286e-05, | |
"loss": 0.4353, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 2.08, | |
"learning_rate": 2.0825852782764812e-05, | |
"loss": 0.3832, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 2.1, | |
"learning_rate": 2.1005385996409338e-05, | |
"loss": 0.3967, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 2.12, | |
"learning_rate": 2.118491921005386e-05, | |
"loss": 0.3614, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 2.13, | |
"learning_rate": 2.1364452423698386e-05, | |
"loss": 0.3665, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 2.15, | |
"learning_rate": 2.154398563734291e-05, | |
"loss": 0.3871, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 2.17, | |
"learning_rate": 2.1723518850987435e-05, | |
"loss": 0.3748, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 2.19, | |
"learning_rate": 2.1903052064631957e-05, | |
"loss": 0.396, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 2.21, | |
"learning_rate": 2.2082585278276483e-05, | |
"loss": 0.374, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 2.22, | |
"learning_rate": 2.2262118491921005e-05, | |
"loss": 0.3735, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 2.24, | |
"learning_rate": 2.244165170556553e-05, | |
"loss": 0.3894, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 2.26, | |
"learning_rate": 2.2621184919210057e-05, | |
"loss": 0.3443, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 2.28, | |
"learning_rate": 2.280071813285458e-05, | |
"loss": 0.3402, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 2.3, | |
"learning_rate": 2.2980251346499105e-05, | |
"loss": 0.4164, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 2.31, | |
"learning_rate": 2.3159784560143628e-05, | |
"loss": 0.3879, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 2.33, | |
"learning_rate": 2.3339317773788153e-05, | |
"loss": 0.3719, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 2.35, | |
"learning_rate": 2.3518850987432676e-05, | |
"loss": 0.3477, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 2.37, | |
"learning_rate": 2.3698384201077202e-05, | |
"loss": 0.3744, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 2.39, | |
"learning_rate": 2.3877917414721724e-05, | |
"loss": 0.3833, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 2.4, | |
"learning_rate": 2.405745062836625e-05, | |
"loss": 0.398, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 2.42, | |
"learning_rate": 2.4236983842010776e-05, | |
"loss": 0.3562, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 2.44, | |
"learning_rate": 2.4416517055655298e-05, | |
"loss": 0.3614, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 2.46, | |
"learning_rate": 2.4596050269299824e-05, | |
"loss": 0.315, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 2.48, | |
"learning_rate": 2.4775583482944346e-05, | |
"loss": 0.3441, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 2.49, | |
"learning_rate": 2.495511669658887e-05, | |
"loss": 0.3589, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 2.51, | |
"learning_rate": 2.5134649910233395e-05, | |
"loss": 0.3418, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 2.53, | |
"learning_rate": 2.5314183123877917e-05, | |
"loss": 0.3696, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 2.55, | |
"learning_rate": 2.5493716337522443e-05, | |
"loss": 0.3606, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 2.57, | |
"learning_rate": 2.5673249551166965e-05, | |
"loss": 0.3389, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 2.58, | |
"learning_rate": 2.585278276481149e-05, | |
"loss": 0.3375, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 2.6, | |
"learning_rate": 2.6032315978456017e-05, | |
"loss": 0.3193, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 2.62, | |
"learning_rate": 2.621184919210054e-05, | |
"loss": 0.3683, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 2.64, | |
"learning_rate": 2.6391382405745062e-05, | |
"loss": 0.3203, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 2.65, | |
"learning_rate": 2.6570915619389584e-05, | |
"loss": 0.3792, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 2.67, | |
"learning_rate": 2.6750448833034114e-05, | |
"loss": 0.3343, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 2.69, | |
"learning_rate": 2.6929982046678636e-05, | |
"loss": 0.3671, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 2.71, | |
"learning_rate": 2.710951526032316e-05, | |
"loss": 0.4002, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 2.73, | |
"learning_rate": 2.7289048473967688e-05, | |
"loss": 0.3246, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 2.74, | |
"learning_rate": 2.746858168761221e-05, | |
"loss": 0.3731, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 2.76, | |
"learning_rate": 2.7648114901256732e-05, | |
"loss": 0.3652, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 2.78, | |
"learning_rate": 2.7827648114901255e-05, | |
"loss": 0.3767, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 2.8, | |
"learning_rate": 2.8007181328545784e-05, | |
"loss": 0.3135, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 2.82, | |
"learning_rate": 2.8186714542190307e-05, | |
"loss": 0.3303, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 2.83, | |
"learning_rate": 2.836624775583483e-05, | |
"loss": 0.3485, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 2.85, | |
"learning_rate": 2.8545780969479358e-05, | |
"loss": 0.3528, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 2.87, | |
"learning_rate": 2.872531418312388e-05, | |
"loss": 0.3205, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 2.89, | |
"learning_rate": 2.8904847396768403e-05, | |
"loss": 0.3047, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 2.91, | |
"learning_rate": 2.9084380610412926e-05, | |
"loss": 0.3656, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 2.92, | |
"learning_rate": 2.9263913824057455e-05, | |
"loss": 0.3317, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 2.94, | |
"learning_rate": 2.9443447037701977e-05, | |
"loss": 0.3318, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 2.96, | |
"learning_rate": 2.96229802513465e-05, | |
"loss": 0.365, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 2.98, | |
"learning_rate": 2.9802513464991022e-05, | |
"loss": 0.3273, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 3.0, | |
"learning_rate": 2.998204667863555e-05, | |
"loss": 0.3246, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 3.0, | |
"eval_accuracy": { | |
"accuracy": 0.9295245570755775 | |
}, | |
"eval_f1": { | |
"f1": 0.9243811536773696 | |
}, | |
"eval_loss": 0.19212597608566284, | |
"eval_precision": { | |
"precision": 0.9273415557649412 | |
}, | |
"eval_recall": { | |
"recall": 0.9219909309089409 | |
}, | |
"eval_runtime": 99.0388, | |
"eval_samples_per_second": 180.091, | |
"eval_steps_per_second": 5.634, | |
"step": 1672 | |
}, | |
{ | |
"epoch": 3.01, | |
"learning_rate": 3.0161579892280074e-05, | |
"loss": 0.3135, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 3.03, | |
"learning_rate": 3.0341113105924596e-05, | |
"loss": 0.3145, | |
"step": 1690 | |
}, | |
{ | |
"epoch": 3.05, | |
"learning_rate": 3.0520646319569125e-05, | |
"loss": 0.3395, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 3.07, | |
"learning_rate": 3.070017953321365e-05, | |
"loss": 0.2927, | |
"step": 1710 | |
}, | |
{ | |
"epoch": 3.09, | |
"learning_rate": 3.087971274685817e-05, | |
"loss": 0.3363, | |
"step": 1720 | |
}, | |
{ | |
"epoch": 3.1, | |
"learning_rate": 3.105924596050269e-05, | |
"loss": 0.3392, | |
"step": 1730 | |
}, | |
{ | |
"epoch": 3.12, | |
"learning_rate": 3.123877917414722e-05, | |
"loss": 0.3279, | |
"step": 1740 | |
}, | |
{ | |
"epoch": 3.14, | |
"learning_rate": 3.1418312387791744e-05, | |
"loss": 0.3194, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 3.16, | |
"learning_rate": 3.159784560143627e-05, | |
"loss": 0.3206, | |
"step": 1760 | |
}, | |
{ | |
"epoch": 3.17, | |
"learning_rate": 3.1777378815080796e-05, | |
"loss": 0.3246, | |
"step": 1770 | |
}, | |
{ | |
"epoch": 3.19, | |
"learning_rate": 3.195691202872532e-05, | |
"loss": 0.3155, | |
"step": 1780 | |
}, | |
{ | |
"epoch": 3.21, | |
"learning_rate": 3.213644524236984e-05, | |
"loss": 0.301, | |
"step": 1790 | |
}, | |
{ | |
"epoch": 3.23, | |
"learning_rate": 3.231597845601436e-05, | |
"loss": 0.3287, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 3.25, | |
"learning_rate": 3.249551166965889e-05, | |
"loss": 0.3282, | |
"step": 1810 | |
}, | |
{ | |
"epoch": 3.26, | |
"learning_rate": 3.2675044883303415e-05, | |
"loss": 0.2872, | |
"step": 1820 | |
}, | |
{ | |
"epoch": 3.28, | |
"learning_rate": 3.285457809694794e-05, | |
"loss": 0.3003, | |
"step": 1830 | |
}, | |
{ | |
"epoch": 3.3, | |
"learning_rate": 3.3034111310592466e-05, | |
"loss": 0.3009, | |
"step": 1840 | |
}, | |
{ | |
"epoch": 3.32, | |
"learning_rate": 3.321364452423699e-05, | |
"loss": 0.325, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 3.34, | |
"learning_rate": 3.339317773788151e-05, | |
"loss": 0.3308, | |
"step": 1860 | |
}, | |
{ | |
"epoch": 3.35, | |
"learning_rate": 3.3572710951526034e-05, | |
"loss": 0.3175, | |
"step": 1870 | |
}, | |
{ | |
"epoch": 3.37, | |
"learning_rate": 3.375224416517056e-05, | |
"loss": 0.291, | |
"step": 1880 | |
}, | |
{ | |
"epoch": 3.39, | |
"learning_rate": 3.3931777378815085e-05, | |
"loss": 0.2936, | |
"step": 1890 | |
}, | |
{ | |
"epoch": 3.41, | |
"learning_rate": 3.411131059245961e-05, | |
"loss": 0.3304, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 3.43, | |
"learning_rate": 3.429084380610413e-05, | |
"loss": 0.331, | |
"step": 1910 | |
}, | |
{ | |
"epoch": 3.44, | |
"learning_rate": 3.447037701974866e-05, | |
"loss": 0.3158, | |
"step": 1920 | |
}, | |
{ | |
"epoch": 3.46, | |
"learning_rate": 3.464991023339318e-05, | |
"loss": 0.3054, | |
"step": 1930 | |
}, | |
{ | |
"epoch": 3.48, | |
"learning_rate": 3.4829443447037704e-05, | |
"loss": 0.3063, | |
"step": 1940 | |
}, | |
{ | |
"epoch": 3.5, | |
"learning_rate": 3.500897666068223e-05, | |
"loss": 0.279, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 3.52, | |
"learning_rate": 3.5188509874326756e-05, | |
"loss": 0.2986, | |
"step": 1960 | |
}, | |
{ | |
"epoch": 3.53, | |
"learning_rate": 3.536804308797128e-05, | |
"loss": 0.3184, | |
"step": 1970 | |
}, | |
{ | |
"epoch": 3.55, | |
"learning_rate": 3.55475763016158e-05, | |
"loss": 0.3623, | |
"step": 1980 | |
}, | |
{ | |
"epoch": 3.57, | |
"learning_rate": 3.572710951526032e-05, | |
"loss": 0.3178, | |
"step": 1990 | |
}, | |
{ | |
"epoch": 3.59, | |
"learning_rate": 3.5906642728904846e-05, | |
"loss": 0.2553, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 3.61, | |
"learning_rate": 3.6086175942549375e-05, | |
"loss": 0.3116, | |
"step": 2010 | |
}, | |
{ | |
"epoch": 3.62, | |
"learning_rate": 3.62657091561939e-05, | |
"loss": 0.3278, | |
"step": 2020 | |
}, | |
{ | |
"epoch": 3.64, | |
"learning_rate": 3.644524236983842e-05, | |
"loss": 0.311, | |
"step": 2030 | |
}, | |
{ | |
"epoch": 3.66, | |
"learning_rate": 3.662477558348294e-05, | |
"loss": 0.3116, | |
"step": 2040 | |
}, | |
{ | |
"epoch": 3.68, | |
"learning_rate": 3.6804308797127465e-05, | |
"loss": 0.3087, | |
"step": 2050 | |
}, | |
{ | |
"epoch": 3.7, | |
"learning_rate": 3.6983842010771994e-05, | |
"loss": 0.3091, | |
"step": 2060 | |
}, | |
{ | |
"epoch": 3.71, | |
"learning_rate": 3.7163375224416516e-05, | |
"loss": 0.2855, | |
"step": 2070 | |
}, | |
{ | |
"epoch": 3.73, | |
"learning_rate": 3.734290843806104e-05, | |
"loss": 0.313, | |
"step": 2080 | |
}, | |
{ | |
"epoch": 3.75, | |
"learning_rate": 3.752244165170557e-05, | |
"loss": 0.2992, | |
"step": 2090 | |
}, | |
{ | |
"epoch": 3.77, | |
"learning_rate": 3.770197486535009e-05, | |
"loss": 0.2946, | |
"step": 2100 | |
}, | |
{ | |
"epoch": 3.78, | |
"learning_rate": 3.788150807899461e-05, | |
"loss": 0.3034, | |
"step": 2110 | |
}, | |
{ | |
"epoch": 3.8, | |
"learning_rate": 3.8061041292639135e-05, | |
"loss": 0.2868, | |
"step": 2120 | |
}, | |
{ | |
"epoch": 3.82, | |
"learning_rate": 3.8240574506283664e-05, | |
"loss": 0.2818, | |
"step": 2130 | |
}, | |
{ | |
"epoch": 3.84, | |
"learning_rate": 3.842010771992819e-05, | |
"loss": 0.2943, | |
"step": 2140 | |
}, | |
{ | |
"epoch": 3.86, | |
"learning_rate": 3.859964093357271e-05, | |
"loss": 0.3166, | |
"step": 2150 | |
}, | |
{ | |
"epoch": 3.87, | |
"learning_rate": 3.877917414721723e-05, | |
"loss": 0.2947, | |
"step": 2160 | |
}, | |
{ | |
"epoch": 3.89, | |
"learning_rate": 3.895870736086176e-05, | |
"loss": 0.2764, | |
"step": 2170 | |
}, | |
{ | |
"epoch": 3.91, | |
"learning_rate": 3.9138240574506283e-05, | |
"loss": 0.278, | |
"step": 2180 | |
}, | |
{ | |
"epoch": 3.93, | |
"learning_rate": 3.9317773788150806e-05, | |
"loss": 0.3091, | |
"step": 2190 | |
}, | |
{ | |
"epoch": 3.95, | |
"learning_rate": 3.9497307001795335e-05, | |
"loss": 0.3336, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 3.96, | |
"learning_rate": 3.967684021543986e-05, | |
"loss": 0.3256, | |
"step": 2210 | |
}, | |
{ | |
"epoch": 3.98, | |
"learning_rate": 3.985637342908438e-05, | |
"loss": 0.2664, | |
"step": 2220 | |
}, | |
{ | |
"epoch": 4.0, | |
"learning_rate": 4.00359066427289e-05, | |
"loss": 0.2819, | |
"step": 2230 | |
}, | |
{ | |
"epoch": 4.0, | |
"eval_accuracy": { | |
"accuracy": 0.9389437093518727 | |
}, | |
"eval_f1": { | |
"f1": 0.934636026587255 | |
}, | |
"eval_loss": 0.16309776902198792, | |
"eval_precision": { | |
"precision": 0.9384931858190604 | |
}, | |
"eval_recall": { | |
"recall": 0.9317771824858079 | |
}, | |
"eval_runtime": 98.5849, | |
"eval_samples_per_second": 180.92, | |
"eval_steps_per_second": 5.66, | |
"step": 2230 | |
}, | |
{ | |
"epoch": 4.02, | |
"learning_rate": 4.021543985637343e-05, | |
"loss": 0.2725, | |
"step": 2240 | |
}, | |
{ | |
"epoch": 4.04, | |
"learning_rate": 4.0394973070017954e-05, | |
"loss": 0.3239, | |
"step": 2250 | |
}, | |
{ | |
"epoch": 4.05, | |
"learning_rate": 4.0574506283662476e-05, | |
"loss": 0.2761, | |
"step": 2260 | |
}, | |
{ | |
"epoch": 4.07, | |
"learning_rate": 4.0754039497307006e-05, | |
"loss": 0.275, | |
"step": 2270 | |
}, | |
{ | |
"epoch": 4.09, | |
"learning_rate": 4.093357271095153e-05, | |
"loss": 0.2789, | |
"step": 2280 | |
}, | |
{ | |
"epoch": 4.11, | |
"learning_rate": 4.111310592459605e-05, | |
"loss": 0.2702, | |
"step": 2290 | |
}, | |
{ | |
"epoch": 4.13, | |
"learning_rate": 4.129263913824057e-05, | |
"loss": 0.3244, | |
"step": 2300 | |
}, | |
{ | |
"epoch": 4.14, | |
"learning_rate": 4.14721723518851e-05, | |
"loss": 0.3254, | |
"step": 2310 | |
}, | |
{ | |
"epoch": 4.16, | |
"learning_rate": 4.1651705565529625e-05, | |
"loss": 0.2969, | |
"step": 2320 | |
}, | |
{ | |
"epoch": 4.18, | |
"learning_rate": 4.183123877917415e-05, | |
"loss": 0.2545, | |
"step": 2330 | |
}, | |
{ | |
"epoch": 4.2, | |
"learning_rate": 4.2010771992818676e-05, | |
"loss": 0.2307, | |
"step": 2340 | |
}, | |
{ | |
"epoch": 4.22, | |
"learning_rate": 4.21903052064632e-05, | |
"loss": 0.3132, | |
"step": 2350 | |
}, | |
{ | |
"epoch": 4.23, | |
"learning_rate": 4.236983842010772e-05, | |
"loss": 0.3017, | |
"step": 2360 | |
}, | |
{ | |
"epoch": 4.25, | |
"learning_rate": 4.2549371633752244e-05, | |
"loss": 0.2885, | |
"step": 2370 | |
}, | |
{ | |
"epoch": 4.27, | |
"learning_rate": 4.272890484739677e-05, | |
"loss": 0.3085, | |
"step": 2380 | |
}, | |
{ | |
"epoch": 4.29, | |
"learning_rate": 4.2908438061041295e-05, | |
"loss": 0.3079, | |
"step": 2390 | |
}, | |
{ | |
"epoch": 4.3, | |
"learning_rate": 4.308797127468582e-05, | |
"loss": 0.2749, | |
"step": 2400 | |
}, | |
{ | |
"epoch": 4.32, | |
"learning_rate": 4.326750448833034e-05, | |
"loss": 0.2863, | |
"step": 2410 | |
}, | |
{ | |
"epoch": 4.34, | |
"learning_rate": 4.344703770197487e-05, | |
"loss": 0.2523, | |
"step": 2420 | |
}, | |
{ | |
"epoch": 4.36, | |
"learning_rate": 4.362657091561939e-05, | |
"loss": 0.2441, | |
"step": 2430 | |
}, | |
{ | |
"epoch": 4.38, | |
"learning_rate": 4.3806104129263914e-05, | |
"loss": 0.3174, | |
"step": 2440 | |
}, | |
{ | |
"epoch": 4.39, | |
"learning_rate": 4.398563734290844e-05, | |
"loss": 0.2938, | |
"step": 2450 | |
}, | |
{ | |
"epoch": 4.41, | |
"learning_rate": 4.4165170556552966e-05, | |
"loss": 0.2632, | |
"step": 2460 | |
}, | |
{ | |
"epoch": 4.43, | |
"learning_rate": 4.434470377019749e-05, | |
"loss": 0.2924, | |
"step": 2470 | |
}, | |
{ | |
"epoch": 4.45, | |
"learning_rate": 4.452423698384201e-05, | |
"loss": 0.3018, | |
"step": 2480 | |
}, | |
{ | |
"epoch": 4.47, | |
"learning_rate": 4.470377019748654e-05, | |
"loss": 0.2488, | |
"step": 2490 | |
}, | |
{ | |
"epoch": 4.48, | |
"learning_rate": 4.488330341113106e-05, | |
"loss": 0.2659, | |
"step": 2500 | |
}, | |
{ | |
"epoch": 4.5, | |
"learning_rate": 4.5062836624775585e-05, | |
"loss": 0.2689, | |
"step": 2510 | |
}, | |
{ | |
"epoch": 4.52, | |
"learning_rate": 4.5242369838420114e-05, | |
"loss": 0.2702, | |
"step": 2520 | |
}, | |
{ | |
"epoch": 4.54, | |
"learning_rate": 4.5421903052064636e-05, | |
"loss": 0.2695, | |
"step": 2530 | |
}, | |
{ | |
"epoch": 4.56, | |
"learning_rate": 4.560143626570916e-05, | |
"loss": 0.2629, | |
"step": 2540 | |
}, | |
{ | |
"epoch": 4.57, | |
"learning_rate": 4.578096947935368e-05, | |
"loss": 0.2931, | |
"step": 2550 | |
}, | |
{ | |
"epoch": 4.59, | |
"learning_rate": 4.596050269299821e-05, | |
"loss": 0.2865, | |
"step": 2560 | |
}, | |
{ | |
"epoch": 4.61, | |
"learning_rate": 4.614003590664273e-05, | |
"loss": 0.2751, | |
"step": 2570 | |
}, | |
{ | |
"epoch": 4.63, | |
"learning_rate": 4.6319569120287255e-05, | |
"loss": 0.2565, | |
"step": 2580 | |
}, | |
{ | |
"epoch": 4.65, | |
"learning_rate": 4.6499102333931784e-05, | |
"loss": 0.2589, | |
"step": 2590 | |
}, | |
{ | |
"epoch": 4.66, | |
"learning_rate": 4.667863554757631e-05, | |
"loss": 0.2501, | |
"step": 2600 | |
}, | |
{ | |
"epoch": 4.68, | |
"learning_rate": 4.685816876122083e-05, | |
"loss": 0.2524, | |
"step": 2610 | |
}, | |
{ | |
"epoch": 4.7, | |
"learning_rate": 4.703770197486535e-05, | |
"loss": 0.2596, | |
"step": 2620 | |
}, | |
{ | |
"epoch": 4.72, | |
"learning_rate": 4.721723518850988e-05, | |
"loss": 0.2567, | |
"step": 2630 | |
}, | |
{ | |
"epoch": 4.74, | |
"learning_rate": 4.7396768402154403e-05, | |
"loss": 0.2886, | |
"step": 2640 | |
}, | |
{ | |
"epoch": 4.75, | |
"learning_rate": 4.7576301615798926e-05, | |
"loss": 0.2879, | |
"step": 2650 | |
}, | |
{ | |
"epoch": 4.77, | |
"learning_rate": 4.775583482944345e-05, | |
"loss": 0.2703, | |
"step": 2660 | |
}, | |
{ | |
"epoch": 4.79, | |
"learning_rate": 4.793536804308798e-05, | |
"loss": 0.3013, | |
"step": 2670 | |
}, | |
{ | |
"epoch": 4.81, | |
"learning_rate": 4.81149012567325e-05, | |
"loss": 0.2801, | |
"step": 2680 | |
}, | |
{ | |
"epoch": 4.83, | |
"learning_rate": 4.829443447037702e-05, | |
"loss": 0.2886, | |
"step": 2690 | |
}, | |
{ | |
"epoch": 4.84, | |
"learning_rate": 4.847396768402155e-05, | |
"loss": 0.3084, | |
"step": 2700 | |
}, | |
{ | |
"epoch": 4.86, | |
"learning_rate": 4.8653500897666074e-05, | |
"loss": 0.2886, | |
"step": 2710 | |
}, | |
{ | |
"epoch": 4.88, | |
"learning_rate": 4.8833034111310596e-05, | |
"loss": 0.2678, | |
"step": 2720 | |
}, | |
{ | |
"epoch": 4.9, | |
"learning_rate": 4.901256732495512e-05, | |
"loss": 0.2628, | |
"step": 2730 | |
}, | |
{ | |
"epoch": 4.91, | |
"learning_rate": 4.919210053859965e-05, | |
"loss": 0.2834, | |
"step": 2740 | |
}, | |
{ | |
"epoch": 4.93, | |
"learning_rate": 4.937163375224417e-05, | |
"loss": 0.2775, | |
"step": 2750 | |
}, | |
{ | |
"epoch": 4.95, | |
"learning_rate": 4.955116696588869e-05, | |
"loss": 0.2987, | |
"step": 2760 | |
}, | |
{ | |
"epoch": 4.97, | |
"learning_rate": 4.9730700179533215e-05, | |
"loss": 0.2409, | |
"step": 2770 | |
}, | |
{ | |
"epoch": 4.99, | |
"learning_rate": 4.991023339317774e-05, | |
"loss": 0.3022, | |
"step": 2780 | |
}, | |
{ | |
"epoch": 5.0, | |
"eval_accuracy": { | |
"accuracy": 0.9471294012110338 | |
}, | |
"eval_f1": { | |
"f1": 0.944197644580418 | |
}, | |
"eval_loss": 0.1480465531349182, | |
"eval_precision": { | |
"precision": 0.9443621962369058 | |
}, | |
"eval_recall": { | |
"recall": 0.9440973623586151 | |
}, | |
"eval_runtime": 99.3848, | |
"eval_samples_per_second": 179.464, | |
"eval_steps_per_second": 5.615, | |
"step": 2787 | |
}, | |
{ | |
"epoch": 5.0, | |
"learning_rate": 4.999002593257531e-05, | |
"loss": 0.2842, | |
"step": 2790 | |
}, | |
{ | |
"epoch": 5.02, | |
"learning_rate": 4.9970077797725914e-05, | |
"loss": 0.2321, | |
"step": 2800 | |
}, | |
{ | |
"epoch": 5.04, | |
"learning_rate": 4.995012966287652e-05, | |
"loss": 0.259, | |
"step": 2810 | |
}, | |
{ | |
"epoch": 5.06, | |
"learning_rate": 4.993018152802713e-05, | |
"loss": 0.2541, | |
"step": 2820 | |
}, | |
{ | |
"epoch": 5.08, | |
"learning_rate": 4.991023339317774e-05, | |
"loss": 0.2419, | |
"step": 2830 | |
}, | |
{ | |
"epoch": 5.09, | |
"learning_rate": 4.989028525832835e-05, | |
"loss": 0.2506, | |
"step": 2840 | |
}, | |
{ | |
"epoch": 5.11, | |
"learning_rate": 4.9870337123478956e-05, | |
"loss": 0.24, | |
"step": 2850 | |
}, | |
{ | |
"epoch": 5.13, | |
"learning_rate": 4.985038898862957e-05, | |
"loss": 0.2443, | |
"step": 2860 | |
}, | |
{ | |
"epoch": 5.15, | |
"learning_rate": 4.9830440853780174e-05, | |
"loss": 0.2645, | |
"step": 2870 | |
}, | |
{ | |
"epoch": 5.17, | |
"learning_rate": 4.9810492718930786e-05, | |
"loss": 0.2679, | |
"step": 2880 | |
}, | |
{ | |
"epoch": 5.18, | |
"learning_rate": 4.979054458408139e-05, | |
"loss": 0.2738, | |
"step": 2890 | |
}, | |
{ | |
"epoch": 5.2, | |
"learning_rate": 4.9770596449232e-05, | |
"loss": 0.2329, | |
"step": 2900 | |
}, | |
{ | |
"epoch": 5.22, | |
"learning_rate": 4.975064831438261e-05, | |
"loss": 0.2766, | |
"step": 2910 | |
}, | |
{ | |
"epoch": 5.24, | |
"learning_rate": 4.9730700179533215e-05, | |
"loss": 0.243, | |
"step": 2920 | |
}, | |
{ | |
"epoch": 5.26, | |
"learning_rate": 4.971075204468382e-05, | |
"loss": 0.2722, | |
"step": 2930 | |
}, | |
{ | |
"epoch": 5.27, | |
"learning_rate": 4.969080390983443e-05, | |
"loss": 0.2556, | |
"step": 2940 | |
}, | |
{ | |
"epoch": 5.29, | |
"learning_rate": 4.967085577498504e-05, | |
"loss": 0.2544, | |
"step": 2950 | |
}, | |
{ | |
"epoch": 5.31, | |
"learning_rate": 4.965090764013565e-05, | |
"loss": 0.2353, | |
"step": 2960 | |
}, | |
{ | |
"epoch": 5.33, | |
"learning_rate": 4.963095950528626e-05, | |
"loss": 0.252, | |
"step": 2970 | |
}, | |
{ | |
"epoch": 5.35, | |
"learning_rate": 4.961101137043687e-05, | |
"loss": 0.2303, | |
"step": 2980 | |
}, | |
{ | |
"epoch": 5.36, | |
"learning_rate": 4.9591063235587475e-05, | |
"loss": 0.2357, | |
"step": 2990 | |
}, | |
{ | |
"epoch": 5.38, | |
"learning_rate": 4.957111510073809e-05, | |
"loss": 0.2529, | |
"step": 3000 | |
}, | |
{ | |
"epoch": 5.4, | |
"learning_rate": 4.955116696588869e-05, | |
"loss": 0.2381, | |
"step": 3010 | |
}, | |
{ | |
"epoch": 5.42, | |
"learning_rate": 4.95312188310393e-05, | |
"loss": 0.2616, | |
"step": 3020 | |
}, | |
{ | |
"epoch": 5.43, | |
"learning_rate": 4.951127069618991e-05, | |
"loss": 0.2836, | |
"step": 3030 | |
}, | |
{ | |
"epoch": 5.45, | |
"learning_rate": 4.9491322561340517e-05, | |
"loss": 0.2726, | |
"step": 3040 | |
}, | |
{ | |
"epoch": 5.47, | |
"learning_rate": 4.947137442649112e-05, | |
"loss": 0.2487, | |
"step": 3050 | |
}, | |
{ | |
"epoch": 5.49, | |
"learning_rate": 4.9451426291641735e-05, | |
"loss": 0.2577, | |
"step": 3060 | |
}, | |
{ | |
"epoch": 5.51, | |
"learning_rate": 4.943147815679234e-05, | |
"loss": 0.2453, | |
"step": 3070 | |
}, | |
{ | |
"epoch": 5.52, | |
"learning_rate": 4.941153002194295e-05, | |
"loss": 0.2682, | |
"step": 3080 | |
}, | |
{ | |
"epoch": 5.54, | |
"learning_rate": 4.939158188709356e-05, | |
"loss": 0.2463, | |
"step": 3090 | |
}, | |
{ | |
"epoch": 5.56, | |
"learning_rate": 4.937163375224417e-05, | |
"loss": 0.254, | |
"step": 3100 | |
}, | |
{ | |
"epoch": 5.58, | |
"learning_rate": 4.9351685617394776e-05, | |
"loss": 0.2516, | |
"step": 3110 | |
}, | |
{ | |
"epoch": 5.6, | |
"learning_rate": 4.933173748254539e-05, | |
"loss": 0.2395, | |
"step": 3120 | |
}, | |
{ | |
"epoch": 5.61, | |
"learning_rate": 4.9311789347695994e-05, | |
"loss": 0.2375, | |
"step": 3130 | |
}, | |
{ | |
"epoch": 5.63, | |
"learning_rate": 4.92918412128466e-05, | |
"loss": 0.2551, | |
"step": 3140 | |
}, | |
{ | |
"epoch": 5.65, | |
"learning_rate": 4.927189307799721e-05, | |
"loss": 0.2391, | |
"step": 3150 | |
}, | |
{ | |
"epoch": 5.67, | |
"learning_rate": 4.925194494314782e-05, | |
"loss": 0.2501, | |
"step": 3160 | |
}, | |
{ | |
"epoch": 5.69, | |
"learning_rate": 4.923199680829842e-05, | |
"loss": 0.248, | |
"step": 3170 | |
}, | |
{ | |
"epoch": 5.7, | |
"learning_rate": 4.9212048673449036e-05, | |
"loss": 0.2357, | |
"step": 3180 | |
}, | |
{ | |
"epoch": 5.72, | |
"learning_rate": 4.919210053859965e-05, | |
"loss": 0.2454, | |
"step": 3190 | |
}, | |
{ | |
"epoch": 5.74, | |
"learning_rate": 4.917215240375025e-05, | |
"loss": 0.2728, | |
"step": 3200 | |
}, | |
{ | |
"epoch": 5.76, | |
"learning_rate": 4.915220426890086e-05, | |
"loss": 0.2789, | |
"step": 3210 | |
}, | |
{ | |
"epoch": 5.78, | |
"learning_rate": 4.913225613405147e-05, | |
"loss": 0.2559, | |
"step": 3220 | |
}, | |
{ | |
"epoch": 5.79, | |
"learning_rate": 4.911230799920208e-05, | |
"loss": 0.2631, | |
"step": 3230 | |
}, | |
{ | |
"epoch": 5.81, | |
"learning_rate": 4.909235986435268e-05, | |
"loss": 0.2292, | |
"step": 3240 | |
}, | |
{ | |
"epoch": 5.83, | |
"learning_rate": 4.9072411729503295e-05, | |
"loss": 0.2301, | |
"step": 3250 | |
}, | |
{ | |
"epoch": 5.85, | |
"learning_rate": 4.90524635946539e-05, | |
"loss": 0.2711, | |
"step": 3260 | |
}, | |
{ | |
"epoch": 5.87, | |
"learning_rate": 4.903251545980451e-05, | |
"loss": 0.2588, | |
"step": 3270 | |
}, | |
{ | |
"epoch": 5.88, | |
"learning_rate": 4.901256732495512e-05, | |
"loss": 0.2683, | |
"step": 3280 | |
}, | |
{ | |
"epoch": 5.9, | |
"learning_rate": 4.8992619190105724e-05, | |
"loss": 0.1896, | |
"step": 3290 | |
}, | |
{ | |
"epoch": 5.92, | |
"learning_rate": 4.897267105525634e-05, | |
"loss": 0.2639, | |
"step": 3300 | |
}, | |
{ | |
"epoch": 5.94, | |
"learning_rate": 4.895272292040695e-05, | |
"loss": 0.2606, | |
"step": 3310 | |
}, | |
{ | |
"epoch": 5.96, | |
"learning_rate": 4.893277478555755e-05, | |
"loss": 0.2751, | |
"step": 3320 | |
}, | |
{ | |
"epoch": 5.97, | |
"learning_rate": 4.891282665070816e-05, | |
"loss": 0.2423, | |
"step": 3330 | |
}, | |
{ | |
"epoch": 5.99, | |
"learning_rate": 4.889287851585877e-05, | |
"loss": 0.2283, | |
"step": 3340 | |
}, | |
{ | |
"epoch": 6.0, | |
"eval_accuracy": { | |
"accuracy": 0.9510540479928234 | |
}, | |
"eval_f1": { | |
"f1": 0.9482048430595941 | |
}, | |
"eval_loss": 0.13114869594573975, | |
"eval_precision": { | |
"precision": 0.9513484013297254 | |
}, | |
"eval_recall": { | |
"recall": 0.9456754807307988 | |
}, | |
"eval_runtime": 99.7361, | |
"eval_samples_per_second": 178.832, | |
"eval_steps_per_second": 5.595, | |
"step": 3345 | |
}, | |
{ | |
"epoch": 6.01, | |
"learning_rate": 4.887293038100938e-05, | |
"loss": 0.2306, | |
"step": 3350 | |
}, | |
{ | |
"epoch": 6.03, | |
"learning_rate": 4.8852982246159984e-05, | |
"loss": 0.2259, | |
"step": 3360 | |
}, | |
{ | |
"epoch": 6.04, | |
"learning_rate": 4.8833034111310596e-05, | |
"loss": 0.2353, | |
"step": 3370 | |
}, | |
{ | |
"epoch": 6.06, | |
"learning_rate": 4.88130859764612e-05, | |
"loss": 0.2264, | |
"step": 3380 | |
}, | |
{ | |
"epoch": 6.08, | |
"learning_rate": 4.8793137841611814e-05, | |
"loss": 0.2264, | |
"step": 3390 | |
}, | |
{ | |
"epoch": 6.1, | |
"learning_rate": 4.877318970676242e-05, | |
"loss": 0.2403, | |
"step": 3400 | |
}, | |
{ | |
"epoch": 6.12, | |
"learning_rate": 4.8753241571913026e-05, | |
"loss": 0.2476, | |
"step": 3410 | |
}, | |
{ | |
"epoch": 6.13, | |
"learning_rate": 4.873329343706364e-05, | |
"loss": 0.2415, | |
"step": 3420 | |
}, | |
{ | |
"epoch": 6.15, | |
"learning_rate": 4.871334530221425e-05, | |
"loss": 0.26, | |
"step": 3430 | |
}, | |
{ | |
"epoch": 6.17, | |
"learning_rate": 4.869339716736485e-05, | |
"loss": 0.2659, | |
"step": 3440 | |
}, | |
{ | |
"epoch": 6.19, | |
"learning_rate": 4.867344903251546e-05, | |
"loss": 0.2114, | |
"step": 3450 | |
}, | |
{ | |
"epoch": 6.21, | |
"learning_rate": 4.8653500897666074e-05, | |
"loss": 0.2485, | |
"step": 3460 | |
}, | |
{ | |
"epoch": 6.22, | |
"learning_rate": 4.863355276281668e-05, | |
"loss": 0.2247, | |
"step": 3470 | |
}, | |
{ | |
"epoch": 6.24, | |
"learning_rate": 4.8613604627967285e-05, | |
"loss": 0.2595, | |
"step": 3480 | |
}, | |
{ | |
"epoch": 6.26, | |
"learning_rate": 4.85936564931179e-05, | |
"loss": 0.2193, | |
"step": 3490 | |
}, | |
{ | |
"epoch": 6.28, | |
"learning_rate": 4.85737083582685e-05, | |
"loss": 0.2051, | |
"step": 3500 | |
}, | |
{ | |
"epoch": 6.3, | |
"learning_rate": 4.8553760223419116e-05, | |
"loss": 0.2105, | |
"step": 3510 | |
}, | |
{ | |
"epoch": 6.31, | |
"learning_rate": 4.853381208856972e-05, | |
"loss": 0.2104, | |
"step": 3520 | |
}, | |
{ | |
"epoch": 6.33, | |
"learning_rate": 4.851386395372033e-05, | |
"loss": 0.2595, | |
"step": 3530 | |
}, | |
{ | |
"epoch": 6.35, | |
"learning_rate": 4.849391581887094e-05, | |
"loss": 0.2199, | |
"step": 3540 | |
}, | |
{ | |
"epoch": 6.37, | |
"learning_rate": 4.847396768402155e-05, | |
"loss": 0.2463, | |
"step": 3550 | |
}, | |
{ | |
"epoch": 6.39, | |
"learning_rate": 4.845401954917215e-05, | |
"loss": 0.221, | |
"step": 3560 | |
}, | |
{ | |
"epoch": 6.4, | |
"learning_rate": 4.843407141432276e-05, | |
"loss": 0.2207, | |
"step": 3570 | |
}, | |
{ | |
"epoch": 6.42, | |
"learning_rate": 4.8414123279473375e-05, | |
"loss": 0.2319, | |
"step": 3580 | |
}, | |
{ | |
"epoch": 6.44, | |
"learning_rate": 4.839417514462398e-05, | |
"loss": 0.2333, | |
"step": 3590 | |
}, | |
{ | |
"epoch": 6.46, | |
"learning_rate": 4.8374227009774586e-05, | |
"loss": 0.237, | |
"step": 3600 | |
}, | |
{ | |
"epoch": 6.48, | |
"learning_rate": 4.83542788749252e-05, | |
"loss": 0.2096, | |
"step": 3610 | |
}, | |
{ | |
"epoch": 6.49, | |
"learning_rate": 4.8334330740075804e-05, | |
"loss": 0.236, | |
"step": 3620 | |
}, | |
{ | |
"epoch": 6.51, | |
"learning_rate": 4.831438260522642e-05, | |
"loss": 0.2376, | |
"step": 3630 | |
}, | |
{ | |
"epoch": 6.53, | |
"learning_rate": 4.829443447037702e-05, | |
"loss": 0.251, | |
"step": 3640 | |
}, | |
{ | |
"epoch": 6.55, | |
"learning_rate": 4.827448633552763e-05, | |
"loss": 0.1974, | |
"step": 3650 | |
}, | |
{ | |
"epoch": 6.57, | |
"learning_rate": 4.825453820067824e-05, | |
"loss": 0.2334, | |
"step": 3660 | |
}, | |
{ | |
"epoch": 6.58, | |
"learning_rate": 4.823459006582885e-05, | |
"loss": 0.2289, | |
"step": 3670 | |
}, | |
{ | |
"epoch": 6.6, | |
"learning_rate": 4.821464193097945e-05, | |
"loss": 0.2252, | |
"step": 3680 | |
}, | |
{ | |
"epoch": 6.62, | |
"learning_rate": 4.8194693796130064e-05, | |
"loss": 0.239, | |
"step": 3690 | |
}, | |
{ | |
"epoch": 6.64, | |
"learning_rate": 4.8174745661280676e-05, | |
"loss": 0.2166, | |
"step": 3700 | |
}, | |
{ | |
"epoch": 6.65, | |
"learning_rate": 4.815479752643128e-05, | |
"loss": 0.225, | |
"step": 3710 | |
}, | |
{ | |
"epoch": 6.67, | |
"learning_rate": 4.813484939158189e-05, | |
"loss": 0.2038, | |
"step": 3720 | |
}, | |
{ | |
"epoch": 6.69, | |
"learning_rate": 4.81149012567325e-05, | |
"loss": 0.214, | |
"step": 3730 | |
}, | |
{ | |
"epoch": 6.71, | |
"learning_rate": 4.8094953121883106e-05, | |
"loss": 0.1958, | |
"step": 3740 | |
}, | |
{ | |
"epoch": 6.73, | |
"learning_rate": 4.807500498703372e-05, | |
"loss": 0.2275, | |
"step": 3750 | |
}, | |
{ | |
"epoch": 6.74, | |
"learning_rate": 4.8055056852184324e-05, | |
"loss": 0.2324, | |
"step": 3760 | |
}, | |
{ | |
"epoch": 6.76, | |
"learning_rate": 4.803510871733493e-05, | |
"loss": 0.2202, | |
"step": 3770 | |
}, | |
{ | |
"epoch": 6.78, | |
"learning_rate": 4.801516058248554e-05, | |
"loss": 0.2285, | |
"step": 3780 | |
}, | |
{ | |
"epoch": 6.8, | |
"learning_rate": 4.7995212447636154e-05, | |
"loss": 0.2385, | |
"step": 3790 | |
}, | |
{ | |
"epoch": 6.82, | |
"learning_rate": 4.797526431278675e-05, | |
"loss": 0.2366, | |
"step": 3800 | |
}, | |
{ | |
"epoch": 6.83, | |
"learning_rate": 4.7955316177937365e-05, | |
"loss": 0.2221, | |
"step": 3810 | |
}, | |
{ | |
"epoch": 6.85, | |
"learning_rate": 4.793536804308798e-05, | |
"loss": 0.2349, | |
"step": 3820 | |
}, | |
{ | |
"epoch": 6.87, | |
"learning_rate": 4.791541990823858e-05, | |
"loss": 0.2749, | |
"step": 3830 | |
}, | |
{ | |
"epoch": 6.89, | |
"learning_rate": 4.789547177338919e-05, | |
"loss": 0.2168, | |
"step": 3840 | |
}, | |
{ | |
"epoch": 6.91, | |
"learning_rate": 4.78755236385398e-05, | |
"loss": 0.1872, | |
"step": 3850 | |
}, | |
{ | |
"epoch": 6.92, | |
"learning_rate": 4.785557550369041e-05, | |
"loss": 0.2502, | |
"step": 3860 | |
}, | |
{ | |
"epoch": 6.94, | |
"learning_rate": 4.783562736884102e-05, | |
"loss": 0.1964, | |
"step": 3870 | |
}, | |
{ | |
"epoch": 6.96, | |
"learning_rate": 4.7815679233991625e-05, | |
"loss": 0.2425, | |
"step": 3880 | |
}, | |
{ | |
"epoch": 6.98, | |
"learning_rate": 4.779573109914223e-05, | |
"loss": 0.2481, | |
"step": 3890 | |
}, | |
{ | |
"epoch": 7.0, | |
"learning_rate": 4.777578296429284e-05, | |
"loss": 0.2376, | |
"step": 3900 | |
}, | |
{ | |
"epoch": 7.0, | |
"eval_accuracy": { | |
"accuracy": 0.9595761381475667 | |
}, | |
"eval_f1": { | |
"f1": 0.95694652190804 | |
}, | |
"eval_loss": 0.10949217528104782, | |
"eval_precision": { | |
"precision": 0.9583449741949119 | |
}, | |
"eval_recall": { | |
"recall": 0.9557128312494094 | |
}, | |
"eval_runtime": 99.0631, | |
"eval_samples_per_second": 180.047, | |
"eval_steps_per_second": 5.633, | |
"step": 3902 | |
}, | |
{ | |
"epoch": 7.01, | |
"learning_rate": 4.775583482944345e-05, | |
"loss": 0.2048, | |
"step": 3910 | |
}, | |
{ | |
"epoch": 7.03, | |
"learning_rate": 4.7735886694594054e-05, | |
"loss": 0.2573, | |
"step": 3920 | |
}, | |
{ | |
"epoch": 7.05, | |
"learning_rate": 4.7715938559744666e-05, | |
"loss": 0.2261, | |
"step": 3930 | |
}, | |
{ | |
"epoch": 7.07, | |
"learning_rate": 4.769599042489528e-05, | |
"loss": 0.2094, | |
"step": 3940 | |
}, | |
{ | |
"epoch": 7.09, | |
"learning_rate": 4.767604229004588e-05, | |
"loss": 0.2148, | |
"step": 3950 | |
}, | |
{ | |
"epoch": 7.1, | |
"learning_rate": 4.765609415519649e-05, | |
"loss": 0.2023, | |
"step": 3960 | |
}, | |
{ | |
"epoch": 7.12, | |
"learning_rate": 4.76361460203471e-05, | |
"loss": 0.2031, | |
"step": 3970 | |
}, | |
{ | |
"epoch": 7.14, | |
"learning_rate": 4.761619788549771e-05, | |
"loss": 0.2051, | |
"step": 3980 | |
}, | |
{ | |
"epoch": 7.16, | |
"learning_rate": 4.7596249750648313e-05, | |
"loss": 0.1944, | |
"step": 3990 | |
}, | |
{ | |
"epoch": 7.17, | |
"learning_rate": 4.7576301615798926e-05, | |
"loss": 0.2319, | |
"step": 4000 | |
}, | |
{ | |
"epoch": 7.19, | |
"learning_rate": 4.755635348094953e-05, | |
"loss": 0.2164, | |
"step": 4010 | |
}, | |
{ | |
"epoch": 7.21, | |
"learning_rate": 4.7536405346100144e-05, | |
"loss": 0.1977, | |
"step": 4020 | |
}, | |
{ | |
"epoch": 7.23, | |
"learning_rate": 4.751645721125075e-05, | |
"loss": 0.2499, | |
"step": 4030 | |
}, | |
{ | |
"epoch": 7.25, | |
"learning_rate": 4.7496509076401355e-05, | |
"loss": 0.1717, | |
"step": 4040 | |
}, | |
{ | |
"epoch": 7.26, | |
"learning_rate": 4.747656094155197e-05, | |
"loss": 0.1886, | |
"step": 4050 | |
}, | |
{ | |
"epoch": 7.28, | |
"learning_rate": 4.745661280670258e-05, | |
"loss": 0.2132, | |
"step": 4060 | |
}, | |
{ | |
"epoch": 7.3, | |
"learning_rate": 4.743666467185318e-05, | |
"loss": 0.2238, | |
"step": 4070 | |
}, | |
{ | |
"epoch": 7.32, | |
"learning_rate": 4.741671653700379e-05, | |
"loss": 0.2213, | |
"step": 4080 | |
}, | |
{ | |
"epoch": 7.34, | |
"learning_rate": 4.7396768402154403e-05, | |
"loss": 0.1884, | |
"step": 4090 | |
}, | |
{ | |
"epoch": 7.35, | |
"learning_rate": 4.737682026730501e-05, | |
"loss": 0.2112, | |
"step": 4100 | |
}, | |
{ | |
"epoch": 7.37, | |
"learning_rate": 4.7356872132455615e-05, | |
"loss": 0.218, | |
"step": 4110 | |
}, | |
{ | |
"epoch": 7.39, | |
"learning_rate": 4.733692399760623e-05, | |
"loss": 0.1901, | |
"step": 4120 | |
}, | |
{ | |
"epoch": 7.41, | |
"learning_rate": 4.731697586275683e-05, | |
"loss": 0.2182, | |
"step": 4130 | |
}, | |
{ | |
"epoch": 7.43, | |
"learning_rate": 4.7297027727907445e-05, | |
"loss": 0.2061, | |
"step": 4140 | |
}, | |
{ | |
"epoch": 7.44, | |
"learning_rate": 4.727707959305805e-05, | |
"loss": 0.1767, | |
"step": 4150 | |
}, | |
{ | |
"epoch": 7.46, | |
"learning_rate": 4.7257131458208656e-05, | |
"loss": 0.2376, | |
"step": 4160 | |
}, | |
{ | |
"epoch": 7.48, | |
"learning_rate": 4.723718332335927e-05, | |
"loss": 0.203, | |
"step": 4170 | |
}, | |
{ | |
"epoch": 7.5, | |
"learning_rate": 4.721723518850988e-05, | |
"loss": 0.1879, | |
"step": 4180 | |
}, | |
{ | |
"epoch": 7.52, | |
"learning_rate": 4.719728705366048e-05, | |
"loss": 0.2355, | |
"step": 4190 | |
}, | |
{ | |
"epoch": 7.53, | |
"learning_rate": 4.717733891881109e-05, | |
"loss": 0.1916, | |
"step": 4200 | |
}, | |
{ | |
"epoch": 7.55, | |
"learning_rate": 4.7157390783961705e-05, | |
"loss": 0.2133, | |
"step": 4210 | |
}, | |
{ | |
"epoch": 7.57, | |
"learning_rate": 4.713744264911231e-05, | |
"loss": 0.208, | |
"step": 4220 | |
}, | |
{ | |
"epoch": 7.59, | |
"learning_rate": 4.7117494514262916e-05, | |
"loss": 0.2364, | |
"step": 4230 | |
}, | |
{ | |
"epoch": 7.61, | |
"learning_rate": 4.709754637941353e-05, | |
"loss": 0.2049, | |
"step": 4240 | |
}, | |
{ | |
"epoch": 7.62, | |
"learning_rate": 4.7077598244564134e-05, | |
"loss": 0.2296, | |
"step": 4250 | |
}, | |
{ | |
"epoch": 7.64, | |
"learning_rate": 4.7057650109714746e-05, | |
"loss": 0.221, | |
"step": 4260 | |
}, | |
{ | |
"epoch": 7.66, | |
"learning_rate": 4.703770197486535e-05, | |
"loss": 0.1917, | |
"step": 4270 | |
}, | |
{ | |
"epoch": 7.68, | |
"learning_rate": 4.701775384001596e-05, | |
"loss": 0.2183, | |
"step": 4280 | |
}, | |
{ | |
"epoch": 7.7, | |
"learning_rate": 4.699780570516657e-05, | |
"loss": 0.1869, | |
"step": 4290 | |
}, | |
{ | |
"epoch": 7.71, | |
"learning_rate": 4.697785757031718e-05, | |
"loss": 0.1975, | |
"step": 4300 | |
}, | |
{ | |
"epoch": 7.73, | |
"learning_rate": 4.695790943546778e-05, | |
"loss": 0.2274, | |
"step": 4310 | |
}, | |
{ | |
"epoch": 7.75, | |
"learning_rate": 4.693796130061839e-05, | |
"loss": 0.1973, | |
"step": 4320 | |
}, | |
{ | |
"epoch": 7.77, | |
"learning_rate": 4.6918013165769006e-05, | |
"loss": 0.2005, | |
"step": 4330 | |
}, | |
{ | |
"epoch": 7.78, | |
"learning_rate": 4.689806503091961e-05, | |
"loss": 0.1978, | |
"step": 4340 | |
}, | |
{ | |
"epoch": 7.8, | |
"learning_rate": 4.687811689607022e-05, | |
"loss": 0.1986, | |
"step": 4350 | |
}, | |
{ | |
"epoch": 7.82, | |
"learning_rate": 4.685816876122083e-05, | |
"loss": 0.2013, | |
"step": 4360 | |
}, | |
{ | |
"epoch": 7.84, | |
"learning_rate": 4.6838220626371435e-05, | |
"loss": 0.2218, | |
"step": 4370 | |
}, | |
{ | |
"epoch": 7.86, | |
"learning_rate": 4.681827249152205e-05, | |
"loss": 0.2168, | |
"step": 4380 | |
}, | |
{ | |
"epoch": 7.87, | |
"learning_rate": 4.679832435667265e-05, | |
"loss": 0.1709, | |
"step": 4390 | |
}, | |
{ | |
"epoch": 7.89, | |
"learning_rate": 4.677837622182326e-05, | |
"loss": 0.2249, | |
"step": 4400 | |
}, | |
{ | |
"epoch": 7.91, | |
"learning_rate": 4.675842808697387e-05, | |
"loss": 0.2246, | |
"step": 4410 | |
}, | |
{ | |
"epoch": 7.93, | |
"learning_rate": 4.673847995212448e-05, | |
"loss": 0.2163, | |
"step": 4420 | |
}, | |
{ | |
"epoch": 7.95, | |
"learning_rate": 4.671853181727508e-05, | |
"loss": 0.1922, | |
"step": 4430 | |
}, | |
{ | |
"epoch": 7.96, | |
"learning_rate": 4.6698583682425695e-05, | |
"loss": 0.2157, | |
"step": 4440 | |
}, | |
{ | |
"epoch": 7.98, | |
"learning_rate": 4.667863554757631e-05, | |
"loss": 0.2272, | |
"step": 4450 | |
}, | |
{ | |
"epoch": 8.0, | |
"learning_rate": 4.665868741272691e-05, | |
"loss": 0.2027, | |
"step": 4460 | |
}, | |
{ | |
"epoch": 8.0, | |
"eval_accuracy": { | |
"accuracy": 0.9548665620094191 | |
}, | |
"eval_f1": { | |
"f1": 0.951539693294557 | |
}, | |
"eval_loss": 0.12259286642074585, | |
"eval_precision": { | |
"precision": 0.9548733769718504 | |
}, | |
"eval_recall": { | |
"recall": 0.9494659029809656 | |
}, | |
"eval_runtime": 98.7615, | |
"eval_samples_per_second": 180.597, | |
"eval_steps_per_second": 5.65, | |
"step": 4460 | |
}, | |
{ | |
"epoch": 8.02, | |
"learning_rate": 4.663873927787752e-05, | |
"loss": 0.1895, | |
"step": 4470 | |
}, | |
{ | |
"epoch": 8.04, | |
"learning_rate": 4.661879114302813e-05, | |
"loss": 0.1628, | |
"step": 4480 | |
}, | |
{ | |
"epoch": 8.05, | |
"learning_rate": 4.6598843008178736e-05, | |
"loss": 0.1996, | |
"step": 4490 | |
}, | |
{ | |
"epoch": 8.07, | |
"learning_rate": 4.657889487332935e-05, | |
"loss": 0.2024, | |
"step": 4500 | |
}, | |
{ | |
"epoch": 8.09, | |
"learning_rate": 4.6558946738479954e-05, | |
"loss": 0.1887, | |
"step": 4510 | |
}, | |
{ | |
"epoch": 8.11, | |
"learning_rate": 4.653899860363056e-05, | |
"loss": 0.2224, | |
"step": 4520 | |
}, | |
{ | |
"epoch": 8.13, | |
"learning_rate": 4.651905046878117e-05, | |
"loss": 0.1992, | |
"step": 4530 | |
}, | |
{ | |
"epoch": 8.14, | |
"learning_rate": 4.6499102333931784e-05, | |
"loss": 0.2027, | |
"step": 4540 | |
}, | |
{ | |
"epoch": 8.16, | |
"learning_rate": 4.647915419908238e-05, | |
"loss": 0.1844, | |
"step": 4550 | |
}, | |
{ | |
"epoch": 8.18, | |
"learning_rate": 4.6459206064232996e-05, | |
"loss": 0.1838, | |
"step": 4560 | |
}, | |
{ | |
"epoch": 8.2, | |
"learning_rate": 4.643925792938361e-05, | |
"loss": 0.1897, | |
"step": 4570 | |
}, | |
{ | |
"epoch": 8.22, | |
"learning_rate": 4.6419309794534214e-05, | |
"loss": 0.1794, | |
"step": 4580 | |
}, | |
{ | |
"epoch": 8.23, | |
"learning_rate": 4.639936165968482e-05, | |
"loss": 0.2052, | |
"step": 4590 | |
}, | |
{ | |
"epoch": 8.25, | |
"learning_rate": 4.637941352483543e-05, | |
"loss": 0.225, | |
"step": 4600 | |
}, | |
{ | |
"epoch": 8.27, | |
"learning_rate": 4.635946538998604e-05, | |
"loss": 0.1905, | |
"step": 4610 | |
}, | |
{ | |
"epoch": 8.29, | |
"learning_rate": 4.633951725513665e-05, | |
"loss": 0.1933, | |
"step": 4620 | |
}, | |
{ | |
"epoch": 8.3, | |
"learning_rate": 4.6319569120287255e-05, | |
"loss": 0.2084, | |
"step": 4630 | |
}, | |
{ | |
"epoch": 8.32, | |
"learning_rate": 4.629962098543786e-05, | |
"loss": 0.1947, | |
"step": 4640 | |
}, | |
{ | |
"epoch": 8.34, | |
"learning_rate": 4.627967285058847e-05, | |
"loss": 0.1984, | |
"step": 4650 | |
}, | |
{ | |
"epoch": 8.36, | |
"learning_rate": 4.625972471573908e-05, | |
"loss": 0.21, | |
"step": 4660 | |
}, | |
{ | |
"epoch": 8.38, | |
"learning_rate": 4.6239776580889684e-05, | |
"loss": 0.2083, | |
"step": 4670 | |
}, | |
{ | |
"epoch": 8.39, | |
"learning_rate": 4.62198284460403e-05, | |
"loss": 0.2066, | |
"step": 4680 | |
}, | |
{ | |
"epoch": 8.41, | |
"learning_rate": 4.619988031119091e-05, | |
"loss": 0.1874, | |
"step": 4690 | |
}, | |
{ | |
"epoch": 8.43, | |
"learning_rate": 4.6179932176341515e-05, | |
"loss": 0.2062, | |
"step": 4700 | |
}, | |
{ | |
"epoch": 8.45, | |
"learning_rate": 4.615998404149212e-05, | |
"loss": 0.1714, | |
"step": 4710 | |
}, | |
{ | |
"epoch": 8.47, | |
"learning_rate": 4.614003590664273e-05, | |
"loss": 0.1864, | |
"step": 4720 | |
}, | |
{ | |
"epoch": 8.48, | |
"learning_rate": 4.612008777179334e-05, | |
"loss": 0.1997, | |
"step": 4730 | |
}, | |
{ | |
"epoch": 8.5, | |
"learning_rate": 4.6100139636943944e-05, | |
"loss": 0.2102, | |
"step": 4740 | |
}, | |
{ | |
"epoch": 8.52, | |
"learning_rate": 4.6080191502094556e-05, | |
"loss": 0.2008, | |
"step": 4750 | |
}, | |
{ | |
"epoch": 8.54, | |
"learning_rate": 4.606024336724516e-05, | |
"loss": 0.1874, | |
"step": 4760 | |
}, | |
{ | |
"epoch": 8.56, | |
"learning_rate": 4.6040295232395774e-05, | |
"loss": 0.1718, | |
"step": 4770 | |
}, | |
{ | |
"epoch": 8.57, | |
"learning_rate": 4.602034709754638e-05, | |
"loss": 0.1799, | |
"step": 4780 | |
}, | |
{ | |
"epoch": 8.59, | |
"learning_rate": 4.6000398962696986e-05, | |
"loss": 0.1741, | |
"step": 4790 | |
}, | |
{ | |
"epoch": 8.61, | |
"learning_rate": 4.59804508278476e-05, | |
"loss": 0.1786, | |
"step": 4800 | |
}, | |
{ | |
"epoch": 8.63, | |
"learning_rate": 4.596050269299821e-05, | |
"loss": 0.1853, | |
"step": 4810 | |
}, | |
{ | |
"epoch": 8.65, | |
"learning_rate": 4.5940554558148816e-05, | |
"loss": 0.1964, | |
"step": 4820 | |
}, | |
{ | |
"epoch": 8.66, | |
"learning_rate": 4.592060642329942e-05, | |
"loss": 0.2065, | |
"step": 4830 | |
}, | |
{ | |
"epoch": 8.68, | |
"learning_rate": 4.5900658288450034e-05, | |
"loss": 0.2021, | |
"step": 4840 | |
}, | |
{ | |
"epoch": 8.7, | |
"learning_rate": 4.588071015360064e-05, | |
"loss": 0.1887, | |
"step": 4850 | |
}, | |
{ | |
"epoch": 8.72, | |
"learning_rate": 4.5860762018751245e-05, | |
"loss": 0.2203, | |
"step": 4860 | |
}, | |
{ | |
"epoch": 8.74, | |
"learning_rate": 4.584081388390186e-05, | |
"loss": 0.1808, | |
"step": 4870 | |
}, | |
{ | |
"epoch": 8.75, | |
"learning_rate": 4.582086574905246e-05, | |
"loss": 0.2213, | |
"step": 4880 | |
}, | |
{ | |
"epoch": 8.77, | |
"learning_rate": 4.5800917614203076e-05, | |
"loss": 0.1934, | |
"step": 4890 | |
}, | |
{ | |
"epoch": 8.79, | |
"learning_rate": 4.578096947935368e-05, | |
"loss": 0.2048, | |
"step": 4900 | |
}, | |
{ | |
"epoch": 8.81, | |
"learning_rate": 4.576102134450429e-05, | |
"loss": 0.217, | |
"step": 4910 | |
}, | |
{ | |
"epoch": 8.83, | |
"learning_rate": 4.57410732096549e-05, | |
"loss": 0.2161, | |
"step": 4920 | |
}, | |
{ | |
"epoch": 8.84, | |
"learning_rate": 4.572112507480551e-05, | |
"loss": 0.2145, | |
"step": 4930 | |
}, | |
{ | |
"epoch": 8.86, | |
"learning_rate": 4.570117693995612e-05, | |
"loss": 0.1722, | |
"step": 4940 | |
}, | |
{ | |
"epoch": 8.88, | |
"learning_rate": 4.568122880510672e-05, | |
"loss": 0.1904, | |
"step": 4950 | |
}, | |
{ | |
"epoch": 8.9, | |
"learning_rate": 4.5661280670257335e-05, | |
"loss": 0.1888, | |
"step": 4960 | |
}, | |
{ | |
"epoch": 8.91, | |
"learning_rate": 4.564133253540794e-05, | |
"loss": 0.1931, | |
"step": 4970 | |
}, | |
{ | |
"epoch": 8.93, | |
"learning_rate": 4.5621384400558546e-05, | |
"loss": 0.1607, | |
"step": 4980 | |
}, | |
{ | |
"epoch": 8.95, | |
"learning_rate": 4.560143626570916e-05, | |
"loss": 0.2014, | |
"step": 4990 | |
}, | |
{ | |
"epoch": 8.97, | |
"learning_rate": 4.5581488130859764e-05, | |
"loss": 0.2043, | |
"step": 5000 | |
}, | |
{ | |
"epoch": 8.99, | |
"learning_rate": 4.556153999601038e-05, | |
"loss": 0.1807, | |
"step": 5010 | |
}, | |
{ | |
"epoch": 9.0, | |
"eval_accuracy": { | |
"accuracy": 0.9633886521641624 | |
}, | |
"eval_f1": { | |
"f1": 0.9611100614395742 | |
}, | |
"eval_loss": 0.10741779208183289, | |
"eval_precision": { | |
"precision": 0.9618083534790791 | |
}, | |
"eval_recall": { | |
"recall": 0.9605394028604899 | |
}, | |
"eval_runtime": 98.612, | |
"eval_samples_per_second": 180.87, | |
"eval_steps_per_second": 5.659, | |
"step": 5017 | |
}, | |
{ | |
"epoch": 9.0, | |
"learning_rate": 4.554159186116098e-05, | |
"loss": 0.1723, | |
"step": 5020 | |
}, | |
{ | |
"epoch": 9.02, | |
"learning_rate": 4.5521643726311595e-05, | |
"loss": 0.1929, | |
"step": 5030 | |
}, | |
{ | |
"epoch": 9.04, | |
"learning_rate": 4.55016955914622e-05, | |
"loss": 0.165, | |
"step": 5040 | |
}, | |
{ | |
"epoch": 9.06, | |
"learning_rate": 4.548174745661281e-05, | |
"loss": 0.202, | |
"step": 5050 | |
}, | |
{ | |
"epoch": 9.08, | |
"learning_rate": 4.546179932176342e-05, | |
"loss": 0.2107, | |
"step": 5060 | |
}, | |
{ | |
"epoch": 9.09, | |
"learning_rate": 4.5441851186914024e-05, | |
"loss": 0.1745, | |
"step": 5070 | |
}, | |
{ | |
"epoch": 9.11, | |
"learning_rate": 4.5421903052064636e-05, | |
"loss": 0.2109, | |
"step": 5080 | |
}, | |
{ | |
"epoch": 9.13, | |
"learning_rate": 4.540195491721524e-05, | |
"loss": 0.1916, | |
"step": 5090 | |
}, | |
{ | |
"epoch": 9.15, | |
"learning_rate": 4.538200678236585e-05, | |
"loss": 0.204, | |
"step": 5100 | |
}, | |
{ | |
"epoch": 9.17, | |
"learning_rate": 4.536205864751646e-05, | |
"loss": 0.1683, | |
"step": 5110 | |
}, | |
{ | |
"epoch": 9.18, | |
"learning_rate": 4.5342110512667066e-05, | |
"loss": 0.1848, | |
"step": 5120 | |
}, | |
{ | |
"epoch": 9.2, | |
"learning_rate": 4.532216237781768e-05, | |
"loss": 0.1638, | |
"step": 5130 | |
}, | |
{ | |
"epoch": 9.22, | |
"learning_rate": 4.5302214242968284e-05, | |
"loss": 0.1545, | |
"step": 5140 | |
}, | |
{ | |
"epoch": 9.24, | |
"learning_rate": 4.5282266108118896e-05, | |
"loss": 0.1886, | |
"step": 5150 | |
}, | |
{ | |
"epoch": 9.26, | |
"learning_rate": 4.52623179732695e-05, | |
"loss": 0.188, | |
"step": 5160 | |
}, | |
{ | |
"epoch": 9.27, | |
"learning_rate": 4.5242369838420114e-05, | |
"loss": 0.197, | |
"step": 5170 | |
}, | |
{ | |
"epoch": 9.29, | |
"learning_rate": 4.522242170357072e-05, | |
"loss": 0.1625, | |
"step": 5180 | |
}, | |
{ | |
"epoch": 9.31, | |
"learning_rate": 4.5202473568721325e-05, | |
"loss": 0.1942, | |
"step": 5190 | |
}, | |
{ | |
"epoch": 9.33, | |
"learning_rate": 4.518252543387194e-05, | |
"loss": 0.1781, | |
"step": 5200 | |
}, | |
{ | |
"epoch": 9.35, | |
"learning_rate": 4.516257729902254e-05, | |
"loss": 0.1661, | |
"step": 5210 | |
}, | |
{ | |
"epoch": 9.36, | |
"learning_rate": 4.514262916417315e-05, | |
"loss": 0.197, | |
"step": 5220 | |
}, | |
{ | |
"epoch": 9.38, | |
"learning_rate": 4.512268102932376e-05, | |
"loss": 0.193, | |
"step": 5230 | |
}, | |
{ | |
"epoch": 9.4, | |
"learning_rate": 4.510273289447437e-05, | |
"loss": 0.1884, | |
"step": 5240 | |
}, | |
{ | |
"epoch": 9.42, | |
"learning_rate": 4.508278475962498e-05, | |
"loss": 0.1829, | |
"step": 5250 | |
}, | |
{ | |
"epoch": 9.43, | |
"learning_rate": 4.5062836624775585e-05, | |
"loss": 0.1854, | |
"step": 5260 | |
}, | |
{ | |
"epoch": 9.45, | |
"learning_rate": 4.50428884899262e-05, | |
"loss": 0.2, | |
"step": 5270 | |
}, | |
{ | |
"epoch": 9.47, | |
"learning_rate": 4.50229403550768e-05, | |
"loss": 0.1769, | |
"step": 5280 | |
}, | |
{ | |
"epoch": 9.49, | |
"learning_rate": 4.5002992220227415e-05, | |
"loss": 0.1876, | |
"step": 5290 | |
}, | |
{ | |
"epoch": 9.51, | |
"learning_rate": 4.498304408537802e-05, | |
"loss": 0.1598, | |
"step": 5300 | |
}, | |
{ | |
"epoch": 9.52, | |
"learning_rate": 4.4963095950528626e-05, | |
"loss": 0.1857, | |
"step": 5310 | |
}, | |
{ | |
"epoch": 9.54, | |
"learning_rate": 4.494314781567924e-05, | |
"loss": 0.1982, | |
"step": 5320 | |
}, | |
{ | |
"epoch": 9.56, | |
"learning_rate": 4.4923199680829844e-05, | |
"loss": 0.1846, | |
"step": 5330 | |
}, | |
{ | |
"epoch": 9.58, | |
"learning_rate": 4.490325154598045e-05, | |
"loss": 0.1707, | |
"step": 5340 | |
}, | |
{ | |
"epoch": 9.6, | |
"learning_rate": 4.488330341113106e-05, | |
"loss": 0.1952, | |
"step": 5350 | |
}, | |
{ | |
"epoch": 9.61, | |
"learning_rate": 4.486335527628167e-05, | |
"loss": 0.1499, | |
"step": 5360 | |
}, | |
{ | |
"epoch": 9.63, | |
"learning_rate": 4.484340714143228e-05, | |
"loss": 0.1857, | |
"step": 5370 | |
}, | |
{ | |
"epoch": 9.65, | |
"learning_rate": 4.4823459006582886e-05, | |
"loss": 0.1557, | |
"step": 5380 | |
}, | |
{ | |
"epoch": 9.67, | |
"learning_rate": 4.48035108717335e-05, | |
"loss": 0.1716, | |
"step": 5390 | |
}, | |
{ | |
"epoch": 9.69, | |
"learning_rate": 4.4783562736884104e-05, | |
"loss": 0.1696, | |
"step": 5400 | |
}, | |
{ | |
"epoch": 9.7, | |
"learning_rate": 4.476361460203471e-05, | |
"loss": 0.1695, | |
"step": 5410 | |
}, | |
{ | |
"epoch": 9.72, | |
"learning_rate": 4.474366646718532e-05, | |
"loss": 0.2097, | |
"step": 5420 | |
}, | |
{ | |
"epoch": 9.74, | |
"learning_rate": 4.472371833233593e-05, | |
"loss": 0.1986, | |
"step": 5430 | |
}, | |
{ | |
"epoch": 9.76, | |
"learning_rate": 4.470377019748654e-05, | |
"loss": 0.1548, | |
"step": 5440 | |
}, | |
{ | |
"epoch": 9.78, | |
"learning_rate": 4.4683822062637145e-05, | |
"loss": 0.1907, | |
"step": 5450 | |
}, | |
{ | |
"epoch": 9.79, | |
"learning_rate": 4.466387392778775e-05, | |
"loss": 0.1706, | |
"step": 5460 | |
}, | |
{ | |
"epoch": 9.81, | |
"learning_rate": 4.4643925792938363e-05, | |
"loss": 0.1791, | |
"step": 5470 | |
}, | |
{ | |
"epoch": 9.83, | |
"learning_rate": 4.462397765808897e-05, | |
"loss": 0.1913, | |
"step": 5480 | |
}, | |
{ | |
"epoch": 9.85, | |
"learning_rate": 4.4604029523239575e-05, | |
"loss": 0.1779, | |
"step": 5490 | |
}, | |
{ | |
"epoch": 9.87, | |
"learning_rate": 4.458408138839019e-05, | |
"loss": 0.172, | |
"step": 5500 | |
}, | |
{ | |
"epoch": 9.88, | |
"learning_rate": 4.45641332535408e-05, | |
"loss": 0.1696, | |
"step": 5510 | |
}, | |
{ | |
"epoch": 9.9, | |
"learning_rate": 4.4544185118691405e-05, | |
"loss": 0.1666, | |
"step": 5520 | |
}, | |
{ | |
"epoch": 9.92, | |
"learning_rate": 4.452423698384201e-05, | |
"loss": 0.1601, | |
"step": 5530 | |
}, | |
{ | |
"epoch": 9.94, | |
"learning_rate": 4.450428884899262e-05, | |
"loss": 0.1778, | |
"step": 5540 | |
}, | |
{ | |
"epoch": 9.96, | |
"learning_rate": 4.448434071414323e-05, | |
"loss": 0.1732, | |
"step": 5550 | |
}, | |
{ | |
"epoch": 9.97, | |
"learning_rate": 4.446439257929384e-05, | |
"loss": 0.1886, | |
"step": 5560 | |
}, | |
{ | |
"epoch": 9.99, | |
"learning_rate": 4.4444444444444447e-05, | |
"loss": 0.2238, | |
"step": 5570 | |
}, | |
{ | |
"epoch": 10.0, | |
"eval_accuracy": { | |
"accuracy": 0.967425431711146 | |
}, | |
"eval_f1": { | |
"f1": 0.9654836432017294 | |
}, | |
"eval_loss": 0.09295742213726044, | |
"eval_precision": { | |
"precision": 0.9661647785647793 | |
}, | |
"eval_recall": { | |
"recall": 0.9648241815791808 | |
}, | |
"eval_runtime": 98.8994, | |
"eval_samples_per_second": 180.345, | |
"eval_steps_per_second": 5.642, | |
"step": 5575 | |
}, | |
{ | |
"epoch": 10.01, | |
"learning_rate": 4.442449630959505e-05, | |
"loss": 0.1974, | |
"step": 5580 | |
}, | |
{ | |
"epoch": 10.03, | |
"learning_rate": 4.4404548174745665e-05, | |
"loss": 0.1731, | |
"step": 5590 | |
}, | |
{ | |
"epoch": 10.04, | |
"learning_rate": 4.438460003989628e-05, | |
"loss": 0.1664, | |
"step": 5600 | |
}, | |
{ | |
"epoch": 10.06, | |
"learning_rate": 4.4364651905046876e-05, | |
"loss": 0.1904, | |
"step": 5610 | |
}, | |
{ | |
"epoch": 10.08, | |
"learning_rate": 4.434470377019749e-05, | |
"loss": 0.1393, | |
"step": 5620 | |
}, | |
{ | |
"epoch": 10.1, | |
"learning_rate": 4.43247556353481e-05, | |
"loss": 0.1697, | |
"step": 5630 | |
}, | |
{ | |
"epoch": 10.12, | |
"learning_rate": 4.4304807500498706e-05, | |
"loss": 0.1689, | |
"step": 5640 | |
}, | |
{ | |
"epoch": 10.13, | |
"learning_rate": 4.428485936564931e-05, | |
"loss": 0.1643, | |
"step": 5650 | |
}, | |
{ | |
"epoch": 10.15, | |
"learning_rate": 4.4264911230799924e-05, | |
"loss": 0.1638, | |
"step": 5660 | |
}, | |
{ | |
"epoch": 10.17, | |
"learning_rate": 4.424496309595053e-05, | |
"loss": 0.1539, | |
"step": 5670 | |
}, | |
{ | |
"epoch": 10.19, | |
"learning_rate": 4.422501496110114e-05, | |
"loss": 0.1588, | |
"step": 5680 | |
}, | |
{ | |
"epoch": 10.21, | |
"learning_rate": 4.420506682625175e-05, | |
"loss": 0.1461, | |
"step": 5690 | |
}, | |
{ | |
"epoch": 10.22, | |
"learning_rate": 4.418511869140235e-05, | |
"loss": 0.1629, | |
"step": 5700 | |
}, | |
{ | |
"epoch": 10.24, | |
"learning_rate": 4.4165170556552966e-05, | |
"loss": 0.1755, | |
"step": 5710 | |
}, | |
{ | |
"epoch": 10.26, | |
"learning_rate": 4.414522242170358e-05, | |
"loss": 0.1783, | |
"step": 5720 | |
}, | |
{ | |
"epoch": 10.28, | |
"learning_rate": 4.412527428685418e-05, | |
"loss": 0.1712, | |
"step": 5730 | |
}, | |
{ | |
"epoch": 10.3, | |
"learning_rate": 4.410532615200479e-05, | |
"loss": 0.1646, | |
"step": 5740 | |
}, | |
{ | |
"epoch": 10.31, | |
"learning_rate": 4.40853780171554e-05, | |
"loss": 0.1553, | |
"step": 5750 | |
}, | |
{ | |
"epoch": 10.33, | |
"learning_rate": 4.406542988230601e-05, | |
"loss": 0.1619, | |
"step": 5760 | |
}, | |
{ | |
"epoch": 10.35, | |
"learning_rate": 4.404548174745661e-05, | |
"loss": 0.1991, | |
"step": 5770 | |
}, | |
{ | |
"epoch": 10.37, | |
"learning_rate": 4.4025533612607225e-05, | |
"loss": 0.1806, | |
"step": 5780 | |
}, | |
{ | |
"epoch": 10.39, | |
"learning_rate": 4.400558547775783e-05, | |
"loss": 0.1616, | |
"step": 5790 | |
}, | |
{ | |
"epoch": 10.4, | |
"learning_rate": 4.398563734290844e-05, | |
"loss": 0.1912, | |
"step": 5800 | |
}, | |
{ | |
"epoch": 10.42, | |
"learning_rate": 4.396568920805905e-05, | |
"loss": 0.1764, | |
"step": 5810 | |
}, | |
{ | |
"epoch": 10.44, | |
"learning_rate": 4.3945741073209655e-05, | |
"loss": 0.1591, | |
"step": 5820 | |
}, | |
{ | |
"epoch": 10.46, | |
"learning_rate": 4.392579293836027e-05, | |
"loss": 0.1534, | |
"step": 5830 | |
}, | |
{ | |
"epoch": 10.48, | |
"learning_rate": 4.390584480351088e-05, | |
"loss": 0.1773, | |
"step": 5840 | |
}, | |
{ | |
"epoch": 10.49, | |
"learning_rate": 4.388589666866148e-05, | |
"loss": 0.1775, | |
"step": 5850 | |
}, | |
{ | |
"epoch": 10.51, | |
"learning_rate": 4.386594853381209e-05, | |
"loss": 0.1761, | |
"step": 5860 | |
}, | |
{ | |
"epoch": 10.53, | |
"learning_rate": 4.38460003989627e-05, | |
"loss": 0.1387, | |
"step": 5870 | |
}, | |
{ | |
"epoch": 10.55, | |
"learning_rate": 4.382605226411331e-05, | |
"loss": 0.2006, | |
"step": 5880 | |
}, | |
{ | |
"epoch": 10.57, | |
"learning_rate": 4.3806104129263914e-05, | |
"loss": 0.166, | |
"step": 5890 | |
}, | |
{ | |
"epoch": 10.58, | |
"learning_rate": 4.3786155994414526e-05, | |
"loss": 0.1741, | |
"step": 5900 | |
}, | |
{ | |
"epoch": 10.6, | |
"learning_rate": 4.376620785956513e-05, | |
"loss": 0.1627, | |
"step": 5910 | |
}, | |
{ | |
"epoch": 10.62, | |
"learning_rate": 4.3746259724715744e-05, | |
"loss": 0.1704, | |
"step": 5920 | |
}, | |
{ | |
"epoch": 10.64, | |
"learning_rate": 4.372631158986635e-05, | |
"loss": 0.1686, | |
"step": 5930 | |
}, | |
{ | |
"epoch": 10.65, | |
"learning_rate": 4.3706363455016956e-05, | |
"loss": 0.1717, | |
"step": 5940 | |
}, | |
{ | |
"epoch": 10.67, | |
"learning_rate": 4.368641532016757e-05, | |
"loss": 0.1795, | |
"step": 5950 | |
}, | |
{ | |
"epoch": 10.69, | |
"learning_rate": 4.366646718531818e-05, | |
"loss": 0.196, | |
"step": 5960 | |
}, | |
{ | |
"epoch": 10.71, | |
"learning_rate": 4.364651905046878e-05, | |
"loss": 0.1749, | |
"step": 5970 | |
}, | |
{ | |
"epoch": 10.73, | |
"learning_rate": 4.362657091561939e-05, | |
"loss": 0.1547, | |
"step": 5980 | |
}, | |
{ | |
"epoch": 10.74, | |
"learning_rate": 4.3606622780770004e-05, | |
"loss": 0.152, | |
"step": 5990 | |
}, | |
{ | |
"epoch": 10.76, | |
"learning_rate": 4.358667464592061e-05, | |
"loss": 0.1615, | |
"step": 6000 | |
}, | |
{ | |
"epoch": 10.78, | |
"learning_rate": 4.3566726511071215e-05, | |
"loss": 0.1519, | |
"step": 6010 | |
}, | |
{ | |
"epoch": 10.8, | |
"learning_rate": 4.354677837622183e-05, | |
"loss": 0.1844, | |
"step": 6020 | |
}, | |
{ | |
"epoch": 10.82, | |
"learning_rate": 4.352683024137243e-05, | |
"loss": 0.1562, | |
"step": 6030 | |
}, | |
{ | |
"epoch": 10.83, | |
"learning_rate": 4.3506882106523046e-05, | |
"loss": 0.1823, | |
"step": 6040 | |
}, | |
{ | |
"epoch": 10.85, | |
"learning_rate": 4.348693397167365e-05, | |
"loss": 0.1665, | |
"step": 6050 | |
}, | |
{ | |
"epoch": 10.87, | |
"learning_rate": 4.346698583682426e-05, | |
"loss": 0.1796, | |
"step": 6060 | |
}, | |
{ | |
"epoch": 10.89, | |
"learning_rate": 4.344703770197487e-05, | |
"loss": 0.1753, | |
"step": 6070 | |
}, | |
{ | |
"epoch": 10.91, | |
"learning_rate": 4.342708956712548e-05, | |
"loss": 0.151, | |
"step": 6080 | |
}, | |
{ | |
"epoch": 10.92, | |
"learning_rate": 4.340714143227608e-05, | |
"loss": 0.163, | |
"step": 6090 | |
}, | |
{ | |
"epoch": 10.94, | |
"learning_rate": 4.338719329742669e-05, | |
"loss": 0.1853, | |
"step": 6100 | |
}, | |
{ | |
"epoch": 10.96, | |
"learning_rate": 4.3367245162577305e-05, | |
"loss": 0.1569, | |
"step": 6110 | |
}, | |
{ | |
"epoch": 10.98, | |
"learning_rate": 4.334729702772791e-05, | |
"loss": 0.1639, | |
"step": 6120 | |
}, | |
{ | |
"epoch": 11.0, | |
"learning_rate": 4.3327348892878516e-05, | |
"loss": 0.1744, | |
"step": 6130 | |
}, | |
{ | |
"epoch": 11.0, | |
"eval_accuracy": { | |
"accuracy": 0.9663601704418031 | |
}, | |
"eval_f1": { | |
"f1": 0.9642377959067083 | |
}, | |
"eval_loss": 0.09443444013595581, | |
"eval_precision": { | |
"precision": 0.9653800416731579 | |
}, | |
"eval_recall": { | |
"recall": 0.963250134618939 | |
}, | |
"eval_runtime": 98.9219, | |
"eval_samples_per_second": 180.304, | |
"eval_steps_per_second": 5.641, | |
"step": 6132 | |
}, | |
{ | |
"epoch": 11.01, | |
"learning_rate": 4.330740075802913e-05, | |
"loss": 0.1373, | |
"step": 6140 | |
}, | |
{ | |
"epoch": 11.03, | |
"learning_rate": 4.3287452623179734e-05, | |
"loss": 0.1833, | |
"step": 6150 | |
}, | |
{ | |
"epoch": 11.05, | |
"learning_rate": 4.326750448833034e-05, | |
"loss": 0.1554, | |
"step": 6160 | |
}, | |
{ | |
"epoch": 11.07, | |
"learning_rate": 4.324755635348095e-05, | |
"loss": 0.1551, | |
"step": 6170 | |
}, | |
{ | |
"epoch": 11.09, | |
"learning_rate": 4.322760821863156e-05, | |
"loss": 0.1708, | |
"step": 6180 | |
}, | |
{ | |
"epoch": 11.1, | |
"learning_rate": 4.320766008378217e-05, | |
"loss": 0.1303, | |
"step": 6190 | |
}, | |
{ | |
"epoch": 11.12, | |
"learning_rate": 4.3187711948932776e-05, | |
"loss": 0.1771, | |
"step": 6200 | |
}, | |
{ | |
"epoch": 11.14, | |
"learning_rate": 4.316776381408338e-05, | |
"loss": 0.1407, | |
"step": 6210 | |
}, | |
{ | |
"epoch": 11.16, | |
"learning_rate": 4.3147815679233994e-05, | |
"loss": 0.1733, | |
"step": 6220 | |
}, | |
{ | |
"epoch": 11.17, | |
"learning_rate": 4.3127867544384606e-05, | |
"loss": 0.173, | |
"step": 6230 | |
}, | |
{ | |
"epoch": 11.19, | |
"learning_rate": 4.3107919409535205e-05, | |
"loss": 0.1517, | |
"step": 6240 | |
}, | |
{ | |
"epoch": 11.21, | |
"learning_rate": 4.308797127468582e-05, | |
"loss": 0.1671, | |
"step": 6250 | |
}, | |
{ | |
"epoch": 11.23, | |
"learning_rate": 4.306802313983643e-05, | |
"loss": 0.1602, | |
"step": 6260 | |
}, | |
{ | |
"epoch": 11.25, | |
"learning_rate": 4.3048075004987036e-05, | |
"loss": 0.1912, | |
"step": 6270 | |
}, | |
{ | |
"epoch": 11.26, | |
"learning_rate": 4.302812687013764e-05, | |
"loss": 0.1617, | |
"step": 6280 | |
}, | |
{ | |
"epoch": 11.28, | |
"learning_rate": 4.3008178735288254e-05, | |
"loss": 0.1535, | |
"step": 6290 | |
}, | |
{ | |
"epoch": 11.3, | |
"learning_rate": 4.298823060043886e-05, | |
"loss": 0.1574, | |
"step": 6300 | |
}, | |
{ | |
"epoch": 11.32, | |
"learning_rate": 4.296828246558947e-05, | |
"loss": 0.1587, | |
"step": 6310 | |
}, | |
{ | |
"epoch": 11.34, | |
"learning_rate": 4.294833433074008e-05, | |
"loss": 0.1509, | |
"step": 6320 | |
}, | |
{ | |
"epoch": 11.35, | |
"learning_rate": 4.292838619589068e-05, | |
"loss": 0.1851, | |
"step": 6330 | |
}, | |
{ | |
"epoch": 11.37, | |
"learning_rate": 4.2908438061041295e-05, | |
"loss": 0.167, | |
"step": 6340 | |
}, | |
{ | |
"epoch": 11.39, | |
"learning_rate": 4.288848992619191e-05, | |
"loss": 0.1669, | |
"step": 6350 | |
}, | |
{ | |
"epoch": 11.41, | |
"learning_rate": 4.2868541791342506e-05, | |
"loss": 0.1634, | |
"step": 6360 | |
}, | |
{ | |
"epoch": 11.43, | |
"learning_rate": 4.284859365649312e-05, | |
"loss": 0.1827, | |
"step": 6370 | |
}, | |
{ | |
"epoch": 11.44, | |
"learning_rate": 4.282864552164373e-05, | |
"loss": 0.1486, | |
"step": 6380 | |
}, | |
{ | |
"epoch": 11.46, | |
"learning_rate": 4.280869738679434e-05, | |
"loss": 0.1492, | |
"step": 6390 | |
}, | |
{ | |
"epoch": 11.48, | |
"learning_rate": 4.278874925194494e-05, | |
"loss": 0.1554, | |
"step": 6400 | |
}, | |
{ | |
"epoch": 11.5, | |
"learning_rate": 4.2768801117095555e-05, | |
"loss": 0.1812, | |
"step": 6410 | |
}, | |
{ | |
"epoch": 11.52, | |
"learning_rate": 4.274885298224616e-05, | |
"loss": 0.1765, | |
"step": 6420 | |
}, | |
{ | |
"epoch": 11.53, | |
"learning_rate": 4.272890484739677e-05, | |
"loss": 0.1653, | |
"step": 6430 | |
}, | |
{ | |
"epoch": 11.55, | |
"learning_rate": 4.270895671254738e-05, | |
"loss": 0.1772, | |
"step": 6440 | |
}, | |
{ | |
"epoch": 11.57, | |
"learning_rate": 4.2689008577697984e-05, | |
"loss": 0.1205, | |
"step": 6450 | |
}, | |
{ | |
"epoch": 11.59, | |
"learning_rate": 4.2669060442848596e-05, | |
"loss": 0.1429, | |
"step": 6460 | |
}, | |
{ | |
"epoch": 11.61, | |
"learning_rate": 4.264911230799921e-05, | |
"loss": 0.1508, | |
"step": 6470 | |
}, | |
{ | |
"epoch": 11.62, | |
"learning_rate": 4.262916417314981e-05, | |
"loss": 0.1559, | |
"step": 6480 | |
}, | |
{ | |
"epoch": 11.64, | |
"learning_rate": 4.260921603830042e-05, | |
"loss": 0.1448, | |
"step": 6490 | |
}, | |
{ | |
"epoch": 11.66, | |
"learning_rate": 4.258926790345103e-05, | |
"loss": 0.1579, | |
"step": 6500 | |
}, | |
{ | |
"epoch": 11.68, | |
"learning_rate": 4.256931976860164e-05, | |
"loss": 0.1582, | |
"step": 6510 | |
}, | |
{ | |
"epoch": 11.7, | |
"learning_rate": 4.2549371633752244e-05, | |
"loss": 0.1609, | |
"step": 6520 | |
}, | |
{ | |
"epoch": 11.71, | |
"learning_rate": 4.2529423498902856e-05, | |
"loss": 0.1715, | |
"step": 6530 | |
}, | |
{ | |
"epoch": 11.73, | |
"learning_rate": 4.250947536405346e-05, | |
"loss": 0.1444, | |
"step": 6540 | |
}, | |
{ | |
"epoch": 11.75, | |
"learning_rate": 4.2489527229204074e-05, | |
"loss": 0.148, | |
"step": 6550 | |
}, | |
{ | |
"epoch": 11.77, | |
"learning_rate": 4.246957909435468e-05, | |
"loss": 0.162, | |
"step": 6560 | |
}, | |
{ | |
"epoch": 11.78, | |
"learning_rate": 4.2449630959505285e-05, | |
"loss": 0.1787, | |
"step": 6570 | |
}, | |
{ | |
"epoch": 11.8, | |
"learning_rate": 4.24296828246559e-05, | |
"loss": 0.1457, | |
"step": 6580 | |
}, | |
{ | |
"epoch": 11.82, | |
"learning_rate": 4.240973468980651e-05, | |
"loss": 0.1628, | |
"step": 6590 | |
}, | |
{ | |
"epoch": 11.84, | |
"learning_rate": 4.238978655495711e-05, | |
"loss": 0.1668, | |
"step": 6600 | |
}, | |
{ | |
"epoch": 11.86, | |
"learning_rate": 4.236983842010772e-05, | |
"loss": 0.1542, | |
"step": 6610 | |
}, | |
{ | |
"epoch": 11.87, | |
"learning_rate": 4.2349890285258333e-05, | |
"loss": 0.1475, | |
"step": 6620 | |
}, | |
{ | |
"epoch": 11.89, | |
"learning_rate": 4.232994215040894e-05, | |
"loss": 0.1723, | |
"step": 6630 | |
}, | |
{ | |
"epoch": 11.91, | |
"learning_rate": 4.2309994015559545e-05, | |
"loss": 0.1707, | |
"step": 6640 | |
}, | |
{ | |
"epoch": 11.93, | |
"learning_rate": 4.229004588071016e-05, | |
"loss": 0.1834, | |
"step": 6650 | |
}, | |
{ | |
"epoch": 11.95, | |
"learning_rate": 4.227009774586076e-05, | |
"loss": 0.1773, | |
"step": 6660 | |
}, | |
{ | |
"epoch": 11.96, | |
"learning_rate": 4.2250149611011375e-05, | |
"loss": 0.1742, | |
"step": 6670 | |
}, | |
{ | |
"epoch": 11.98, | |
"learning_rate": 4.223020147616198e-05, | |
"loss": 0.1585, | |
"step": 6680 | |
}, | |
{ | |
"epoch": 12.0, | |
"learning_rate": 4.2210253341312586e-05, | |
"loss": 0.1482, | |
"step": 6690 | |
}, | |
{ | |
"epoch": 12.0, | |
"eval_accuracy": { | |
"accuracy": 0.9696120206324288 | |
}, | |
"eval_f1": { | |
"f1": 0.9678411020044558 | |
}, | |
"eval_loss": 0.08285478502511978, | |
"eval_precision": { | |
"precision": 0.9677791103198417 | |
}, | |
"eval_recall": { | |
"recall": 0.9679224853422947 | |
}, | |
"eval_runtime": 98.5998, | |
"eval_samples_per_second": 180.893, | |
"eval_steps_per_second": 5.659, | |
"step": 6690 | |
}, | |
{ | |
"epoch": 12.02, | |
"learning_rate": 4.21903052064632e-05, | |
"loss": 0.1509, | |
"step": 6700 | |
}, | |
{ | |
"epoch": 12.04, | |
"learning_rate": 4.217035707161381e-05, | |
"loss": 0.1539, | |
"step": 6710 | |
}, | |
{ | |
"epoch": 12.05, | |
"learning_rate": 4.215040893676441e-05, | |
"loss": 0.1514, | |
"step": 6720 | |
}, | |
{ | |
"epoch": 12.07, | |
"learning_rate": 4.213046080191502e-05, | |
"loss": 0.1503, | |
"step": 6730 | |
}, | |
{ | |
"epoch": 12.09, | |
"learning_rate": 4.2110512667065635e-05, | |
"loss": 0.1348, | |
"step": 6740 | |
}, | |
{ | |
"epoch": 12.11, | |
"learning_rate": 4.209056453221624e-05, | |
"loss": 0.1615, | |
"step": 6750 | |
}, | |
{ | |
"epoch": 12.13, | |
"learning_rate": 4.2070616397366846e-05, | |
"loss": 0.1485, | |
"step": 6760 | |
}, | |
{ | |
"epoch": 12.14, | |
"learning_rate": 4.205066826251746e-05, | |
"loss": 0.1467, | |
"step": 6770 | |
}, | |
{ | |
"epoch": 12.16, | |
"learning_rate": 4.2030720127668064e-05, | |
"loss": 0.1625, | |
"step": 6780 | |
}, | |
{ | |
"epoch": 12.18, | |
"learning_rate": 4.2010771992818676e-05, | |
"loss": 0.1385, | |
"step": 6790 | |
}, | |
{ | |
"epoch": 12.2, | |
"learning_rate": 4.199082385796928e-05, | |
"loss": 0.141, | |
"step": 6800 | |
}, | |
{ | |
"epoch": 12.22, | |
"learning_rate": 4.197087572311989e-05, | |
"loss": 0.146, | |
"step": 6810 | |
}, | |
{ | |
"epoch": 12.23, | |
"learning_rate": 4.19509275882705e-05, | |
"loss": 0.1705, | |
"step": 6820 | |
}, | |
{ | |
"epoch": 12.25, | |
"learning_rate": 4.193097945342111e-05, | |
"loss": 0.1681, | |
"step": 6830 | |
}, | |
{ | |
"epoch": 12.27, | |
"learning_rate": 4.191103131857171e-05, | |
"loss": 0.1825, | |
"step": 6840 | |
}, | |
{ | |
"epoch": 12.29, | |
"learning_rate": 4.1891083183722323e-05, | |
"loss": 0.1443, | |
"step": 6850 | |
}, | |
{ | |
"epoch": 12.3, | |
"learning_rate": 4.1871135048872936e-05, | |
"loss": 0.1545, | |
"step": 6860 | |
}, | |
{ | |
"epoch": 12.32, | |
"learning_rate": 4.185118691402354e-05, | |
"loss": 0.1628, | |
"step": 6870 | |
}, | |
{ | |
"epoch": 12.34, | |
"learning_rate": 4.183123877917415e-05, | |
"loss": 0.1562, | |
"step": 6880 | |
}, | |
{ | |
"epoch": 12.36, | |
"learning_rate": 4.181129064432476e-05, | |
"loss": 0.1655, | |
"step": 6890 | |
}, | |
{ | |
"epoch": 12.38, | |
"learning_rate": 4.1791342509475365e-05, | |
"loss": 0.1434, | |
"step": 6900 | |
}, | |
{ | |
"epoch": 12.39, | |
"learning_rate": 4.177139437462597e-05, | |
"loss": 0.1807, | |
"step": 6910 | |
}, | |
{ | |
"epoch": 12.41, | |
"learning_rate": 4.175144623977658e-05, | |
"loss": 0.1416, | |
"step": 6920 | |
}, | |
{ | |
"epoch": 12.43, | |
"learning_rate": 4.173149810492719e-05, | |
"loss": 0.1541, | |
"step": 6930 | |
}, | |
{ | |
"epoch": 12.45, | |
"learning_rate": 4.17115499700778e-05, | |
"loss": 0.1531, | |
"step": 6940 | |
}, | |
{ | |
"epoch": 12.47, | |
"learning_rate": 4.1691601835228407e-05, | |
"loss": 0.1595, | |
"step": 6950 | |
}, | |
{ | |
"epoch": 12.48, | |
"learning_rate": 4.167165370037901e-05, | |
"loss": 0.1723, | |
"step": 6960 | |
}, | |
{ | |
"epoch": 12.5, | |
"learning_rate": 4.1651705565529625e-05, | |
"loss": 0.1389, | |
"step": 6970 | |
}, | |
{ | |
"epoch": 12.52, | |
"learning_rate": 4.163175743068024e-05, | |
"loss": 0.1727, | |
"step": 6980 | |
}, | |
{ | |
"epoch": 12.54, | |
"learning_rate": 4.161180929583084e-05, | |
"loss": 0.1334, | |
"step": 6990 | |
}, | |
{ | |
"epoch": 12.56, | |
"learning_rate": 4.159186116098145e-05, | |
"loss": 0.1305, | |
"step": 7000 | |
}, | |
{ | |
"epoch": 12.57, | |
"learning_rate": 4.157191302613206e-05, | |
"loss": 0.1279, | |
"step": 7010 | |
}, | |
{ | |
"epoch": 12.59, | |
"learning_rate": 4.1551964891282666e-05, | |
"loss": 0.1575, | |
"step": 7020 | |
}, | |
{ | |
"epoch": 12.61, | |
"learning_rate": 4.153201675643327e-05, | |
"loss": 0.1509, | |
"step": 7030 | |
}, | |
{ | |
"epoch": 12.63, | |
"learning_rate": 4.1512068621583884e-05, | |
"loss": 0.1735, | |
"step": 7040 | |
}, | |
{ | |
"epoch": 12.65, | |
"learning_rate": 4.149212048673449e-05, | |
"loss": 0.1442, | |
"step": 7050 | |
}, | |
{ | |
"epoch": 12.66, | |
"learning_rate": 4.14721723518851e-05, | |
"loss": 0.1743, | |
"step": 7060 | |
}, | |
{ | |
"epoch": 12.68, | |
"learning_rate": 4.145222421703571e-05, | |
"loss": 0.1438, | |
"step": 7070 | |
}, | |
{ | |
"epoch": 12.7, | |
"learning_rate": 4.143227608218631e-05, | |
"loss": 0.1572, | |
"step": 7080 | |
}, | |
{ | |
"epoch": 12.72, | |
"learning_rate": 4.1412327947336926e-05, | |
"loss": 0.1466, | |
"step": 7090 | |
}, | |
{ | |
"epoch": 12.74, | |
"learning_rate": 4.139237981248754e-05, | |
"loss": 0.1528, | |
"step": 7100 | |
}, | |
{ | |
"epoch": 12.75, | |
"learning_rate": 4.1372431677638144e-05, | |
"loss": 0.1478, | |
"step": 7110 | |
}, | |
{ | |
"epoch": 12.77, | |
"learning_rate": 4.135248354278875e-05, | |
"loss": 0.1664, | |
"step": 7120 | |
}, | |
{ | |
"epoch": 12.79, | |
"learning_rate": 4.133253540793936e-05, | |
"loss": 0.1398, | |
"step": 7130 | |
}, | |
{ | |
"epoch": 12.81, | |
"learning_rate": 4.131258727308997e-05, | |
"loss": 0.1502, | |
"step": 7140 | |
}, | |
{ | |
"epoch": 12.83, | |
"learning_rate": 4.129263913824057e-05, | |
"loss": 0.1756, | |
"step": 7150 | |
}, | |
{ | |
"epoch": 12.84, | |
"learning_rate": 4.1272691003391185e-05, | |
"loss": 0.1391, | |
"step": 7160 | |
}, | |
{ | |
"epoch": 12.86, | |
"learning_rate": 4.125274286854179e-05, | |
"loss": 0.1591, | |
"step": 7170 | |
}, | |
{ | |
"epoch": 12.88, | |
"learning_rate": 4.12327947336924e-05, | |
"loss": 0.137, | |
"step": 7180 | |
}, | |
{ | |
"epoch": 12.9, | |
"learning_rate": 4.121284659884301e-05, | |
"loss": 0.1369, | |
"step": 7190 | |
}, | |
{ | |
"epoch": 12.91, | |
"learning_rate": 4.1192898463993615e-05, | |
"loss": 0.1359, | |
"step": 7200 | |
}, | |
{ | |
"epoch": 12.93, | |
"learning_rate": 4.117295032914423e-05, | |
"loss": 0.1587, | |
"step": 7210 | |
}, | |
{ | |
"epoch": 12.95, | |
"learning_rate": 4.115300219429484e-05, | |
"loss": 0.1315, | |
"step": 7220 | |
}, | |
{ | |
"epoch": 12.97, | |
"learning_rate": 4.1133054059445445e-05, | |
"loss": 0.1685, | |
"step": 7230 | |
}, | |
{ | |
"epoch": 12.99, | |
"learning_rate": 4.111310592459605e-05, | |
"loss": 0.1598, | |
"step": 7240 | |
}, | |
{ | |
"epoch": 13.0, | |
"eval_accuracy": { | |
"accuracy": 0.967761830006728 | |
}, | |
"eval_f1": { | |
"f1": 0.9657620776941498 | |
}, | |
"eval_loss": 0.09193319082260132, | |
"eval_precision": { | |
"precision": 0.9671382733917124 | |
}, | |
"eval_recall": { | |
"recall": 0.9645924834051133 | |
}, | |
"eval_runtime": 95.698, | |
"eval_samples_per_second": 186.378, | |
"eval_steps_per_second": 5.831, | |
"step": 7247 | |
}, | |
{ | |
"epoch": 13.0, | |
"learning_rate": 4.109315778974666e-05, | |
"loss": 0.1567, | |
"step": 7250 | |
}, | |
{ | |
"epoch": 13.02, | |
"learning_rate": 4.107320965489727e-05, | |
"loss": 0.1456, | |
"step": 7260 | |
}, | |
{ | |
"epoch": 13.04, | |
"learning_rate": 4.1053261520047874e-05, | |
"loss": 0.1383, | |
"step": 7270 | |
}, | |
{ | |
"epoch": 13.06, | |
"learning_rate": 4.1033313385198486e-05, | |
"loss": 0.1416, | |
"step": 7280 | |
}, | |
{ | |
"epoch": 13.08, | |
"learning_rate": 4.101336525034909e-05, | |
"loss": 0.1572, | |
"step": 7290 | |
}, | |
{ | |
"epoch": 13.09, | |
"learning_rate": 4.0993417115499704e-05, | |
"loss": 0.165, | |
"step": 7300 | |
}, | |
{ | |
"epoch": 13.11, | |
"learning_rate": 4.097346898065031e-05, | |
"loss": 0.1564, | |
"step": 7310 | |
}, | |
{ | |
"epoch": 13.13, | |
"learning_rate": 4.0953520845800916e-05, | |
"loss": 0.148, | |
"step": 7320 | |
}, | |
{ | |
"epoch": 13.15, | |
"learning_rate": 4.093357271095153e-05, | |
"loss": 0.1406, | |
"step": 7330 | |
}, | |
{ | |
"epoch": 13.17, | |
"learning_rate": 4.091362457610214e-05, | |
"loss": 0.1441, | |
"step": 7340 | |
}, | |
{ | |
"epoch": 13.18, | |
"learning_rate": 4.0893676441252746e-05, | |
"loss": 0.1341, | |
"step": 7350 | |
}, | |
{ | |
"epoch": 13.2, | |
"learning_rate": 4.087372830640335e-05, | |
"loss": 0.1735, | |
"step": 7360 | |
}, | |
{ | |
"epoch": 13.22, | |
"learning_rate": 4.0853780171553964e-05, | |
"loss": 0.1473, | |
"step": 7370 | |
}, | |
{ | |
"epoch": 13.24, | |
"learning_rate": 4.083383203670457e-05, | |
"loss": 0.1515, | |
"step": 7380 | |
}, | |
{ | |
"epoch": 13.26, | |
"learning_rate": 4.0813883901855175e-05, | |
"loss": 0.133, | |
"step": 7390 | |
}, | |
{ | |
"epoch": 13.27, | |
"learning_rate": 4.079393576700579e-05, | |
"loss": 0.1275, | |
"step": 7400 | |
}, | |
{ | |
"epoch": 13.29, | |
"learning_rate": 4.077398763215639e-05, | |
"loss": 0.1491, | |
"step": 7410 | |
}, | |
{ | |
"epoch": 13.31, | |
"learning_rate": 4.0754039497307006e-05, | |
"loss": 0.1358, | |
"step": 7420 | |
}, | |
{ | |
"epoch": 13.33, | |
"learning_rate": 4.073409136245761e-05, | |
"loss": 0.1438, | |
"step": 7430 | |
}, | |
{ | |
"epoch": 13.35, | |
"learning_rate": 4.071414322760822e-05, | |
"loss": 0.1366, | |
"step": 7440 | |
}, | |
{ | |
"epoch": 13.36, | |
"learning_rate": 4.069419509275883e-05, | |
"loss": 0.1528, | |
"step": 7450 | |
}, | |
{ | |
"epoch": 13.38, | |
"learning_rate": 4.067424695790944e-05, | |
"loss": 0.1153, | |
"step": 7460 | |
}, | |
{ | |
"epoch": 13.4, | |
"learning_rate": 4.065429882306005e-05, | |
"loss": 0.1614, | |
"step": 7470 | |
}, | |
{ | |
"epoch": 13.42, | |
"learning_rate": 4.063435068821065e-05, | |
"loss": 0.1493, | |
"step": 7480 | |
}, | |
{ | |
"epoch": 13.43, | |
"learning_rate": 4.0614402553361265e-05, | |
"loss": 0.1667, | |
"step": 7490 | |
}, | |
{ | |
"epoch": 13.45, | |
"learning_rate": 4.059445441851187e-05, | |
"loss": 0.1531, | |
"step": 7500 | |
}, | |
{ | |
"epoch": 13.47, | |
"learning_rate": 4.0574506283662476e-05, | |
"loss": 0.1608, | |
"step": 7510 | |
}, | |
{ | |
"epoch": 13.49, | |
"learning_rate": 4.055455814881309e-05, | |
"loss": 0.16, | |
"step": 7520 | |
}, | |
{ | |
"epoch": 13.51, | |
"learning_rate": 4.0534610013963694e-05, | |
"loss": 0.1384, | |
"step": 7530 | |
}, | |
{ | |
"epoch": 13.52, | |
"learning_rate": 4.051466187911431e-05, | |
"loss": 0.1393, | |
"step": 7540 | |
}, | |
{ | |
"epoch": 13.54, | |
"learning_rate": 4.049471374426491e-05, | |
"loss": 0.1421, | |
"step": 7550 | |
}, | |
{ | |
"epoch": 13.56, | |
"learning_rate": 4.0474765609415525e-05, | |
"loss": 0.1609, | |
"step": 7560 | |
}, | |
{ | |
"epoch": 13.58, | |
"learning_rate": 4.045481747456613e-05, | |
"loss": 0.145, | |
"step": 7570 | |
}, | |
{ | |
"epoch": 13.6, | |
"learning_rate": 4.043486933971674e-05, | |
"loss": 0.1579, | |
"step": 7580 | |
}, | |
{ | |
"epoch": 13.61, | |
"learning_rate": 4.041492120486735e-05, | |
"loss": 0.1266, | |
"step": 7590 | |
}, | |
{ | |
"epoch": 13.63, | |
"learning_rate": 4.0394973070017954e-05, | |
"loss": 0.1368, | |
"step": 7600 | |
}, | |
{ | |
"epoch": 13.65, | |
"learning_rate": 4.0375024935168566e-05, | |
"loss": 0.1229, | |
"step": 7610 | |
}, | |
{ | |
"epoch": 13.67, | |
"learning_rate": 4.035507680031917e-05, | |
"loss": 0.1703, | |
"step": 7620 | |
}, | |
{ | |
"epoch": 13.69, | |
"learning_rate": 4.033512866546978e-05, | |
"loss": 0.1543, | |
"step": 7630 | |
}, | |
{ | |
"epoch": 13.7, | |
"learning_rate": 4.031518053062039e-05, | |
"loss": 0.1762, | |
"step": 7640 | |
}, | |
{ | |
"epoch": 13.72, | |
"learning_rate": 4.0295232395770996e-05, | |
"loss": 0.1034, | |
"step": 7650 | |
}, | |
{ | |
"epoch": 13.74, | |
"learning_rate": 4.02752842609216e-05, | |
"loss": 0.1531, | |
"step": 7660 | |
}, | |
{ | |
"epoch": 13.76, | |
"learning_rate": 4.0255336126072214e-05, | |
"loss": 0.1451, | |
"step": 7670 | |
}, | |
{ | |
"epoch": 13.78, | |
"learning_rate": 4.0235387991222826e-05, | |
"loss": 0.146, | |
"step": 7680 | |
}, | |
{ | |
"epoch": 13.79, | |
"learning_rate": 4.021543985637343e-05, | |
"loss": 0.1406, | |
"step": 7690 | |
}, | |
{ | |
"epoch": 13.81, | |
"learning_rate": 4.019549172152404e-05, | |
"loss": 0.1423, | |
"step": 7700 | |
}, | |
{ | |
"epoch": 13.83, | |
"learning_rate": 4.017554358667465e-05, | |
"loss": 0.1547, | |
"step": 7710 | |
}, | |
{ | |
"epoch": 13.85, | |
"learning_rate": 4.0155595451825255e-05, | |
"loss": 0.1531, | |
"step": 7720 | |
}, | |
{ | |
"epoch": 13.87, | |
"learning_rate": 4.013564731697587e-05, | |
"loss": 0.1458, | |
"step": 7730 | |
}, | |
{ | |
"epoch": 13.88, | |
"learning_rate": 4.011569918212647e-05, | |
"loss": 0.1192, | |
"step": 7740 | |
}, | |
{ | |
"epoch": 13.9, | |
"learning_rate": 4.009575104727708e-05, | |
"loss": 0.1615, | |
"step": 7750 | |
}, | |
{ | |
"epoch": 13.92, | |
"learning_rate": 4.007580291242769e-05, | |
"loss": 0.1108, | |
"step": 7760 | |
}, | |
{ | |
"epoch": 13.94, | |
"learning_rate": 4.00558547775783e-05, | |
"loss": 0.1445, | |
"step": 7770 | |
}, | |
{ | |
"epoch": 13.96, | |
"learning_rate": 4.00359066427289e-05, | |
"loss": 0.1466, | |
"step": 7780 | |
}, | |
{ | |
"epoch": 13.97, | |
"learning_rate": 4.0015958507879515e-05, | |
"loss": 0.1465, | |
"step": 7790 | |
}, | |
{ | |
"epoch": 13.99, | |
"learning_rate": 3.999601037303013e-05, | |
"loss": 0.1614, | |
"step": 7800 | |
}, | |
{ | |
"epoch": 14.0, | |
"eval_accuracy": { | |
"accuracy": 0.9706772819017717 | |
}, | |
"eval_f1": { | |
"f1": 0.968670422404098 | |
}, | |
"eval_loss": 0.0847558081150055, | |
"eval_precision": { | |
"precision": 0.9697786748358946 | |
}, | |
"eval_recall": { | |
"recall": 0.9678101937075398 | |
}, | |
"eval_runtime": 95.5289, | |
"eval_samples_per_second": 186.708, | |
"eval_steps_per_second": 5.841, | |
"step": 7805 | |
}, | |
{ | |
"epoch": 14.01, | |
"learning_rate": 3.997606223818073e-05, | |
"loss": 0.1499, | |
"step": 7810 | |
}, | |
{ | |
"epoch": 14.03, | |
"learning_rate": 3.995611410333134e-05, | |
"loss": 0.1341, | |
"step": 7820 | |
}, | |
{ | |
"epoch": 14.04, | |
"learning_rate": 3.993616596848195e-05, | |
"loss": 0.1476, | |
"step": 7830 | |
}, | |
{ | |
"epoch": 14.06, | |
"learning_rate": 3.9916217833632556e-05, | |
"loss": 0.1378, | |
"step": 7840 | |
}, | |
{ | |
"epoch": 14.08, | |
"learning_rate": 3.989626969878317e-05, | |
"loss": 0.1444, | |
"step": 7850 | |
}, | |
{ | |
"epoch": 14.1, | |
"learning_rate": 3.9876321563933774e-05, | |
"loss": 0.1529, | |
"step": 7860 | |
}, | |
{ | |
"epoch": 14.12, | |
"learning_rate": 3.985637342908438e-05, | |
"loss": 0.1619, | |
"step": 7870 | |
}, | |
{ | |
"epoch": 14.13, | |
"learning_rate": 3.983642529423499e-05, | |
"loss": 0.1228, | |
"step": 7880 | |
}, | |
{ | |
"epoch": 14.15, | |
"learning_rate": 3.98164771593856e-05, | |
"loss": 0.1469, | |
"step": 7890 | |
}, | |
{ | |
"epoch": 14.17, | |
"learning_rate": 3.9796529024536204e-05, | |
"loss": 0.1302, | |
"step": 7900 | |
}, | |
{ | |
"epoch": 14.19, | |
"learning_rate": 3.9776580889686816e-05, | |
"loss": 0.1513, | |
"step": 7910 | |
}, | |
{ | |
"epoch": 14.21, | |
"learning_rate": 3.975663275483743e-05, | |
"loss": 0.1471, | |
"step": 7920 | |
}, | |
{ | |
"epoch": 14.22, | |
"learning_rate": 3.9736684619988034e-05, | |
"loss": 0.1152, | |
"step": 7930 | |
}, | |
{ | |
"epoch": 14.24, | |
"learning_rate": 3.971673648513864e-05, | |
"loss": 0.1431, | |
"step": 7940 | |
}, | |
{ | |
"epoch": 14.26, | |
"learning_rate": 3.969678835028925e-05, | |
"loss": 0.1416, | |
"step": 7950 | |
}, | |
{ | |
"epoch": 14.28, | |
"learning_rate": 3.967684021543986e-05, | |
"loss": 0.1271, | |
"step": 7960 | |
}, | |
{ | |
"epoch": 14.3, | |
"learning_rate": 3.965689208059047e-05, | |
"loss": 0.16, | |
"step": 7970 | |
}, | |
{ | |
"epoch": 14.31, | |
"learning_rate": 3.9636943945741075e-05, | |
"loss": 0.1446, | |
"step": 7980 | |
}, | |
{ | |
"epoch": 14.33, | |
"learning_rate": 3.961699581089168e-05, | |
"loss": 0.1255, | |
"step": 7990 | |
}, | |
{ | |
"epoch": 14.35, | |
"learning_rate": 3.9597047676042293e-05, | |
"loss": 0.1516, | |
"step": 8000 | |
}, | |
{ | |
"epoch": 14.37, | |
"learning_rate": 3.95770995411929e-05, | |
"loss": 0.1346, | |
"step": 8010 | |
}, | |
{ | |
"epoch": 14.39, | |
"learning_rate": 3.9557151406343505e-05, | |
"loss": 0.1471, | |
"step": 8020 | |
}, | |
{ | |
"epoch": 14.4, | |
"learning_rate": 3.953720327149412e-05, | |
"loss": 0.1271, | |
"step": 8030 | |
}, | |
{ | |
"epoch": 14.42, | |
"learning_rate": 3.951725513664473e-05, | |
"loss": 0.1377, | |
"step": 8040 | |
}, | |
{ | |
"epoch": 14.44, | |
"learning_rate": 3.9497307001795335e-05, | |
"loss": 0.1511, | |
"step": 8050 | |
}, | |
{ | |
"epoch": 14.46, | |
"learning_rate": 3.947735886694594e-05, | |
"loss": 0.1446, | |
"step": 8060 | |
}, | |
{ | |
"epoch": 14.48, | |
"learning_rate": 3.945741073209655e-05, | |
"loss": 0.1224, | |
"step": 8070 | |
}, | |
{ | |
"epoch": 14.49, | |
"learning_rate": 3.943746259724716e-05, | |
"loss": 0.1333, | |
"step": 8080 | |
}, | |
{ | |
"epoch": 14.51, | |
"learning_rate": 3.941751446239777e-05, | |
"loss": 0.1261, | |
"step": 8090 | |
}, | |
{ | |
"epoch": 14.53, | |
"learning_rate": 3.939756632754838e-05, | |
"loss": 0.1218, | |
"step": 8100 | |
}, | |
{ | |
"epoch": 14.55, | |
"learning_rate": 3.937761819269898e-05, | |
"loss": 0.1288, | |
"step": 8110 | |
}, | |
{ | |
"epoch": 14.57, | |
"learning_rate": 3.9357670057849595e-05, | |
"loss": 0.1381, | |
"step": 8120 | |
}, | |
{ | |
"epoch": 14.58, | |
"learning_rate": 3.933772192300021e-05, | |
"loss": 0.1675, | |
"step": 8130 | |
}, | |
{ | |
"epoch": 14.6, | |
"learning_rate": 3.9317773788150806e-05, | |
"loss": 0.1316, | |
"step": 8140 | |
}, | |
{ | |
"epoch": 14.62, | |
"learning_rate": 3.929782565330142e-05, | |
"loss": 0.141, | |
"step": 8150 | |
}, | |
{ | |
"epoch": 14.64, | |
"learning_rate": 3.927787751845203e-05, | |
"loss": 0.1449, | |
"step": 8160 | |
}, | |
{ | |
"epoch": 14.65, | |
"learning_rate": 3.9257929383602636e-05, | |
"loss": 0.1452, | |
"step": 8170 | |
}, | |
{ | |
"epoch": 14.67, | |
"learning_rate": 3.923798124875324e-05, | |
"loss": 0.1182, | |
"step": 8180 | |
}, | |
{ | |
"epoch": 14.69, | |
"learning_rate": 3.9218033113903854e-05, | |
"loss": 0.113, | |
"step": 8190 | |
}, | |
{ | |
"epoch": 14.71, | |
"learning_rate": 3.919808497905446e-05, | |
"loss": 0.1608, | |
"step": 8200 | |
}, | |
{ | |
"epoch": 14.73, | |
"learning_rate": 3.917813684420507e-05, | |
"loss": 0.1355, | |
"step": 8210 | |
}, | |
{ | |
"epoch": 14.74, | |
"learning_rate": 3.915818870935568e-05, | |
"loss": 0.1525, | |
"step": 8220 | |
}, | |
{ | |
"epoch": 14.76, | |
"learning_rate": 3.9138240574506283e-05, | |
"loss": 0.1312, | |
"step": 8230 | |
}, | |
{ | |
"epoch": 14.78, | |
"learning_rate": 3.9118292439656896e-05, | |
"loss": 0.1354, | |
"step": 8240 | |
}, | |
{ | |
"epoch": 14.8, | |
"learning_rate": 3.909834430480751e-05, | |
"loss": 0.1372, | |
"step": 8250 | |
}, | |
{ | |
"epoch": 14.82, | |
"learning_rate": 3.907839616995811e-05, | |
"loss": 0.1357, | |
"step": 8260 | |
}, | |
{ | |
"epoch": 14.83, | |
"learning_rate": 3.905844803510872e-05, | |
"loss": 0.1548, | |
"step": 8270 | |
}, | |
{ | |
"epoch": 14.85, | |
"learning_rate": 3.903849990025933e-05, | |
"loss": 0.1533, | |
"step": 8280 | |
}, | |
{ | |
"epoch": 14.87, | |
"learning_rate": 3.901855176540994e-05, | |
"loss": 0.1368, | |
"step": 8290 | |
}, | |
{ | |
"epoch": 14.89, | |
"learning_rate": 3.899860363056054e-05, | |
"loss": 0.1251, | |
"step": 8300 | |
}, | |
{ | |
"epoch": 14.91, | |
"learning_rate": 3.8978655495711155e-05, | |
"loss": 0.1482, | |
"step": 8310 | |
}, | |
{ | |
"epoch": 14.92, | |
"learning_rate": 3.895870736086176e-05, | |
"loss": 0.1466, | |
"step": 8320 | |
}, | |
{ | |
"epoch": 14.94, | |
"learning_rate": 3.893875922601237e-05, | |
"loss": 0.1545, | |
"step": 8330 | |
}, | |
{ | |
"epoch": 14.96, | |
"learning_rate": 3.891881109116298e-05, | |
"loss": 0.1422, | |
"step": 8340 | |
}, | |
{ | |
"epoch": 14.98, | |
"learning_rate": 3.8898862956313585e-05, | |
"loss": 0.1395, | |
"step": 8350 | |
}, | |
{ | |
"epoch": 15.0, | |
"learning_rate": 3.88789148214642e-05, | |
"loss": 0.1292, | |
"step": 8360 | |
}, | |
{ | |
"epoch": 15.0, | |
"eval_accuracy": { | |
"accuracy": 0.967873962771922 | |
}, | |
"eval_f1": { | |
"f1": 0.9658569332167405 | |
}, | |
"eval_loss": 0.09215713292360306, | |
"eval_precision": { | |
"precision": 0.9670221999602495 | |
}, | |
"eval_recall": { | |
"recall": 0.9648394517708 | |
}, | |
"eval_runtime": 95.701, | |
"eval_samples_per_second": 186.372, | |
"eval_steps_per_second": 5.831, | |
"step": 8362 | |
}, | |
{ | |
"epoch": 15.01, | |
"learning_rate": 3.885896668661481e-05, | |
"loss": 0.1233, | |
"step": 8370 | |
}, | |
{ | |
"epoch": 15.03, | |
"learning_rate": 3.883901855176541e-05, | |
"loss": 0.1377, | |
"step": 8380 | |
}, | |
{ | |
"epoch": 15.05, | |
"learning_rate": 3.881907041691602e-05, | |
"loss": 0.1423, | |
"step": 8390 | |
}, | |
{ | |
"epoch": 15.07, | |
"learning_rate": 3.879912228206663e-05, | |
"loss": 0.1214, | |
"step": 8400 | |
}, | |
{ | |
"epoch": 15.09, | |
"learning_rate": 3.877917414721723e-05, | |
"loss": 0.1356, | |
"step": 8410 | |
}, | |
{ | |
"epoch": 15.1, | |
"learning_rate": 3.8759226012367844e-05, | |
"loss": 0.1294, | |
"step": 8420 | |
}, | |
{ | |
"epoch": 15.12, | |
"learning_rate": 3.8739277877518457e-05, | |
"loss": 0.1338, | |
"step": 8430 | |
}, | |
{ | |
"epoch": 15.14, | |
"learning_rate": 3.871932974266906e-05, | |
"loss": 0.1172, | |
"step": 8440 | |
}, | |
{ | |
"epoch": 15.16, | |
"learning_rate": 3.869938160781967e-05, | |
"loss": 0.1272, | |
"step": 8450 | |
}, | |
{ | |
"epoch": 15.17, | |
"learning_rate": 3.867943347297028e-05, | |
"loss": 0.1348, | |
"step": 8460 | |
}, | |
{ | |
"epoch": 15.19, | |
"learning_rate": 3.8659485338120886e-05, | |
"loss": 0.1335, | |
"step": 8470 | |
}, | |
{ | |
"epoch": 15.21, | |
"learning_rate": 3.86395372032715e-05, | |
"loss": 0.1382, | |
"step": 8480 | |
}, | |
{ | |
"epoch": 15.23, | |
"learning_rate": 3.8619589068422104e-05, | |
"loss": 0.1323, | |
"step": 8490 | |
}, | |
{ | |
"epoch": 15.25, | |
"learning_rate": 3.859964093357271e-05, | |
"loss": 0.145, | |
"step": 8500 | |
}, | |
{ | |
"epoch": 15.26, | |
"learning_rate": 3.857969279872332e-05, | |
"loss": 0.1432, | |
"step": 8510 | |
}, | |
{ | |
"epoch": 15.28, | |
"learning_rate": 3.8559744663873934e-05, | |
"loss": 0.1568, | |
"step": 8520 | |
}, | |
{ | |
"epoch": 15.3, | |
"learning_rate": 3.853979652902453e-05, | |
"loss": 0.1231, | |
"step": 8530 | |
}, | |
{ | |
"epoch": 15.32, | |
"learning_rate": 3.8519848394175145e-05, | |
"loss": 0.146, | |
"step": 8540 | |
}, | |
{ | |
"epoch": 15.34, | |
"learning_rate": 3.849990025932576e-05, | |
"loss": 0.1114, | |
"step": 8550 | |
}, | |
{ | |
"epoch": 15.35, | |
"learning_rate": 3.847995212447636e-05, | |
"loss": 0.0976, | |
"step": 8560 | |
}, | |
{ | |
"epoch": 15.37, | |
"learning_rate": 3.846000398962697e-05, | |
"loss": 0.1269, | |
"step": 8570 | |
}, | |
{ | |
"epoch": 15.39, | |
"learning_rate": 3.844005585477758e-05, | |
"loss": 0.1703, | |
"step": 8580 | |
}, | |
{ | |
"epoch": 15.41, | |
"learning_rate": 3.842010771992819e-05, | |
"loss": 0.1444, | |
"step": 8590 | |
}, | |
{ | |
"epoch": 15.43, | |
"learning_rate": 3.84001595850788e-05, | |
"loss": 0.1194, | |
"step": 8600 | |
}, | |
{ | |
"epoch": 15.44, | |
"learning_rate": 3.8380211450229405e-05, | |
"loss": 0.1645, | |
"step": 8610 | |
}, | |
{ | |
"epoch": 15.46, | |
"learning_rate": 3.836026331538001e-05, | |
"loss": 0.1235, | |
"step": 8620 | |
}, | |
{ | |
"epoch": 15.48, | |
"learning_rate": 3.834031518053062e-05, | |
"loss": 0.1592, | |
"step": 8630 | |
}, | |
{ | |
"epoch": 15.5, | |
"learning_rate": 3.8320367045681235e-05, | |
"loss": 0.1244, | |
"step": 8640 | |
}, | |
{ | |
"epoch": 15.52, | |
"learning_rate": 3.8300418910831834e-05, | |
"loss": 0.1208, | |
"step": 8650 | |
}, | |
{ | |
"epoch": 15.53, | |
"learning_rate": 3.8280470775982446e-05, | |
"loss": 0.159, | |
"step": 8660 | |
}, | |
{ | |
"epoch": 15.55, | |
"learning_rate": 3.826052264113306e-05, | |
"loss": 0.1353, | |
"step": 8670 | |
}, | |
{ | |
"epoch": 15.57, | |
"learning_rate": 3.8240574506283664e-05, | |
"loss": 0.1368, | |
"step": 8680 | |
}, | |
{ | |
"epoch": 15.59, | |
"learning_rate": 3.822062637143427e-05, | |
"loss": 0.133, | |
"step": 8690 | |
}, | |
{ | |
"epoch": 15.61, | |
"learning_rate": 3.820067823658488e-05, | |
"loss": 0.133, | |
"step": 8700 | |
}, | |
{ | |
"epoch": 15.62, | |
"learning_rate": 3.818073010173549e-05, | |
"loss": 0.1055, | |
"step": 8710 | |
}, | |
{ | |
"epoch": 15.64, | |
"learning_rate": 3.81607819668861e-05, | |
"loss": 0.1274, | |
"step": 8720 | |
}, | |
{ | |
"epoch": 15.66, | |
"learning_rate": 3.8140833832036706e-05, | |
"loss": 0.1398, | |
"step": 8730 | |
}, | |
{ | |
"epoch": 15.68, | |
"learning_rate": 3.812088569718731e-05, | |
"loss": 0.1407, | |
"step": 8740 | |
}, | |
{ | |
"epoch": 15.7, | |
"learning_rate": 3.8100937562337924e-05, | |
"loss": 0.1103, | |
"step": 8750 | |
}, | |
{ | |
"epoch": 15.71, | |
"learning_rate": 3.8080989427488536e-05, | |
"loss": 0.1391, | |
"step": 8760 | |
}, | |
{ | |
"epoch": 15.73, | |
"learning_rate": 3.8061041292639135e-05, | |
"loss": 0.1307, | |
"step": 8770 | |
}, | |
{ | |
"epoch": 15.75, | |
"learning_rate": 3.804109315778975e-05, | |
"loss": 0.1219, | |
"step": 8780 | |
}, | |
{ | |
"epoch": 15.77, | |
"learning_rate": 3.802114502294036e-05, | |
"loss": 0.1603, | |
"step": 8790 | |
}, | |
{ | |
"epoch": 15.78, | |
"learning_rate": 3.8001196888090966e-05, | |
"loss": 0.1483, | |
"step": 8800 | |
}, | |
{ | |
"epoch": 15.8, | |
"learning_rate": 3.798124875324157e-05, | |
"loss": 0.1545, | |
"step": 8810 | |
}, | |
{ | |
"epoch": 15.82, | |
"learning_rate": 3.7961300618392184e-05, | |
"loss": 0.138, | |
"step": 8820 | |
}, | |
{ | |
"epoch": 15.84, | |
"learning_rate": 3.794135248354279e-05, | |
"loss": 0.1218, | |
"step": 8830 | |
}, | |
{ | |
"epoch": 15.86, | |
"learning_rate": 3.79214043486934e-05, | |
"loss": 0.1481, | |
"step": 8840 | |
}, | |
{ | |
"epoch": 15.87, | |
"learning_rate": 3.790145621384401e-05, | |
"loss": 0.1387, | |
"step": 8850 | |
}, | |
{ | |
"epoch": 15.89, | |
"learning_rate": 3.788150807899461e-05, | |
"loss": 0.1423, | |
"step": 8860 | |
}, | |
{ | |
"epoch": 15.91, | |
"learning_rate": 3.7861559944145225e-05, | |
"loss": 0.1164, | |
"step": 8870 | |
}, | |
{ | |
"epoch": 15.93, | |
"learning_rate": 3.784161180929584e-05, | |
"loss": 0.1513, | |
"step": 8880 | |
}, | |
{ | |
"epoch": 15.95, | |
"learning_rate": 3.7821663674446436e-05, | |
"loss": 0.1585, | |
"step": 8890 | |
}, | |
{ | |
"epoch": 15.96, | |
"learning_rate": 3.780171553959705e-05, | |
"loss": 0.1151, | |
"step": 8900 | |
}, | |
{ | |
"epoch": 15.98, | |
"learning_rate": 3.778176740474766e-05, | |
"loss": 0.1272, | |
"step": 8910 | |
}, | |
{ | |
"epoch": 16.0, | |
"learning_rate": 3.776181926989827e-05, | |
"loss": 0.0988, | |
"step": 8920 | |
}, | |
{ | |
"epoch": 16.0, | |
"eval_accuracy": { | |
"accuracy": 0.9703408836061898 | |
}, | |
"eval_f1": { | |
"f1": 0.9688037947343187 | |
}, | |
"eval_loss": 0.08405764400959015, | |
"eval_precision": { | |
"precision": 0.9683340964462768 | |
}, | |
"eval_recall": { | |
"recall": 0.9693524400362661 | |
}, | |
"eval_runtime": 96.499, | |
"eval_samples_per_second": 184.831, | |
"eval_steps_per_second": 5.782, | |
"step": 8920 | |
}, | |
{ | |
"epoch": 16.02, | |
"learning_rate": 3.774187113504887e-05, | |
"loss": 0.0976, | |
"step": 8930 | |
}, | |
{ | |
"epoch": 16.04, | |
"learning_rate": 3.7721923000199485e-05, | |
"loss": 0.163, | |
"step": 8940 | |
}, | |
{ | |
"epoch": 16.05, | |
"learning_rate": 3.770197486535009e-05, | |
"loss": 0.1468, | |
"step": 8950 | |
}, | |
{ | |
"epoch": 16.07, | |
"learning_rate": 3.76820267305007e-05, | |
"loss": 0.1222, | |
"step": 8960 | |
}, | |
{ | |
"epoch": 16.09, | |
"learning_rate": 3.766207859565131e-05, | |
"loss": 0.1322, | |
"step": 8970 | |
}, | |
{ | |
"epoch": 16.11, | |
"learning_rate": 3.7642130460801914e-05, | |
"loss": 0.151, | |
"step": 8980 | |
}, | |
{ | |
"epoch": 16.13, | |
"learning_rate": 3.7622182325952526e-05, | |
"loss": 0.1287, | |
"step": 8990 | |
}, | |
{ | |
"epoch": 16.14, | |
"learning_rate": 3.760223419110314e-05, | |
"loss": 0.1319, | |
"step": 9000 | |
}, | |
{ | |
"epoch": 16.16, | |
"learning_rate": 3.758228605625374e-05, | |
"loss": 0.1231, | |
"step": 9010 | |
}, | |
{ | |
"epoch": 16.18, | |
"learning_rate": 3.756233792140435e-05, | |
"loss": 0.1463, | |
"step": 9020 | |
}, | |
{ | |
"epoch": 16.2, | |
"learning_rate": 3.754238978655496e-05, | |
"loss": 0.1263, | |
"step": 9030 | |
}, | |
{ | |
"epoch": 16.22, | |
"learning_rate": 3.752244165170557e-05, | |
"loss": 0.138, | |
"step": 9040 | |
}, | |
{ | |
"epoch": 16.23, | |
"learning_rate": 3.7502493516856174e-05, | |
"loss": 0.1352, | |
"step": 9050 | |
}, | |
{ | |
"epoch": 16.25, | |
"learning_rate": 3.7482545382006786e-05, | |
"loss": 0.1268, | |
"step": 9060 | |
}, | |
{ | |
"epoch": 16.27, | |
"learning_rate": 3.746259724715739e-05, | |
"loss": 0.1274, | |
"step": 9070 | |
}, | |
{ | |
"epoch": 16.29, | |
"learning_rate": 3.7442649112308004e-05, | |
"loss": 0.1289, | |
"step": 9080 | |
}, | |
{ | |
"epoch": 16.3, | |
"learning_rate": 3.742270097745861e-05, | |
"loss": 0.1263, | |
"step": 9090 | |
}, | |
{ | |
"epoch": 16.32, | |
"learning_rate": 3.7402752842609215e-05, | |
"loss": 0.139, | |
"step": 9100 | |
}, | |
{ | |
"epoch": 16.34, | |
"learning_rate": 3.738280470775983e-05, | |
"loss": 0.1366, | |
"step": 9110 | |
}, | |
{ | |
"epoch": 16.36, | |
"learning_rate": 3.736285657291044e-05, | |
"loss": 0.1141, | |
"step": 9120 | |
}, | |
{ | |
"epoch": 16.38, | |
"learning_rate": 3.734290843806104e-05, | |
"loss": 0.151, | |
"step": 9130 | |
}, | |
{ | |
"epoch": 16.39, | |
"learning_rate": 3.732296030321165e-05, | |
"loss": 0.1181, | |
"step": 9140 | |
}, | |
{ | |
"epoch": 16.41, | |
"learning_rate": 3.7303012168362264e-05, | |
"loss": 0.1182, | |
"step": 9150 | |
}, | |
{ | |
"epoch": 16.43, | |
"learning_rate": 3.728306403351286e-05, | |
"loss": 0.0934, | |
"step": 9160 | |
}, | |
{ | |
"epoch": 16.45, | |
"learning_rate": 3.7263115898663475e-05, | |
"loss": 0.1123, | |
"step": 9170 | |
}, | |
{ | |
"epoch": 16.47, | |
"learning_rate": 3.724316776381409e-05, | |
"loss": 0.1232, | |
"step": 9180 | |
}, | |
{ | |
"epoch": 16.48, | |
"learning_rate": 3.722321962896469e-05, | |
"loss": 0.1465, | |
"step": 9190 | |
}, | |
{ | |
"epoch": 16.5, | |
"learning_rate": 3.72032714941153e-05, | |
"loss": 0.1241, | |
"step": 9200 | |
}, | |
{ | |
"epoch": 16.52, | |
"learning_rate": 3.718332335926591e-05, | |
"loss": 0.1229, | |
"step": 9210 | |
}, | |
{ | |
"epoch": 16.54, | |
"learning_rate": 3.7163375224416516e-05, | |
"loss": 0.1311, | |
"step": 9220 | |
}, | |
{ | |
"epoch": 16.56, | |
"learning_rate": 3.714342708956713e-05, | |
"loss": 0.1056, | |
"step": 9230 | |
}, | |
{ | |
"epoch": 16.57, | |
"learning_rate": 3.7123478954717734e-05, | |
"loss": 0.1462, | |
"step": 9240 | |
}, | |
{ | |
"epoch": 16.59, | |
"learning_rate": 3.710353081986834e-05, | |
"loss": 0.1137, | |
"step": 9250 | |
}, | |
{ | |
"epoch": 16.61, | |
"learning_rate": 3.708358268501895e-05, | |
"loss": 0.1463, | |
"step": 9260 | |
}, | |
{ | |
"epoch": 16.63, | |
"learning_rate": 3.7063634550169565e-05, | |
"loss": 0.1163, | |
"step": 9270 | |
}, | |
{ | |
"epoch": 16.65, | |
"learning_rate": 3.7043686415320164e-05, | |
"loss": 0.1181, | |
"step": 9280 | |
}, | |
{ | |
"epoch": 16.66, | |
"learning_rate": 3.7023738280470776e-05, | |
"loss": 0.1237, | |
"step": 9290 | |
}, | |
{ | |
"epoch": 16.68, | |
"learning_rate": 3.700379014562139e-05, | |
"loss": 0.1316, | |
"step": 9300 | |
}, | |
{ | |
"epoch": 16.7, | |
"learning_rate": 3.6983842010771994e-05, | |
"loss": 0.1359, | |
"step": 9310 | |
}, | |
{ | |
"epoch": 16.72, | |
"learning_rate": 3.69638938759226e-05, | |
"loss": 0.1258, | |
"step": 9320 | |
}, | |
{ | |
"epoch": 16.74, | |
"learning_rate": 3.694394574107321e-05, | |
"loss": 0.1433, | |
"step": 9330 | |
}, | |
{ | |
"epoch": 16.75, | |
"learning_rate": 3.692399760622382e-05, | |
"loss": 0.1082, | |
"step": 9340 | |
}, | |
{ | |
"epoch": 16.77, | |
"learning_rate": 3.690404947137443e-05, | |
"loss": 0.1608, | |
"step": 9350 | |
}, | |
{ | |
"epoch": 16.79, | |
"learning_rate": 3.6884101336525035e-05, | |
"loss": 0.1269, | |
"step": 9360 | |
}, | |
{ | |
"epoch": 16.81, | |
"learning_rate": 3.686415320167564e-05, | |
"loss": 0.1603, | |
"step": 9370 | |
}, | |
{ | |
"epoch": 16.83, | |
"learning_rate": 3.6844205066826253e-05, | |
"loss": 0.1275, | |
"step": 9380 | |
}, | |
{ | |
"epoch": 16.84, | |
"learning_rate": 3.6824256931976866e-05, | |
"loss": 0.1168, | |
"step": 9390 | |
}, | |
{ | |
"epoch": 16.86, | |
"learning_rate": 3.6804308797127465e-05, | |
"loss": 0.1231, | |
"step": 9400 | |
}, | |
{ | |
"epoch": 16.88, | |
"learning_rate": 3.678436066227808e-05, | |
"loss": 0.1384, | |
"step": 9410 | |
}, | |
{ | |
"epoch": 16.9, | |
"learning_rate": 3.676441252742869e-05, | |
"loss": 0.1272, | |
"step": 9420 | |
}, | |
{ | |
"epoch": 16.91, | |
"learning_rate": 3.6744464392579295e-05, | |
"loss": 0.1292, | |
"step": 9430 | |
}, | |
{ | |
"epoch": 16.93, | |
"learning_rate": 3.67245162577299e-05, | |
"loss": 0.1193, | |
"step": 9440 | |
}, | |
{ | |
"epoch": 16.95, | |
"learning_rate": 3.670456812288051e-05, | |
"loss": 0.1218, | |
"step": 9450 | |
}, | |
{ | |
"epoch": 16.97, | |
"learning_rate": 3.668461998803112e-05, | |
"loss": 0.1277, | |
"step": 9460 | |
}, | |
{ | |
"epoch": 16.99, | |
"learning_rate": 3.666467185318173e-05, | |
"loss": 0.1062, | |
"step": 9470 | |
}, | |
{ | |
"epoch": 17.0, | |
"eval_accuracy": { | |
"accuracy": 0.9719107423189056 | |
}, | |
"eval_f1": { | |
"f1": 0.970393669051259 | |
}, | |
"eval_loss": 0.08178497105836868, | |
"eval_precision": { | |
"precision": 0.9706603351073347 | |
}, | |
"eval_recall": { | |
"recall": 0.970195220462345 | |
}, | |
"eval_runtime": 95.8143, | |
"eval_samples_per_second": 186.152, | |
"eval_steps_per_second": 5.824, | |
"step": 9477 | |
}, | |
{ | |
"epoch": 17.0, | |
"learning_rate": 3.664472371833234e-05, | |
"loss": 0.1213, | |
"step": 9480 | |
}, | |
{ | |
"epoch": 17.02, | |
"learning_rate": 3.662477558348294e-05, | |
"loss": 0.1373, | |
"step": 9490 | |
}, | |
{ | |
"epoch": 17.04, | |
"learning_rate": 3.6604827448633555e-05, | |
"loss": 0.1706, | |
"step": 9500 | |
}, | |
{ | |
"epoch": 17.06, | |
"learning_rate": 3.658487931378417e-05, | |
"loss": 0.1133, | |
"step": 9510 | |
}, | |
{ | |
"epoch": 17.08, | |
"learning_rate": 3.656493117893477e-05, | |
"loss": 0.0913, | |
"step": 9520 | |
}, | |
{ | |
"epoch": 17.09, | |
"learning_rate": 3.654498304408538e-05, | |
"loss": 0.1227, | |
"step": 9530 | |
}, | |
{ | |
"epoch": 17.11, | |
"learning_rate": 3.652503490923599e-05, | |
"loss": 0.1178, | |
"step": 9540 | |
}, | |
{ | |
"epoch": 17.13, | |
"learning_rate": 3.6505086774386596e-05, | |
"loss": 0.1212, | |
"step": 9550 | |
}, | |
{ | |
"epoch": 17.15, | |
"learning_rate": 3.64851386395372e-05, | |
"loss": 0.1015, | |
"step": 9560 | |
}, | |
{ | |
"epoch": 17.17, | |
"learning_rate": 3.6465190504687814e-05, | |
"loss": 0.1223, | |
"step": 9570 | |
}, | |
{ | |
"epoch": 17.18, | |
"learning_rate": 3.644524236983842e-05, | |
"loss": 0.1283, | |
"step": 9580 | |
}, | |
{ | |
"epoch": 17.2, | |
"learning_rate": 3.642529423498903e-05, | |
"loss": 0.1264, | |
"step": 9590 | |
}, | |
{ | |
"epoch": 17.22, | |
"learning_rate": 3.640534610013964e-05, | |
"loss": 0.14, | |
"step": 9600 | |
}, | |
{ | |
"epoch": 17.24, | |
"learning_rate": 3.6385397965290243e-05, | |
"loss": 0.1286, | |
"step": 9610 | |
}, | |
{ | |
"epoch": 17.26, | |
"learning_rate": 3.6365449830440856e-05, | |
"loss": 0.1219, | |
"step": 9620 | |
}, | |
{ | |
"epoch": 17.27, | |
"learning_rate": 3.634550169559147e-05, | |
"loss": 0.1309, | |
"step": 9630 | |
}, | |
{ | |
"epoch": 17.29, | |
"learning_rate": 3.6325553560742074e-05, | |
"loss": 0.124, | |
"step": 9640 | |
}, | |
{ | |
"epoch": 17.31, | |
"learning_rate": 3.630560542589268e-05, | |
"loss": 0.1238, | |
"step": 9650 | |
}, | |
{ | |
"epoch": 17.33, | |
"learning_rate": 3.628565729104329e-05, | |
"loss": 0.1159, | |
"step": 9660 | |
}, | |
{ | |
"epoch": 17.35, | |
"learning_rate": 3.62657091561939e-05, | |
"loss": 0.1226, | |
"step": 9670 | |
}, | |
{ | |
"epoch": 17.36, | |
"learning_rate": 3.62457610213445e-05, | |
"loss": 0.148, | |
"step": 9680 | |
}, | |
{ | |
"epoch": 17.38, | |
"learning_rate": 3.6225812886495115e-05, | |
"loss": 0.1352, | |
"step": 9690 | |
}, | |
{ | |
"epoch": 17.4, | |
"learning_rate": 3.620586475164572e-05, | |
"loss": 0.1102, | |
"step": 9700 | |
}, | |
{ | |
"epoch": 17.42, | |
"learning_rate": 3.618591661679633e-05, | |
"loss": 0.1201, | |
"step": 9710 | |
}, | |
{ | |
"epoch": 17.43, | |
"learning_rate": 3.616596848194694e-05, | |
"loss": 0.131, | |
"step": 9720 | |
}, | |
{ | |
"epoch": 17.45, | |
"learning_rate": 3.6146020347097545e-05, | |
"loss": 0.118, | |
"step": 9730 | |
}, | |
{ | |
"epoch": 17.47, | |
"learning_rate": 3.612607221224816e-05, | |
"loss": 0.1221, | |
"step": 9740 | |
}, | |
{ | |
"epoch": 17.49, | |
"learning_rate": 3.610612407739877e-05, | |
"loss": 0.126, | |
"step": 9750 | |
}, | |
{ | |
"epoch": 17.51, | |
"learning_rate": 3.6086175942549375e-05, | |
"loss": 0.1265, | |
"step": 9760 | |
}, | |
{ | |
"epoch": 17.52, | |
"learning_rate": 3.606622780769998e-05, | |
"loss": 0.1301, | |
"step": 9770 | |
}, | |
{ | |
"epoch": 17.54, | |
"learning_rate": 3.604627967285059e-05, | |
"loss": 0.1212, | |
"step": 9780 | |
}, | |
{ | |
"epoch": 17.56, | |
"learning_rate": 3.60263315380012e-05, | |
"loss": 0.1337, | |
"step": 9790 | |
}, | |
{ | |
"epoch": 17.58, | |
"learning_rate": 3.6006383403151804e-05, | |
"loss": 0.1325, | |
"step": 9800 | |
}, | |
{ | |
"epoch": 17.6, | |
"learning_rate": 3.5986435268302417e-05, | |
"loss": 0.1252, | |
"step": 9810 | |
}, | |
{ | |
"epoch": 17.61, | |
"learning_rate": 3.596648713345302e-05, | |
"loss": 0.1428, | |
"step": 9820 | |
}, | |
{ | |
"epoch": 17.63, | |
"learning_rate": 3.5946538998603635e-05, | |
"loss": 0.1182, | |
"step": 9830 | |
}, | |
{ | |
"epoch": 17.65, | |
"learning_rate": 3.592659086375424e-05, | |
"loss": 0.1193, | |
"step": 9840 | |
}, | |
{ | |
"epoch": 17.67, | |
"learning_rate": 3.5906642728904846e-05, | |
"loss": 0.1111, | |
"step": 9850 | |
}, | |
{ | |
"epoch": 17.69, | |
"learning_rate": 3.588669459405546e-05, | |
"loss": 0.1123, | |
"step": 9860 | |
}, | |
{ | |
"epoch": 17.7, | |
"learning_rate": 3.586674645920607e-05, | |
"loss": 0.1246, | |
"step": 9870 | |
}, | |
{ | |
"epoch": 17.72, | |
"learning_rate": 3.5846798324356676e-05, | |
"loss": 0.1229, | |
"step": 9880 | |
}, | |
{ | |
"epoch": 17.74, | |
"learning_rate": 3.582685018950728e-05, | |
"loss": 0.1363, | |
"step": 9890 | |
}, | |
{ | |
"epoch": 17.76, | |
"learning_rate": 3.5806902054657894e-05, | |
"loss": 0.1259, | |
"step": 9900 | |
}, | |
{ | |
"epoch": 17.78, | |
"learning_rate": 3.57869539198085e-05, | |
"loss": 0.0848, | |
"step": 9910 | |
}, | |
{ | |
"epoch": 17.79, | |
"learning_rate": 3.5767005784959105e-05, | |
"loss": 0.0918, | |
"step": 9920 | |
}, | |
{ | |
"epoch": 17.81, | |
"learning_rate": 3.574705765010972e-05, | |
"loss": 0.1319, | |
"step": 9930 | |
}, | |
{ | |
"epoch": 17.83, | |
"learning_rate": 3.572710951526032e-05, | |
"loss": 0.1291, | |
"step": 9940 | |
}, | |
{ | |
"epoch": 17.85, | |
"learning_rate": 3.570716138041093e-05, | |
"loss": 0.1178, | |
"step": 9950 | |
}, | |
{ | |
"epoch": 17.87, | |
"learning_rate": 3.568721324556154e-05, | |
"loss": 0.1135, | |
"step": 9960 | |
}, | |
{ | |
"epoch": 17.88, | |
"learning_rate": 3.566726511071215e-05, | |
"loss": 0.1298, | |
"step": 9970 | |
}, | |
{ | |
"epoch": 17.9, | |
"learning_rate": 3.564731697586276e-05, | |
"loss": 0.1109, | |
"step": 9980 | |
}, | |
{ | |
"epoch": 17.92, | |
"learning_rate": 3.5627368841013365e-05, | |
"loss": 0.1326, | |
"step": 9990 | |
}, | |
{ | |
"epoch": 17.94, | |
"learning_rate": 3.560742070616398e-05, | |
"loss": 0.1062, | |
"step": 10000 | |
}, | |
{ | |
"epoch": 17.96, | |
"learning_rate": 3.558747257131458e-05, | |
"loss": 0.1356, | |
"step": 10010 | |
}, | |
{ | |
"epoch": 17.97, | |
"learning_rate": 3.5567524436465195e-05, | |
"loss": 0.1279, | |
"step": 10020 | |
}, | |
{ | |
"epoch": 17.99, | |
"learning_rate": 3.55475763016158e-05, | |
"loss": 0.1291, | |
"step": 10030 | |
}, | |
{ | |
"epoch": 18.0, | |
"eval_accuracy": { | |
"accuracy": 0.9703969499887867 | |
}, | |
"eval_f1": { | |
"f1": 0.9687676965848208 | |
}, | |
"eval_loss": 0.08788565546274185, | |
"eval_precision": { | |
"precision": 0.9686849139922615 | |
}, | |
"eval_recall": { | |
"recall": 0.9690100916019242 | |
}, | |
"eval_runtime": 96.3636, | |
"eval_samples_per_second": 185.091, | |
"eval_steps_per_second": 5.791, | |
"step": 10035 | |
}, | |
{ | |
"epoch": 18.01, | |
"learning_rate": 3.5527628166766406e-05, | |
"loss": 0.1297, | |
"step": 10040 | |
}, | |
{ | |
"epoch": 18.03, | |
"learning_rate": 3.550768003191702e-05, | |
"loss": 0.1155, | |
"step": 10050 | |
}, | |
{ | |
"epoch": 18.04, | |
"learning_rate": 3.5487731897067624e-05, | |
"loss": 0.0892, | |
"step": 10060 | |
}, | |
{ | |
"epoch": 18.06, | |
"learning_rate": 3.546778376221823e-05, | |
"loss": 0.1048, | |
"step": 10070 | |
}, | |
{ | |
"epoch": 18.08, | |
"learning_rate": 3.544783562736884e-05, | |
"loss": 0.1328, | |
"step": 10080 | |
}, | |
{ | |
"epoch": 18.1, | |
"learning_rate": 3.5427887492519455e-05, | |
"loss": 0.1087, | |
"step": 10090 | |
}, | |
{ | |
"epoch": 18.12, | |
"learning_rate": 3.540793935767006e-05, | |
"loss": 0.0989, | |
"step": 10100 | |
}, | |
{ | |
"epoch": 18.13, | |
"learning_rate": 3.5387991222820666e-05, | |
"loss": 0.1456, | |
"step": 10110 | |
}, | |
{ | |
"epoch": 18.15, | |
"learning_rate": 3.536804308797128e-05, | |
"loss": 0.1244, | |
"step": 10120 | |
}, | |
{ | |
"epoch": 18.17, | |
"learning_rate": 3.5348094953121884e-05, | |
"loss": 0.1182, | |
"step": 10130 | |
}, | |
{ | |
"epoch": 18.19, | |
"learning_rate": 3.5328146818272496e-05, | |
"loss": 0.126, | |
"step": 10140 | |
}, | |
{ | |
"epoch": 18.21, | |
"learning_rate": 3.53081986834231e-05, | |
"loss": 0.105, | |
"step": 10150 | |
}, | |
{ | |
"epoch": 18.22, | |
"learning_rate": 3.528825054857371e-05, | |
"loss": 0.1367, | |
"step": 10160 | |
}, | |
{ | |
"epoch": 18.24, | |
"learning_rate": 3.526830241372432e-05, | |
"loss": 0.1155, | |
"step": 10170 | |
}, | |
{ | |
"epoch": 18.26, | |
"learning_rate": 3.5248354278874926e-05, | |
"loss": 0.1011, | |
"step": 10180 | |
}, | |
{ | |
"epoch": 18.28, | |
"learning_rate": 3.522840614402553e-05, | |
"loss": 0.1189, | |
"step": 10190 | |
}, | |
{ | |
"epoch": 18.3, | |
"learning_rate": 3.5208458009176144e-05, | |
"loss": 0.1124, | |
"step": 10200 | |
}, | |
{ | |
"epoch": 18.31, | |
"learning_rate": 3.5188509874326756e-05, | |
"loss": 0.105, | |
"step": 10210 | |
}, | |
{ | |
"epoch": 18.33, | |
"learning_rate": 3.516856173947736e-05, | |
"loss": 0.1166, | |
"step": 10220 | |
}, | |
{ | |
"epoch": 18.35, | |
"learning_rate": 3.514861360462797e-05, | |
"loss": 0.1119, | |
"step": 10230 | |
}, | |
{ | |
"epoch": 18.37, | |
"learning_rate": 3.512866546977858e-05, | |
"loss": 0.1337, | |
"step": 10240 | |
}, | |
{ | |
"epoch": 18.39, | |
"learning_rate": 3.5108717334929185e-05, | |
"loss": 0.1168, | |
"step": 10250 | |
}, | |
{ | |
"epoch": 18.4, | |
"learning_rate": 3.50887692000798e-05, | |
"loss": 0.1152, | |
"step": 10260 | |
}, | |
{ | |
"epoch": 18.42, | |
"learning_rate": 3.50688210652304e-05, | |
"loss": 0.1319, | |
"step": 10270 | |
}, | |
{ | |
"epoch": 18.44, | |
"learning_rate": 3.504887293038101e-05, | |
"loss": 0.1266, | |
"step": 10280 | |
}, | |
{ | |
"epoch": 18.46, | |
"learning_rate": 3.502892479553162e-05, | |
"loss": 0.1214, | |
"step": 10290 | |
}, | |
{ | |
"epoch": 18.48, | |
"learning_rate": 3.500897666068223e-05, | |
"loss": 0.1489, | |
"step": 10300 | |
}, | |
{ | |
"epoch": 18.49, | |
"learning_rate": 3.498902852583283e-05, | |
"loss": 0.1094, | |
"step": 10310 | |
}, | |
{ | |
"epoch": 18.51, | |
"learning_rate": 3.4969080390983445e-05, | |
"loss": 0.102, | |
"step": 10320 | |
}, | |
{ | |
"epoch": 18.53, | |
"learning_rate": 3.494913225613406e-05, | |
"loss": 0.1277, | |
"step": 10330 | |
}, | |
{ | |
"epoch": 18.55, | |
"learning_rate": 3.492918412128466e-05, | |
"loss": 0.1265, | |
"step": 10340 | |
}, | |
{ | |
"epoch": 18.57, | |
"learning_rate": 3.490923598643527e-05, | |
"loss": 0.1156, | |
"step": 10350 | |
}, | |
{ | |
"epoch": 18.58, | |
"learning_rate": 3.488928785158588e-05, | |
"loss": 0.1349, | |
"step": 10360 | |
}, | |
{ | |
"epoch": 18.6, | |
"learning_rate": 3.4869339716736486e-05, | |
"loss": 0.1305, | |
"step": 10370 | |
}, | |
{ | |
"epoch": 18.62, | |
"learning_rate": 3.48493915818871e-05, | |
"loss": 0.1159, | |
"step": 10380 | |
}, | |
{ | |
"epoch": 18.64, | |
"learning_rate": 3.4829443447037704e-05, | |
"loss": 0.1168, | |
"step": 10390 | |
}, | |
{ | |
"epoch": 18.65, | |
"learning_rate": 3.480949531218831e-05, | |
"loss": 0.1233, | |
"step": 10400 | |
}, | |
{ | |
"epoch": 18.67, | |
"learning_rate": 3.478954717733892e-05, | |
"loss": 0.1179, | |
"step": 10410 | |
}, | |
{ | |
"epoch": 18.69, | |
"learning_rate": 3.476959904248953e-05, | |
"loss": 0.1384, | |
"step": 10420 | |
}, | |
{ | |
"epoch": 18.71, | |
"learning_rate": 3.4749650907640134e-05, | |
"loss": 0.1282, | |
"step": 10430 | |
}, | |
{ | |
"epoch": 18.73, | |
"learning_rate": 3.4729702772790746e-05, | |
"loss": 0.1296, | |
"step": 10440 | |
}, | |
{ | |
"epoch": 18.74, | |
"learning_rate": 3.470975463794136e-05, | |
"loss": 0.1343, | |
"step": 10450 | |
}, | |
{ | |
"epoch": 18.76, | |
"learning_rate": 3.4689806503091964e-05, | |
"loss": 0.1446, | |
"step": 10460 | |
}, | |
{ | |
"epoch": 18.78, | |
"learning_rate": 3.466985836824257e-05, | |
"loss": 0.1107, | |
"step": 10470 | |
}, | |
{ | |
"epoch": 18.8, | |
"learning_rate": 3.464991023339318e-05, | |
"loss": 0.1009, | |
"step": 10480 | |
}, | |
{ | |
"epoch": 18.82, | |
"learning_rate": 3.462996209854379e-05, | |
"loss": 0.1396, | |
"step": 10490 | |
}, | |
{ | |
"epoch": 18.83, | |
"learning_rate": 3.46100139636944e-05, | |
"loss": 0.1205, | |
"step": 10500 | |
}, | |
{ | |
"epoch": 18.85, | |
"learning_rate": 3.4590065828845006e-05, | |
"loss": 0.1426, | |
"step": 10510 | |
}, | |
{ | |
"epoch": 18.87, | |
"learning_rate": 3.457011769399561e-05, | |
"loss": 0.1231, | |
"step": 10520 | |
}, | |
{ | |
"epoch": 18.89, | |
"learning_rate": 3.4550169559146224e-05, | |
"loss": 0.1107, | |
"step": 10530 | |
}, | |
{ | |
"epoch": 18.91, | |
"learning_rate": 3.453022142429683e-05, | |
"loss": 0.1136, | |
"step": 10540 | |
}, | |
{ | |
"epoch": 18.92, | |
"learning_rate": 3.4510273289447435e-05, | |
"loss": 0.1152, | |
"step": 10550 | |
}, | |
{ | |
"epoch": 18.94, | |
"learning_rate": 3.449032515459805e-05, | |
"loss": 0.1121, | |
"step": 10560 | |
}, | |
{ | |
"epoch": 18.96, | |
"learning_rate": 3.447037701974866e-05, | |
"loss": 0.103, | |
"step": 10570 | |
}, | |
{ | |
"epoch": 18.98, | |
"learning_rate": 3.4450428884899265e-05, | |
"loss": 0.1077, | |
"step": 10580 | |
}, | |
{ | |
"epoch": 19.0, | |
"learning_rate": 3.443048075004987e-05, | |
"loss": 0.1253, | |
"step": 10590 | |
}, | |
{ | |
"epoch": 19.0, | |
"eval_accuracy": { | |
"accuracy": 0.9731442027360395 | |
}, | |
"eval_f1": { | |
"f1": 0.9716105864810595 | |
}, | |
"eval_loss": 0.07871522009372711, | |
"eval_precision": { | |
"precision": 0.9721612839062381 | |
}, | |
"eval_recall": { | |
"recall": 0.9711479618814062 | |
}, | |
"eval_runtime": 97.4964, | |
"eval_samples_per_second": 182.94, | |
"eval_steps_per_second": 5.723, | |
"step": 10592 | |
}, | |
{ | |
"epoch": 19.01, | |
"learning_rate": 3.441053261520048e-05, | |
"loss": 0.1217, | |
"step": 10600 | |
}, | |
{ | |
"epoch": 19.03, | |
"learning_rate": 3.439058448035109e-05, | |
"loss": 0.118, | |
"step": 10610 | |
}, | |
{ | |
"epoch": 19.05, | |
"learning_rate": 3.43706363455017e-05, | |
"loss": 0.1315, | |
"step": 10620 | |
}, | |
{ | |
"epoch": 19.07, | |
"learning_rate": 3.435068821065231e-05, | |
"loss": 0.1029, | |
"step": 10630 | |
}, | |
{ | |
"epoch": 19.09, | |
"learning_rate": 3.433074007580291e-05, | |
"loss": 0.1138, | |
"step": 10640 | |
}, | |
{ | |
"epoch": 19.1, | |
"learning_rate": 3.4310791940953525e-05, | |
"loss": 0.1146, | |
"step": 10650 | |
}, | |
{ | |
"epoch": 19.12, | |
"learning_rate": 3.429084380610413e-05, | |
"loss": 0.1324, | |
"step": 10660 | |
}, | |
{ | |
"epoch": 19.14, | |
"learning_rate": 3.4270895671254736e-05, | |
"loss": 0.114, | |
"step": 10670 | |
}, | |
{ | |
"epoch": 19.16, | |
"learning_rate": 3.425094753640535e-05, | |
"loss": 0.1205, | |
"step": 10680 | |
}, | |
{ | |
"epoch": 19.17, | |
"learning_rate": 3.423099940155596e-05, | |
"loss": 0.1122, | |
"step": 10690 | |
}, | |
{ | |
"epoch": 19.19, | |
"learning_rate": 3.421105126670656e-05, | |
"loss": 0.1213, | |
"step": 10700 | |
}, | |
{ | |
"epoch": 19.21, | |
"learning_rate": 3.419110313185717e-05, | |
"loss": 0.1276, | |
"step": 10710 | |
}, | |
{ | |
"epoch": 19.23, | |
"learning_rate": 3.4171154997007784e-05, | |
"loss": 0.0995, | |
"step": 10720 | |
}, | |
{ | |
"epoch": 19.25, | |
"learning_rate": 3.415120686215839e-05, | |
"loss": 0.1114, | |
"step": 10730 | |
}, | |
{ | |
"epoch": 19.26, | |
"learning_rate": 3.4131258727308995e-05, | |
"loss": 0.1356, | |
"step": 10740 | |
}, | |
{ | |
"epoch": 19.28, | |
"learning_rate": 3.411131059245961e-05, | |
"loss": 0.133, | |
"step": 10750 | |
}, | |
{ | |
"epoch": 19.3, | |
"learning_rate": 3.4091362457610213e-05, | |
"loss": 0.1009, | |
"step": 10760 | |
}, | |
{ | |
"epoch": 19.32, | |
"learning_rate": 3.4071414322760826e-05, | |
"loss": 0.136, | |
"step": 10770 | |
}, | |
{ | |
"epoch": 19.34, | |
"learning_rate": 3.405146618791143e-05, | |
"loss": 0.1111, | |
"step": 10780 | |
}, | |
{ | |
"epoch": 19.35, | |
"learning_rate": 3.403151805306204e-05, | |
"loss": 0.1176, | |
"step": 10790 | |
}, | |
{ | |
"epoch": 19.37, | |
"learning_rate": 3.401156991821265e-05, | |
"loss": 0.1128, | |
"step": 10800 | |
}, | |
{ | |
"epoch": 19.39, | |
"learning_rate": 3.399162178336326e-05, | |
"loss": 0.0974, | |
"step": 10810 | |
}, | |
{ | |
"epoch": 19.41, | |
"learning_rate": 3.397167364851386e-05, | |
"loss": 0.1043, | |
"step": 10820 | |
}, | |
{ | |
"epoch": 19.43, | |
"learning_rate": 3.395172551366447e-05, | |
"loss": 0.128, | |
"step": 10830 | |
}, | |
{ | |
"epoch": 19.44, | |
"learning_rate": 3.3931777378815085e-05, | |
"loss": 0.1245, | |
"step": 10840 | |
}, | |
{ | |
"epoch": 19.46, | |
"learning_rate": 3.391182924396569e-05, | |
"loss": 0.1144, | |
"step": 10850 | |
}, | |
{ | |
"epoch": 19.48, | |
"learning_rate": 3.38918811091163e-05, | |
"loss": 0.1044, | |
"step": 10860 | |
}, | |
{ | |
"epoch": 19.5, | |
"learning_rate": 3.387193297426691e-05, | |
"loss": 0.1005, | |
"step": 10870 | |
}, | |
{ | |
"epoch": 19.52, | |
"learning_rate": 3.3851984839417515e-05, | |
"loss": 0.1083, | |
"step": 10880 | |
}, | |
{ | |
"epoch": 19.53, | |
"learning_rate": 3.383203670456813e-05, | |
"loss": 0.129, | |
"step": 10890 | |
}, | |
{ | |
"epoch": 19.55, | |
"learning_rate": 3.381208856971873e-05, | |
"loss": 0.1093, | |
"step": 10900 | |
}, | |
{ | |
"epoch": 19.57, | |
"learning_rate": 3.379214043486934e-05, | |
"loss": 0.1305, | |
"step": 10910 | |
}, | |
{ | |
"epoch": 19.59, | |
"learning_rate": 3.377219230001995e-05, | |
"loss": 0.1071, | |
"step": 10920 | |
}, | |
{ | |
"epoch": 19.61, | |
"learning_rate": 3.375224416517056e-05, | |
"loss": 0.0956, | |
"step": 10930 | |
}, | |
{ | |
"epoch": 19.62, | |
"learning_rate": 3.373229603032116e-05, | |
"loss": 0.1515, | |
"step": 10940 | |
}, | |
{ | |
"epoch": 19.64, | |
"learning_rate": 3.3712347895471774e-05, | |
"loss": 0.1128, | |
"step": 10950 | |
}, | |
{ | |
"epoch": 19.66, | |
"learning_rate": 3.3692399760622387e-05, | |
"loss": 0.1096, | |
"step": 10960 | |
}, | |
{ | |
"epoch": 19.68, | |
"learning_rate": 3.367245162577299e-05, | |
"loss": 0.1066, | |
"step": 10970 | |
}, | |
{ | |
"epoch": 19.7, | |
"learning_rate": 3.36525034909236e-05, | |
"loss": 0.1129, | |
"step": 10980 | |
}, | |
{ | |
"epoch": 19.71, | |
"learning_rate": 3.363255535607421e-05, | |
"loss": 0.1279, | |
"step": 10990 | |
}, | |
{ | |
"epoch": 19.73, | |
"learning_rate": 3.3612607221224816e-05, | |
"loss": 0.1392, | |
"step": 11000 | |
}, | |
{ | |
"epoch": 19.75, | |
"learning_rate": 3.359265908637543e-05, | |
"loss": 0.1123, | |
"step": 11010 | |
}, | |
{ | |
"epoch": 19.77, | |
"learning_rate": 3.3572710951526034e-05, | |
"loss": 0.1239, | |
"step": 11020 | |
}, | |
{ | |
"epoch": 19.78, | |
"learning_rate": 3.355276281667664e-05, | |
"loss": 0.1262, | |
"step": 11030 | |
}, | |
{ | |
"epoch": 19.8, | |
"learning_rate": 3.353281468182725e-05, | |
"loss": 0.0987, | |
"step": 11040 | |
}, | |
{ | |
"epoch": 19.82, | |
"learning_rate": 3.3512866546977864e-05, | |
"loss": 0.1146, | |
"step": 11050 | |
}, | |
{ | |
"epoch": 19.84, | |
"learning_rate": 3.349291841212846e-05, | |
"loss": 0.1094, | |
"step": 11060 | |
}, | |
{ | |
"epoch": 19.86, | |
"learning_rate": 3.3472970277279075e-05, | |
"loss": 0.118, | |
"step": 11070 | |
}, | |
{ | |
"epoch": 19.87, | |
"learning_rate": 3.345302214242969e-05, | |
"loss": 0.1014, | |
"step": 11080 | |
}, | |
{ | |
"epoch": 19.89, | |
"learning_rate": 3.343307400758029e-05, | |
"loss": 0.1186, | |
"step": 11090 | |
}, | |
{ | |
"epoch": 19.91, | |
"learning_rate": 3.34131258727309e-05, | |
"loss": 0.1255, | |
"step": 11100 | |
}, | |
{ | |
"epoch": 19.93, | |
"learning_rate": 3.339317773788151e-05, | |
"loss": 0.1233, | |
"step": 11110 | |
}, | |
{ | |
"epoch": 19.95, | |
"learning_rate": 3.337322960303212e-05, | |
"loss": 0.105, | |
"step": 11120 | |
}, | |
{ | |
"epoch": 19.96, | |
"learning_rate": 3.335328146818273e-05, | |
"loss": 0.127, | |
"step": 11130 | |
}, | |
{ | |
"epoch": 19.98, | |
"learning_rate": 3.3333333333333335e-05, | |
"loss": 0.1007, | |
"step": 11140 | |
}, | |
{ | |
"epoch": 20.0, | |
"learning_rate": 3.331338519848394e-05, | |
"loss": 0.1473, | |
"step": 11150 | |
}, | |
{ | |
"epoch": 20.0, | |
"eval_accuracy": { | |
"accuracy": 0.9723592733796815 | |
}, | |
"eval_f1": { | |
"f1": 0.9711380216113251 | |
}, | |
"eval_loss": 0.08163481950759888, | |
"eval_precision": { | |
"precision": 0.9709134522828391 | |
}, | |
"eval_recall": { | |
"recall": 0.9715130547673007 | |
}, | |
"eval_runtime": 163.6284, | |
"eval_samples_per_second": 109.003, | |
"eval_steps_per_second": 3.41, | |
"step": 11150 | |
}, | |
{ | |
"epoch": 20.02, | |
"learning_rate": 3.329343706363455e-05, | |
"loss": 0.104, | |
"step": 11160 | |
}, | |
{ | |
"epoch": 20.04, | |
"learning_rate": 3.3273488928785165e-05, | |
"loss": 0.1147, | |
"step": 11170 | |
}, | |
{ | |
"epoch": 20.05, | |
"learning_rate": 3.3253540793935764e-05, | |
"loss": 0.1285, | |
"step": 11180 | |
}, | |
{ | |
"epoch": 20.07, | |
"learning_rate": 3.3233592659086377e-05, | |
"loss": 0.1098, | |
"step": 11190 | |
}, | |
{ | |
"epoch": 20.09, | |
"learning_rate": 3.321364452423699e-05, | |
"loss": 0.1277, | |
"step": 11200 | |
}, | |
{ | |
"epoch": 20.11, | |
"learning_rate": 3.3193696389387595e-05, | |
"loss": 0.132, | |
"step": 11210 | |
}, | |
{ | |
"epoch": 20.13, | |
"learning_rate": 3.31737482545382e-05, | |
"loss": 0.1003, | |
"step": 11220 | |
}, | |
{ | |
"epoch": 20.14, | |
"learning_rate": 3.315380011968881e-05, | |
"loss": 0.0991, | |
"step": 11230 | |
}, | |
{ | |
"epoch": 20.16, | |
"learning_rate": 3.313385198483942e-05, | |
"loss": 0.1148, | |
"step": 11240 | |
}, | |
{ | |
"epoch": 20.18, | |
"learning_rate": 3.311390384999003e-05, | |
"loss": 0.1156, | |
"step": 11250 | |
}, | |
{ | |
"epoch": 20.2, | |
"learning_rate": 3.3093955715140636e-05, | |
"loss": 0.1136, | |
"step": 11260 | |
}, | |
{ | |
"epoch": 20.22, | |
"learning_rate": 3.307400758029124e-05, | |
"loss": 0.1101, | |
"step": 11270 | |
}, | |
{ | |
"epoch": 20.23, | |
"learning_rate": 3.3054059445441854e-05, | |
"loss": 0.112, | |
"step": 11280 | |
}, | |
{ | |
"epoch": 20.25, | |
"learning_rate": 3.3034111310592466e-05, | |
"loss": 0.1289, | |
"step": 11290 | |
}, | |
{ | |
"epoch": 20.27, | |
"learning_rate": 3.3014163175743065e-05, | |
"loss": 0.122, | |
"step": 11300 | |
}, | |
{ | |
"epoch": 20.29, | |
"learning_rate": 3.299421504089368e-05, | |
"loss": 0.1383, | |
"step": 11310 | |
}, | |
{ | |
"epoch": 20.3, | |
"learning_rate": 3.297426690604429e-05, | |
"loss": 0.1367, | |
"step": 11320 | |
}, | |
{ | |
"epoch": 20.32, | |
"learning_rate": 3.2954318771194896e-05, | |
"loss": 0.1206, | |
"step": 11330 | |
}, | |
{ | |
"epoch": 20.34, | |
"learning_rate": 3.29343706363455e-05, | |
"loss": 0.11, | |
"step": 11340 | |
}, | |
{ | |
"epoch": 20.36, | |
"learning_rate": 3.2914422501496114e-05, | |
"loss": 0.1052, | |
"step": 11350 | |
}, | |
{ | |
"epoch": 20.38, | |
"learning_rate": 3.289447436664672e-05, | |
"loss": 0.1011, | |
"step": 11360 | |
}, | |
{ | |
"epoch": 20.39, | |
"learning_rate": 3.287452623179733e-05, | |
"loss": 0.108, | |
"step": 11370 | |
}, | |
{ | |
"epoch": 20.41, | |
"learning_rate": 3.285457809694794e-05, | |
"loss": 0.1126, | |
"step": 11380 | |
}, | |
{ | |
"epoch": 20.43, | |
"learning_rate": 3.283462996209854e-05, | |
"loss": 0.0932, | |
"step": 11390 | |
}, | |
{ | |
"epoch": 20.45, | |
"learning_rate": 3.2814681827249155e-05, | |
"loss": 0.108, | |
"step": 11400 | |
}, | |
{ | |
"epoch": 20.47, | |
"learning_rate": 3.279473369239976e-05, | |
"loss": 0.1364, | |
"step": 11410 | |
}, | |
{ | |
"epoch": 20.48, | |
"learning_rate": 3.2774785557550366e-05, | |
"loss": 0.1183, | |
"step": 11420 | |
}, | |
{ | |
"epoch": 20.5, | |
"learning_rate": 3.275483742270098e-05, | |
"loss": 0.1112, | |
"step": 11430 | |
}, | |
{ | |
"epoch": 20.52, | |
"learning_rate": 3.273488928785159e-05, | |
"loss": 0.1126, | |
"step": 11440 | |
}, | |
{ | |
"epoch": 20.54, | |
"learning_rate": 3.271494115300219e-05, | |
"loss": 0.0929, | |
"step": 11450 | |
}, | |
{ | |
"epoch": 20.56, | |
"learning_rate": 3.26949930181528e-05, | |
"loss": 0.1057, | |
"step": 11460 | |
}, | |
{ | |
"epoch": 20.57, | |
"learning_rate": 3.2675044883303415e-05, | |
"loss": 0.1167, | |
"step": 11470 | |
}, | |
{ | |
"epoch": 20.59, | |
"learning_rate": 3.265509674845402e-05, | |
"loss": 0.1217, | |
"step": 11480 | |
}, | |
{ | |
"epoch": 20.61, | |
"learning_rate": 3.2635148613604626e-05, | |
"loss": 0.1101, | |
"step": 11490 | |
}, | |
{ | |
"epoch": 20.63, | |
"learning_rate": 3.261520047875524e-05, | |
"loss": 0.0885, | |
"step": 11500 | |
}, | |
{ | |
"epoch": 20.65, | |
"learning_rate": 3.2595252343905844e-05, | |
"loss": 0.1036, | |
"step": 11510 | |
}, | |
{ | |
"epoch": 20.66, | |
"learning_rate": 3.2575304209056456e-05, | |
"loss": 0.1268, | |
"step": 11520 | |
}, | |
{ | |
"epoch": 20.68, | |
"learning_rate": 3.255535607420706e-05, | |
"loss": 0.1195, | |
"step": 11530 | |
}, | |
{ | |
"epoch": 20.7, | |
"learning_rate": 3.253540793935767e-05, | |
"loss": 0.1039, | |
"step": 11540 | |
}, | |
{ | |
"epoch": 20.72, | |
"learning_rate": 3.251545980450828e-05, | |
"loss": 0.0855, | |
"step": 11550 | |
}, | |
{ | |
"epoch": 20.74, | |
"learning_rate": 3.249551166965889e-05, | |
"loss": 0.1096, | |
"step": 11560 | |
}, | |
{ | |
"epoch": 20.75, | |
"learning_rate": 3.247556353480949e-05, | |
"loss": 0.1106, | |
"step": 11570 | |
}, | |
{ | |
"epoch": 20.77, | |
"learning_rate": 3.2455615399960104e-05, | |
"loss": 0.1085, | |
"step": 11580 | |
}, | |
{ | |
"epoch": 20.79, | |
"learning_rate": 3.2435667265110716e-05, | |
"loss": 0.126, | |
"step": 11590 | |
}, | |
{ | |
"epoch": 20.81, | |
"learning_rate": 3.241571913026132e-05, | |
"loss": 0.0959, | |
"step": 11600 | |
}, | |
{ | |
"epoch": 20.83, | |
"learning_rate": 3.239577099541193e-05, | |
"loss": 0.1152, | |
"step": 11610 | |
}, | |
{ | |
"epoch": 20.84, | |
"learning_rate": 3.237582286056254e-05, | |
"loss": 0.1252, | |
"step": 11620 | |
}, | |
{ | |
"epoch": 20.86, | |
"learning_rate": 3.2355874725713145e-05, | |
"loss": 0.1151, | |
"step": 11630 | |
}, | |
{ | |
"epoch": 20.88, | |
"learning_rate": 3.233592659086376e-05, | |
"loss": 0.1083, | |
"step": 11640 | |
}, | |
{ | |
"epoch": 20.9, | |
"learning_rate": 3.231597845601436e-05, | |
"loss": 0.1292, | |
"step": 11650 | |
}, | |
{ | |
"epoch": 20.91, | |
"learning_rate": 3.229603032116497e-05, | |
"loss": 0.1201, | |
"step": 11660 | |
}, | |
{ | |
"epoch": 20.93, | |
"learning_rate": 3.227608218631558e-05, | |
"loss": 0.1006, | |
"step": 11670 | |
}, | |
{ | |
"epoch": 20.95, | |
"learning_rate": 3.2256134051466194e-05, | |
"loss": 0.1131, | |
"step": 11680 | |
}, | |
{ | |
"epoch": 20.97, | |
"learning_rate": 3.223618591661679e-05, | |
"loss": 0.1206, | |
"step": 11690 | |
}, | |
{ | |
"epoch": 20.99, | |
"learning_rate": 3.2216237781767405e-05, | |
"loss": 0.0912, | |
"step": 11700 | |
}, | |
{ | |
"epoch": 21.0, | |
"eval_accuracy": { | |
"accuracy": 0.9757793227180982 | |
}, | |
"eval_f1": { | |
"f1": 0.9743545836218206 | |
}, | |
"eval_loss": 0.07501205056905746, | |
"eval_precision": { | |
"precision": 0.9744013683930541 | |
}, | |
"eval_recall": { | |
"recall": 0.9743462910019547 | |
}, | |
"eval_runtime": 96.9661, | |
"eval_samples_per_second": 183.941, | |
"eval_steps_per_second": 5.755, | |
"step": 11707 | |
}, | |
{ | |
"epoch": 21.0, | |
"learning_rate": 3.219628964691802e-05, | |
"loss": 0.1059, | |
"step": 11710 | |
}, | |
{ | |
"epoch": 21.02, | |
"learning_rate": 3.217634151206862e-05, | |
"loss": 0.1021, | |
"step": 11720 | |
}, | |
{ | |
"epoch": 21.04, | |
"learning_rate": 3.215639337721923e-05, | |
"loss": 0.1044, | |
"step": 11730 | |
}, | |
{ | |
"epoch": 21.06, | |
"learning_rate": 3.213644524236984e-05, | |
"loss": 0.098, | |
"step": 11740 | |
}, | |
{ | |
"epoch": 21.08, | |
"learning_rate": 3.2116497107520446e-05, | |
"loss": 0.1192, | |
"step": 11750 | |
}, | |
{ | |
"epoch": 21.09, | |
"learning_rate": 3.209654897267106e-05, | |
"loss": 0.1098, | |
"step": 11760 | |
}, | |
{ | |
"epoch": 21.11, | |
"learning_rate": 3.2076600837821664e-05, | |
"loss": 0.1244, | |
"step": 11770 | |
}, | |
{ | |
"epoch": 21.13, | |
"learning_rate": 3.205665270297227e-05, | |
"loss": 0.1163, | |
"step": 11780 | |
}, | |
{ | |
"epoch": 21.15, | |
"learning_rate": 3.203670456812288e-05, | |
"loss": 0.1267, | |
"step": 11790 | |
}, | |
{ | |
"epoch": 21.17, | |
"learning_rate": 3.2016756433273495e-05, | |
"loss": 0.0953, | |
"step": 11800 | |
}, | |
{ | |
"epoch": 21.18, | |
"learning_rate": 3.1996808298424094e-05, | |
"loss": 0.1165, | |
"step": 11810 | |
}, | |
{ | |
"epoch": 21.2, | |
"learning_rate": 3.1976860163574706e-05, | |
"loss": 0.0978, | |
"step": 11820 | |
}, | |
{ | |
"epoch": 21.22, | |
"learning_rate": 3.195691202872532e-05, | |
"loss": 0.1034, | |
"step": 11830 | |
}, | |
{ | |
"epoch": 21.24, | |
"learning_rate": 3.1936963893875924e-05, | |
"loss": 0.1029, | |
"step": 11840 | |
}, | |
{ | |
"epoch": 21.26, | |
"learning_rate": 3.191701575902653e-05, | |
"loss": 0.1196, | |
"step": 11850 | |
}, | |
{ | |
"epoch": 21.27, | |
"learning_rate": 3.189706762417714e-05, | |
"loss": 0.1041, | |
"step": 11860 | |
}, | |
{ | |
"epoch": 21.29, | |
"learning_rate": 3.187711948932775e-05, | |
"loss": 0.1038, | |
"step": 11870 | |
}, | |
{ | |
"epoch": 21.31, | |
"learning_rate": 3.185717135447836e-05, | |
"loss": 0.0942, | |
"step": 11880 | |
}, | |
{ | |
"epoch": 21.33, | |
"learning_rate": 3.1837223219628966e-05, | |
"loss": 0.1111, | |
"step": 11890 | |
}, | |
{ | |
"epoch": 21.35, | |
"learning_rate": 3.181727508477957e-05, | |
"loss": 0.0972, | |
"step": 11900 | |
}, | |
{ | |
"epoch": 21.36, | |
"learning_rate": 3.1797326949930184e-05, | |
"loss": 0.1149, | |
"step": 11910 | |
}, | |
{ | |
"epoch": 21.38, | |
"learning_rate": 3.1777378815080796e-05, | |
"loss": 0.1141, | |
"step": 11920 | |
}, | |
{ | |
"epoch": 21.4, | |
"learning_rate": 3.17574306802314e-05, | |
"loss": 0.1246, | |
"step": 11930 | |
}, | |
{ | |
"epoch": 21.42, | |
"learning_rate": 3.173748254538201e-05, | |
"loss": 0.1008, | |
"step": 11940 | |
}, | |
{ | |
"epoch": 21.43, | |
"learning_rate": 3.171753441053262e-05, | |
"loss": 0.1053, | |
"step": 11950 | |
}, | |
{ | |
"epoch": 21.45, | |
"learning_rate": 3.1697586275683225e-05, | |
"loss": 0.1262, | |
"step": 11960 | |
}, | |
{ | |
"epoch": 21.47, | |
"learning_rate": 3.167763814083383e-05, | |
"loss": 0.0978, | |
"step": 11970 | |
}, | |
{ | |
"epoch": 21.49, | |
"learning_rate": 3.165769000598444e-05, | |
"loss": 0.084, | |
"step": 11980 | |
}, | |
{ | |
"epoch": 21.51, | |
"learning_rate": 3.163774187113505e-05, | |
"loss": 0.1137, | |
"step": 11990 | |
}, | |
{ | |
"epoch": 21.52, | |
"learning_rate": 3.161779373628566e-05, | |
"loss": 0.1137, | |
"step": 12000 | |
}, | |
{ | |
"epoch": 21.54, | |
"learning_rate": 3.159784560143627e-05, | |
"loss": 0.1474, | |
"step": 12010 | |
}, | |
{ | |
"epoch": 21.56, | |
"learning_rate": 3.157789746658687e-05, | |
"loss": 0.116, | |
"step": 12020 | |
}, | |
{ | |
"epoch": 21.58, | |
"learning_rate": 3.1557949331737485e-05, | |
"loss": 0.1145, | |
"step": 12030 | |
}, | |
{ | |
"epoch": 21.6, | |
"learning_rate": 3.15380011968881e-05, | |
"loss": 0.1184, | |
"step": 12040 | |
}, | |
{ | |
"epoch": 21.61, | |
"learning_rate": 3.15180530620387e-05, | |
"loss": 0.1016, | |
"step": 12050 | |
}, | |
{ | |
"epoch": 21.63, | |
"learning_rate": 3.149810492718931e-05, | |
"loss": 0.1183, | |
"step": 12060 | |
}, | |
{ | |
"epoch": 21.65, | |
"learning_rate": 3.147815679233992e-05, | |
"loss": 0.1202, | |
"step": 12070 | |
}, | |
{ | |
"epoch": 21.67, | |
"learning_rate": 3.1458208657490526e-05, | |
"loss": 0.1014, | |
"step": 12080 | |
}, | |
{ | |
"epoch": 21.69, | |
"learning_rate": 3.143826052264113e-05, | |
"loss": 0.0979, | |
"step": 12090 | |
}, | |
{ | |
"epoch": 21.7, | |
"learning_rate": 3.1418312387791744e-05, | |
"loss": 0.0934, | |
"step": 12100 | |
}, | |
{ | |
"epoch": 21.72, | |
"learning_rate": 3.139836425294235e-05, | |
"loss": 0.0867, | |
"step": 12110 | |
}, | |
{ | |
"epoch": 21.74, | |
"learning_rate": 3.137841611809296e-05, | |
"loss": 0.1156, | |
"step": 12120 | |
}, | |
{ | |
"epoch": 21.76, | |
"learning_rate": 3.135846798324357e-05, | |
"loss": 0.11, | |
"step": 12130 | |
}, | |
{ | |
"epoch": 21.78, | |
"learning_rate": 3.1338519848394173e-05, | |
"loss": 0.0927, | |
"step": 12140 | |
}, | |
{ | |
"epoch": 21.79, | |
"learning_rate": 3.1318571713544786e-05, | |
"loss": 0.0885, | |
"step": 12150 | |
}, | |
{ | |
"epoch": 21.81, | |
"learning_rate": 3.129862357869539e-05, | |
"loss": 0.1108, | |
"step": 12160 | |
}, | |
{ | |
"epoch": 21.83, | |
"learning_rate": 3.1278675443846004e-05, | |
"loss": 0.0972, | |
"step": 12170 | |
}, | |
{ | |
"epoch": 21.85, | |
"learning_rate": 3.125872730899661e-05, | |
"loss": 0.1263, | |
"step": 12180 | |
}, | |
{ | |
"epoch": 21.87, | |
"learning_rate": 3.123877917414722e-05, | |
"loss": 0.1119, | |
"step": 12190 | |
}, | |
{ | |
"epoch": 21.88, | |
"learning_rate": 3.121883103929783e-05, | |
"loss": 0.1089, | |
"step": 12200 | |
}, | |
{ | |
"epoch": 21.9, | |
"learning_rate": 3.119888290444843e-05, | |
"loss": 0.1389, | |
"step": 12210 | |
}, | |
{ | |
"epoch": 21.92, | |
"learning_rate": 3.1178934769599045e-05, | |
"loss": 0.1115, | |
"step": 12220 | |
}, | |
{ | |
"epoch": 21.94, | |
"learning_rate": 3.115898663474965e-05, | |
"loss": 0.0977, | |
"step": 12230 | |
}, | |
{ | |
"epoch": 21.96, | |
"learning_rate": 3.113903849990026e-05, | |
"loss": 0.1208, | |
"step": 12240 | |
}, | |
{ | |
"epoch": 21.97, | |
"learning_rate": 3.111909036505087e-05, | |
"loss": 0.1062, | |
"step": 12250 | |
}, | |
{ | |
"epoch": 21.99, | |
"learning_rate": 3.1099142230201475e-05, | |
"loss": 0.1014, | |
"step": 12260 | |
}, | |
{ | |
"epoch": 22.0, | |
"eval_accuracy": { | |
"accuracy": 0.9767324512222472 | |
}, | |
"eval_f1": { | |
"f1": 0.9756321207375294 | |
}, | |
"eval_loss": 0.06683723628520966, | |
"eval_precision": { | |
"precision": 0.9757768485441531 | |
}, | |
"eval_recall": { | |
"recall": 0.9755062975650303 | |
}, | |
"eval_runtime": 97.9323, | |
"eval_samples_per_second": 182.126, | |
"eval_steps_per_second": 5.698, | |
"step": 12265 | |
}, | |
{ | |
"epoch": 22.01, | |
"learning_rate": 3.107919409535209e-05, | |
"loss": 0.1262, | |
"step": 12270 | |
}, | |
{ | |
"epoch": 22.03, | |
"learning_rate": 3.105924596050269e-05, | |
"loss": 0.0876, | |
"step": 12280 | |
}, | |
{ | |
"epoch": 22.04, | |
"learning_rate": 3.1039297825653305e-05, | |
"loss": 0.1011, | |
"step": 12290 | |
}, | |
{ | |
"epoch": 22.06, | |
"learning_rate": 3.101934969080391e-05, | |
"loss": 0.1049, | |
"step": 12300 | |
}, | |
{ | |
"epoch": 22.08, | |
"learning_rate": 3.099940155595452e-05, | |
"loss": 0.1151, | |
"step": 12310 | |
}, | |
{ | |
"epoch": 22.1, | |
"learning_rate": 3.097945342110513e-05, | |
"loss": 0.093, | |
"step": 12320 | |
}, | |
{ | |
"epoch": 22.12, | |
"learning_rate": 3.0959505286255734e-05, | |
"loss": 0.0962, | |
"step": 12330 | |
}, | |
{ | |
"epoch": 22.13, | |
"learning_rate": 3.0939557151406347e-05, | |
"loss": 0.1027, | |
"step": 12340 | |
}, | |
{ | |
"epoch": 22.15, | |
"learning_rate": 3.091960901655695e-05, | |
"loss": 0.1102, | |
"step": 12350 | |
}, | |
{ | |
"epoch": 22.17, | |
"learning_rate": 3.089966088170756e-05, | |
"loss": 0.1039, | |
"step": 12360 | |
}, | |
{ | |
"epoch": 22.19, | |
"learning_rate": 3.087971274685817e-05, | |
"loss": 0.1156, | |
"step": 12370 | |
}, | |
{ | |
"epoch": 22.21, | |
"learning_rate": 3.0859764612008776e-05, | |
"loss": 0.1374, | |
"step": 12380 | |
}, | |
{ | |
"epoch": 22.22, | |
"learning_rate": 3.083981647715939e-05, | |
"loss": 0.131, | |
"step": 12390 | |
}, | |
{ | |
"epoch": 22.24, | |
"learning_rate": 3.0819868342309994e-05, | |
"loss": 0.1005, | |
"step": 12400 | |
}, | |
{ | |
"epoch": 22.26, | |
"learning_rate": 3.0799920207460606e-05, | |
"loss": 0.1083, | |
"step": 12410 | |
}, | |
{ | |
"epoch": 22.28, | |
"learning_rate": 3.077997207261121e-05, | |
"loss": 0.1206, | |
"step": 12420 | |
}, | |
{ | |
"epoch": 22.3, | |
"learning_rate": 3.0760023937761824e-05, | |
"loss": 0.0828, | |
"step": 12430 | |
}, | |
{ | |
"epoch": 22.31, | |
"learning_rate": 3.074007580291243e-05, | |
"loss": 0.0876, | |
"step": 12440 | |
}, | |
{ | |
"epoch": 22.33, | |
"learning_rate": 3.0720127668063035e-05, | |
"loss": 0.0853, | |
"step": 12450 | |
}, | |
{ | |
"epoch": 22.35, | |
"learning_rate": 3.070017953321365e-05, | |
"loss": 0.0978, | |
"step": 12460 | |
}, | |
{ | |
"epoch": 22.37, | |
"learning_rate": 3.068023139836425e-05, | |
"loss": 0.0838, | |
"step": 12470 | |
}, | |
{ | |
"epoch": 22.39, | |
"learning_rate": 3.066028326351486e-05, | |
"loss": 0.11, | |
"step": 12480 | |
}, | |
{ | |
"epoch": 22.4, | |
"learning_rate": 3.064033512866547e-05, | |
"loss": 0.0932, | |
"step": 12490 | |
}, | |
{ | |
"epoch": 22.42, | |
"learning_rate": 3.0620386993816084e-05, | |
"loss": 0.1047, | |
"step": 12500 | |
}, | |
{ | |
"epoch": 22.44, | |
"learning_rate": 3.060043885896669e-05, | |
"loss": 0.1136, | |
"step": 12510 | |
}, | |
{ | |
"epoch": 22.46, | |
"learning_rate": 3.0580490724117295e-05, | |
"loss": 0.1309, | |
"step": 12520 | |
}, | |
{ | |
"epoch": 22.48, | |
"learning_rate": 3.056054258926791e-05, | |
"loss": 0.1196, | |
"step": 12530 | |
}, | |
{ | |
"epoch": 22.49, | |
"learning_rate": 3.054059445441851e-05, | |
"loss": 0.1098, | |
"step": 12540 | |
}, | |
{ | |
"epoch": 22.51, | |
"learning_rate": 3.0520646319569125e-05, | |
"loss": 0.122, | |
"step": 12550 | |
}, | |
{ | |
"epoch": 22.53, | |
"learning_rate": 3.0500698184719728e-05, | |
"loss": 0.1157, | |
"step": 12560 | |
}, | |
{ | |
"epoch": 22.55, | |
"learning_rate": 3.0480750049870337e-05, | |
"loss": 0.1006, | |
"step": 12570 | |
}, | |
{ | |
"epoch": 22.57, | |
"learning_rate": 3.046080191502095e-05, | |
"loss": 0.106, | |
"step": 12580 | |
}, | |
{ | |
"epoch": 22.58, | |
"learning_rate": 3.0440853780171558e-05, | |
"loss": 0.1071, | |
"step": 12590 | |
}, | |
{ | |
"epoch": 22.6, | |
"learning_rate": 3.0420905645322164e-05, | |
"loss": 0.1078, | |
"step": 12600 | |
}, | |
{ | |
"epoch": 22.62, | |
"learning_rate": 3.0400957510472772e-05, | |
"loss": 0.0983, | |
"step": 12610 | |
}, | |
{ | |
"epoch": 22.64, | |
"learning_rate": 3.038100937562338e-05, | |
"loss": 0.0951, | |
"step": 12620 | |
}, | |
{ | |
"epoch": 22.65, | |
"learning_rate": 3.036106124077399e-05, | |
"loss": 0.1093, | |
"step": 12630 | |
}, | |
{ | |
"epoch": 22.67, | |
"learning_rate": 3.0341113105924596e-05, | |
"loss": 0.1088, | |
"step": 12640 | |
}, | |
{ | |
"epoch": 22.69, | |
"learning_rate": 3.0321164971075205e-05, | |
"loss": 0.1178, | |
"step": 12650 | |
}, | |
{ | |
"epoch": 22.71, | |
"learning_rate": 3.0301216836225814e-05, | |
"loss": 0.105, | |
"step": 12660 | |
}, | |
{ | |
"epoch": 22.73, | |
"learning_rate": 3.0281268701376426e-05, | |
"loss": 0.1285, | |
"step": 12670 | |
}, | |
{ | |
"epoch": 22.74, | |
"learning_rate": 3.026132056652703e-05, | |
"loss": 0.1001, | |
"step": 12680 | |
}, | |
{ | |
"epoch": 22.76, | |
"learning_rate": 3.0241372431677638e-05, | |
"loss": 0.1059, | |
"step": 12690 | |
}, | |
{ | |
"epoch": 22.78, | |
"learning_rate": 3.022142429682825e-05, | |
"loss": 0.11, | |
"step": 12700 | |
}, | |
{ | |
"epoch": 22.8, | |
"learning_rate": 3.020147616197886e-05, | |
"loss": 0.1007, | |
"step": 12710 | |
}, | |
{ | |
"epoch": 22.82, | |
"learning_rate": 3.0181528027129465e-05, | |
"loss": 0.0924, | |
"step": 12720 | |
}, | |
{ | |
"epoch": 22.83, | |
"learning_rate": 3.0161579892280074e-05, | |
"loss": 0.0947, | |
"step": 12730 | |
}, | |
{ | |
"epoch": 22.85, | |
"learning_rate": 3.0141631757430683e-05, | |
"loss": 0.1145, | |
"step": 12740 | |
}, | |
{ | |
"epoch": 22.87, | |
"learning_rate": 3.012168362258129e-05, | |
"loss": 0.1293, | |
"step": 12750 | |
}, | |
{ | |
"epoch": 22.89, | |
"learning_rate": 3.0101735487731897e-05, | |
"loss": 0.0884, | |
"step": 12760 | |
}, | |
{ | |
"epoch": 22.91, | |
"learning_rate": 3.0081787352882506e-05, | |
"loss": 0.0808, | |
"step": 12770 | |
}, | |
{ | |
"epoch": 22.92, | |
"learning_rate": 3.0061839218033115e-05, | |
"loss": 0.0872, | |
"step": 12780 | |
}, | |
{ | |
"epoch": 22.94, | |
"learning_rate": 3.0041891083183728e-05, | |
"loss": 0.1114, | |
"step": 12790 | |
}, | |
{ | |
"epoch": 22.96, | |
"learning_rate": 3.002194294833433e-05, | |
"loss": 0.1115, | |
"step": 12800 | |
}, | |
{ | |
"epoch": 22.98, | |
"learning_rate": 3.000199481348494e-05, | |
"loss": 0.1161, | |
"step": 12810 | |
}, | |
{ | |
"epoch": 23.0, | |
"learning_rate": 2.998204667863555e-05, | |
"loss": 0.1287, | |
"step": 12820 | |
}, | |
{ | |
"epoch": 23.0, | |
"eval_accuracy": { | |
"accuracy": 0.9748261942139493 | |
}, | |
"eval_f1": { | |
"f1": 0.9735162798230279 | |
}, | |
"eval_loss": 0.07296622544527054, | |
"eval_precision": { | |
"precision": 0.9733253666235441 | |
}, | |
"eval_recall": { | |
"recall": 0.9737872383822983 | |
}, | |
"eval_runtime": 98.3574, | |
"eval_samples_per_second": 181.339, | |
"eval_steps_per_second": 5.673, | |
"step": 12822 | |
}, | |
{ | |
"epoch": 23.01, | |
"learning_rate": 2.996209854378616e-05, | |
"loss": 0.0985, | |
"step": 12830 | |
}, | |
{ | |
"epoch": 23.03, | |
"learning_rate": 2.9942150408936766e-05, | |
"loss": 0.111, | |
"step": 12840 | |
}, | |
{ | |
"epoch": 23.05, | |
"learning_rate": 2.9922202274087375e-05, | |
"loss": 0.1048, | |
"step": 12850 | |
}, | |
{ | |
"epoch": 23.07, | |
"learning_rate": 2.9902254139237984e-05, | |
"loss": 0.1375, | |
"step": 12860 | |
}, | |
{ | |
"epoch": 23.09, | |
"learning_rate": 2.9882306004388593e-05, | |
"loss": 0.0984, | |
"step": 12870 | |
}, | |
{ | |
"epoch": 23.1, | |
"learning_rate": 2.98623578695392e-05, | |
"loss": 0.1094, | |
"step": 12880 | |
}, | |
{ | |
"epoch": 23.12, | |
"learning_rate": 2.9842409734689807e-05, | |
"loss": 0.0999, | |
"step": 12890 | |
}, | |
{ | |
"epoch": 23.14, | |
"learning_rate": 2.9822461599840416e-05, | |
"loss": 0.1106, | |
"step": 12900 | |
}, | |
{ | |
"epoch": 23.16, | |
"learning_rate": 2.9802513464991022e-05, | |
"loss": 0.0916, | |
"step": 12910 | |
}, | |
{ | |
"epoch": 23.17, | |
"learning_rate": 2.978256533014163e-05, | |
"loss": 0.0806, | |
"step": 12920 | |
}, | |
{ | |
"epoch": 23.19, | |
"learning_rate": 2.976261719529224e-05, | |
"loss": 0.1111, | |
"step": 12930 | |
}, | |
{ | |
"epoch": 23.21, | |
"learning_rate": 2.9742669060442852e-05, | |
"loss": 0.1042, | |
"step": 12940 | |
}, | |
{ | |
"epoch": 23.23, | |
"learning_rate": 2.9722720925593455e-05, | |
"loss": 0.108, | |
"step": 12950 | |
}, | |
{ | |
"epoch": 23.25, | |
"learning_rate": 2.9702772790744067e-05, | |
"loss": 0.0858, | |
"step": 12960 | |
}, | |
{ | |
"epoch": 23.26, | |
"learning_rate": 2.9682824655894676e-05, | |
"loss": 0.1188, | |
"step": 12970 | |
}, | |
{ | |
"epoch": 23.28, | |
"learning_rate": 2.9662876521045285e-05, | |
"loss": 0.1247, | |
"step": 12980 | |
}, | |
{ | |
"epoch": 23.3, | |
"learning_rate": 2.964292838619589e-05, | |
"loss": 0.1047, | |
"step": 12990 | |
}, | |
{ | |
"epoch": 23.32, | |
"learning_rate": 2.96229802513465e-05, | |
"loss": 0.0968, | |
"step": 13000 | |
}, | |
{ | |
"epoch": 23.34, | |
"learning_rate": 2.960303211649711e-05, | |
"loss": 0.0997, | |
"step": 13010 | |
}, | |
{ | |
"epoch": 23.35, | |
"learning_rate": 2.9583083981647718e-05, | |
"loss": 0.1163, | |
"step": 13020 | |
}, | |
{ | |
"epoch": 23.37, | |
"learning_rate": 2.9563135846798323e-05, | |
"loss": 0.1015, | |
"step": 13030 | |
}, | |
{ | |
"epoch": 23.39, | |
"learning_rate": 2.9543187711948932e-05, | |
"loss": 0.0948, | |
"step": 13040 | |
}, | |
{ | |
"epoch": 23.41, | |
"learning_rate": 2.9523239577099545e-05, | |
"loss": 0.0792, | |
"step": 13050 | |
}, | |
{ | |
"epoch": 23.43, | |
"learning_rate": 2.9503291442250154e-05, | |
"loss": 0.098, | |
"step": 13060 | |
}, | |
{ | |
"epoch": 23.44, | |
"learning_rate": 2.9483343307400756e-05, | |
"loss": 0.0987, | |
"step": 13070 | |
}, | |
{ | |
"epoch": 23.46, | |
"learning_rate": 2.9463395172551368e-05, | |
"loss": 0.092, | |
"step": 13080 | |
}, | |
{ | |
"epoch": 23.48, | |
"learning_rate": 2.9443447037701977e-05, | |
"loss": 0.0957, | |
"step": 13090 | |
}, | |
{ | |
"epoch": 23.5, | |
"learning_rate": 2.9423498902852586e-05, | |
"loss": 0.1069, | |
"step": 13100 | |
}, | |
{ | |
"epoch": 23.52, | |
"learning_rate": 2.9403550768003192e-05, | |
"loss": 0.096, | |
"step": 13110 | |
}, | |
{ | |
"epoch": 23.53, | |
"learning_rate": 2.93836026331538e-05, | |
"loss": 0.123, | |
"step": 13120 | |
}, | |
{ | |
"epoch": 23.55, | |
"learning_rate": 2.936365449830441e-05, | |
"loss": 0.109, | |
"step": 13130 | |
}, | |
{ | |
"epoch": 23.57, | |
"learning_rate": 2.934370636345502e-05, | |
"loss": 0.1242, | |
"step": 13140 | |
}, | |
{ | |
"epoch": 23.59, | |
"learning_rate": 2.9323758228605624e-05, | |
"loss": 0.0954, | |
"step": 13150 | |
}, | |
{ | |
"epoch": 23.61, | |
"learning_rate": 2.9303810093756233e-05, | |
"loss": 0.1054, | |
"step": 13160 | |
}, | |
{ | |
"epoch": 23.62, | |
"learning_rate": 2.9283861958906846e-05, | |
"loss": 0.0903, | |
"step": 13170 | |
}, | |
{ | |
"epoch": 23.64, | |
"learning_rate": 2.9263913824057455e-05, | |
"loss": 0.1071, | |
"step": 13180 | |
}, | |
{ | |
"epoch": 23.66, | |
"learning_rate": 2.9243965689208057e-05, | |
"loss": 0.1002, | |
"step": 13190 | |
}, | |
{ | |
"epoch": 23.68, | |
"learning_rate": 2.922401755435867e-05, | |
"loss": 0.0843, | |
"step": 13200 | |
}, | |
{ | |
"epoch": 23.7, | |
"learning_rate": 2.920406941950928e-05, | |
"loss": 0.1035, | |
"step": 13210 | |
}, | |
{ | |
"epoch": 23.71, | |
"learning_rate": 2.9184121284659887e-05, | |
"loss": 0.0881, | |
"step": 13220 | |
}, | |
{ | |
"epoch": 23.73, | |
"learning_rate": 2.9164173149810493e-05, | |
"loss": 0.1047, | |
"step": 13230 | |
}, | |
{ | |
"epoch": 23.75, | |
"learning_rate": 2.9144225014961102e-05, | |
"loss": 0.1076, | |
"step": 13240 | |
}, | |
{ | |
"epoch": 23.77, | |
"learning_rate": 2.912427688011171e-05, | |
"loss": 0.0935, | |
"step": 13250 | |
}, | |
{ | |
"epoch": 23.78, | |
"learning_rate": 2.910432874526232e-05, | |
"loss": 0.1102, | |
"step": 13260 | |
}, | |
{ | |
"epoch": 23.8, | |
"learning_rate": 2.9084380610412926e-05, | |
"loss": 0.1055, | |
"step": 13270 | |
}, | |
{ | |
"epoch": 23.82, | |
"learning_rate": 2.9064432475563535e-05, | |
"loss": 0.1142, | |
"step": 13280 | |
}, | |
{ | |
"epoch": 23.84, | |
"learning_rate": 2.9044484340714147e-05, | |
"loss": 0.1072, | |
"step": 13290 | |
}, | |
{ | |
"epoch": 23.86, | |
"learning_rate": 2.9024536205864756e-05, | |
"loss": 0.0945, | |
"step": 13300 | |
}, | |
{ | |
"epoch": 23.87, | |
"learning_rate": 2.9004588071015358e-05, | |
"loss": 0.1009, | |
"step": 13310 | |
}, | |
{ | |
"epoch": 23.89, | |
"learning_rate": 2.898463993616597e-05, | |
"loss": 0.0981, | |
"step": 13320 | |
}, | |
{ | |
"epoch": 23.91, | |
"learning_rate": 2.896469180131658e-05, | |
"loss": 0.1008, | |
"step": 13330 | |
}, | |
{ | |
"epoch": 23.93, | |
"learning_rate": 2.894474366646719e-05, | |
"loss": 0.0916, | |
"step": 13340 | |
}, | |
{ | |
"epoch": 23.95, | |
"learning_rate": 2.8924795531617794e-05, | |
"loss": 0.1012, | |
"step": 13350 | |
}, | |
{ | |
"epoch": 23.96, | |
"learning_rate": 2.8904847396768403e-05, | |
"loss": 0.1195, | |
"step": 13360 | |
}, | |
{ | |
"epoch": 23.98, | |
"learning_rate": 2.8884899261919012e-05, | |
"loss": 0.1071, | |
"step": 13370 | |
}, | |
{ | |
"epoch": 24.0, | |
"learning_rate": 2.886495112706962e-05, | |
"loss": 0.1021, | |
"step": 13380 | |
}, | |
{ | |
"epoch": 24.0, | |
"eval_accuracy": { | |
"accuracy": 0.9749383269791433 | |
}, | |
"eval_f1": { | |
"f1": 0.9738176491846292 | |
}, | |
"eval_loss": 0.07623546570539474, | |
"eval_precision": { | |
"precision": 0.9736032803908243 | |
}, | |
"eval_recall": { | |
"recall": 0.9741172314123937 | |
}, | |
"eval_runtime": 97.7552, | |
"eval_samples_per_second": 182.456, | |
"eval_steps_per_second": 5.708, | |
"step": 13380 | |
}, | |
{ | |
"epoch": 24.02, | |
"learning_rate": 2.8845002992220227e-05, | |
"loss": 0.0898, | |
"step": 13390 | |
}, | |
{ | |
"epoch": 24.04, | |
"learning_rate": 2.8825054857370836e-05, | |
"loss": 0.1146, | |
"step": 13400 | |
}, | |
{ | |
"epoch": 24.05, | |
"learning_rate": 2.8805106722521448e-05, | |
"loss": 0.0964, | |
"step": 13410 | |
}, | |
{ | |
"epoch": 24.07, | |
"learning_rate": 2.8785158587672057e-05, | |
"loss": 0.0898, | |
"step": 13420 | |
}, | |
{ | |
"epoch": 24.09, | |
"learning_rate": 2.876521045282266e-05, | |
"loss": 0.1009, | |
"step": 13430 | |
}, | |
{ | |
"epoch": 24.11, | |
"learning_rate": 2.874526231797327e-05, | |
"loss": 0.1012, | |
"step": 13440 | |
}, | |
{ | |
"epoch": 24.13, | |
"learning_rate": 2.872531418312388e-05, | |
"loss": 0.1084, | |
"step": 13450 | |
}, | |
{ | |
"epoch": 24.14, | |
"learning_rate": 2.870536604827449e-05, | |
"loss": 0.0947, | |
"step": 13460 | |
}, | |
{ | |
"epoch": 24.16, | |
"learning_rate": 2.8685417913425095e-05, | |
"loss": 0.0824, | |
"step": 13470 | |
}, | |
{ | |
"epoch": 24.18, | |
"learning_rate": 2.8665469778575704e-05, | |
"loss": 0.0837, | |
"step": 13480 | |
}, | |
{ | |
"epoch": 24.2, | |
"learning_rate": 2.8645521643726313e-05, | |
"loss": 0.1033, | |
"step": 13490 | |
}, | |
{ | |
"epoch": 24.22, | |
"learning_rate": 2.8625573508876922e-05, | |
"loss": 0.103, | |
"step": 13500 | |
}, | |
{ | |
"epoch": 24.23, | |
"learning_rate": 2.8605625374027528e-05, | |
"loss": 0.0979, | |
"step": 13510 | |
}, | |
{ | |
"epoch": 24.25, | |
"learning_rate": 2.8585677239178137e-05, | |
"loss": 0.0768, | |
"step": 13520 | |
}, | |
{ | |
"epoch": 24.27, | |
"learning_rate": 2.856572910432875e-05, | |
"loss": 0.0893, | |
"step": 13530 | |
}, | |
{ | |
"epoch": 24.29, | |
"learning_rate": 2.8545780969479358e-05, | |
"loss": 0.1119, | |
"step": 13540 | |
}, | |
{ | |
"epoch": 24.3, | |
"learning_rate": 2.852583283462996e-05, | |
"loss": 0.0883, | |
"step": 13550 | |
}, | |
{ | |
"epoch": 24.32, | |
"learning_rate": 2.8505884699780573e-05, | |
"loss": 0.0971, | |
"step": 13560 | |
}, | |
{ | |
"epoch": 24.34, | |
"learning_rate": 2.8485936564931182e-05, | |
"loss": 0.1084, | |
"step": 13570 | |
}, | |
{ | |
"epoch": 24.36, | |
"learning_rate": 2.846598843008179e-05, | |
"loss": 0.093, | |
"step": 13580 | |
}, | |
{ | |
"epoch": 24.38, | |
"learning_rate": 2.8446040295232396e-05, | |
"loss": 0.1002, | |
"step": 13590 | |
}, | |
{ | |
"epoch": 24.39, | |
"learning_rate": 2.8426092160383005e-05, | |
"loss": 0.0781, | |
"step": 13600 | |
}, | |
{ | |
"epoch": 24.41, | |
"learning_rate": 2.8406144025533614e-05, | |
"loss": 0.0956, | |
"step": 13610 | |
}, | |
{ | |
"epoch": 24.43, | |
"learning_rate": 2.8386195890684227e-05, | |
"loss": 0.1295, | |
"step": 13620 | |
}, | |
{ | |
"epoch": 24.45, | |
"learning_rate": 2.836624775583483e-05, | |
"loss": 0.1102, | |
"step": 13630 | |
}, | |
{ | |
"epoch": 24.47, | |
"learning_rate": 2.8346299620985438e-05, | |
"loss": 0.0993, | |
"step": 13640 | |
}, | |
{ | |
"epoch": 24.48, | |
"learning_rate": 2.832635148613605e-05, | |
"loss": 0.1135, | |
"step": 13650 | |
}, | |
{ | |
"epoch": 24.5, | |
"learning_rate": 2.8306403351286653e-05, | |
"loss": 0.0908, | |
"step": 13660 | |
}, | |
{ | |
"epoch": 24.52, | |
"learning_rate": 2.828645521643726e-05, | |
"loss": 0.094, | |
"step": 13670 | |
}, | |
{ | |
"epoch": 24.54, | |
"learning_rate": 2.8266507081587874e-05, | |
"loss": 0.0983, | |
"step": 13680 | |
}, | |
{ | |
"epoch": 24.56, | |
"learning_rate": 2.8246558946738483e-05, | |
"loss": 0.095, | |
"step": 13690 | |
}, | |
{ | |
"epoch": 24.57, | |
"learning_rate": 2.822661081188909e-05, | |
"loss": 0.0878, | |
"step": 13700 | |
}, | |
{ | |
"epoch": 24.59, | |
"learning_rate": 2.8206662677039698e-05, | |
"loss": 0.0992, | |
"step": 13710 | |
}, | |
{ | |
"epoch": 24.61, | |
"learning_rate": 2.8186714542190307e-05, | |
"loss": 0.1033, | |
"step": 13720 | |
}, | |
{ | |
"epoch": 24.63, | |
"learning_rate": 2.8166766407340916e-05, | |
"loss": 0.0963, | |
"step": 13730 | |
}, | |
{ | |
"epoch": 24.65, | |
"learning_rate": 2.814681827249152e-05, | |
"loss": 0.0993, | |
"step": 13740 | |
}, | |
{ | |
"epoch": 24.66, | |
"learning_rate": 2.812687013764213e-05, | |
"loss": 0.0859, | |
"step": 13750 | |
}, | |
{ | |
"epoch": 24.68, | |
"learning_rate": 2.810692200279274e-05, | |
"loss": 0.1133, | |
"step": 13760 | |
}, | |
{ | |
"epoch": 24.7, | |
"learning_rate": 2.808697386794335e-05, | |
"loss": 0.0895, | |
"step": 13770 | |
}, | |
{ | |
"epoch": 24.72, | |
"learning_rate": 2.8067025733093954e-05, | |
"loss": 0.0999, | |
"step": 13780 | |
}, | |
{ | |
"epoch": 24.74, | |
"learning_rate": 2.8047077598244563e-05, | |
"loss": 0.0897, | |
"step": 13790 | |
}, | |
{ | |
"epoch": 24.75, | |
"learning_rate": 2.8027129463395175e-05, | |
"loss": 0.0843, | |
"step": 13800 | |
}, | |
{ | |
"epoch": 24.77, | |
"learning_rate": 2.8007181328545784e-05, | |
"loss": 0.113, | |
"step": 13810 | |
}, | |
{ | |
"epoch": 24.79, | |
"learning_rate": 2.798723319369639e-05, | |
"loss": 0.1002, | |
"step": 13820 | |
}, | |
{ | |
"epoch": 24.81, | |
"learning_rate": 2.7967285058847e-05, | |
"loss": 0.0852, | |
"step": 13830 | |
}, | |
{ | |
"epoch": 24.83, | |
"learning_rate": 2.7947336923997608e-05, | |
"loss": 0.0815, | |
"step": 13840 | |
}, | |
{ | |
"epoch": 24.84, | |
"learning_rate": 2.7927388789148217e-05, | |
"loss": 0.0755, | |
"step": 13850 | |
}, | |
{ | |
"epoch": 24.86, | |
"learning_rate": 2.7907440654298822e-05, | |
"loss": 0.0994, | |
"step": 13860 | |
}, | |
{ | |
"epoch": 24.88, | |
"learning_rate": 2.788749251944943e-05, | |
"loss": 0.1107, | |
"step": 13870 | |
}, | |
{ | |
"epoch": 24.9, | |
"learning_rate": 2.786754438460004e-05, | |
"loss": 0.1049, | |
"step": 13880 | |
}, | |
{ | |
"epoch": 24.91, | |
"learning_rate": 2.7847596249750653e-05, | |
"loss": 0.0817, | |
"step": 13890 | |
}, | |
{ | |
"epoch": 24.93, | |
"learning_rate": 2.7827648114901255e-05, | |
"loss": 0.0871, | |
"step": 13900 | |
}, | |
{ | |
"epoch": 24.95, | |
"learning_rate": 2.7807699980051864e-05, | |
"loss": 0.0801, | |
"step": 13910 | |
}, | |
{ | |
"epoch": 24.97, | |
"learning_rate": 2.7787751845202476e-05, | |
"loss": 0.1207, | |
"step": 13920 | |
}, | |
{ | |
"epoch": 24.99, | |
"learning_rate": 2.7767803710353085e-05, | |
"loss": 0.0998, | |
"step": 13930 | |
}, | |
{ | |
"epoch": 25.0, | |
"eval_accuracy": { | |
"accuracy": 0.9769567167526351 | |
}, | |
"eval_f1": { | |
"f1": 0.9759374545265928 | |
}, | |
"eval_loss": 0.07147885859012604, | |
"eval_precision": { | |
"precision": 0.9758764895384466 | |
}, | |
"eval_recall": { | |
"recall": 0.9760207637707055 | |
}, | |
"eval_runtime": 98.6071, | |
"eval_samples_per_second": 180.879, | |
"eval_steps_per_second": 5.659, | |
"step": 13937 | |
}, | |
{ | |
"epoch": 25.0, | |
"learning_rate": 2.774785557550369e-05, | |
"loss": 0.0995, | |
"step": 13940 | |
}, | |
{ | |
"epoch": 25.02, | |
"learning_rate": 2.77279074406543e-05, | |
"loss": 0.0672, | |
"step": 13950 | |
}, | |
{ | |
"epoch": 25.04, | |
"learning_rate": 2.770795930580491e-05, | |
"loss": 0.0844, | |
"step": 13960 | |
}, | |
{ | |
"epoch": 25.06, | |
"learning_rate": 2.7688011170955518e-05, | |
"loss": 0.0962, | |
"step": 13970 | |
}, | |
{ | |
"epoch": 25.08, | |
"learning_rate": 2.7668063036106123e-05, | |
"loss": 0.1012, | |
"step": 13980 | |
}, | |
{ | |
"epoch": 25.09, | |
"learning_rate": 2.7648114901256732e-05, | |
"loss": 0.1004, | |
"step": 13990 | |
}, | |
{ | |
"epoch": 25.11, | |
"learning_rate": 2.762816676640734e-05, | |
"loss": 0.1145, | |
"step": 14000 | |
}, | |
{ | |
"epoch": 25.13, | |
"learning_rate": 2.7608218631557954e-05, | |
"loss": 0.079, | |
"step": 14010 | |
}, | |
{ | |
"epoch": 25.15, | |
"learning_rate": 2.7588270496708556e-05, | |
"loss": 0.0792, | |
"step": 14020 | |
}, | |
{ | |
"epoch": 25.17, | |
"learning_rate": 2.756832236185917e-05, | |
"loss": 0.0866, | |
"step": 14030 | |
}, | |
{ | |
"epoch": 25.18, | |
"learning_rate": 2.7548374227009777e-05, | |
"loss": 0.0874, | |
"step": 14040 | |
}, | |
{ | |
"epoch": 25.2, | |
"learning_rate": 2.7528426092160386e-05, | |
"loss": 0.1009, | |
"step": 14050 | |
}, | |
{ | |
"epoch": 25.22, | |
"learning_rate": 2.7508477957310992e-05, | |
"loss": 0.1023, | |
"step": 14060 | |
}, | |
{ | |
"epoch": 25.24, | |
"learning_rate": 2.74885298224616e-05, | |
"loss": 0.1032, | |
"step": 14070 | |
}, | |
{ | |
"epoch": 25.26, | |
"learning_rate": 2.746858168761221e-05, | |
"loss": 0.0868, | |
"step": 14080 | |
}, | |
{ | |
"epoch": 25.27, | |
"learning_rate": 2.744863355276282e-05, | |
"loss": 0.1046, | |
"step": 14090 | |
}, | |
{ | |
"epoch": 25.29, | |
"learning_rate": 2.7428685417913425e-05, | |
"loss": 0.1076, | |
"step": 14100 | |
}, | |
{ | |
"epoch": 25.31, | |
"learning_rate": 2.7408737283064034e-05, | |
"loss": 0.0999, | |
"step": 14110 | |
}, | |
{ | |
"epoch": 25.33, | |
"learning_rate": 2.7388789148214643e-05, | |
"loss": 0.1028, | |
"step": 14120 | |
}, | |
{ | |
"epoch": 25.35, | |
"learning_rate": 2.7368841013365255e-05, | |
"loss": 0.1026, | |
"step": 14130 | |
}, | |
{ | |
"epoch": 25.36, | |
"learning_rate": 2.7348892878515857e-05, | |
"loss": 0.1028, | |
"step": 14140 | |
}, | |
{ | |
"epoch": 25.38, | |
"learning_rate": 2.732894474366647e-05, | |
"loss": 0.0952, | |
"step": 14150 | |
}, | |
{ | |
"epoch": 25.4, | |
"learning_rate": 2.730899660881708e-05, | |
"loss": 0.0695, | |
"step": 14160 | |
}, | |
{ | |
"epoch": 25.42, | |
"learning_rate": 2.7289048473967688e-05, | |
"loss": 0.1123, | |
"step": 14170 | |
}, | |
{ | |
"epoch": 25.43, | |
"learning_rate": 2.7269100339118293e-05, | |
"loss": 0.0989, | |
"step": 14180 | |
}, | |
{ | |
"epoch": 25.45, | |
"learning_rate": 2.7249152204268902e-05, | |
"loss": 0.0936, | |
"step": 14190 | |
}, | |
{ | |
"epoch": 25.47, | |
"learning_rate": 2.722920406941951e-05, | |
"loss": 0.1003, | |
"step": 14200 | |
}, | |
{ | |
"epoch": 25.49, | |
"learning_rate": 2.720925593457012e-05, | |
"loss": 0.0969, | |
"step": 14210 | |
}, | |
{ | |
"epoch": 25.51, | |
"learning_rate": 2.7189307799720726e-05, | |
"loss": 0.0992, | |
"step": 14220 | |
}, | |
{ | |
"epoch": 25.52, | |
"learning_rate": 2.7169359664871335e-05, | |
"loss": 0.1013, | |
"step": 14230 | |
}, | |
{ | |
"epoch": 25.54, | |
"learning_rate": 2.7149411530021944e-05, | |
"loss": 0.1018, | |
"step": 14240 | |
}, | |
{ | |
"epoch": 25.56, | |
"learning_rate": 2.7129463395172556e-05, | |
"loss": 0.0929, | |
"step": 14250 | |
}, | |
{ | |
"epoch": 25.58, | |
"learning_rate": 2.710951526032316e-05, | |
"loss": 0.0966, | |
"step": 14260 | |
}, | |
{ | |
"epoch": 25.6, | |
"learning_rate": 2.708956712547377e-05, | |
"loss": 0.0824, | |
"step": 14270 | |
}, | |
{ | |
"epoch": 25.61, | |
"learning_rate": 2.706961899062438e-05, | |
"loss": 0.1119, | |
"step": 14280 | |
}, | |
{ | |
"epoch": 25.63, | |
"learning_rate": 2.704967085577499e-05, | |
"loss": 0.0759, | |
"step": 14290 | |
}, | |
{ | |
"epoch": 25.65, | |
"learning_rate": 2.7029722720925594e-05, | |
"loss": 0.1329, | |
"step": 14300 | |
}, | |
{ | |
"epoch": 25.67, | |
"learning_rate": 2.7009774586076203e-05, | |
"loss": 0.0811, | |
"step": 14310 | |
}, | |
{ | |
"epoch": 25.69, | |
"learning_rate": 2.6989826451226812e-05, | |
"loss": 0.0923, | |
"step": 14320 | |
}, | |
{ | |
"epoch": 25.7, | |
"learning_rate": 2.696987831637742e-05, | |
"loss": 0.0965, | |
"step": 14330 | |
}, | |
{ | |
"epoch": 25.72, | |
"learning_rate": 2.6949930181528027e-05, | |
"loss": 0.0874, | |
"step": 14340 | |
}, | |
{ | |
"epoch": 25.74, | |
"learning_rate": 2.6929982046678636e-05, | |
"loss": 0.1088, | |
"step": 14350 | |
}, | |
{ | |
"epoch": 25.76, | |
"learning_rate": 2.6910033911829245e-05, | |
"loss": 0.1008, | |
"step": 14360 | |
}, | |
{ | |
"epoch": 25.78, | |
"learning_rate": 2.6890085776979857e-05, | |
"loss": 0.0959, | |
"step": 14370 | |
}, | |
{ | |
"epoch": 25.79, | |
"learning_rate": 2.687013764213046e-05, | |
"loss": 0.0933, | |
"step": 14380 | |
}, | |
{ | |
"epoch": 25.81, | |
"learning_rate": 2.6850189507281072e-05, | |
"loss": 0.1216, | |
"step": 14390 | |
}, | |
{ | |
"epoch": 25.83, | |
"learning_rate": 2.683024137243168e-05, | |
"loss": 0.0928, | |
"step": 14400 | |
}, | |
{ | |
"epoch": 25.85, | |
"learning_rate": 2.6810293237582283e-05, | |
"loss": 0.0909, | |
"step": 14410 | |
}, | |
{ | |
"epoch": 25.87, | |
"learning_rate": 2.6790345102732896e-05, | |
"loss": 0.1061, | |
"step": 14420 | |
}, | |
{ | |
"epoch": 25.88, | |
"learning_rate": 2.6770396967883505e-05, | |
"loss": 0.0852, | |
"step": 14430 | |
}, | |
{ | |
"epoch": 25.9, | |
"learning_rate": 2.6750448833034114e-05, | |
"loss": 0.1285, | |
"step": 14440 | |
}, | |
{ | |
"epoch": 25.92, | |
"learning_rate": 2.673050069818472e-05, | |
"loss": 0.0766, | |
"step": 14450 | |
}, | |
{ | |
"epoch": 25.94, | |
"learning_rate": 2.6710552563335328e-05, | |
"loss": 0.0937, | |
"step": 14460 | |
}, | |
{ | |
"epoch": 25.96, | |
"learning_rate": 2.6690604428485937e-05, | |
"loss": 0.1024, | |
"step": 14470 | |
}, | |
{ | |
"epoch": 25.97, | |
"learning_rate": 2.6670656293636546e-05, | |
"loss": 0.0833, | |
"step": 14480 | |
}, | |
{ | |
"epoch": 25.99, | |
"learning_rate": 2.6650708158787152e-05, | |
"loss": 0.1165, | |
"step": 14490 | |
}, | |
{ | |
"epoch": 26.0, | |
"eval_accuracy": { | |
"accuracy": 0.9753868580399193 | |
}, | |
"eval_f1": { | |
"f1": 0.9741465330684267 | |
}, | |
"eval_loss": 0.0761144682765007, | |
"eval_precision": { | |
"precision": 0.9742342246117814 | |
}, | |
"eval_recall": { | |
"recall": 0.9741434756629215 | |
}, | |
"eval_runtime": 98.5676, | |
"eval_samples_per_second": 180.952, | |
"eval_steps_per_second": 5.661, | |
"step": 14495 | |
}, | |
{ | |
"epoch": 26.01, | |
"learning_rate": 2.663076002393776e-05, | |
"loss": 0.0919, | |
"step": 14500 | |
}, | |
{ | |
"epoch": 26.03, | |
"learning_rate": 2.6610811889088373e-05, | |
"loss": 0.1011, | |
"step": 14510 | |
}, | |
{ | |
"epoch": 26.04, | |
"learning_rate": 2.6590863754238982e-05, | |
"loss": 0.1092, | |
"step": 14520 | |
}, | |
{ | |
"epoch": 26.06, | |
"learning_rate": 2.6570915619389584e-05, | |
"loss": 0.0989, | |
"step": 14530 | |
}, | |
{ | |
"epoch": 26.08, | |
"learning_rate": 2.6550967484540197e-05, | |
"loss": 0.0923, | |
"step": 14540 | |
}, | |
{ | |
"epoch": 26.1, | |
"learning_rate": 2.6531019349690806e-05, | |
"loss": 0.092, | |
"step": 14550 | |
}, | |
{ | |
"epoch": 26.12, | |
"learning_rate": 2.6511071214841415e-05, | |
"loss": 0.0936, | |
"step": 14560 | |
}, | |
{ | |
"epoch": 26.13, | |
"learning_rate": 2.649112307999202e-05, | |
"loss": 0.0789, | |
"step": 14570 | |
}, | |
{ | |
"epoch": 26.15, | |
"learning_rate": 2.647117494514263e-05, | |
"loss": 0.0479, | |
"step": 14580 | |
}, | |
{ | |
"epoch": 26.17, | |
"learning_rate": 2.645122681029324e-05, | |
"loss": 0.0745, | |
"step": 14590 | |
}, | |
{ | |
"epoch": 26.19, | |
"learning_rate": 2.643127867544385e-05, | |
"loss": 0.0896, | |
"step": 14600 | |
}, | |
{ | |
"epoch": 26.21, | |
"learning_rate": 2.6411330540594453e-05, | |
"loss": 0.1001, | |
"step": 14610 | |
}, | |
{ | |
"epoch": 26.22, | |
"learning_rate": 2.6391382405745062e-05, | |
"loss": 0.0846, | |
"step": 14620 | |
}, | |
{ | |
"epoch": 26.24, | |
"learning_rate": 2.6371434270895674e-05, | |
"loss": 0.0899, | |
"step": 14630 | |
}, | |
{ | |
"epoch": 26.26, | |
"learning_rate": 2.6351486136046283e-05, | |
"loss": 0.0929, | |
"step": 14640 | |
}, | |
{ | |
"epoch": 26.28, | |
"learning_rate": 2.6331538001196886e-05, | |
"loss": 0.0922, | |
"step": 14650 | |
}, | |
{ | |
"epoch": 26.3, | |
"learning_rate": 2.6311589866347498e-05, | |
"loss": 0.0851, | |
"step": 14660 | |
}, | |
{ | |
"epoch": 26.31, | |
"learning_rate": 2.6291641731498107e-05, | |
"loss": 0.0959, | |
"step": 14670 | |
}, | |
{ | |
"epoch": 26.33, | |
"learning_rate": 2.6271693596648716e-05, | |
"loss": 0.0914, | |
"step": 14680 | |
}, | |
{ | |
"epoch": 26.35, | |
"learning_rate": 2.625174546179932e-05, | |
"loss": 0.0712, | |
"step": 14690 | |
}, | |
{ | |
"epoch": 26.37, | |
"learning_rate": 2.623179732694993e-05, | |
"loss": 0.0908, | |
"step": 14700 | |
}, | |
{ | |
"epoch": 26.39, | |
"learning_rate": 2.621184919210054e-05, | |
"loss": 0.1077, | |
"step": 14710 | |
}, | |
{ | |
"epoch": 26.4, | |
"learning_rate": 2.6191901057251152e-05, | |
"loss": 0.0941, | |
"step": 14720 | |
}, | |
{ | |
"epoch": 26.42, | |
"learning_rate": 2.6171952922401754e-05, | |
"loss": 0.0962, | |
"step": 14730 | |
}, | |
{ | |
"epoch": 26.44, | |
"learning_rate": 2.6152004787552363e-05, | |
"loss": 0.09, | |
"step": 14740 | |
}, | |
{ | |
"epoch": 26.46, | |
"learning_rate": 2.6132056652702975e-05, | |
"loss": 0.0985, | |
"step": 14750 | |
}, | |
{ | |
"epoch": 26.48, | |
"learning_rate": 2.6112108517853584e-05, | |
"loss": 0.0739, | |
"step": 14760 | |
}, | |
{ | |
"epoch": 26.49, | |
"learning_rate": 2.6092160383004187e-05, | |
"loss": 0.0941, | |
"step": 14770 | |
}, | |
{ | |
"epoch": 26.51, | |
"learning_rate": 2.60722122481548e-05, | |
"loss": 0.084, | |
"step": 14780 | |
}, | |
{ | |
"epoch": 26.53, | |
"learning_rate": 2.6052264113305408e-05, | |
"loss": 0.0976, | |
"step": 14790 | |
}, | |
{ | |
"epoch": 26.55, | |
"learning_rate": 2.6032315978456017e-05, | |
"loss": 0.118, | |
"step": 14800 | |
}, | |
{ | |
"epoch": 26.57, | |
"learning_rate": 2.6012367843606623e-05, | |
"loss": 0.0885, | |
"step": 14810 | |
}, | |
{ | |
"epoch": 26.58, | |
"learning_rate": 2.599241970875723e-05, | |
"loss": 0.0868, | |
"step": 14820 | |
}, | |
{ | |
"epoch": 26.6, | |
"learning_rate": 2.597247157390784e-05, | |
"loss": 0.0727, | |
"step": 14830 | |
}, | |
{ | |
"epoch": 26.62, | |
"learning_rate": 2.5952523439058453e-05, | |
"loss": 0.0826, | |
"step": 14840 | |
}, | |
{ | |
"epoch": 26.64, | |
"learning_rate": 2.5932575304209055e-05, | |
"loss": 0.0943, | |
"step": 14850 | |
}, | |
{ | |
"epoch": 26.65, | |
"learning_rate": 2.5912627169359664e-05, | |
"loss": 0.1172, | |
"step": 14860 | |
}, | |
{ | |
"epoch": 26.67, | |
"learning_rate": 2.5892679034510277e-05, | |
"loss": 0.1022, | |
"step": 14870 | |
}, | |
{ | |
"epoch": 26.69, | |
"learning_rate": 2.5872730899660886e-05, | |
"loss": 0.0951, | |
"step": 14880 | |
}, | |
{ | |
"epoch": 26.71, | |
"learning_rate": 2.585278276481149e-05, | |
"loss": 0.0948, | |
"step": 14890 | |
}, | |
{ | |
"epoch": 26.73, | |
"learning_rate": 2.58328346299621e-05, | |
"loss": 0.1103, | |
"step": 14900 | |
}, | |
{ | |
"epoch": 26.74, | |
"learning_rate": 2.581288649511271e-05, | |
"loss": 0.0957, | |
"step": 14910 | |
}, | |
{ | |
"epoch": 26.76, | |
"learning_rate": 2.5792938360263318e-05, | |
"loss": 0.0987, | |
"step": 14920 | |
}, | |
{ | |
"epoch": 26.78, | |
"learning_rate": 2.5772990225413924e-05, | |
"loss": 0.0889, | |
"step": 14930 | |
}, | |
{ | |
"epoch": 26.8, | |
"learning_rate": 2.5753042090564533e-05, | |
"loss": 0.0947, | |
"step": 14940 | |
}, | |
{ | |
"epoch": 26.82, | |
"learning_rate": 2.5733093955715142e-05, | |
"loss": 0.09, | |
"step": 14950 | |
}, | |
{ | |
"epoch": 26.83, | |
"learning_rate": 2.5713145820865754e-05, | |
"loss": 0.0759, | |
"step": 14960 | |
}, | |
{ | |
"epoch": 26.85, | |
"learning_rate": 2.5693197686016356e-05, | |
"loss": 0.0793, | |
"step": 14970 | |
}, | |
{ | |
"epoch": 26.87, | |
"learning_rate": 2.5673249551166965e-05, | |
"loss": 0.0858, | |
"step": 14980 | |
}, | |
{ | |
"epoch": 26.89, | |
"learning_rate": 2.5653301416317578e-05, | |
"loss": 0.0911, | |
"step": 14990 | |
}, | |
{ | |
"epoch": 26.91, | |
"learning_rate": 2.5633353281468187e-05, | |
"loss": 0.0943, | |
"step": 15000 | |
}, | |
{ | |
"epoch": 26.92, | |
"learning_rate": 2.5613405146618792e-05, | |
"loss": 0.0891, | |
"step": 15010 | |
}, | |
{ | |
"epoch": 26.94, | |
"learning_rate": 2.55934570117694e-05, | |
"loss": 0.1016, | |
"step": 15020 | |
}, | |
{ | |
"epoch": 26.96, | |
"learning_rate": 2.557350887692001e-05, | |
"loss": 0.0933, | |
"step": 15030 | |
}, | |
{ | |
"epoch": 26.98, | |
"learning_rate": 2.555356074207062e-05, | |
"loss": 0.0901, | |
"step": 15040 | |
}, | |
{ | |
"epoch": 27.0, | |
"learning_rate": 2.5533612607221225e-05, | |
"loss": 0.0893, | |
"step": 15050 | |
}, | |
{ | |
"epoch": 27.0, | |
"eval_accuracy": { | |
"accuracy": 0.9747701278313523 | |
}, | |
"eval_f1": { | |
"f1": 0.9732384371698068 | |
}, | |
"eval_loss": 0.07595139741897583, | |
"eval_precision": { | |
"precision": 0.973486192026186 | |
}, | |
"eval_recall": { | |
"recall": 0.9730468507730479 | |
}, | |
"eval_runtime": 97.4662, | |
"eval_samples_per_second": 182.997, | |
"eval_steps_per_second": 5.725, | |
"step": 15052 | |
}, | |
{ | |
"epoch": 27.01, | |
"learning_rate": 2.5513664472371834e-05, | |
"loss": 0.0914, | |
"step": 15060 | |
}, | |
{ | |
"epoch": 27.03, | |
"learning_rate": 2.5493716337522443e-05, | |
"loss": 0.1063, | |
"step": 15070 | |
}, | |
{ | |
"epoch": 27.05, | |
"learning_rate": 2.5473768202673055e-05, | |
"loss": 0.0782, | |
"step": 15080 | |
}, | |
{ | |
"epoch": 27.07, | |
"learning_rate": 2.5453820067823658e-05, | |
"loss": 0.0757, | |
"step": 15090 | |
}, | |
{ | |
"epoch": 27.09, | |
"learning_rate": 2.5433871932974267e-05, | |
"loss": 0.0816, | |
"step": 15100 | |
}, | |
{ | |
"epoch": 27.1, | |
"learning_rate": 2.541392379812488e-05, | |
"loss": 0.0967, | |
"step": 15110 | |
}, | |
{ | |
"epoch": 27.12, | |
"learning_rate": 2.5393975663275488e-05, | |
"loss": 0.085, | |
"step": 15120 | |
}, | |
{ | |
"epoch": 27.14, | |
"learning_rate": 2.5374027528426094e-05, | |
"loss": 0.0824, | |
"step": 15130 | |
}, | |
{ | |
"epoch": 27.16, | |
"learning_rate": 2.5354079393576703e-05, | |
"loss": 0.0966, | |
"step": 15140 | |
}, | |
{ | |
"epoch": 27.17, | |
"learning_rate": 2.533413125872731e-05, | |
"loss": 0.1064, | |
"step": 15150 | |
}, | |
{ | |
"epoch": 27.19, | |
"learning_rate": 2.5314183123877917e-05, | |
"loss": 0.0805, | |
"step": 15160 | |
}, | |
{ | |
"epoch": 27.21, | |
"learning_rate": 2.5294234989028526e-05, | |
"loss": 0.1041, | |
"step": 15170 | |
}, | |
{ | |
"epoch": 27.23, | |
"learning_rate": 2.5274286854179135e-05, | |
"loss": 0.0864, | |
"step": 15180 | |
}, | |
{ | |
"epoch": 27.25, | |
"learning_rate": 2.5254338719329744e-05, | |
"loss": 0.0791, | |
"step": 15190 | |
}, | |
{ | |
"epoch": 27.26, | |
"learning_rate": 2.523439058448035e-05, | |
"loss": 0.0682, | |
"step": 15200 | |
}, | |
{ | |
"epoch": 27.28, | |
"learning_rate": 2.521444244963096e-05, | |
"loss": 0.0754, | |
"step": 15210 | |
}, | |
{ | |
"epoch": 27.3, | |
"learning_rate": 2.5194494314781568e-05, | |
"loss": 0.0915, | |
"step": 15220 | |
}, | |
{ | |
"epoch": 27.32, | |
"learning_rate": 2.517454617993218e-05, | |
"loss": 0.0904, | |
"step": 15230 | |
}, | |
{ | |
"epoch": 27.34, | |
"learning_rate": 2.5154598045082782e-05, | |
"loss": 0.0879, | |
"step": 15240 | |
}, | |
{ | |
"epoch": 27.35, | |
"learning_rate": 2.5134649910233395e-05, | |
"loss": 0.0987, | |
"step": 15250 | |
}, | |
{ | |
"epoch": 27.37, | |
"learning_rate": 2.5114701775384004e-05, | |
"loss": 0.0882, | |
"step": 15260 | |
}, | |
{ | |
"epoch": 27.39, | |
"learning_rate": 2.5094753640534613e-05, | |
"loss": 0.0916, | |
"step": 15270 | |
}, | |
{ | |
"epoch": 27.41, | |
"learning_rate": 2.507480550568522e-05, | |
"loss": 0.0761, | |
"step": 15280 | |
}, | |
{ | |
"epoch": 27.43, | |
"learning_rate": 2.5054857370835827e-05, | |
"loss": 0.0926, | |
"step": 15290 | |
}, | |
{ | |
"epoch": 27.44, | |
"learning_rate": 2.5034909235986436e-05, | |
"loss": 0.079, | |
"step": 15300 | |
}, | |
{ | |
"epoch": 27.46, | |
"learning_rate": 2.5014961101137045e-05, | |
"loss": 0.0784, | |
"step": 15310 | |
}, | |
{ | |
"epoch": 27.48, | |
"learning_rate": 2.4995012966287654e-05, | |
"loss": 0.0922, | |
"step": 15320 | |
}, | |
{ | |
"epoch": 27.5, | |
"learning_rate": 2.497506483143826e-05, | |
"loss": 0.0772, | |
"step": 15330 | |
}, | |
{ | |
"epoch": 27.52, | |
"learning_rate": 2.495511669658887e-05, | |
"loss": 0.0697, | |
"step": 15340 | |
}, | |
{ | |
"epoch": 27.53, | |
"learning_rate": 2.4935168561739478e-05, | |
"loss": 0.098, | |
"step": 15350 | |
}, | |
{ | |
"epoch": 27.55, | |
"learning_rate": 2.4915220426890087e-05, | |
"loss": 0.0817, | |
"step": 15360 | |
}, | |
{ | |
"epoch": 27.57, | |
"learning_rate": 2.4895272292040696e-05, | |
"loss": 0.109, | |
"step": 15370 | |
}, | |
{ | |
"epoch": 27.59, | |
"learning_rate": 2.4875324157191305e-05, | |
"loss": 0.0864, | |
"step": 15380 | |
}, | |
{ | |
"epoch": 27.61, | |
"learning_rate": 2.485537602234191e-05, | |
"loss": 0.0856, | |
"step": 15390 | |
}, | |
{ | |
"epoch": 27.62, | |
"learning_rate": 2.483542788749252e-05, | |
"loss": 0.0934, | |
"step": 15400 | |
}, | |
{ | |
"epoch": 27.64, | |
"learning_rate": 2.481547975264313e-05, | |
"loss": 0.0686, | |
"step": 15410 | |
}, | |
{ | |
"epoch": 27.66, | |
"learning_rate": 2.4795531617793737e-05, | |
"loss": 0.0963, | |
"step": 15420 | |
}, | |
{ | |
"epoch": 27.68, | |
"learning_rate": 2.4775583482944346e-05, | |
"loss": 0.1084, | |
"step": 15430 | |
}, | |
{ | |
"epoch": 27.7, | |
"learning_rate": 2.4755635348094955e-05, | |
"loss": 0.0975, | |
"step": 15440 | |
}, | |
{ | |
"epoch": 27.71, | |
"learning_rate": 2.473568721324556e-05, | |
"loss": 0.0881, | |
"step": 15450 | |
}, | |
{ | |
"epoch": 27.73, | |
"learning_rate": 2.471573907839617e-05, | |
"loss": 0.0915, | |
"step": 15460 | |
}, | |
{ | |
"epoch": 27.75, | |
"learning_rate": 2.469579094354678e-05, | |
"loss": 0.0925, | |
"step": 15470 | |
}, | |
{ | |
"epoch": 27.77, | |
"learning_rate": 2.4675842808697388e-05, | |
"loss": 0.0907, | |
"step": 15480 | |
}, | |
{ | |
"epoch": 27.78, | |
"learning_rate": 2.4655894673847997e-05, | |
"loss": 0.0894, | |
"step": 15490 | |
}, | |
{ | |
"epoch": 27.8, | |
"learning_rate": 2.4635946538998606e-05, | |
"loss": 0.105, | |
"step": 15500 | |
}, | |
{ | |
"epoch": 27.82, | |
"learning_rate": 2.461599840414921e-05, | |
"loss": 0.0961, | |
"step": 15510 | |
}, | |
{ | |
"epoch": 27.84, | |
"learning_rate": 2.4596050269299824e-05, | |
"loss": 0.0766, | |
"step": 15520 | |
}, | |
{ | |
"epoch": 27.86, | |
"learning_rate": 2.457610213445043e-05, | |
"loss": 0.0906, | |
"step": 15530 | |
}, | |
{ | |
"epoch": 27.87, | |
"learning_rate": 2.455615399960104e-05, | |
"loss": 0.0943, | |
"step": 15540 | |
}, | |
{ | |
"epoch": 27.89, | |
"learning_rate": 2.4536205864751648e-05, | |
"loss": 0.1001, | |
"step": 15550 | |
}, | |
{ | |
"epoch": 27.91, | |
"learning_rate": 2.4516257729902257e-05, | |
"loss": 0.1162, | |
"step": 15560 | |
}, | |
{ | |
"epoch": 27.93, | |
"learning_rate": 2.4496309595052862e-05, | |
"loss": 0.0848, | |
"step": 15570 | |
}, | |
{ | |
"epoch": 27.95, | |
"learning_rate": 2.4476361460203475e-05, | |
"loss": 0.0888, | |
"step": 15580 | |
}, | |
{ | |
"epoch": 27.96, | |
"learning_rate": 2.445641332535408e-05, | |
"loss": 0.1014, | |
"step": 15590 | |
}, | |
{ | |
"epoch": 27.98, | |
"learning_rate": 2.443646519050469e-05, | |
"loss": 0.0829, | |
"step": 15600 | |
}, | |
{ | |
"epoch": 28.0, | |
"learning_rate": 2.4416517055655298e-05, | |
"loss": 0.0923, | |
"step": 15610 | |
}, | |
{ | |
"epoch": 28.0, | |
"eval_accuracy": { | |
"accuracy": 0.9749943933617403 | |
}, | |
"eval_f1": { | |
"f1": 0.9736564567883176 | |
}, | |
"eval_loss": 0.07191977649927139, | |
"eval_precision": { | |
"precision": 0.9741444996510955 | |
}, | |
"eval_recall": { | |
"recall": 0.9733229541675914 | |
}, | |
"eval_runtime": 98.7577, | |
"eval_samples_per_second": 180.604, | |
"eval_steps_per_second": 5.65, | |
"step": 15610 | |
}, | |
{ | |
"epoch": 28.02, | |
"learning_rate": 2.4396568920805907e-05, | |
"loss": 0.0891, | |
"step": 15620 | |
}, | |
{ | |
"epoch": 28.04, | |
"learning_rate": 2.4376620785956513e-05, | |
"loss": 0.0856, | |
"step": 15630 | |
}, | |
{ | |
"epoch": 28.05, | |
"learning_rate": 2.4356672651107125e-05, | |
"loss": 0.0982, | |
"step": 15640 | |
}, | |
{ | |
"epoch": 28.07, | |
"learning_rate": 2.433672451625773e-05, | |
"loss": 0.0972, | |
"step": 15650 | |
}, | |
{ | |
"epoch": 28.09, | |
"learning_rate": 2.431677638140834e-05, | |
"loss": 0.0742, | |
"step": 15660 | |
}, | |
{ | |
"epoch": 28.11, | |
"learning_rate": 2.429682824655895e-05, | |
"loss": 0.0762, | |
"step": 15670 | |
}, | |
{ | |
"epoch": 28.13, | |
"learning_rate": 2.4276880111709558e-05, | |
"loss": 0.0852, | |
"step": 15680 | |
}, | |
{ | |
"epoch": 28.14, | |
"learning_rate": 2.4256931976860163e-05, | |
"loss": 0.0862, | |
"step": 15690 | |
}, | |
{ | |
"epoch": 28.16, | |
"learning_rate": 2.4236983842010776e-05, | |
"loss": 0.0925, | |
"step": 15700 | |
}, | |
{ | |
"epoch": 28.18, | |
"learning_rate": 2.421703570716138e-05, | |
"loss": 0.0851, | |
"step": 15710 | |
}, | |
{ | |
"epoch": 28.2, | |
"learning_rate": 2.419708757231199e-05, | |
"loss": 0.1033, | |
"step": 15720 | |
}, | |
{ | |
"epoch": 28.22, | |
"learning_rate": 2.41771394374626e-05, | |
"loss": 0.0702, | |
"step": 15730 | |
}, | |
{ | |
"epoch": 28.23, | |
"learning_rate": 2.415719130261321e-05, | |
"loss": 0.1027, | |
"step": 15740 | |
}, | |
{ | |
"epoch": 28.25, | |
"learning_rate": 2.4137243167763814e-05, | |
"loss": 0.079, | |
"step": 15750 | |
}, | |
{ | |
"epoch": 28.27, | |
"learning_rate": 2.4117295032914426e-05, | |
"loss": 0.0696, | |
"step": 15760 | |
}, | |
{ | |
"epoch": 28.29, | |
"learning_rate": 2.4097346898065032e-05, | |
"loss": 0.0768, | |
"step": 15770 | |
}, | |
{ | |
"epoch": 28.3, | |
"learning_rate": 2.407739876321564e-05, | |
"loss": 0.0764, | |
"step": 15780 | |
}, | |
{ | |
"epoch": 28.32, | |
"learning_rate": 2.405745062836625e-05, | |
"loss": 0.0839, | |
"step": 15790 | |
}, | |
{ | |
"epoch": 28.34, | |
"learning_rate": 2.403750249351686e-05, | |
"loss": 0.0998, | |
"step": 15800 | |
}, | |
{ | |
"epoch": 28.36, | |
"learning_rate": 2.4017554358667465e-05, | |
"loss": 0.077, | |
"step": 15810 | |
}, | |
{ | |
"epoch": 28.38, | |
"learning_rate": 2.3997606223818077e-05, | |
"loss": 0.0761, | |
"step": 15820 | |
}, | |
{ | |
"epoch": 28.39, | |
"learning_rate": 2.3977658088968683e-05, | |
"loss": 0.1013, | |
"step": 15830 | |
}, | |
{ | |
"epoch": 28.41, | |
"learning_rate": 2.395770995411929e-05, | |
"loss": 0.0912, | |
"step": 15840 | |
}, | |
{ | |
"epoch": 28.43, | |
"learning_rate": 2.39377618192699e-05, | |
"loss": 0.0708, | |
"step": 15850 | |
}, | |
{ | |
"epoch": 28.45, | |
"learning_rate": 2.391781368442051e-05, | |
"loss": 0.1065, | |
"step": 15860 | |
}, | |
{ | |
"epoch": 28.47, | |
"learning_rate": 2.3897865549571115e-05, | |
"loss": 0.1141, | |
"step": 15870 | |
}, | |
{ | |
"epoch": 28.48, | |
"learning_rate": 2.3877917414721724e-05, | |
"loss": 0.0713, | |
"step": 15880 | |
}, | |
{ | |
"epoch": 28.5, | |
"learning_rate": 2.3857969279872333e-05, | |
"loss": 0.0758, | |
"step": 15890 | |
}, | |
{ | |
"epoch": 28.52, | |
"learning_rate": 2.383802114502294e-05, | |
"loss": 0.1108, | |
"step": 15900 | |
}, | |
{ | |
"epoch": 28.54, | |
"learning_rate": 2.381807301017355e-05, | |
"loss": 0.0968, | |
"step": 15910 | |
}, | |
{ | |
"epoch": 28.56, | |
"learning_rate": 2.3798124875324157e-05, | |
"loss": 0.0918, | |
"step": 15920 | |
}, | |
{ | |
"epoch": 28.57, | |
"learning_rate": 2.3778176740474766e-05, | |
"loss": 0.0972, | |
"step": 15930 | |
}, | |
{ | |
"epoch": 28.59, | |
"learning_rate": 2.3758228605625375e-05, | |
"loss": 0.0918, | |
"step": 15940 | |
}, | |
{ | |
"epoch": 28.61, | |
"learning_rate": 2.3738280470775984e-05, | |
"loss": 0.1135, | |
"step": 15950 | |
}, | |
{ | |
"epoch": 28.63, | |
"learning_rate": 2.371833233592659e-05, | |
"loss": 0.0937, | |
"step": 15960 | |
}, | |
{ | |
"epoch": 28.65, | |
"learning_rate": 2.3698384201077202e-05, | |
"loss": 0.0715, | |
"step": 15970 | |
}, | |
{ | |
"epoch": 28.66, | |
"learning_rate": 2.3678436066227807e-05, | |
"loss": 0.0842, | |
"step": 15980 | |
}, | |
{ | |
"epoch": 28.68, | |
"learning_rate": 2.3658487931378416e-05, | |
"loss": 0.0869, | |
"step": 15990 | |
}, | |
{ | |
"epoch": 28.7, | |
"learning_rate": 2.3638539796529025e-05, | |
"loss": 0.0811, | |
"step": 16000 | |
}, | |
{ | |
"epoch": 28.72, | |
"learning_rate": 2.3618591661679634e-05, | |
"loss": 0.0934, | |
"step": 16010 | |
}, | |
{ | |
"epoch": 28.74, | |
"learning_rate": 2.359864352683024e-05, | |
"loss": 0.0791, | |
"step": 16020 | |
}, | |
{ | |
"epoch": 28.75, | |
"learning_rate": 2.3578695391980852e-05, | |
"loss": 0.0938, | |
"step": 16030 | |
}, | |
{ | |
"epoch": 28.77, | |
"learning_rate": 2.3558747257131458e-05, | |
"loss": 0.0813, | |
"step": 16040 | |
}, | |
{ | |
"epoch": 28.79, | |
"learning_rate": 2.3538799122282067e-05, | |
"loss": 0.0902, | |
"step": 16050 | |
}, | |
{ | |
"epoch": 28.81, | |
"learning_rate": 2.3518850987432676e-05, | |
"loss": 0.0712, | |
"step": 16060 | |
}, | |
{ | |
"epoch": 28.83, | |
"learning_rate": 2.3498902852583285e-05, | |
"loss": 0.0964, | |
"step": 16070 | |
}, | |
{ | |
"epoch": 28.84, | |
"learning_rate": 2.347895471773389e-05, | |
"loss": 0.0782, | |
"step": 16080 | |
}, | |
{ | |
"epoch": 28.86, | |
"learning_rate": 2.3459006582884503e-05, | |
"loss": 0.0876, | |
"step": 16090 | |
}, | |
{ | |
"epoch": 28.88, | |
"learning_rate": 2.343905844803511e-05, | |
"loss": 0.0926, | |
"step": 16100 | |
}, | |
{ | |
"epoch": 28.9, | |
"learning_rate": 2.3419110313185717e-05, | |
"loss": 0.0797, | |
"step": 16110 | |
}, | |
{ | |
"epoch": 28.91, | |
"learning_rate": 2.3399162178336326e-05, | |
"loss": 0.0912, | |
"step": 16120 | |
}, | |
{ | |
"epoch": 28.93, | |
"learning_rate": 2.3379214043486935e-05, | |
"loss": 0.0835, | |
"step": 16130 | |
}, | |
{ | |
"epoch": 28.95, | |
"learning_rate": 2.335926590863754e-05, | |
"loss": 0.0671, | |
"step": 16140 | |
}, | |
{ | |
"epoch": 28.97, | |
"learning_rate": 2.3339317773788153e-05, | |
"loss": 0.083, | |
"step": 16150 | |
}, | |
{ | |
"epoch": 28.99, | |
"learning_rate": 2.331936963893876e-05, | |
"loss": 0.0768, | |
"step": 16160 | |
}, | |
{ | |
"epoch": 29.0, | |
"eval_accuracy": { | |
"accuracy": 0.9791433056739179 | |
}, | |
"eval_f1": { | |
"f1": 0.9780302300119661 | |
}, | |
"eval_loss": 0.06340872496366501, | |
"eval_precision": { | |
"precision": 0.978505403264897 | |
}, | |
"eval_recall": { | |
"recall": 0.9775669866728356 | |
}, | |
"eval_runtime": 99.5076, | |
"eval_samples_per_second": 179.243, | |
"eval_steps_per_second": 5.608, | |
"step": 16167 | |
}, | |
{ | |
"epoch": 29.0, | |
"learning_rate": 2.3299421504089368e-05, | |
"loss": 0.0781, | |
"step": 16170 | |
}, | |
{ | |
"epoch": 29.02, | |
"learning_rate": 2.3279473369239977e-05, | |
"loss": 0.0743, | |
"step": 16180 | |
}, | |
{ | |
"epoch": 29.04, | |
"learning_rate": 2.3259525234390586e-05, | |
"loss": 0.0736, | |
"step": 16190 | |
}, | |
{ | |
"epoch": 29.06, | |
"learning_rate": 2.323957709954119e-05, | |
"loss": 0.0937, | |
"step": 16200 | |
}, | |
{ | |
"epoch": 29.08, | |
"learning_rate": 2.3219628964691804e-05, | |
"loss": 0.0962, | |
"step": 16210 | |
}, | |
{ | |
"epoch": 29.09, | |
"learning_rate": 2.319968082984241e-05, | |
"loss": 0.0963, | |
"step": 16220 | |
}, | |
{ | |
"epoch": 29.11, | |
"learning_rate": 2.317973269499302e-05, | |
"loss": 0.0827, | |
"step": 16230 | |
}, | |
{ | |
"epoch": 29.13, | |
"learning_rate": 2.3159784560143628e-05, | |
"loss": 0.0973, | |
"step": 16240 | |
}, | |
{ | |
"epoch": 29.15, | |
"learning_rate": 2.3139836425294237e-05, | |
"loss": 0.077, | |
"step": 16250 | |
}, | |
{ | |
"epoch": 29.17, | |
"learning_rate": 2.3119888290444842e-05, | |
"loss": 0.0793, | |
"step": 16260 | |
}, | |
{ | |
"epoch": 29.18, | |
"learning_rate": 2.3099940155595455e-05, | |
"loss": 0.0796, | |
"step": 16270 | |
}, | |
{ | |
"epoch": 29.2, | |
"learning_rate": 2.307999202074606e-05, | |
"loss": 0.0982, | |
"step": 16280 | |
}, | |
{ | |
"epoch": 29.22, | |
"learning_rate": 2.306004388589667e-05, | |
"loss": 0.0907, | |
"step": 16290 | |
}, | |
{ | |
"epoch": 29.24, | |
"learning_rate": 2.3040095751047278e-05, | |
"loss": 0.0757, | |
"step": 16300 | |
}, | |
{ | |
"epoch": 29.26, | |
"learning_rate": 2.3020147616197887e-05, | |
"loss": 0.0861, | |
"step": 16310 | |
}, | |
{ | |
"epoch": 29.27, | |
"learning_rate": 2.3000199481348493e-05, | |
"loss": 0.0804, | |
"step": 16320 | |
}, | |
{ | |
"epoch": 29.29, | |
"learning_rate": 2.2980251346499105e-05, | |
"loss": 0.075, | |
"step": 16330 | |
}, | |
{ | |
"epoch": 29.31, | |
"learning_rate": 2.296030321164971e-05, | |
"loss": 0.0789, | |
"step": 16340 | |
}, | |
{ | |
"epoch": 29.33, | |
"learning_rate": 2.294035507680032e-05, | |
"loss": 0.0978, | |
"step": 16350 | |
}, | |
{ | |
"epoch": 29.35, | |
"learning_rate": 2.292040694195093e-05, | |
"loss": 0.0877, | |
"step": 16360 | |
}, | |
{ | |
"epoch": 29.36, | |
"learning_rate": 2.2900458807101538e-05, | |
"loss": 0.0733, | |
"step": 16370 | |
}, | |
{ | |
"epoch": 29.38, | |
"learning_rate": 2.2880510672252143e-05, | |
"loss": 0.0758, | |
"step": 16380 | |
}, | |
{ | |
"epoch": 29.4, | |
"learning_rate": 2.2860562537402756e-05, | |
"loss": 0.0865, | |
"step": 16390 | |
}, | |
{ | |
"epoch": 29.42, | |
"learning_rate": 2.284061440255336e-05, | |
"loss": 0.0959, | |
"step": 16400 | |
}, | |
{ | |
"epoch": 29.43, | |
"learning_rate": 2.282066626770397e-05, | |
"loss": 0.1044, | |
"step": 16410 | |
}, | |
{ | |
"epoch": 29.45, | |
"learning_rate": 2.280071813285458e-05, | |
"loss": 0.0915, | |
"step": 16420 | |
}, | |
{ | |
"epoch": 29.47, | |
"learning_rate": 2.278076999800519e-05, | |
"loss": 0.0896, | |
"step": 16430 | |
}, | |
{ | |
"epoch": 29.49, | |
"learning_rate": 2.2760821863155797e-05, | |
"loss": 0.0659, | |
"step": 16440 | |
}, | |
{ | |
"epoch": 29.51, | |
"learning_rate": 2.2740873728306406e-05, | |
"loss": 0.0824, | |
"step": 16450 | |
}, | |
{ | |
"epoch": 29.52, | |
"learning_rate": 2.2720925593457012e-05, | |
"loss": 0.0822, | |
"step": 16460 | |
}, | |
{ | |
"epoch": 29.54, | |
"learning_rate": 2.270097745860762e-05, | |
"loss": 0.0931, | |
"step": 16470 | |
}, | |
{ | |
"epoch": 29.56, | |
"learning_rate": 2.268102932375823e-05, | |
"loss": 0.0978, | |
"step": 16480 | |
}, | |
{ | |
"epoch": 29.58, | |
"learning_rate": 2.266108118890884e-05, | |
"loss": 0.0895, | |
"step": 16490 | |
}, | |
{ | |
"epoch": 29.6, | |
"learning_rate": 2.2641133054059448e-05, | |
"loss": 0.0843, | |
"step": 16500 | |
}, | |
{ | |
"epoch": 29.61, | |
"learning_rate": 2.2621184919210057e-05, | |
"loss": 0.0757, | |
"step": 16510 | |
}, | |
{ | |
"epoch": 29.63, | |
"learning_rate": 2.2601236784360663e-05, | |
"loss": 0.0691, | |
"step": 16520 | |
}, | |
{ | |
"epoch": 29.65, | |
"learning_rate": 2.258128864951127e-05, | |
"loss": 0.0831, | |
"step": 16530 | |
}, | |
{ | |
"epoch": 29.67, | |
"learning_rate": 2.256134051466188e-05, | |
"loss": 0.0864, | |
"step": 16540 | |
}, | |
{ | |
"epoch": 29.69, | |
"learning_rate": 2.254139237981249e-05, | |
"loss": 0.0858, | |
"step": 16550 | |
}, | |
{ | |
"epoch": 29.7, | |
"learning_rate": 2.25214442449631e-05, | |
"loss": 0.085, | |
"step": 16560 | |
}, | |
{ | |
"epoch": 29.72, | |
"learning_rate": 2.2501496110113708e-05, | |
"loss": 0.0811, | |
"step": 16570 | |
}, | |
{ | |
"epoch": 29.74, | |
"learning_rate": 2.2481547975264313e-05, | |
"loss": 0.0764, | |
"step": 16580 | |
}, | |
{ | |
"epoch": 29.76, | |
"learning_rate": 2.2461599840414922e-05, | |
"loss": 0.0872, | |
"step": 16590 | |
}, | |
{ | |
"epoch": 29.78, | |
"learning_rate": 2.244165170556553e-05, | |
"loss": 0.0824, | |
"step": 16600 | |
}, | |
{ | |
"epoch": 29.79, | |
"learning_rate": 2.242170357071614e-05, | |
"loss": 0.0896, | |
"step": 16610 | |
}, | |
{ | |
"epoch": 29.81, | |
"learning_rate": 2.240175543586675e-05, | |
"loss": 0.0951, | |
"step": 16620 | |
}, | |
{ | |
"epoch": 29.83, | |
"learning_rate": 2.2381807301017355e-05, | |
"loss": 0.0897, | |
"step": 16630 | |
}, | |
{ | |
"epoch": 29.85, | |
"learning_rate": 2.2361859166167964e-05, | |
"loss": 0.0982, | |
"step": 16640 | |
}, | |
{ | |
"epoch": 29.87, | |
"learning_rate": 2.2341911031318573e-05, | |
"loss": 0.0676, | |
"step": 16650 | |
}, | |
{ | |
"epoch": 29.88, | |
"learning_rate": 2.2321962896469182e-05, | |
"loss": 0.0936, | |
"step": 16660 | |
}, | |
{ | |
"epoch": 29.9, | |
"learning_rate": 2.2302014761619787e-05, | |
"loss": 0.0994, | |
"step": 16670 | |
}, | |
{ | |
"epoch": 29.92, | |
"learning_rate": 2.22820666267704e-05, | |
"loss": 0.0846, | |
"step": 16680 | |
}, | |
{ | |
"epoch": 29.94, | |
"learning_rate": 2.2262118491921005e-05, | |
"loss": 0.0927, | |
"step": 16690 | |
}, | |
{ | |
"epoch": 29.96, | |
"learning_rate": 2.2242170357071614e-05, | |
"loss": 0.0808, | |
"step": 16700 | |
}, | |
{ | |
"epoch": 29.97, | |
"learning_rate": 2.2222222222222223e-05, | |
"loss": 0.0844, | |
"step": 16710 | |
}, | |
{ | |
"epoch": 29.99, | |
"learning_rate": 2.2202274087372832e-05, | |
"loss": 0.088, | |
"step": 16720 | |
}, | |
{ | |
"epoch": 30.0, | |
"eval_accuracy": { | |
"accuracy": 0.9776295133437991 | |
}, | |
"eval_f1": { | |
"f1": 0.976554836667848 | |
}, | |
"eval_loss": 0.06922875344753265, | |
"eval_precision": { | |
"precision": 0.9771869164236219 | |
}, | |
"eval_recall": { | |
"recall": 0.9759720463645606 | |
}, | |
"eval_runtime": 100.0445, | |
"eval_samples_per_second": 178.281, | |
"eval_steps_per_second": 5.578, | |
"step": 16725 | |
}, | |
{ | |
"epoch": 30.01, | |
"learning_rate": 2.2182325952523438e-05, | |
"loss": 0.0917, | |
"step": 16730 | |
}, | |
{ | |
"epoch": 30.03, | |
"learning_rate": 2.216237781767405e-05, | |
"loss": 0.0943, | |
"step": 16740 | |
}, | |
{ | |
"epoch": 30.04, | |
"learning_rate": 2.2142429682824656e-05, | |
"loss": 0.0827, | |
"step": 16750 | |
}, | |
{ | |
"epoch": 30.06, | |
"learning_rate": 2.2122481547975265e-05, | |
"loss": 0.0722, | |
"step": 16760 | |
}, | |
{ | |
"epoch": 30.08, | |
"learning_rate": 2.2102533413125874e-05, | |
"loss": 0.086, | |
"step": 16770 | |
}, | |
{ | |
"epoch": 30.1, | |
"learning_rate": 2.2082585278276483e-05, | |
"loss": 0.087, | |
"step": 16780 | |
}, | |
{ | |
"epoch": 30.12, | |
"learning_rate": 2.206263714342709e-05, | |
"loss": 0.0975, | |
"step": 16790 | |
}, | |
{ | |
"epoch": 30.13, | |
"learning_rate": 2.20426890085777e-05, | |
"loss": 0.062, | |
"step": 16800 | |
}, | |
{ | |
"epoch": 30.15, | |
"learning_rate": 2.2022740873728306e-05, | |
"loss": 0.0903, | |
"step": 16810 | |
}, | |
{ | |
"epoch": 30.17, | |
"learning_rate": 2.2002792738878915e-05, | |
"loss": 0.0657, | |
"step": 16820 | |
}, | |
{ | |
"epoch": 30.19, | |
"learning_rate": 2.1982844604029524e-05, | |
"loss": 0.0658, | |
"step": 16830 | |
}, | |
{ | |
"epoch": 30.21, | |
"learning_rate": 2.1962896469180133e-05, | |
"loss": 0.0765, | |
"step": 16840 | |
}, | |
{ | |
"epoch": 30.22, | |
"learning_rate": 2.194294833433074e-05, | |
"loss": 0.0869, | |
"step": 16850 | |
}, | |
{ | |
"epoch": 30.24, | |
"learning_rate": 2.192300019948135e-05, | |
"loss": 0.0914, | |
"step": 16860 | |
}, | |
{ | |
"epoch": 30.26, | |
"learning_rate": 2.1903052064631957e-05, | |
"loss": 0.0908, | |
"step": 16870 | |
}, | |
{ | |
"epoch": 30.28, | |
"learning_rate": 2.1883103929782566e-05, | |
"loss": 0.078, | |
"step": 16880 | |
}, | |
{ | |
"epoch": 30.3, | |
"learning_rate": 2.1863155794933175e-05, | |
"loss": 0.0879, | |
"step": 16890 | |
}, | |
{ | |
"epoch": 30.31, | |
"learning_rate": 2.1843207660083784e-05, | |
"loss": 0.0764, | |
"step": 16900 | |
}, | |
{ | |
"epoch": 30.33, | |
"learning_rate": 2.182325952523439e-05, | |
"loss": 0.0737, | |
"step": 16910 | |
}, | |
{ | |
"epoch": 30.35, | |
"learning_rate": 2.1803311390385002e-05, | |
"loss": 0.0997, | |
"step": 16920 | |
}, | |
{ | |
"epoch": 30.37, | |
"learning_rate": 2.1783363255535608e-05, | |
"loss": 0.0782, | |
"step": 16930 | |
}, | |
{ | |
"epoch": 30.39, | |
"learning_rate": 2.1763415120686217e-05, | |
"loss": 0.0913, | |
"step": 16940 | |
}, | |
{ | |
"epoch": 30.4, | |
"learning_rate": 2.1743466985836826e-05, | |
"loss": 0.0817, | |
"step": 16950 | |
}, | |
{ | |
"epoch": 30.42, | |
"learning_rate": 2.1723518850987435e-05, | |
"loss": 0.0695, | |
"step": 16960 | |
}, | |
{ | |
"epoch": 30.44, | |
"learning_rate": 2.170357071613804e-05, | |
"loss": 0.0968, | |
"step": 16970 | |
}, | |
{ | |
"epoch": 30.46, | |
"learning_rate": 2.1683622581288653e-05, | |
"loss": 0.0878, | |
"step": 16980 | |
}, | |
{ | |
"epoch": 30.48, | |
"learning_rate": 2.1663674446439258e-05, | |
"loss": 0.0736, | |
"step": 16990 | |
}, | |
{ | |
"epoch": 30.49, | |
"learning_rate": 2.1643726311589867e-05, | |
"loss": 0.0767, | |
"step": 17000 | |
}, | |
{ | |
"epoch": 30.51, | |
"learning_rate": 2.1623778176740476e-05, | |
"loss": 0.0908, | |
"step": 17010 | |
}, | |
{ | |
"epoch": 30.53, | |
"learning_rate": 2.1603830041891085e-05, | |
"loss": 0.0933, | |
"step": 17020 | |
}, | |
{ | |
"epoch": 30.55, | |
"learning_rate": 2.158388190704169e-05, | |
"loss": 0.0987, | |
"step": 17030 | |
}, | |
{ | |
"epoch": 30.57, | |
"learning_rate": 2.1563933772192303e-05, | |
"loss": 0.0974, | |
"step": 17040 | |
}, | |
{ | |
"epoch": 30.58, | |
"learning_rate": 2.154398563734291e-05, | |
"loss": 0.0858, | |
"step": 17050 | |
}, | |
{ | |
"epoch": 30.6, | |
"learning_rate": 2.1524037502493518e-05, | |
"loss": 0.0724, | |
"step": 17060 | |
}, | |
{ | |
"epoch": 30.62, | |
"learning_rate": 2.1504089367644127e-05, | |
"loss": 0.0712, | |
"step": 17070 | |
}, | |
{ | |
"epoch": 30.64, | |
"learning_rate": 2.1484141232794736e-05, | |
"loss": 0.0774, | |
"step": 17080 | |
}, | |
{ | |
"epoch": 30.65, | |
"learning_rate": 2.146419309794534e-05, | |
"loss": 0.0625, | |
"step": 17090 | |
}, | |
{ | |
"epoch": 30.67, | |
"learning_rate": 2.1444244963095954e-05, | |
"loss": 0.0838, | |
"step": 17100 | |
}, | |
{ | |
"epoch": 30.69, | |
"learning_rate": 2.142429682824656e-05, | |
"loss": 0.0825, | |
"step": 17110 | |
}, | |
{ | |
"epoch": 30.71, | |
"learning_rate": 2.140434869339717e-05, | |
"loss": 0.0956, | |
"step": 17120 | |
}, | |
{ | |
"epoch": 30.73, | |
"learning_rate": 2.1384400558547777e-05, | |
"loss": 0.0808, | |
"step": 17130 | |
}, | |
{ | |
"epoch": 30.74, | |
"learning_rate": 2.1364452423698386e-05, | |
"loss": 0.0755, | |
"step": 17140 | |
}, | |
{ | |
"epoch": 30.76, | |
"learning_rate": 2.1344504288848992e-05, | |
"loss": 0.0945, | |
"step": 17150 | |
}, | |
{ | |
"epoch": 30.78, | |
"learning_rate": 2.1324556153999604e-05, | |
"loss": 0.0858, | |
"step": 17160 | |
}, | |
{ | |
"epoch": 30.8, | |
"learning_rate": 2.130460801915021e-05, | |
"loss": 0.0654, | |
"step": 17170 | |
}, | |
{ | |
"epoch": 30.82, | |
"learning_rate": 2.128465988430082e-05, | |
"loss": 0.0781, | |
"step": 17180 | |
}, | |
{ | |
"epoch": 30.83, | |
"learning_rate": 2.1264711749451428e-05, | |
"loss": 0.0863, | |
"step": 17190 | |
}, | |
{ | |
"epoch": 30.85, | |
"learning_rate": 2.1244763614602037e-05, | |
"loss": 0.0885, | |
"step": 17200 | |
}, | |
{ | |
"epoch": 30.87, | |
"learning_rate": 2.1224815479752643e-05, | |
"loss": 0.0987, | |
"step": 17210 | |
}, | |
{ | |
"epoch": 30.89, | |
"learning_rate": 2.1204867344903255e-05, | |
"loss": 0.0896, | |
"step": 17220 | |
}, | |
{ | |
"epoch": 30.91, | |
"learning_rate": 2.118491921005386e-05, | |
"loss": 0.0724, | |
"step": 17230 | |
}, | |
{ | |
"epoch": 30.92, | |
"learning_rate": 2.116497107520447e-05, | |
"loss": 0.0677, | |
"step": 17240 | |
}, | |
{ | |
"epoch": 30.94, | |
"learning_rate": 2.114502294035508e-05, | |
"loss": 0.1029, | |
"step": 17250 | |
}, | |
{ | |
"epoch": 30.96, | |
"learning_rate": 2.1125074805505688e-05, | |
"loss": 0.0757, | |
"step": 17260 | |
}, | |
{ | |
"epoch": 30.98, | |
"learning_rate": 2.1105126670656293e-05, | |
"loss": 0.0761, | |
"step": 17270 | |
}, | |
{ | |
"epoch": 31.0, | |
"learning_rate": 2.1085178535806906e-05, | |
"loss": 0.0935, | |
"step": 17280 | |
}, | |
{ | |
"epoch": 31.0, | |
"eval_accuracy": { | |
"accuracy": 0.977517380578605 | |
}, | |
"eval_f1": { | |
"f1": 0.9763918699196389 | |
}, | |
"eval_loss": 0.07163029164075851, | |
"eval_precision": { | |
"precision": 0.9768201951102174 | |
}, | |
"eval_recall": { | |
"recall": 0.976012656434153 | |
}, | |
"eval_runtime": 100.1681, | |
"eval_samples_per_second": 178.061, | |
"eval_steps_per_second": 5.571, | |
"step": 17282 | |
}, | |
{ | |
"epoch": 31.01, | |
"learning_rate": 2.106523040095751e-05, | |
"loss": 0.0866, | |
"step": 17290 | |
}, | |
{ | |
"epoch": 31.03, | |
"learning_rate": 2.104528226610812e-05, | |
"loss": 0.0566, | |
"step": 17300 | |
}, | |
{ | |
"epoch": 31.05, | |
"learning_rate": 2.102533413125873e-05, | |
"loss": 0.065, | |
"step": 17310 | |
}, | |
{ | |
"epoch": 31.07, | |
"learning_rate": 2.1005385996409338e-05, | |
"loss": 0.0791, | |
"step": 17320 | |
}, | |
{ | |
"epoch": 31.09, | |
"learning_rate": 2.0985437861559944e-05, | |
"loss": 0.0944, | |
"step": 17330 | |
}, | |
{ | |
"epoch": 31.1, | |
"learning_rate": 2.0965489726710556e-05, | |
"loss": 0.0548, | |
"step": 17340 | |
}, | |
{ | |
"epoch": 31.12, | |
"learning_rate": 2.0945541591861162e-05, | |
"loss": 0.0797, | |
"step": 17350 | |
}, | |
{ | |
"epoch": 31.14, | |
"learning_rate": 2.092559345701177e-05, | |
"loss": 0.0795, | |
"step": 17360 | |
}, | |
{ | |
"epoch": 31.16, | |
"learning_rate": 2.090564532216238e-05, | |
"loss": 0.0815, | |
"step": 17370 | |
}, | |
{ | |
"epoch": 31.17, | |
"learning_rate": 2.0885697187312985e-05, | |
"loss": 0.0939, | |
"step": 17380 | |
}, | |
{ | |
"epoch": 31.19, | |
"learning_rate": 2.0865749052463594e-05, | |
"loss": 0.0697, | |
"step": 17390 | |
}, | |
{ | |
"epoch": 31.21, | |
"learning_rate": 2.0845800917614203e-05, | |
"loss": 0.0908, | |
"step": 17400 | |
}, | |
{ | |
"epoch": 31.23, | |
"learning_rate": 2.0825852782764812e-05, | |
"loss": 0.0741, | |
"step": 17410 | |
}, | |
{ | |
"epoch": 31.25, | |
"learning_rate": 2.080590464791542e-05, | |
"loss": 0.0641, | |
"step": 17420 | |
}, | |
{ | |
"epoch": 31.26, | |
"learning_rate": 2.078595651306603e-05, | |
"loss": 0.0605, | |
"step": 17430 | |
}, | |
{ | |
"epoch": 31.28, | |
"learning_rate": 2.0766008378216636e-05, | |
"loss": 0.0607, | |
"step": 17440 | |
}, | |
{ | |
"epoch": 31.3, | |
"learning_rate": 2.0746060243367245e-05, | |
"loss": 0.0786, | |
"step": 17450 | |
}, | |
{ | |
"epoch": 31.32, | |
"learning_rate": 2.0726112108517854e-05, | |
"loss": 0.0845, | |
"step": 17460 | |
}, | |
{ | |
"epoch": 31.34, | |
"learning_rate": 2.0706163973668463e-05, | |
"loss": 0.0793, | |
"step": 17470 | |
}, | |
{ | |
"epoch": 31.35, | |
"learning_rate": 2.0686215838819072e-05, | |
"loss": 0.0839, | |
"step": 17480 | |
}, | |
{ | |
"epoch": 31.37, | |
"learning_rate": 2.066626770396968e-05, | |
"loss": 0.081, | |
"step": 17490 | |
}, | |
{ | |
"epoch": 31.39, | |
"learning_rate": 2.0646319569120286e-05, | |
"loss": 0.0746, | |
"step": 17500 | |
}, | |
{ | |
"epoch": 31.41, | |
"learning_rate": 2.0626371434270895e-05, | |
"loss": 0.0914, | |
"step": 17510 | |
}, | |
{ | |
"epoch": 31.43, | |
"learning_rate": 2.0606423299421504e-05, | |
"loss": 0.1103, | |
"step": 17520 | |
}, | |
{ | |
"epoch": 31.44, | |
"learning_rate": 2.0586475164572113e-05, | |
"loss": 0.0793, | |
"step": 17530 | |
}, | |
{ | |
"epoch": 31.46, | |
"learning_rate": 2.0566527029722722e-05, | |
"loss": 0.1049, | |
"step": 17540 | |
}, | |
{ | |
"epoch": 31.48, | |
"learning_rate": 2.054657889487333e-05, | |
"loss": 0.0953, | |
"step": 17550 | |
}, | |
{ | |
"epoch": 31.5, | |
"learning_rate": 2.0526630760023937e-05, | |
"loss": 0.0792, | |
"step": 17560 | |
}, | |
{ | |
"epoch": 31.52, | |
"learning_rate": 2.0506682625174546e-05, | |
"loss": 0.0732, | |
"step": 17570 | |
}, | |
{ | |
"epoch": 31.53, | |
"learning_rate": 2.0486734490325155e-05, | |
"loss": 0.0775, | |
"step": 17580 | |
}, | |
{ | |
"epoch": 31.55, | |
"learning_rate": 2.0466786355475764e-05, | |
"loss": 0.0892, | |
"step": 17590 | |
}, | |
{ | |
"epoch": 31.57, | |
"learning_rate": 2.0446838220626373e-05, | |
"loss": 0.0863, | |
"step": 17600 | |
}, | |
{ | |
"epoch": 31.59, | |
"learning_rate": 2.0426890085776982e-05, | |
"loss": 0.0891, | |
"step": 17610 | |
}, | |
{ | |
"epoch": 31.61, | |
"learning_rate": 2.0406941950927588e-05, | |
"loss": 0.0874, | |
"step": 17620 | |
}, | |
{ | |
"epoch": 31.62, | |
"learning_rate": 2.0386993816078197e-05, | |
"loss": 0.0829, | |
"step": 17630 | |
}, | |
{ | |
"epoch": 31.64, | |
"learning_rate": 2.0367045681228806e-05, | |
"loss": 0.0749, | |
"step": 17640 | |
}, | |
{ | |
"epoch": 31.66, | |
"learning_rate": 2.0347097546379415e-05, | |
"loss": 0.0601, | |
"step": 17650 | |
}, | |
{ | |
"epoch": 31.68, | |
"learning_rate": 2.0327149411530024e-05, | |
"loss": 0.0816, | |
"step": 17660 | |
}, | |
{ | |
"epoch": 31.7, | |
"learning_rate": 2.0307201276680633e-05, | |
"loss": 0.0826, | |
"step": 17670 | |
}, | |
{ | |
"epoch": 31.71, | |
"learning_rate": 2.0287253141831238e-05, | |
"loss": 0.093, | |
"step": 17680 | |
}, | |
{ | |
"epoch": 31.73, | |
"learning_rate": 2.0267305006981847e-05, | |
"loss": 0.1012, | |
"step": 17690 | |
}, | |
{ | |
"epoch": 31.75, | |
"learning_rate": 2.0247356872132456e-05, | |
"loss": 0.1079, | |
"step": 17700 | |
}, | |
{ | |
"epoch": 31.77, | |
"learning_rate": 2.0227408737283065e-05, | |
"loss": 0.0718, | |
"step": 17710 | |
}, | |
{ | |
"epoch": 31.78, | |
"learning_rate": 2.0207460602433674e-05, | |
"loss": 0.074, | |
"step": 17720 | |
}, | |
{ | |
"epoch": 31.8, | |
"learning_rate": 2.0187512467584283e-05, | |
"loss": 0.0854, | |
"step": 17730 | |
}, | |
{ | |
"epoch": 31.82, | |
"learning_rate": 2.016756433273489e-05, | |
"loss": 0.0536, | |
"step": 17740 | |
}, | |
{ | |
"epoch": 31.84, | |
"learning_rate": 2.0147616197885498e-05, | |
"loss": 0.0647, | |
"step": 17750 | |
}, | |
{ | |
"epoch": 31.86, | |
"learning_rate": 2.0127668063036107e-05, | |
"loss": 0.0685, | |
"step": 17760 | |
}, | |
{ | |
"epoch": 31.87, | |
"learning_rate": 2.0107719928186716e-05, | |
"loss": 0.0635, | |
"step": 17770 | |
}, | |
{ | |
"epoch": 31.89, | |
"learning_rate": 2.0087771793337325e-05, | |
"loss": 0.0562, | |
"step": 17780 | |
}, | |
{ | |
"epoch": 31.91, | |
"learning_rate": 2.0067823658487934e-05, | |
"loss": 0.0766, | |
"step": 17790 | |
}, | |
{ | |
"epoch": 31.93, | |
"learning_rate": 2.004787552363854e-05, | |
"loss": 0.0658, | |
"step": 17800 | |
}, | |
{ | |
"epoch": 31.95, | |
"learning_rate": 2.002792738878915e-05, | |
"loss": 0.0896, | |
"step": 17810 | |
}, | |
{ | |
"epoch": 31.96, | |
"learning_rate": 2.0007979253939757e-05, | |
"loss": 0.0783, | |
"step": 17820 | |
}, | |
{ | |
"epoch": 31.98, | |
"learning_rate": 1.9988031119090366e-05, | |
"loss": 0.0763, | |
"step": 17830 | |
}, | |
{ | |
"epoch": 32.0, | |
"learning_rate": 1.9968082984240975e-05, | |
"loss": 0.0784, | |
"step": 17840 | |
}, | |
{ | |
"epoch": 32.0, | |
"eval_accuracy": { | |
"accuracy": 0.9769567167526351 | |
}, | |
"eval_f1": { | |
"f1": 0.9757421921153259 | |
}, | |
"eval_loss": 0.07629601657390594, | |
"eval_precision": { | |
"precision": 0.9761298359346052 | |
}, | |
"eval_recall": { | |
"recall": 0.975422207744439 | |
}, | |
"eval_runtime": 99.8719, | |
"eval_samples_per_second": 178.589, | |
"eval_steps_per_second": 5.587, | |
"step": 17840 | |
}, | |
{ | |
"epoch": 32.02, | |
"learning_rate": 1.9948134849391584e-05, | |
"loss": 0.0733, | |
"step": 17850 | |
}, | |
{ | |
"epoch": 32.04, | |
"learning_rate": 1.992818671454219e-05, | |
"loss": 0.0699, | |
"step": 17860 | |
}, | |
{ | |
"epoch": 32.05, | |
"learning_rate": 1.99082385796928e-05, | |
"loss": 0.0934, | |
"step": 17870 | |
}, | |
{ | |
"epoch": 32.07, | |
"learning_rate": 1.9888290444843408e-05, | |
"loss": 0.0835, | |
"step": 17880 | |
}, | |
{ | |
"epoch": 32.09, | |
"learning_rate": 1.9868342309994017e-05, | |
"loss": 0.0678, | |
"step": 17890 | |
}, | |
{ | |
"epoch": 32.11, | |
"learning_rate": 1.9848394175144626e-05, | |
"loss": 0.0797, | |
"step": 17900 | |
}, | |
{ | |
"epoch": 32.13, | |
"learning_rate": 1.9828446040295235e-05, | |
"loss": 0.0775, | |
"step": 17910 | |
}, | |
{ | |
"epoch": 32.14, | |
"learning_rate": 1.980849790544584e-05, | |
"loss": 0.0881, | |
"step": 17920 | |
}, | |
{ | |
"epoch": 32.16, | |
"learning_rate": 1.978854977059645e-05, | |
"loss": 0.0732, | |
"step": 17930 | |
}, | |
{ | |
"epoch": 32.18, | |
"learning_rate": 1.976860163574706e-05, | |
"loss": 0.0727, | |
"step": 17940 | |
}, | |
{ | |
"epoch": 32.2, | |
"learning_rate": 1.9748653500897668e-05, | |
"loss": 0.0823, | |
"step": 17950 | |
}, | |
{ | |
"epoch": 32.22, | |
"learning_rate": 1.9728705366048277e-05, | |
"loss": 0.0665, | |
"step": 17960 | |
}, | |
{ | |
"epoch": 32.23, | |
"learning_rate": 1.9708757231198886e-05, | |
"loss": 0.0827, | |
"step": 17970 | |
}, | |
{ | |
"epoch": 32.25, | |
"learning_rate": 1.968880909634949e-05, | |
"loss": 0.0624, | |
"step": 17980 | |
}, | |
{ | |
"epoch": 32.27, | |
"learning_rate": 1.9668860961500103e-05, | |
"loss": 0.0681, | |
"step": 17990 | |
}, | |
{ | |
"epoch": 32.29, | |
"learning_rate": 1.964891282665071e-05, | |
"loss": 0.081, | |
"step": 18000 | |
}, | |
{ | |
"epoch": 32.3, | |
"learning_rate": 1.9628964691801318e-05, | |
"loss": 0.0745, | |
"step": 18010 | |
}, | |
{ | |
"epoch": 32.32, | |
"learning_rate": 1.9609016556951927e-05, | |
"loss": 0.0888, | |
"step": 18020 | |
}, | |
{ | |
"epoch": 32.34, | |
"learning_rate": 1.9589068422102536e-05, | |
"loss": 0.098, | |
"step": 18030 | |
}, | |
{ | |
"epoch": 32.36, | |
"learning_rate": 1.9569120287253142e-05, | |
"loss": 0.0724, | |
"step": 18040 | |
}, | |
{ | |
"epoch": 32.38, | |
"learning_rate": 1.9549172152403754e-05, | |
"loss": 0.0732, | |
"step": 18050 | |
}, | |
{ | |
"epoch": 32.39, | |
"learning_rate": 1.952922401755436e-05, | |
"loss": 0.0799, | |
"step": 18060 | |
}, | |
{ | |
"epoch": 32.41, | |
"learning_rate": 1.950927588270497e-05, | |
"loss": 0.0829, | |
"step": 18070 | |
}, | |
{ | |
"epoch": 32.43, | |
"learning_rate": 1.9489327747855578e-05, | |
"loss": 0.0704, | |
"step": 18080 | |
}, | |
{ | |
"epoch": 32.45, | |
"learning_rate": 1.9469379613006187e-05, | |
"loss": 0.0561, | |
"step": 18090 | |
}, | |
{ | |
"epoch": 32.47, | |
"learning_rate": 1.9449431478156792e-05, | |
"loss": 0.0731, | |
"step": 18100 | |
}, | |
{ | |
"epoch": 32.48, | |
"learning_rate": 1.9429483343307405e-05, | |
"loss": 0.0839, | |
"step": 18110 | |
}, | |
{ | |
"epoch": 32.5, | |
"learning_rate": 1.940953520845801e-05, | |
"loss": 0.0845, | |
"step": 18120 | |
}, | |
{ | |
"epoch": 32.52, | |
"learning_rate": 1.9389587073608616e-05, | |
"loss": 0.0788, | |
"step": 18130 | |
}, | |
{ | |
"epoch": 32.54, | |
"learning_rate": 1.9369638938759228e-05, | |
"loss": 0.0643, | |
"step": 18140 | |
}, | |
{ | |
"epoch": 32.56, | |
"learning_rate": 1.9349690803909834e-05, | |
"loss": 0.0782, | |
"step": 18150 | |
}, | |
{ | |
"epoch": 32.57, | |
"learning_rate": 1.9329742669060443e-05, | |
"loss": 0.0805, | |
"step": 18160 | |
}, | |
{ | |
"epoch": 32.59, | |
"learning_rate": 1.9309794534211052e-05, | |
"loss": 0.0923, | |
"step": 18170 | |
}, | |
{ | |
"epoch": 32.61, | |
"learning_rate": 1.928984639936166e-05, | |
"loss": 0.0856, | |
"step": 18180 | |
}, | |
{ | |
"epoch": 32.63, | |
"learning_rate": 1.9269898264512266e-05, | |
"loss": 0.0838, | |
"step": 18190 | |
}, | |
{ | |
"epoch": 32.65, | |
"learning_rate": 1.924995012966288e-05, | |
"loss": 0.0713, | |
"step": 18200 | |
}, | |
{ | |
"epoch": 32.66, | |
"learning_rate": 1.9230001994813484e-05, | |
"loss": 0.0714, | |
"step": 18210 | |
}, | |
{ | |
"epoch": 32.68, | |
"learning_rate": 1.9210053859964093e-05, | |
"loss": 0.0761, | |
"step": 18220 | |
}, | |
{ | |
"epoch": 32.7, | |
"learning_rate": 1.9190105725114702e-05, | |
"loss": 0.0733, | |
"step": 18230 | |
}, | |
{ | |
"epoch": 32.72, | |
"learning_rate": 1.917015759026531e-05, | |
"loss": 0.0992, | |
"step": 18240 | |
}, | |
{ | |
"epoch": 32.74, | |
"learning_rate": 1.9150209455415917e-05, | |
"loss": 0.0848, | |
"step": 18250 | |
}, | |
{ | |
"epoch": 32.75, | |
"learning_rate": 1.913026132056653e-05, | |
"loss": 0.0768, | |
"step": 18260 | |
}, | |
{ | |
"epoch": 32.77, | |
"learning_rate": 1.9110313185717135e-05, | |
"loss": 0.0899, | |
"step": 18270 | |
}, | |
{ | |
"epoch": 32.79, | |
"learning_rate": 1.9090365050867744e-05, | |
"loss": 0.0834, | |
"step": 18280 | |
}, | |
{ | |
"epoch": 32.81, | |
"learning_rate": 1.9070416916018353e-05, | |
"loss": 0.0669, | |
"step": 18290 | |
}, | |
{ | |
"epoch": 32.83, | |
"learning_rate": 1.9050468781168962e-05, | |
"loss": 0.0896, | |
"step": 18300 | |
}, | |
{ | |
"epoch": 32.84, | |
"learning_rate": 1.9030520646319568e-05, | |
"loss": 0.076, | |
"step": 18310 | |
}, | |
{ | |
"epoch": 32.86, | |
"learning_rate": 1.901057251147018e-05, | |
"loss": 0.068, | |
"step": 18320 | |
}, | |
{ | |
"epoch": 32.88, | |
"learning_rate": 1.8990624376620786e-05, | |
"loss": 0.0713, | |
"step": 18330 | |
}, | |
{ | |
"epoch": 32.9, | |
"learning_rate": 1.8970676241771395e-05, | |
"loss": 0.0812, | |
"step": 18340 | |
}, | |
{ | |
"epoch": 32.91, | |
"learning_rate": 1.8950728106922004e-05, | |
"loss": 0.0709, | |
"step": 18350 | |
}, | |
{ | |
"epoch": 32.93, | |
"learning_rate": 1.8930779972072613e-05, | |
"loss": 0.0813, | |
"step": 18360 | |
}, | |
{ | |
"epoch": 32.95, | |
"learning_rate": 1.8910831837223218e-05, | |
"loss": 0.0794, | |
"step": 18370 | |
}, | |
{ | |
"epoch": 32.97, | |
"learning_rate": 1.889088370237383e-05, | |
"loss": 0.0941, | |
"step": 18380 | |
}, | |
{ | |
"epoch": 32.99, | |
"learning_rate": 1.8870935567524436e-05, | |
"loss": 0.0856, | |
"step": 18390 | |
}, | |
{ | |
"epoch": 33.0, | |
"eval_accuracy": { | |
"accuracy": 0.9769567167526351 | |
}, | |
"eval_f1": { | |
"f1": 0.9758387045693412 | |
}, | |
"eval_loss": 0.07306946069002151, | |
"eval_precision": { | |
"precision": 0.9756844886392796 | |
}, | |
"eval_recall": { | |
"recall": 0.9760671495545676 | |
}, | |
"eval_runtime": 98.5743, | |
"eval_samples_per_second": 180.94, | |
"eval_steps_per_second": 5.661, | |
"step": 18397 | |
}, | |
{ | |
"epoch": 33.0, | |
"learning_rate": 1.8850987432675045e-05, | |
"loss": 0.0776, | |
"step": 18400 | |
}, | |
{ | |
"epoch": 33.02, | |
"learning_rate": 1.8831039297825654e-05, | |
"loss": 0.0744, | |
"step": 18410 | |
}, | |
{ | |
"epoch": 33.04, | |
"learning_rate": 1.8811091162976263e-05, | |
"loss": 0.0688, | |
"step": 18420 | |
}, | |
{ | |
"epoch": 33.06, | |
"learning_rate": 1.879114302812687e-05, | |
"loss": 0.0787, | |
"step": 18430 | |
}, | |
{ | |
"epoch": 33.08, | |
"learning_rate": 1.877119489327748e-05, | |
"loss": 0.0748, | |
"step": 18440 | |
}, | |
{ | |
"epoch": 33.09, | |
"learning_rate": 1.8751246758428087e-05, | |
"loss": 0.0716, | |
"step": 18450 | |
}, | |
{ | |
"epoch": 33.11, | |
"learning_rate": 1.8731298623578696e-05, | |
"loss": 0.0729, | |
"step": 18460 | |
}, | |
{ | |
"epoch": 33.13, | |
"learning_rate": 1.8711350488729305e-05, | |
"loss": 0.0878, | |
"step": 18470 | |
}, | |
{ | |
"epoch": 33.15, | |
"learning_rate": 1.8691402353879914e-05, | |
"loss": 0.0642, | |
"step": 18480 | |
}, | |
{ | |
"epoch": 33.17, | |
"learning_rate": 1.867145421903052e-05, | |
"loss": 0.0754, | |
"step": 18490 | |
}, | |
{ | |
"epoch": 33.18, | |
"learning_rate": 1.8651506084181132e-05, | |
"loss": 0.0685, | |
"step": 18500 | |
}, | |
{ | |
"epoch": 33.2, | |
"learning_rate": 1.8631557949331737e-05, | |
"loss": 0.0922, | |
"step": 18510 | |
}, | |
{ | |
"epoch": 33.22, | |
"learning_rate": 1.8611609814482346e-05, | |
"loss": 0.0915, | |
"step": 18520 | |
}, | |
{ | |
"epoch": 33.24, | |
"learning_rate": 1.8591661679632955e-05, | |
"loss": 0.0843, | |
"step": 18530 | |
}, | |
{ | |
"epoch": 33.26, | |
"learning_rate": 1.8571713544783564e-05, | |
"loss": 0.0818, | |
"step": 18540 | |
}, | |
{ | |
"epoch": 33.27, | |
"learning_rate": 1.855176540993417e-05, | |
"loss": 0.0774, | |
"step": 18550 | |
}, | |
{ | |
"epoch": 33.29, | |
"learning_rate": 1.8531817275084782e-05, | |
"loss": 0.08, | |
"step": 18560 | |
}, | |
{ | |
"epoch": 33.31, | |
"learning_rate": 1.8511869140235388e-05, | |
"loss": 0.0901, | |
"step": 18570 | |
}, | |
{ | |
"epoch": 33.33, | |
"learning_rate": 1.8491921005385997e-05, | |
"loss": 0.0906, | |
"step": 18580 | |
}, | |
{ | |
"epoch": 33.35, | |
"learning_rate": 1.8471972870536606e-05, | |
"loss": 0.0792, | |
"step": 18590 | |
}, | |
{ | |
"epoch": 33.36, | |
"learning_rate": 1.8452024735687215e-05, | |
"loss": 0.0817, | |
"step": 18600 | |
}, | |
{ | |
"epoch": 33.38, | |
"learning_rate": 1.843207660083782e-05, | |
"loss": 0.0774, | |
"step": 18610 | |
}, | |
{ | |
"epoch": 33.4, | |
"learning_rate": 1.8412128465988433e-05, | |
"loss": 0.069, | |
"step": 18620 | |
}, | |
{ | |
"epoch": 33.42, | |
"learning_rate": 1.839218033113904e-05, | |
"loss": 0.0975, | |
"step": 18630 | |
}, | |
{ | |
"epoch": 33.43, | |
"learning_rate": 1.8372232196289648e-05, | |
"loss": 0.064, | |
"step": 18640 | |
}, | |
{ | |
"epoch": 33.45, | |
"learning_rate": 1.8352284061440257e-05, | |
"loss": 0.0691, | |
"step": 18650 | |
}, | |
{ | |
"epoch": 33.47, | |
"learning_rate": 1.8332335926590866e-05, | |
"loss": 0.0696, | |
"step": 18660 | |
}, | |
{ | |
"epoch": 33.49, | |
"learning_rate": 1.831238779174147e-05, | |
"loss": 0.0608, | |
"step": 18670 | |
}, | |
{ | |
"epoch": 33.51, | |
"learning_rate": 1.8292439656892083e-05, | |
"loss": 0.0775, | |
"step": 18680 | |
}, | |
{ | |
"epoch": 33.52, | |
"learning_rate": 1.827249152204269e-05, | |
"loss": 0.0877, | |
"step": 18690 | |
}, | |
{ | |
"epoch": 33.54, | |
"learning_rate": 1.8252543387193298e-05, | |
"loss": 0.0732, | |
"step": 18700 | |
}, | |
{ | |
"epoch": 33.56, | |
"learning_rate": 1.8232595252343907e-05, | |
"loss": 0.0614, | |
"step": 18710 | |
}, | |
{ | |
"epoch": 33.58, | |
"learning_rate": 1.8212647117494516e-05, | |
"loss": 0.0753, | |
"step": 18720 | |
}, | |
{ | |
"epoch": 33.6, | |
"learning_rate": 1.8192698982645122e-05, | |
"loss": 0.0845, | |
"step": 18730 | |
}, | |
{ | |
"epoch": 33.61, | |
"learning_rate": 1.8172750847795734e-05, | |
"loss": 0.0629, | |
"step": 18740 | |
}, | |
{ | |
"epoch": 33.63, | |
"learning_rate": 1.815280271294634e-05, | |
"loss": 0.0544, | |
"step": 18750 | |
}, | |
{ | |
"epoch": 33.65, | |
"learning_rate": 1.813285457809695e-05, | |
"loss": 0.0743, | |
"step": 18760 | |
}, | |
{ | |
"epoch": 33.67, | |
"learning_rate": 1.8112906443247558e-05, | |
"loss": 0.0913, | |
"step": 18770 | |
}, | |
{ | |
"epoch": 33.69, | |
"learning_rate": 1.8092958308398167e-05, | |
"loss": 0.0796, | |
"step": 18780 | |
}, | |
{ | |
"epoch": 33.7, | |
"learning_rate": 1.8073010173548772e-05, | |
"loss": 0.0996, | |
"step": 18790 | |
}, | |
{ | |
"epoch": 33.72, | |
"learning_rate": 1.8053062038699385e-05, | |
"loss": 0.0842, | |
"step": 18800 | |
}, | |
{ | |
"epoch": 33.74, | |
"learning_rate": 1.803311390384999e-05, | |
"loss": 0.0819, | |
"step": 18810 | |
}, | |
{ | |
"epoch": 33.76, | |
"learning_rate": 1.80131657690006e-05, | |
"loss": 0.0724, | |
"step": 18820 | |
}, | |
{ | |
"epoch": 33.78, | |
"learning_rate": 1.7993217634151208e-05, | |
"loss": 0.0589, | |
"step": 18830 | |
}, | |
{ | |
"epoch": 33.79, | |
"learning_rate": 1.7973269499301817e-05, | |
"loss": 0.075, | |
"step": 18840 | |
}, | |
{ | |
"epoch": 33.81, | |
"learning_rate": 1.7953321364452423e-05, | |
"loss": 0.0687, | |
"step": 18850 | |
}, | |
{ | |
"epoch": 33.83, | |
"learning_rate": 1.7933373229603035e-05, | |
"loss": 0.0803, | |
"step": 18860 | |
}, | |
{ | |
"epoch": 33.85, | |
"learning_rate": 1.791342509475364e-05, | |
"loss": 0.0623, | |
"step": 18870 | |
}, | |
{ | |
"epoch": 33.87, | |
"learning_rate": 1.789347695990425e-05, | |
"loss": 0.0707, | |
"step": 18880 | |
}, | |
{ | |
"epoch": 33.88, | |
"learning_rate": 1.787352882505486e-05, | |
"loss": 0.0699, | |
"step": 18890 | |
}, | |
{ | |
"epoch": 33.9, | |
"learning_rate": 1.7853580690205464e-05, | |
"loss": 0.059, | |
"step": 18900 | |
}, | |
{ | |
"epoch": 33.92, | |
"learning_rate": 1.7833632555356073e-05, | |
"loss": 0.0797, | |
"step": 18910 | |
}, | |
{ | |
"epoch": 33.94, | |
"learning_rate": 1.7813684420506682e-05, | |
"loss": 0.0796, | |
"step": 18920 | |
}, | |
{ | |
"epoch": 33.96, | |
"learning_rate": 1.779373628565729e-05, | |
"loss": 0.0596, | |
"step": 18930 | |
}, | |
{ | |
"epoch": 33.97, | |
"learning_rate": 1.77737881508079e-05, | |
"loss": 0.0684, | |
"step": 18940 | |
}, | |
{ | |
"epoch": 33.99, | |
"learning_rate": 1.775384001595851e-05, | |
"loss": 0.0841, | |
"step": 18950 | |
}, | |
{ | |
"epoch": 34.0, | |
"eval_accuracy": { | |
"accuracy": 0.978806907378336 | |
}, | |
"eval_f1": { | |
"f1": 0.9778357381939384 | |
}, | |
"eval_loss": 0.06932555884122849, | |
"eval_precision": { | |
"precision": 0.9778894355053215 | |
}, | |
"eval_recall": { | |
"recall": 0.9778245851611548 | |
}, | |
"eval_runtime": 98.2297, | |
"eval_samples_per_second": 181.574, | |
"eval_steps_per_second": 5.681, | |
"step": 18955 | |
} | |
], | |
"logging_steps": 10, | |
"max_steps": 27850, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 50, | |
"save_steps": 500, | |
"total_flos": 6.0293061922944614e+19, | |
"train_batch_size": 32, | |
"trial_name": null, | |
"trial_params": null | |
} | |