|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8828560392870938, |
|
"eval_steps": 500, |
|
"global_step": 12000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0007357133660725781, |
|
"grad_norm": 223.60693359375, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 20.4521, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0014714267321451562, |
|
"grad_norm": 115.68708038330078, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 17.0724, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0022071400982177345, |
|
"grad_norm": 9.73855972290039, |
|
"learning_rate": 3e-06, |
|
"loss": 11.1765, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0029428534642903124, |
|
"grad_norm": 4.239206314086914, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 9.9334, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0036785668303628907, |
|
"grad_norm": 5.939801216125488, |
|
"learning_rate": 5e-06, |
|
"loss": 9.6538, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004414280196435469, |
|
"grad_norm": 7.083678245544434, |
|
"learning_rate": 6e-06, |
|
"loss": 9.4877, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0051499935625080465, |
|
"grad_norm": 4.155125617980957, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 9.2998, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.005885706928580625, |
|
"grad_norm": 8.080842018127441, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 9.1653, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.006621420294653203, |
|
"grad_norm": 5.2444353103637695, |
|
"learning_rate": 9e-06, |
|
"loss": 9.0503, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0073571336607257815, |
|
"grad_norm": 5.591372966766357, |
|
"learning_rate": 1e-05, |
|
"loss": 8.9182, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.008092847026798359, |
|
"grad_norm": 6.641251564025879, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 8.7308, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.008828560392870938, |
|
"grad_norm": 8.2617769241333, |
|
"learning_rate": 1.2e-05, |
|
"loss": 8.4667, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.009564273758943516, |
|
"grad_norm": 8.181135177612305, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 8.1982, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.010299987125016093, |
|
"grad_norm": 5.930114269256592, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 7.8692, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.011035700491088672, |
|
"grad_norm": 4.546728610992432, |
|
"learning_rate": 1.5e-05, |
|
"loss": 7.5773, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01177141385716125, |
|
"grad_norm": 4.127258777618408, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 7.3816, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.012507127223233829, |
|
"grad_norm": 4.3951849937438965, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 7.2603, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.013242840589306406, |
|
"grad_norm": 2.085538625717163, |
|
"learning_rate": 1.8e-05, |
|
"loss": 7.16, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.013978553955378984, |
|
"grad_norm": 3.098052501678467, |
|
"learning_rate": 1.9e-05, |
|
"loss": 7.0236, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.014714267321451563, |
|
"grad_norm": 4.054460525512695, |
|
"learning_rate": 2e-05, |
|
"loss": 7.0169, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01544998068752414, |
|
"grad_norm": 1.6470402479171753, |
|
"learning_rate": 2.1e-05, |
|
"loss": 6.9933, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.016185694053596718, |
|
"grad_norm": 1.3456400632858276, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 6.9151, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.016921407419669295, |
|
"grad_norm": 1.3457653522491455, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 6.9246, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.017657120785741876, |
|
"grad_norm": 2.5111804008483887, |
|
"learning_rate": 2.4e-05, |
|
"loss": 6.8615, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.018392834151814454, |
|
"grad_norm": 2.082517147064209, |
|
"learning_rate": 2.5e-05, |
|
"loss": 6.8109, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01912854751788703, |
|
"grad_norm": 2.9603984355926514, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 6.7036, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01986426088395961, |
|
"grad_norm": 1.7493945360183716, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 6.6403, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.020599974250032186, |
|
"grad_norm": 3.83150315284729, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 6.6216, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.021335687616104767, |
|
"grad_norm": 2.9999842643737793, |
|
"learning_rate": 2.9e-05, |
|
"loss": 6.6013, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.022071400982177344, |
|
"grad_norm": 2.799868583679199, |
|
"learning_rate": 3e-05, |
|
"loss": 6.4751, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.022807114348249922, |
|
"grad_norm": 3.548393487930298, |
|
"learning_rate": 3.1e-05, |
|
"loss": 6.5082, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.0235428277143225, |
|
"grad_norm": 3.523336410522461, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 6.4566, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.024278541080395077, |
|
"grad_norm": 2.0755739212036133, |
|
"learning_rate": 3.3e-05, |
|
"loss": 6.3776, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.025014254446467658, |
|
"grad_norm": 3.288764238357544, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 6.3559, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.025749967812540235, |
|
"grad_norm": 3.3932132720947266, |
|
"learning_rate": 3.5e-05, |
|
"loss": 6.3234, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.026485681178612813, |
|
"grad_norm": 2.7775251865386963, |
|
"learning_rate": 3.6e-05, |
|
"loss": 6.2551, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.02722139454468539, |
|
"grad_norm": 2.6773834228515625, |
|
"learning_rate": 3.7e-05, |
|
"loss": 6.206, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.027957107910757967, |
|
"grad_norm": 2.556666612625122, |
|
"learning_rate": 3.8e-05, |
|
"loss": 6.1396, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.02869282127683055, |
|
"grad_norm": 3.2724132537841797, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 6.106, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.029428534642903126, |
|
"grad_norm": 2.354327440261841, |
|
"learning_rate": 4e-05, |
|
"loss": 6.1258, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.030164248008975703, |
|
"grad_norm": 1.804317593574524, |
|
"learning_rate": 4.1e-05, |
|
"loss": 6.0516, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.03089996137504828, |
|
"grad_norm": 1.7134747505187988, |
|
"learning_rate": 4.2e-05, |
|
"loss": 5.9582, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.03163567474112086, |
|
"grad_norm": 1.4939428567886353, |
|
"learning_rate": 4.3e-05, |
|
"loss": 5.9328, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.032371388107193436, |
|
"grad_norm": 1.9662060737609863, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 5.8578, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.03310710147326602, |
|
"grad_norm": 1.748353123664856, |
|
"learning_rate": 4.5e-05, |
|
"loss": 5.7874, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.03384281483933859, |
|
"grad_norm": 2.1945955753326416, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 5.7787, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.03457852820541117, |
|
"grad_norm": 2.1684770584106445, |
|
"learning_rate": 4.7e-05, |
|
"loss": 5.7138, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.03531424157148375, |
|
"grad_norm": 1.728945255279541, |
|
"learning_rate": 4.8e-05, |
|
"loss": 5.6774, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.036049954937556326, |
|
"grad_norm": 1.8206098079681396, |
|
"learning_rate": 4.9e-05, |
|
"loss": 5.6214, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03678566830362891, |
|
"grad_norm": 1.5937659740447998, |
|
"learning_rate": 5e-05, |
|
"loss": 5.5198, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03752138166970148, |
|
"grad_norm": 1.617659568786621, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 5.4818, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.03825709503577406, |
|
"grad_norm": 1.7942918539047241, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 5.5156, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03899280840184664, |
|
"grad_norm": 1.830945611000061, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 5.3836, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03972852176791922, |
|
"grad_norm": 1.5295898914337158, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 5.3989, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.0404642351339918, |
|
"grad_norm": 1.527603268623352, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 5.3724, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.04119994850006437, |
|
"grad_norm": 1.6225807666778564, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 5.2788, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.04193566186613695, |
|
"grad_norm": 2.0076022148132324, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 5.2227, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.042671375232209534, |
|
"grad_norm": 1.5891458988189697, |
|
"learning_rate": 5.8e-05, |
|
"loss": 5.2265, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.04340708859828211, |
|
"grad_norm": 1.5814419984817505, |
|
"learning_rate": 5.9e-05, |
|
"loss": 5.1948, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.04414280196435469, |
|
"grad_norm": 1.7139537334442139, |
|
"learning_rate": 6e-05, |
|
"loss": 5.1036, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.04487851533042726, |
|
"grad_norm": 1.2224972248077393, |
|
"learning_rate": 6.1e-05, |
|
"loss": 5.1932, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.045614228696499844, |
|
"grad_norm": 1.6268278360366821, |
|
"learning_rate": 6.2e-05, |
|
"loss": 5.068, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.046349942062572425, |
|
"grad_norm": 1.4944841861724854, |
|
"learning_rate": 6.3e-05, |
|
"loss": 5.032, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.047085655428645, |
|
"grad_norm": 1.442824363708496, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 5.0077, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.04782136879471758, |
|
"grad_norm": 1.6031944751739502, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 4.9808, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.04855708216079015, |
|
"grad_norm": 1.6006144285202026, |
|
"learning_rate": 6.6e-05, |
|
"loss": 4.9239, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.049292795526862734, |
|
"grad_norm": 1.3961182832717896, |
|
"learning_rate": 6.7e-05, |
|
"loss": 4.9158, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.050028508892935315, |
|
"grad_norm": 1.4314249753952026, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 4.8126, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.05076422225900789, |
|
"grad_norm": 1.4509848356246948, |
|
"learning_rate": 6.9e-05, |
|
"loss": 4.8713, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.05149993562508047, |
|
"grad_norm": 1.5618196725845337, |
|
"learning_rate": 7e-05, |
|
"loss": 4.7814, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.052235648991153044, |
|
"grad_norm": 1.5423110723495483, |
|
"learning_rate": 7.1e-05, |
|
"loss": 4.7409, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.052971362357225625, |
|
"grad_norm": 1.2559527158737183, |
|
"learning_rate": 7.2e-05, |
|
"loss": 4.73, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.053707075723298206, |
|
"grad_norm": 1.1119533777236938, |
|
"learning_rate": 7.3e-05, |
|
"loss": 4.7055, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.05444278908937078, |
|
"grad_norm": 1.2400840520858765, |
|
"learning_rate": 7.4e-05, |
|
"loss": 4.6501, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.05517850245544336, |
|
"grad_norm": 1.2191784381866455, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 4.6239, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.055914215821515935, |
|
"grad_norm": 1.269572377204895, |
|
"learning_rate": 7.6e-05, |
|
"loss": 4.6498, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.056649929187588516, |
|
"grad_norm": 1.275608777999878, |
|
"learning_rate": 7.7e-05, |
|
"loss": 4.6326, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.0573856425536611, |
|
"grad_norm": 1.3013856410980225, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 4.5869, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.05812135591973367, |
|
"grad_norm": 1.37381112575531, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 4.5785, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.05885706928580625, |
|
"grad_norm": 1.2166635990142822, |
|
"learning_rate": 8e-05, |
|
"loss": 4.5126, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.059592782651878826, |
|
"grad_norm": 1.4644134044647217, |
|
"learning_rate": 8.1e-05, |
|
"loss": 4.5228, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.06032849601795141, |
|
"grad_norm": 1.206525206565857, |
|
"learning_rate": 8.2e-05, |
|
"loss": 4.4874, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.06106420938402399, |
|
"grad_norm": 1.1188849210739136, |
|
"learning_rate": 8.3e-05, |
|
"loss": 4.4619, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.06179992275009656, |
|
"grad_norm": 1.2260874509811401, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.4118, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.06253563611616914, |
|
"grad_norm": 1.19864022731781, |
|
"learning_rate": 8.5e-05, |
|
"loss": 4.3522, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.06327134948224172, |
|
"grad_norm": 1.1710054874420166, |
|
"learning_rate": 8.6e-05, |
|
"loss": 4.4048, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.0640070628483143, |
|
"grad_norm": 0.8916666507720947, |
|
"learning_rate": 8.7e-05, |
|
"loss": 4.3865, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.06474277621438687, |
|
"grad_norm": 1.135651707649231, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 4.3554, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.06547848958045946, |
|
"grad_norm": 0.9037818312644958, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 4.4055, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.06621420294653203, |
|
"grad_norm": 1.1230660676956177, |
|
"learning_rate": 9e-05, |
|
"loss": 4.3131, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.06694991631260461, |
|
"grad_norm": 1.149318814277649, |
|
"learning_rate": 9.1e-05, |
|
"loss": 4.2923, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.06768562967867718, |
|
"grad_norm": 1.1154922246932983, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 4.2774, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.06842134304474977, |
|
"grad_norm": 1.0399177074432373, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 4.2766, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.06915705641082234, |
|
"grad_norm": 1.0165122747421265, |
|
"learning_rate": 9.4e-05, |
|
"loss": 4.2479, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.06989276977689492, |
|
"grad_norm": 1.3305314779281616, |
|
"learning_rate": 9.5e-05, |
|
"loss": 4.2443, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.0706284831429675, |
|
"grad_norm": 0.8728047609329224, |
|
"learning_rate": 9.6e-05, |
|
"loss": 4.3267, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.07136419650904008, |
|
"grad_norm": 1.117966890335083, |
|
"learning_rate": 9.7e-05, |
|
"loss": 4.2435, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.07209990987511265, |
|
"grad_norm": 0.934165894985199, |
|
"learning_rate": 9.8e-05, |
|
"loss": 4.2125, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.07283562324118524, |
|
"grad_norm": 1.0023953914642334, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 4.1705, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.07357133660725781, |
|
"grad_norm": 0.9707255959510803, |
|
"learning_rate": 0.0001, |
|
"loss": 4.202, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.07430704997333039, |
|
"grad_norm": 0.988510012626648, |
|
"learning_rate": 9.999998440456413e-05, |
|
"loss": 4.1781, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.07504276333940296, |
|
"grad_norm": 1.0348843336105347, |
|
"learning_rate": 9.99999376182662e-05, |
|
"loss": 4.1589, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.07577847670547555, |
|
"grad_norm": 1.0255194902420044, |
|
"learning_rate": 9.999985964113542e-05, |
|
"loss": 4.1653, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.07651419007154812, |
|
"grad_norm": 0.831902801990509, |
|
"learning_rate": 9.999975047322044e-05, |
|
"loss": 4.1595, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.0772499034376207, |
|
"grad_norm": 0.964509904384613, |
|
"learning_rate": 9.999961011458933e-05, |
|
"loss": 4.1414, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.07798561680369329, |
|
"grad_norm": 0.796290397644043, |
|
"learning_rate": 9.99994385653297e-05, |
|
"loss": 4.0971, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.07872133016976586, |
|
"grad_norm": 0.8830526471138, |
|
"learning_rate": 9.99992358255485e-05, |
|
"loss": 4.1182, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.07945704353583843, |
|
"grad_norm": 1.0227144956588745, |
|
"learning_rate": 9.999900189537226e-05, |
|
"loss": 4.0917, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.08019275690191102, |
|
"grad_norm": 1.0185173749923706, |
|
"learning_rate": 9.999873677494689e-05, |
|
"loss": 4.1066, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.0809284702679836, |
|
"grad_norm": 0.8834981322288513, |
|
"learning_rate": 9.999844046443776e-05, |
|
"loss": 4.0413, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.08166418363405617, |
|
"grad_norm": 0.9037182331085205, |
|
"learning_rate": 9.999811296402975e-05, |
|
"loss": 4.1012, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.08239989700012874, |
|
"grad_norm": 0.9970824122428894, |
|
"learning_rate": 9.99977542739271e-05, |
|
"loss": 4.0548, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.08313561036620133, |
|
"grad_norm": 0.9786319136619568, |
|
"learning_rate": 9.999736439435364e-05, |
|
"loss": 4.0613, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.0838713237322739, |
|
"grad_norm": 0.8991515636444092, |
|
"learning_rate": 9.999694332555253e-05, |
|
"loss": 4.07, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.08460703709834648, |
|
"grad_norm": 0.8191851377487183, |
|
"learning_rate": 9.999649106778646e-05, |
|
"loss": 4.0169, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.08534275046441907, |
|
"grad_norm": 0.7964187860488892, |
|
"learning_rate": 9.999600762133756e-05, |
|
"loss": 4.0185, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.08607846383049164, |
|
"grad_norm": 0.8330606818199158, |
|
"learning_rate": 9.99954929865074e-05, |
|
"loss": 4.0695, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.08681417719656422, |
|
"grad_norm": 0.8456650376319885, |
|
"learning_rate": 9.999494716361703e-05, |
|
"loss": 3.971, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.0875498905626368, |
|
"grad_norm": 0.7506271600723267, |
|
"learning_rate": 9.999437015300694e-05, |
|
"loss": 4.0053, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.08828560392870938, |
|
"grad_norm": 0.7525317668914795, |
|
"learning_rate": 9.999376195503709e-05, |
|
"loss": 3.9699, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.08902131729478195, |
|
"grad_norm": 0.7871759533882141, |
|
"learning_rate": 9.999312257008685e-05, |
|
"loss": 3.9137, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.08975703066085453, |
|
"grad_norm": 0.8398749828338623, |
|
"learning_rate": 9.999245199855512e-05, |
|
"loss": 3.9667, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.09049274402692711, |
|
"grad_norm": 0.7666105031967163, |
|
"learning_rate": 9.99917502408602e-05, |
|
"loss": 3.9273, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.09122845739299969, |
|
"grad_norm": 0.8470506072044373, |
|
"learning_rate": 9.999101729743985e-05, |
|
"loss": 3.9694, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.09196417075907226, |
|
"grad_norm": 0.7060391306877136, |
|
"learning_rate": 9.999025316875129e-05, |
|
"loss": 3.9618, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.09269988412514485, |
|
"grad_norm": 0.7745850682258606, |
|
"learning_rate": 9.99894578552712e-05, |
|
"loss": 3.9252, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.09343559749121742, |
|
"grad_norm": 0.8749181032180786, |
|
"learning_rate": 9.998863135749575e-05, |
|
"loss": 3.9104, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.09417131085729, |
|
"grad_norm": 0.906620979309082, |
|
"learning_rate": 9.998777367594046e-05, |
|
"loss": 3.9174, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.09490702422336257, |
|
"grad_norm": 0.7657135725021362, |
|
"learning_rate": 9.998688481114039e-05, |
|
"loss": 3.9238, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.09564273758943516, |
|
"grad_norm": 0.6726455688476562, |
|
"learning_rate": 9.998596476365006e-05, |
|
"loss": 3.8952, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.09637845095550773, |
|
"grad_norm": 0.7561966776847839, |
|
"learning_rate": 9.998501353404336e-05, |
|
"loss": 3.9123, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.0971141643215803, |
|
"grad_norm": 0.8008501529693604, |
|
"learning_rate": 9.998403112291372e-05, |
|
"loss": 3.8617, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.0978498776876529, |
|
"grad_norm": 0.7718631029129028, |
|
"learning_rate": 9.998301753087398e-05, |
|
"loss": 3.8859, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.09858559105372547, |
|
"grad_norm": 0.8178690671920776, |
|
"learning_rate": 9.998197275855644e-05, |
|
"loss": 3.8473, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.09932130441979804, |
|
"grad_norm": 0.7664726376533508, |
|
"learning_rate": 9.998089680661282e-05, |
|
"loss": 3.8305, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.10005701778587063, |
|
"grad_norm": 0.8279010653495789, |
|
"learning_rate": 9.997978967571436e-05, |
|
"loss": 3.8802, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.1007927311519432, |
|
"grad_norm": 0.7610680460929871, |
|
"learning_rate": 9.997865136655166e-05, |
|
"loss": 3.8869, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.10152844451801578, |
|
"grad_norm": 0.7211320400238037, |
|
"learning_rate": 9.997748187983487e-05, |
|
"loss": 3.9122, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.10226415788408835, |
|
"grad_norm": 0.7668148279190063, |
|
"learning_rate": 9.997628121629349e-05, |
|
"loss": 3.8238, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.10299987125016094, |
|
"grad_norm": 0.6940723061561584, |
|
"learning_rate": 9.997504937667654e-05, |
|
"loss": 3.8464, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.10373558461623351, |
|
"grad_norm": 0.681667149066925, |
|
"learning_rate": 9.997378636175245e-05, |
|
"loss": 3.8182, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.10447129798230609, |
|
"grad_norm": 0.6430841684341431, |
|
"learning_rate": 9.99724921723091e-05, |
|
"loss": 3.8379, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.10520701134837868, |
|
"grad_norm": 0.6482489705085754, |
|
"learning_rate": 9.997116680915389e-05, |
|
"loss": 3.8117, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.10594272471445125, |
|
"grad_norm": 0.6649356484413147, |
|
"learning_rate": 9.996981027311352e-05, |
|
"loss": 3.8452, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.10667843808052382, |
|
"grad_norm": 0.757146954536438, |
|
"learning_rate": 9.996842256503428e-05, |
|
"loss": 3.7886, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.10741415144659641, |
|
"grad_norm": 0.6373863816261292, |
|
"learning_rate": 9.99670036857818e-05, |
|
"loss": 3.7975, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.10814986481266899, |
|
"grad_norm": 0.6672409176826477, |
|
"learning_rate": 9.996555363624124e-05, |
|
"loss": 3.843, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.10888557817874156, |
|
"grad_norm": 0.6595909595489502, |
|
"learning_rate": 9.996407241731717e-05, |
|
"loss": 3.8397, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.10962129154481413, |
|
"grad_norm": 0.725779116153717, |
|
"learning_rate": 9.996256002993358e-05, |
|
"loss": 3.8264, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.11035700491088672, |
|
"grad_norm": 0.6590442061424255, |
|
"learning_rate": 9.99610164750339e-05, |
|
"loss": 3.8375, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.1110927182769593, |
|
"grad_norm": 0.707144558429718, |
|
"learning_rate": 9.99594417535811e-05, |
|
"loss": 3.7805, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.11182843164303187, |
|
"grad_norm": 0.7135934233665466, |
|
"learning_rate": 9.995783586655745e-05, |
|
"loss": 3.7696, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.11256414500910446, |
|
"grad_norm": 0.7143461108207703, |
|
"learning_rate": 9.995619881496474e-05, |
|
"loss": 3.7558, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.11329985837517703, |
|
"grad_norm": 0.6768718957901001, |
|
"learning_rate": 9.995453059982422e-05, |
|
"loss": 3.8221, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.1140355717412496, |
|
"grad_norm": 0.6788365244865417, |
|
"learning_rate": 9.995283122217653e-05, |
|
"loss": 3.7792, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.1147712851073222, |
|
"grad_norm": 0.6880599856376648, |
|
"learning_rate": 9.995110068308179e-05, |
|
"loss": 3.7191, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.11550699847339477, |
|
"grad_norm": 0.6511822938919067, |
|
"learning_rate": 9.994933898361951e-05, |
|
"loss": 3.7585, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.11624271183946734, |
|
"grad_norm": 0.6109036803245544, |
|
"learning_rate": 9.99475461248887e-05, |
|
"loss": 3.7555, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.11697842520553992, |
|
"grad_norm": 0.6387907862663269, |
|
"learning_rate": 9.994572210800777e-05, |
|
"loss": 3.7283, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.1177141385716125, |
|
"grad_norm": 0.6535074710845947, |
|
"learning_rate": 9.994386693411455e-05, |
|
"loss": 3.7781, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.11844985193768508, |
|
"grad_norm": 0.6718204617500305, |
|
"learning_rate": 9.994198060436637e-05, |
|
"loss": 3.7622, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.11918556530375765, |
|
"grad_norm": 0.75942063331604, |
|
"learning_rate": 9.994006311993991e-05, |
|
"loss": 3.6595, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.11992127866983024, |
|
"grad_norm": 0.6169456243515015, |
|
"learning_rate": 9.993811448203136e-05, |
|
"loss": 3.6921, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.12065699203590281, |
|
"grad_norm": 0.6598173379898071, |
|
"learning_rate": 9.993613469185631e-05, |
|
"loss": 3.6957, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.12139270540197539, |
|
"grad_norm": 0.6640283465385437, |
|
"learning_rate": 9.993412375064979e-05, |
|
"loss": 3.7507, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.12212841876804797, |
|
"grad_norm": 0.720782458782196, |
|
"learning_rate": 9.993208165966623e-05, |
|
"loss": 3.741, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.12286413213412055, |
|
"grad_norm": 0.6343806385993958, |
|
"learning_rate": 9.993000842017957e-05, |
|
"loss": 3.7112, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.12359984550019312, |
|
"grad_norm": 0.72925865650177, |
|
"learning_rate": 9.99279040334831e-05, |
|
"loss": 3.7455, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.1243355588662657, |
|
"grad_norm": 0.7056775093078613, |
|
"learning_rate": 9.99257685008896e-05, |
|
"loss": 3.7007, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.12507127223233827, |
|
"grad_norm": 0.6219266057014465, |
|
"learning_rate": 9.992360182373122e-05, |
|
"loss": 3.6704, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.12580698559841086, |
|
"grad_norm": 0.8017805814743042, |
|
"learning_rate": 9.992140400335958e-05, |
|
"loss": 3.7098, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.12654269896448345, |
|
"grad_norm": 0.6517687439918518, |
|
"learning_rate": 9.991917504114574e-05, |
|
"loss": 3.7145, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.127278412330556, |
|
"grad_norm": 0.6560338735580444, |
|
"learning_rate": 9.991691493848015e-05, |
|
"loss": 3.7046, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.1280141256966286, |
|
"grad_norm": 0.7055456638336182, |
|
"learning_rate": 9.991462369677267e-05, |
|
"loss": 3.6855, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.12874983906270118, |
|
"grad_norm": 0.6441034078598022, |
|
"learning_rate": 9.991230131745268e-05, |
|
"loss": 3.7338, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.12948555242877374, |
|
"grad_norm": 0.6746709942817688, |
|
"learning_rate": 9.990994780196889e-05, |
|
"loss": 3.7237, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.13022126579484633, |
|
"grad_norm": 0.6609126925468445, |
|
"learning_rate": 9.990756315178945e-05, |
|
"loss": 3.7393, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.13095697916091892, |
|
"grad_norm": 0.648073673248291, |
|
"learning_rate": 9.990514736840197e-05, |
|
"loss": 3.7057, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.13169269252699148, |
|
"grad_norm": 0.6735105514526367, |
|
"learning_rate": 9.990270045331344e-05, |
|
"loss": 3.7574, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.13242840589306407, |
|
"grad_norm": 0.6125868558883667, |
|
"learning_rate": 9.99002224080503e-05, |
|
"loss": 3.6548, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.13316411925913665, |
|
"grad_norm": 0.5899696946144104, |
|
"learning_rate": 9.98977132341584e-05, |
|
"loss": 3.624, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.13389983262520921, |
|
"grad_norm": 0.6936771273612976, |
|
"learning_rate": 9.989517293320299e-05, |
|
"loss": 3.7022, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.1346355459912818, |
|
"grad_norm": 0.6176466345787048, |
|
"learning_rate": 9.989260150676876e-05, |
|
"loss": 3.697, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.13537125935735436, |
|
"grad_norm": 0.6583747863769531, |
|
"learning_rate": 9.988999895645981e-05, |
|
"loss": 3.7048, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.13610697272342695, |
|
"grad_norm": 0.6942030191421509, |
|
"learning_rate": 9.988736528389969e-05, |
|
"loss": 3.6784, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.13684268608949954, |
|
"grad_norm": 0.6578043103218079, |
|
"learning_rate": 9.988470049073126e-05, |
|
"loss": 3.6411, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.1375783994555721, |
|
"grad_norm": 0.648202657699585, |
|
"learning_rate": 9.988200457861694e-05, |
|
"loss": 3.6472, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.13831411282164469, |
|
"grad_norm": 0.6567854285240173, |
|
"learning_rate": 9.987927754923843e-05, |
|
"loss": 3.6734, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.13904982618771727, |
|
"grad_norm": 0.6515029668807983, |
|
"learning_rate": 9.987651940429695e-05, |
|
"loss": 3.6463, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.13978553955378983, |
|
"grad_norm": 0.597316324710846, |
|
"learning_rate": 9.987373014551302e-05, |
|
"loss": 3.5951, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.14052125291986242, |
|
"grad_norm": 0.6288499236106873, |
|
"learning_rate": 9.987090977462668e-05, |
|
"loss": 3.675, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.141256966285935, |
|
"grad_norm": 0.6451112627983093, |
|
"learning_rate": 9.986805829339729e-05, |
|
"loss": 3.6552, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.14199267965200757, |
|
"grad_norm": 0.6517444252967834, |
|
"learning_rate": 9.986517570360368e-05, |
|
"loss": 3.6206, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.14272839301808016, |
|
"grad_norm": 0.6413518786430359, |
|
"learning_rate": 9.986226200704404e-05, |
|
"loss": 3.6848, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.14346410638415275, |
|
"grad_norm": 0.6431183815002441, |
|
"learning_rate": 9.9859317205536e-05, |
|
"loss": 3.6339, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.1441998197502253, |
|
"grad_norm": 0.7160407900810242, |
|
"learning_rate": 9.985634130091657e-05, |
|
"loss": 3.6306, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.1449355331162979, |
|
"grad_norm": 0.602085292339325, |
|
"learning_rate": 9.985333429504216e-05, |
|
"loss": 3.6556, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.14567124648237048, |
|
"grad_norm": 0.6392213702201843, |
|
"learning_rate": 9.985029618978863e-05, |
|
"loss": 3.6195, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.14640695984844304, |
|
"grad_norm": 0.5774185657501221, |
|
"learning_rate": 9.984722698705115e-05, |
|
"loss": 3.6463, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.14714267321451563, |
|
"grad_norm": 0.6097477674484253, |
|
"learning_rate": 9.984412668874442e-05, |
|
"loss": 3.6759, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1478783865805882, |
|
"grad_norm": 0.5511993765830994, |
|
"learning_rate": 9.984099529680237e-05, |
|
"loss": 3.6254, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.14861409994666078, |
|
"grad_norm": 0.6293554306030273, |
|
"learning_rate": 9.983783281317847e-05, |
|
"loss": 3.6285, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.14934981331273336, |
|
"grad_norm": 0.5784502625465393, |
|
"learning_rate": 9.983463923984551e-05, |
|
"loss": 3.5952, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.15008552667880593, |
|
"grad_norm": 0.5587512254714966, |
|
"learning_rate": 9.983141457879572e-05, |
|
"loss": 3.5877, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.1508212400448785, |
|
"grad_norm": 0.5835023522377014, |
|
"learning_rate": 9.982815883204068e-05, |
|
"loss": 3.6139, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.1515569534109511, |
|
"grad_norm": 0.6096065044403076, |
|
"learning_rate": 9.982487200161139e-05, |
|
"loss": 3.6261, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.15229266677702366, |
|
"grad_norm": 0.6385859847068787, |
|
"learning_rate": 9.982155408955822e-05, |
|
"loss": 3.6228, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.15302838014309625, |
|
"grad_norm": 0.6276549100875854, |
|
"learning_rate": 9.981820509795096e-05, |
|
"loss": 3.6525, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.15376409350916884, |
|
"grad_norm": 0.5835126638412476, |
|
"learning_rate": 9.981482502887878e-05, |
|
"loss": 3.6081, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.1544998068752414, |
|
"grad_norm": 0.6056969165802002, |
|
"learning_rate": 9.981141388445019e-05, |
|
"loss": 3.6241, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.15523552024131398, |
|
"grad_norm": 0.6270286440849304, |
|
"learning_rate": 9.980797166679314e-05, |
|
"loss": 3.6231, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.15597123360738657, |
|
"grad_norm": 0.6744200587272644, |
|
"learning_rate": 9.980449837805494e-05, |
|
"loss": 3.5789, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.15670694697345913, |
|
"grad_norm": 0.5492793321609497, |
|
"learning_rate": 9.98009940204023e-05, |
|
"loss": 3.58, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.15744266033953172, |
|
"grad_norm": 0.601390540599823, |
|
"learning_rate": 9.979745859602129e-05, |
|
"loss": 3.6059, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.1581783737056043, |
|
"grad_norm": 0.6107606887817383, |
|
"learning_rate": 9.979389210711737e-05, |
|
"loss": 3.5928, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.15891408707167687, |
|
"grad_norm": 0.6118071675300598, |
|
"learning_rate": 9.979029455591538e-05, |
|
"loss": 3.5771, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.15964980043774946, |
|
"grad_norm": 0.6007488369941711, |
|
"learning_rate": 9.978666594465953e-05, |
|
"loss": 3.5729, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.16038551380382204, |
|
"grad_norm": 0.5910535454750061, |
|
"learning_rate": 9.978300627561343e-05, |
|
"loss": 3.5953, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.1611212271698946, |
|
"grad_norm": 0.5529370903968811, |
|
"learning_rate": 9.977931555106002e-05, |
|
"loss": 3.6167, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.1618569405359672, |
|
"grad_norm": 0.6961104273796082, |
|
"learning_rate": 9.977559377330163e-05, |
|
"loss": 3.5541, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.16259265390203975, |
|
"grad_norm": 0.5334804654121399, |
|
"learning_rate": 9.977184094466001e-05, |
|
"loss": 3.6368, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.16332836726811234, |
|
"grad_norm": 0.6472899913787842, |
|
"learning_rate": 9.976805706747622e-05, |
|
"loss": 3.5896, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.16406408063418493, |
|
"grad_norm": 0.6276665329933167, |
|
"learning_rate": 9.976424214411071e-05, |
|
"loss": 3.5907, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.1647997940002575, |
|
"grad_norm": 0.5503706932067871, |
|
"learning_rate": 9.976039617694328e-05, |
|
"loss": 3.6037, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.16553550736633008, |
|
"grad_norm": 0.5655608773231506, |
|
"learning_rate": 9.975651916837313e-05, |
|
"loss": 3.6157, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.16627122073240266, |
|
"grad_norm": 0.5357344746589661, |
|
"learning_rate": 9.975261112081879e-05, |
|
"loss": 3.583, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.16700693409847522, |
|
"grad_norm": 0.5251188278198242, |
|
"learning_rate": 9.974867203671819e-05, |
|
"loss": 3.5705, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.1677426474645478, |
|
"grad_norm": 0.5895013809204102, |
|
"learning_rate": 9.974470191852858e-05, |
|
"loss": 3.5648, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.1684783608306204, |
|
"grad_norm": 0.5837428569793701, |
|
"learning_rate": 9.974070076872662e-05, |
|
"loss": 3.5607, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.16921407419669296, |
|
"grad_norm": 0.5866957306861877, |
|
"learning_rate": 9.973666858980824e-05, |
|
"loss": 3.5689, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.16994978756276555, |
|
"grad_norm": 0.5927156209945679, |
|
"learning_rate": 9.973260538428884e-05, |
|
"loss": 3.5806, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.17068550092883814, |
|
"grad_norm": 0.5668119788169861, |
|
"learning_rate": 9.972851115470307e-05, |
|
"loss": 3.5247, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.1714212142949107, |
|
"grad_norm": 0.5907779335975647, |
|
"learning_rate": 9.972438590360503e-05, |
|
"loss": 3.5757, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.17215692766098328, |
|
"grad_norm": 0.5702281594276428, |
|
"learning_rate": 9.97202296335681e-05, |
|
"loss": 3.5698, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.17289264102705587, |
|
"grad_norm": 0.5785143375396729, |
|
"learning_rate": 9.971604234718504e-05, |
|
"loss": 3.5547, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.17362835439312843, |
|
"grad_norm": 0.5959532856941223, |
|
"learning_rate": 9.971182404706792e-05, |
|
"loss": 3.4787, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.17436406775920102, |
|
"grad_norm": 0.5923709869384766, |
|
"learning_rate": 9.970757473584824e-05, |
|
"loss": 3.5127, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.1750997811252736, |
|
"grad_norm": 0.6314843893051147, |
|
"learning_rate": 9.970329441617676e-05, |
|
"loss": 3.5123, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.17583549449134617, |
|
"grad_norm": 0.6579912304878235, |
|
"learning_rate": 9.969898309072363e-05, |
|
"loss": 3.5295, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.17657120785741875, |
|
"grad_norm": 0.5852422118186951, |
|
"learning_rate": 9.969464076217831e-05, |
|
"loss": 3.5761, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.17730692122349132, |
|
"grad_norm": 0.5590935945510864, |
|
"learning_rate": 9.969026743324967e-05, |
|
"loss": 3.5697, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.1780426345895639, |
|
"grad_norm": 0.5512566566467285, |
|
"learning_rate": 9.968586310666583e-05, |
|
"loss": 3.5069, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.1787783479556365, |
|
"grad_norm": 0.6014286279678345, |
|
"learning_rate": 9.968142778517429e-05, |
|
"loss": 3.5985, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.17951406132170905, |
|
"grad_norm": 0.5852993726730347, |
|
"learning_rate": 9.967696147154187e-05, |
|
"loss": 3.4978, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.18024977468778164, |
|
"grad_norm": 0.5915689468383789, |
|
"learning_rate": 9.967246416855475e-05, |
|
"loss": 3.5926, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.18098548805385423, |
|
"grad_norm": 0.5336611866950989, |
|
"learning_rate": 9.966793587901844e-05, |
|
"loss": 3.5764, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.1817212014199268, |
|
"grad_norm": 0.6106781959533691, |
|
"learning_rate": 9.966337660575775e-05, |
|
"loss": 3.5095, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.18245691478599937, |
|
"grad_norm": 0.5323026180267334, |
|
"learning_rate": 9.965878635161682e-05, |
|
"loss": 3.4732, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.18319262815207196, |
|
"grad_norm": 0.5882947444915771, |
|
"learning_rate": 9.965416511945915e-05, |
|
"loss": 3.56, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.18392834151814452, |
|
"grad_norm": 0.5978416800498962, |
|
"learning_rate": 9.964951291216755e-05, |
|
"loss": 3.4982, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.1846640548842171, |
|
"grad_norm": 0.5574873089790344, |
|
"learning_rate": 9.964482973264413e-05, |
|
"loss": 3.5024, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.1853997682502897, |
|
"grad_norm": 0.6445927023887634, |
|
"learning_rate": 9.964011558381035e-05, |
|
"loss": 3.4935, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.18613548161636226, |
|
"grad_norm": 0.5893427729606628, |
|
"learning_rate": 9.963537046860697e-05, |
|
"loss": 3.5344, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.18687119498243485, |
|
"grad_norm": 0.5860798954963684, |
|
"learning_rate": 9.963059438999409e-05, |
|
"loss": 3.5498, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.18760690834850743, |
|
"grad_norm": 0.5362831950187683, |
|
"learning_rate": 9.962578735095109e-05, |
|
"loss": 3.4655, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.18834262171458, |
|
"grad_norm": 0.5293241143226624, |
|
"learning_rate": 9.96209493544767e-05, |
|
"loss": 3.5236, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.18907833508065258, |
|
"grad_norm": 0.6069090366363525, |
|
"learning_rate": 9.961608040358894e-05, |
|
"loss": 3.5332, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.18981404844672514, |
|
"grad_norm": 0.5620616674423218, |
|
"learning_rate": 9.961118050132517e-05, |
|
"loss": 3.4838, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.19054976181279773, |
|
"grad_norm": 0.5729257464408875, |
|
"learning_rate": 9.9606249650742e-05, |
|
"loss": 3.4809, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.19128547517887032, |
|
"grad_norm": 0.5876139998435974, |
|
"learning_rate": 9.96012878549154e-05, |
|
"loss": 3.4935, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.19202118854494288, |
|
"grad_norm": 0.5324752330780029, |
|
"learning_rate": 9.959629511694061e-05, |
|
"loss": 3.5621, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.19275690191101547, |
|
"grad_norm": 0.5787373781204224, |
|
"learning_rate": 9.959127143993219e-05, |
|
"loss": 3.4896, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.19349261527708805, |
|
"grad_norm": 0.5609232783317566, |
|
"learning_rate": 9.958621682702403e-05, |
|
"loss": 3.4973, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.1942283286431606, |
|
"grad_norm": 0.5869426131248474, |
|
"learning_rate": 9.958113128136923e-05, |
|
"loss": 3.5203, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.1949640420092332, |
|
"grad_norm": 0.609747052192688, |
|
"learning_rate": 9.957601480614029e-05, |
|
"loss": 3.5297, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.1956997553753058, |
|
"grad_norm": 0.5092261433601379, |
|
"learning_rate": 9.957086740452894e-05, |
|
"loss": 3.4951, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.19643546874137835, |
|
"grad_norm": 0.5409265756607056, |
|
"learning_rate": 9.956568907974621e-05, |
|
"loss": 3.528, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.19717118210745094, |
|
"grad_norm": 0.5979306101799011, |
|
"learning_rate": 9.956047983502245e-05, |
|
"loss": 3.5356, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.19790689547352353, |
|
"grad_norm": 0.5668174624443054, |
|
"learning_rate": 9.955523967360725e-05, |
|
"loss": 3.5797, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.19864260883959609, |
|
"grad_norm": 0.5992650389671326, |
|
"learning_rate": 9.954996859876953e-05, |
|
"loss": 3.5107, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.19937832220566867, |
|
"grad_norm": 0.5725685358047485, |
|
"learning_rate": 9.954466661379748e-05, |
|
"loss": 3.5161, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.20011403557174126, |
|
"grad_norm": 0.5398104786872864, |
|
"learning_rate": 9.953933372199855e-05, |
|
"loss": 3.5191, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.20084974893781382, |
|
"grad_norm": 0.5120781064033508, |
|
"learning_rate": 9.953396992669953e-05, |
|
"loss": 3.4936, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.2015854623038864, |
|
"grad_norm": 0.5185467004776001, |
|
"learning_rate": 9.952857523124641e-05, |
|
"loss": 3.4757, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.202321175669959, |
|
"grad_norm": 0.6107959747314453, |
|
"learning_rate": 9.952314963900453e-05, |
|
"loss": 3.4972, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.20305688903603156, |
|
"grad_norm": 0.5769765377044678, |
|
"learning_rate": 9.951769315335844e-05, |
|
"loss": 3.4983, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.20379260240210414, |
|
"grad_norm": 0.5694507956504822, |
|
"learning_rate": 9.951220577771201e-05, |
|
"loss": 3.5063, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.2045283157681767, |
|
"grad_norm": 0.5544769763946533, |
|
"learning_rate": 9.950668751548835e-05, |
|
"loss": 3.507, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.2052640291342493, |
|
"grad_norm": 0.5395680665969849, |
|
"learning_rate": 9.950113837012984e-05, |
|
"loss": 3.5315, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.20599974250032188, |
|
"grad_norm": 0.575812816619873, |
|
"learning_rate": 9.949555834509817e-05, |
|
"loss": 3.4873, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.20673545586639444, |
|
"grad_norm": 0.5248579978942871, |
|
"learning_rate": 9.948994744387419e-05, |
|
"loss": 3.4849, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.20747116923246703, |
|
"grad_norm": 0.5287348031997681, |
|
"learning_rate": 9.948430566995816e-05, |
|
"loss": 3.5028, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.20820688259853962, |
|
"grad_norm": 0.5448064804077148, |
|
"learning_rate": 9.947863302686945e-05, |
|
"loss": 3.4801, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.20894259596461218, |
|
"grad_norm": 0.583583652973175, |
|
"learning_rate": 9.947292951814679e-05, |
|
"loss": 3.4899, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.20967830933068476, |
|
"grad_norm": 0.5897386074066162, |
|
"learning_rate": 9.946719514734813e-05, |
|
"loss": 3.5659, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.21041402269675735, |
|
"grad_norm": 0.5261275172233582, |
|
"learning_rate": 9.946142991805062e-05, |
|
"loss": 3.4662, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.2111497360628299, |
|
"grad_norm": 0.5870264768600464, |
|
"learning_rate": 9.945563383385079e-05, |
|
"loss": 3.5123, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.2118854494289025, |
|
"grad_norm": 0.5746486186981201, |
|
"learning_rate": 9.944980689836429e-05, |
|
"loss": 3.4351, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.2126211627949751, |
|
"grad_norm": 0.6177893877029419, |
|
"learning_rate": 9.944394911522606e-05, |
|
"loss": 3.5206, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.21335687616104765, |
|
"grad_norm": 0.5619845390319824, |
|
"learning_rate": 9.94380604880903e-05, |
|
"loss": 3.4801, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.21409258952712024, |
|
"grad_norm": 0.5628277063369751, |
|
"learning_rate": 9.943214102063043e-05, |
|
"loss": 3.5564, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.21482830289319282, |
|
"grad_norm": 0.5306517481803894, |
|
"learning_rate": 9.942619071653914e-05, |
|
"loss": 3.5094, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.21556401625926538, |
|
"grad_norm": 0.6212943196296692, |
|
"learning_rate": 9.942020957952831e-05, |
|
"loss": 3.5182, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.21629972962533797, |
|
"grad_norm": 0.5883834362030029, |
|
"learning_rate": 9.941419761332908e-05, |
|
"loss": 3.4715, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.21703544299141056, |
|
"grad_norm": 0.5104158520698547, |
|
"learning_rate": 9.940815482169184e-05, |
|
"loss": 3.4712, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.21777115635748312, |
|
"grad_norm": 0.5349141955375671, |
|
"learning_rate": 9.940208120838616e-05, |
|
"loss": 3.4517, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.2185068697235557, |
|
"grad_norm": 0.555456280708313, |
|
"learning_rate": 9.939597677720089e-05, |
|
"loss": 3.4984, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.21924258308962827, |
|
"grad_norm": 0.5867215991020203, |
|
"learning_rate": 9.938984153194406e-05, |
|
"loss": 3.4821, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.21997829645570086, |
|
"grad_norm": 0.5781378149986267, |
|
"learning_rate": 9.938367547644296e-05, |
|
"loss": 3.4437, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.22071400982177344, |
|
"grad_norm": 0.5571138858795166, |
|
"learning_rate": 9.937747861454407e-05, |
|
"loss": 3.4935, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.221449723187846, |
|
"grad_norm": 0.47858932614326477, |
|
"learning_rate": 9.93712509501131e-05, |
|
"loss": 3.499, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.2221854365539186, |
|
"grad_norm": 0.500126838684082, |
|
"learning_rate": 9.936499248703499e-05, |
|
"loss": 3.4822, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.22292114991999118, |
|
"grad_norm": 0.5339503288269043, |
|
"learning_rate": 9.935870322921387e-05, |
|
"loss": 3.5028, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.22365686328606374, |
|
"grad_norm": 0.5465017557144165, |
|
"learning_rate": 9.93523831805731e-05, |
|
"loss": 3.4338, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.22439257665213633, |
|
"grad_norm": 0.5432729125022888, |
|
"learning_rate": 9.934603234505519e-05, |
|
"loss": 3.4831, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.22512829001820892, |
|
"grad_norm": 0.5051527619361877, |
|
"learning_rate": 9.933965072662197e-05, |
|
"loss": 3.4473, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.22586400338428148, |
|
"grad_norm": 0.5150954127311707, |
|
"learning_rate": 9.933323832925437e-05, |
|
"loss": 3.4755, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.22659971675035406, |
|
"grad_norm": 0.5967200398445129, |
|
"learning_rate": 9.932679515695253e-05, |
|
"loss": 3.4938, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.22733543011642665, |
|
"grad_norm": 0.5499495267868042, |
|
"learning_rate": 9.932032121373587e-05, |
|
"loss": 3.5033, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.2280711434824992, |
|
"grad_norm": 0.502854585647583, |
|
"learning_rate": 9.931381650364291e-05, |
|
"loss": 3.3716, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.2288068568485718, |
|
"grad_norm": 0.5126801133155823, |
|
"learning_rate": 9.93072810307314e-05, |
|
"loss": 3.4727, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.2295425702146444, |
|
"grad_norm": 0.48045796155929565, |
|
"learning_rate": 9.930071479907831e-05, |
|
"loss": 3.4312, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.23027828358071695, |
|
"grad_norm": 0.5664313435554504, |
|
"learning_rate": 9.929411781277974e-05, |
|
"loss": 3.4225, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.23101399694678953, |
|
"grad_norm": 0.5236510634422302, |
|
"learning_rate": 9.928749007595101e-05, |
|
"loss": 3.4255, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.23174971031286212, |
|
"grad_norm": 0.5875115990638733, |
|
"learning_rate": 9.928083159272666e-05, |
|
"loss": 3.4699, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.23248542367893468, |
|
"grad_norm": 0.5420896410942078, |
|
"learning_rate": 9.92741423672603e-05, |
|
"loss": 3.3887, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.23322113704500727, |
|
"grad_norm": 0.5325738191604614, |
|
"learning_rate": 9.926742240372483e-05, |
|
"loss": 3.4807, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.23395685041107983, |
|
"grad_norm": 0.5513830780982971, |
|
"learning_rate": 9.926067170631227e-05, |
|
"loss": 3.4647, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.23469256377715242, |
|
"grad_norm": 0.5124702453613281, |
|
"learning_rate": 9.925389027923382e-05, |
|
"loss": 3.4392, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.235428277143225, |
|
"grad_norm": 0.532706081867218, |
|
"learning_rate": 9.924707812671985e-05, |
|
"loss": 3.4456, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.23616399050929757, |
|
"grad_norm": 0.5108603835105896, |
|
"learning_rate": 9.924023525301991e-05, |
|
"loss": 3.4621, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.23689970387537015, |
|
"grad_norm": 0.5478849411010742, |
|
"learning_rate": 9.92333616624027e-05, |
|
"loss": 3.4602, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.23763541724144274, |
|
"grad_norm": 0.481143057346344, |
|
"learning_rate": 9.922645735915608e-05, |
|
"loss": 3.4647, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.2383711306075153, |
|
"grad_norm": 0.5544288754463196, |
|
"learning_rate": 9.921952234758709e-05, |
|
"loss": 3.4312, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.2391068439735879, |
|
"grad_norm": 0.5568479299545288, |
|
"learning_rate": 9.921255663202189e-05, |
|
"loss": 3.4306, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.23984255733966048, |
|
"grad_norm": 0.5197471380233765, |
|
"learning_rate": 9.92055602168058e-05, |
|
"loss": 3.4488, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.24057827070573304, |
|
"grad_norm": 0.5361900329589844, |
|
"learning_rate": 9.919853310630336e-05, |
|
"loss": 3.395, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.24131398407180563, |
|
"grad_norm": 0.5017499327659607, |
|
"learning_rate": 9.919147530489816e-05, |
|
"loss": 3.4654, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.24204969743787821, |
|
"grad_norm": 0.5076097846031189, |
|
"learning_rate": 9.9184386816993e-05, |
|
"loss": 3.3991, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.24278541080395077, |
|
"grad_norm": 0.5834246873855591, |
|
"learning_rate": 9.917726764700981e-05, |
|
"loss": 3.4058, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.24352112417002336, |
|
"grad_norm": 0.5632571578025818, |
|
"learning_rate": 9.917011779938961e-05, |
|
"loss": 3.4442, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.24425683753609595, |
|
"grad_norm": 0.5473082065582275, |
|
"learning_rate": 9.916293727859265e-05, |
|
"loss": 3.4536, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.2449925509021685, |
|
"grad_norm": 0.5029418468475342, |
|
"learning_rate": 9.915572608909824e-05, |
|
"loss": 3.4263, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.2457282642682411, |
|
"grad_norm": 0.5429378151893616, |
|
"learning_rate": 9.914848423540483e-05, |
|
"loss": 3.4652, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.24646397763431366, |
|
"grad_norm": 0.5347001552581787, |
|
"learning_rate": 9.914121172203005e-05, |
|
"loss": 3.4009, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.24719969100038625, |
|
"grad_norm": 0.538618266582489, |
|
"learning_rate": 9.913390855351058e-05, |
|
"loss": 3.457, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.24793540436645883, |
|
"grad_norm": 0.5618345141410828, |
|
"learning_rate": 9.912657473440232e-05, |
|
"loss": 3.4588, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.2486711177325314, |
|
"grad_norm": 0.5444225072860718, |
|
"learning_rate": 9.911921026928019e-05, |
|
"loss": 3.4401, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.24940683109860398, |
|
"grad_norm": 0.5804809927940369, |
|
"learning_rate": 9.911181516273826e-05, |
|
"loss": 3.3904, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.25014254446467654, |
|
"grad_norm": 0.5047810673713684, |
|
"learning_rate": 9.910438941938978e-05, |
|
"loss": 3.42, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.25087825783074913, |
|
"grad_norm": 0.5213153958320618, |
|
"learning_rate": 9.9096933043867e-05, |
|
"loss": 3.4572, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.2516139711968217, |
|
"grad_norm": 0.49462226033210754, |
|
"learning_rate": 9.908944604082138e-05, |
|
"loss": 3.432, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.2523496845628943, |
|
"grad_norm": 0.5353688597679138, |
|
"learning_rate": 9.908192841492343e-05, |
|
"loss": 3.4174, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.2530853979289669, |
|
"grad_norm": 0.4931166470050812, |
|
"learning_rate": 9.907438017086277e-05, |
|
"loss": 3.4175, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.2538211112950394, |
|
"grad_norm": 0.5079531669616699, |
|
"learning_rate": 9.906680131334813e-05, |
|
"loss": 3.478, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.254556824661112, |
|
"grad_norm": 0.5377930402755737, |
|
"learning_rate": 9.905919184710733e-05, |
|
"loss": 3.4353, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.2552925380271846, |
|
"grad_norm": 0.5525156855583191, |
|
"learning_rate": 9.90515517768873e-05, |
|
"loss": 3.4084, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.2560282513932572, |
|
"grad_norm": 0.5057874321937561, |
|
"learning_rate": 9.904388110745403e-05, |
|
"loss": 3.4503, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.2567639647593298, |
|
"grad_norm": 0.5415982007980347, |
|
"learning_rate": 9.903617984359263e-05, |
|
"loss": 3.412, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.25749967812540236, |
|
"grad_norm": 0.5116530656814575, |
|
"learning_rate": 9.902844799010729e-05, |
|
"loss": 3.3947, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.2582353914914749, |
|
"grad_norm": 0.49354326725006104, |
|
"learning_rate": 9.902068555182124e-05, |
|
"loss": 3.4183, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.2589711048575475, |
|
"grad_norm": 0.5207719206809998, |
|
"learning_rate": 9.901289253357688e-05, |
|
"loss": 3.4225, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.2597068182236201, |
|
"grad_norm": 0.5140487551689148, |
|
"learning_rate": 9.900506894023558e-05, |
|
"loss": 3.4375, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.26044253158969266, |
|
"grad_norm": 0.5068781971931458, |
|
"learning_rate": 9.899721477667785e-05, |
|
"loss": 3.3917, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.26117824495576525, |
|
"grad_norm": 0.5903786420822144, |
|
"learning_rate": 9.898933004780328e-05, |
|
"loss": 3.4376, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.26191395832183784, |
|
"grad_norm": 0.5345304012298584, |
|
"learning_rate": 9.898141475853046e-05, |
|
"loss": 3.4211, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.26264967168791037, |
|
"grad_norm": 0.5153301954269409, |
|
"learning_rate": 9.89734689137971e-05, |
|
"loss": 3.4378, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.26338538505398296, |
|
"grad_norm": 0.48121339082717896, |
|
"learning_rate": 9.896549251855998e-05, |
|
"loss": 3.4369, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.26412109842005554, |
|
"grad_norm": 0.49092957377433777, |
|
"learning_rate": 9.89574855777949e-05, |
|
"loss": 3.3808, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.26485681178612813, |
|
"grad_norm": 0.49230051040649414, |
|
"learning_rate": 9.894944809649671e-05, |
|
"loss": 3.4153, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.2655925251522007, |
|
"grad_norm": 0.542644739151001, |
|
"learning_rate": 9.894138007967935e-05, |
|
"loss": 3.4127, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.2663282385182733, |
|
"grad_norm": 0.49647256731987, |
|
"learning_rate": 9.893328153237578e-05, |
|
"loss": 3.3944, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.26706395188434584, |
|
"grad_norm": 0.4804913103580475, |
|
"learning_rate": 9.892515245963803e-05, |
|
"loss": 3.4299, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.26779966525041843, |
|
"grad_norm": 0.5026209354400635, |
|
"learning_rate": 9.891699286653714e-05, |
|
"loss": 3.4199, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.268535378616491, |
|
"grad_norm": 0.5007442235946655, |
|
"learning_rate": 9.890880275816322e-05, |
|
"loss": 3.4058, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.2692710919825636, |
|
"grad_norm": 0.49342918395996094, |
|
"learning_rate": 9.890058213962538e-05, |
|
"loss": 3.408, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.2700068053486362, |
|
"grad_norm": 0.48717033863067627, |
|
"learning_rate": 9.889233101605184e-05, |
|
"loss": 3.3731, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.2707425187147087, |
|
"grad_norm": 0.5098682045936584, |
|
"learning_rate": 9.888404939258973e-05, |
|
"loss": 3.3768, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.2714782320807813, |
|
"grad_norm": 0.48432040214538574, |
|
"learning_rate": 9.88757372744053e-05, |
|
"loss": 3.4547, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.2722139454468539, |
|
"grad_norm": 0.487289160490036, |
|
"learning_rate": 9.886739466668379e-05, |
|
"loss": 3.4094, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.2729496588129265, |
|
"grad_norm": 0.5253422260284424, |
|
"learning_rate": 9.885902157462948e-05, |
|
"loss": 3.445, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.2736853721789991, |
|
"grad_norm": 0.5656958222389221, |
|
"learning_rate": 9.885061800346563e-05, |
|
"loss": 3.3925, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.27442108554507166, |
|
"grad_norm": 0.5607571601867676, |
|
"learning_rate": 9.884218395843452e-05, |
|
"loss": 3.386, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.2751567989111442, |
|
"grad_norm": 0.48992305994033813, |
|
"learning_rate": 9.883371944479749e-05, |
|
"loss": 3.4261, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.2758925122772168, |
|
"grad_norm": 0.5179588794708252, |
|
"learning_rate": 9.882522446783484e-05, |
|
"loss": 3.3555, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.27662822564328937, |
|
"grad_norm": 0.5124242305755615, |
|
"learning_rate": 9.88166990328459e-05, |
|
"loss": 3.4095, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.27736393900936196, |
|
"grad_norm": 0.5088645815849304, |
|
"learning_rate": 9.880814314514894e-05, |
|
"loss": 3.4152, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.27809965237543455, |
|
"grad_norm": 0.5194174647331238, |
|
"learning_rate": 9.87995568100813e-05, |
|
"loss": 3.3998, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.27883536574150714, |
|
"grad_norm": 0.5403995513916016, |
|
"learning_rate": 9.879094003299928e-05, |
|
"loss": 3.3553, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.27957107910757967, |
|
"grad_norm": 0.5247926712036133, |
|
"learning_rate": 9.87822928192782e-05, |
|
"loss": 3.4321, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.28030679247365226, |
|
"grad_norm": 0.5156255960464478, |
|
"learning_rate": 9.877361517431231e-05, |
|
"loss": 3.3851, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.28104250583972484, |
|
"grad_norm": 0.4857509732246399, |
|
"learning_rate": 9.876490710351489e-05, |
|
"loss": 3.3663, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.28177821920579743, |
|
"grad_norm": 0.5125151872634888, |
|
"learning_rate": 9.875616861231819e-05, |
|
"loss": 3.3637, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.28251393257187, |
|
"grad_norm": 0.47020360827445984, |
|
"learning_rate": 9.874739970617341e-05, |
|
"loss": 3.4038, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.28324964593794255, |
|
"grad_norm": 0.48255953192710876, |
|
"learning_rate": 9.87386003905508e-05, |
|
"loss": 3.3459, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.28398535930401514, |
|
"grad_norm": 0.47846198081970215, |
|
"learning_rate": 9.872977067093947e-05, |
|
"loss": 3.376, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.2847210726700877, |
|
"grad_norm": 0.5100752115249634, |
|
"learning_rate": 9.872091055284756e-05, |
|
"loss": 3.3609, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.2854567860361603, |
|
"grad_norm": 0.5000672936439514, |
|
"learning_rate": 9.87120200418022e-05, |
|
"loss": 3.4512, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.2861924994022329, |
|
"grad_norm": 0.5057271718978882, |
|
"learning_rate": 9.870309914334942e-05, |
|
"loss": 3.3253, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.2869282127683055, |
|
"grad_norm": 0.5031580328941345, |
|
"learning_rate": 9.869414786305424e-05, |
|
"loss": 3.3809, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.287663926134378, |
|
"grad_norm": 0.49000057578086853, |
|
"learning_rate": 9.868516620650062e-05, |
|
"loss": 3.4302, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.2883996395004506, |
|
"grad_norm": 0.4677979052066803, |
|
"learning_rate": 9.867615417929147e-05, |
|
"loss": 3.3782, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.2891353528665232, |
|
"grad_norm": 0.4826011061668396, |
|
"learning_rate": 9.866711178704869e-05, |
|
"loss": 3.3509, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.2898710662325958, |
|
"grad_norm": 0.5133295655250549, |
|
"learning_rate": 9.865803903541301e-05, |
|
"loss": 3.4039, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.2906067795986684, |
|
"grad_norm": 0.5011733174324036, |
|
"learning_rate": 9.864893593004422e-05, |
|
"loss": 3.3433, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.29134249296474096, |
|
"grad_norm": 0.49504631757736206, |
|
"learning_rate": 9.863980247662099e-05, |
|
"loss": 3.3795, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.2920782063308135, |
|
"grad_norm": 0.5216391682624817, |
|
"learning_rate": 9.863063868084093e-05, |
|
"loss": 3.4249, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.2928139196968861, |
|
"grad_norm": 0.5909170508384705, |
|
"learning_rate": 9.862144454842055e-05, |
|
"loss": 3.341, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.29354963306295867, |
|
"grad_norm": 0.5585739016532898, |
|
"learning_rate": 9.861222008509534e-05, |
|
"loss": 3.4334, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.29428534642903126, |
|
"grad_norm": 0.5350683927536011, |
|
"learning_rate": 9.860296529661966e-05, |
|
"loss": 3.4152, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.29502105979510385, |
|
"grad_norm": 0.4655880630016327, |
|
"learning_rate": 9.859368018876682e-05, |
|
"loss": 3.4511, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.2957567731611764, |
|
"grad_norm": 0.483699232339859, |
|
"learning_rate": 9.858436476732904e-05, |
|
"loss": 3.3366, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.29649248652724897, |
|
"grad_norm": 0.49257728457450867, |
|
"learning_rate": 9.857501903811742e-05, |
|
"loss": 3.3481, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.29722819989332155, |
|
"grad_norm": 0.46038171648979187, |
|
"learning_rate": 9.856564300696201e-05, |
|
"loss": 3.4007, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.29796391325939414, |
|
"grad_norm": 0.5034530758857727, |
|
"learning_rate": 9.855623667971173e-05, |
|
"loss": 3.3658, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.29869962662546673, |
|
"grad_norm": 0.466950386762619, |
|
"learning_rate": 9.854680006223441e-05, |
|
"loss": 3.397, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.2994353399915393, |
|
"grad_norm": 0.5001063346862793, |
|
"learning_rate": 9.853733316041678e-05, |
|
"loss": 3.3742, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.30017105335761185, |
|
"grad_norm": 0.48969554901123047, |
|
"learning_rate": 9.852783598016446e-05, |
|
"loss": 3.3504, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.30090676672368444, |
|
"grad_norm": 0.4851774573326111, |
|
"learning_rate": 9.851830852740196e-05, |
|
"loss": 3.3486, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.301642480089757, |
|
"grad_norm": 0.5046120882034302, |
|
"learning_rate": 9.850875080807267e-05, |
|
"loss": 3.3826, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.3023781934558296, |
|
"grad_norm": 0.5258597135543823, |
|
"learning_rate": 9.849916282813885e-05, |
|
"loss": 3.4172, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.3031139068219022, |
|
"grad_norm": 0.5025837421417236, |
|
"learning_rate": 9.848954459358167e-05, |
|
"loss": 3.3501, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.3038496201879748, |
|
"grad_norm": 0.47234296798706055, |
|
"learning_rate": 9.847989611040111e-05, |
|
"loss": 3.3812, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.3045853335540473, |
|
"grad_norm": 0.46150118112564087, |
|
"learning_rate": 9.847021738461611e-05, |
|
"loss": 3.3761, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.3053210469201199, |
|
"grad_norm": 0.4842502474784851, |
|
"learning_rate": 9.846050842226442e-05, |
|
"loss": 3.3183, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.3060567602861925, |
|
"grad_norm": 0.48497274518013, |
|
"learning_rate": 9.845076922940264e-05, |
|
"loss": 3.3959, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.3067924736522651, |
|
"grad_norm": 0.4537655711174011, |
|
"learning_rate": 9.844099981210625e-05, |
|
"loss": 3.3764, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.3075281870183377, |
|
"grad_norm": 0.46067747473716736, |
|
"learning_rate": 9.843120017646959e-05, |
|
"loss": 3.3569, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.30826390038441026, |
|
"grad_norm": 0.4854758083820343, |
|
"learning_rate": 9.842137032860586e-05, |
|
"loss": 3.4087, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.3089996137504828, |
|
"grad_norm": 0.4684829115867615, |
|
"learning_rate": 9.841151027464707e-05, |
|
"loss": 3.4011, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.3097353271165554, |
|
"grad_norm": 0.46631598472595215, |
|
"learning_rate": 9.840162002074409e-05, |
|
"loss": 3.4003, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.31047104048262797, |
|
"grad_norm": 0.4732069671154022, |
|
"learning_rate": 9.839169957306664e-05, |
|
"loss": 3.3787, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.31120675384870056, |
|
"grad_norm": 0.5108915567398071, |
|
"learning_rate": 9.838174893780329e-05, |
|
"loss": 3.3448, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.31194246721477314, |
|
"grad_norm": 0.48219287395477295, |
|
"learning_rate": 9.837176812116137e-05, |
|
"loss": 3.4146, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.3126781805808457, |
|
"grad_norm": 0.48349088430404663, |
|
"learning_rate": 9.836175712936715e-05, |
|
"loss": 3.3261, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.31341389394691826, |
|
"grad_norm": 0.46656161546707153, |
|
"learning_rate": 9.83517159686656e-05, |
|
"loss": 3.3588, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.31414960731299085, |
|
"grad_norm": 0.5095760822296143, |
|
"learning_rate": 9.83416446453206e-05, |
|
"loss": 3.3572, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.31488532067906344, |
|
"grad_norm": 0.4995076358318329, |
|
"learning_rate": 9.833154316561484e-05, |
|
"loss": 3.3413, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.31562103404513603, |
|
"grad_norm": 0.5134519338607788, |
|
"learning_rate": 9.832141153584976e-05, |
|
"loss": 3.3933, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.3163567474112086, |
|
"grad_norm": 0.49631309509277344, |
|
"learning_rate": 9.831124976234567e-05, |
|
"loss": 3.391, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.31709246077728115, |
|
"grad_norm": 0.5069088339805603, |
|
"learning_rate": 9.830105785144163e-05, |
|
"loss": 3.4392, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.31782817414335374, |
|
"grad_norm": 0.46999242901802063, |
|
"learning_rate": 9.829083580949558e-05, |
|
"loss": 3.3529, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.3185638875094263, |
|
"grad_norm": 0.5165208578109741, |
|
"learning_rate": 9.828058364288419e-05, |
|
"loss": 3.3442, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.3192996008754989, |
|
"grad_norm": 0.5024532675743103, |
|
"learning_rate": 9.827030135800292e-05, |
|
"loss": 3.3333, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.3200353142415715, |
|
"grad_norm": 0.4881153106689453, |
|
"learning_rate": 9.825998896126605e-05, |
|
"loss": 3.3771, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.3207710276076441, |
|
"grad_norm": 0.4938250780105591, |
|
"learning_rate": 9.824964645910664e-05, |
|
"loss": 3.3848, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.3215067409737166, |
|
"grad_norm": 0.512721836566925, |
|
"learning_rate": 9.82392738579765e-05, |
|
"loss": 3.3882, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.3222424543397892, |
|
"grad_norm": 0.46891894936561584, |
|
"learning_rate": 9.822887116434629e-05, |
|
"loss": 3.4072, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.3229781677058618, |
|
"grad_norm": 0.5028683543205261, |
|
"learning_rate": 9.821843838470534e-05, |
|
"loss": 3.3296, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.3237138810719344, |
|
"grad_norm": 0.5311598777770996, |
|
"learning_rate": 9.820797552556184e-05, |
|
"loss": 3.3729, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.32444959443800697, |
|
"grad_norm": 0.4591037631034851, |
|
"learning_rate": 9.819748259344266e-05, |
|
"loss": 3.3593, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.3251853078040795, |
|
"grad_norm": 0.46094292402267456, |
|
"learning_rate": 9.818695959489351e-05, |
|
"loss": 3.3822, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.3259210211701521, |
|
"grad_norm": 0.5148822665214539, |
|
"learning_rate": 9.81764065364788e-05, |
|
"loss": 3.3965, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.3266567345362247, |
|
"grad_norm": 0.5029377937316895, |
|
"learning_rate": 9.816582342478173e-05, |
|
"loss": 3.3036, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.32739244790229727, |
|
"grad_norm": 0.46696317195892334, |
|
"learning_rate": 9.815521026640422e-05, |
|
"loss": 3.3496, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.32812816126836986, |
|
"grad_norm": 0.49031320214271545, |
|
"learning_rate": 9.814456706796693e-05, |
|
"loss": 3.4073, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.32886387463444244, |
|
"grad_norm": 0.49924179911613464, |
|
"learning_rate": 9.81338938361093e-05, |
|
"loss": 3.3481, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.329599588000515, |
|
"grad_norm": 0.505862295627594, |
|
"learning_rate": 9.812319057748944e-05, |
|
"loss": 3.3617, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.33033530136658756, |
|
"grad_norm": 0.5051801204681396, |
|
"learning_rate": 9.811245729878426e-05, |
|
"loss": 3.3493, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.33107101473266015, |
|
"grad_norm": 0.4463536739349365, |
|
"learning_rate": 9.810169400668938e-05, |
|
"loss": 3.3342, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.33180672809873274, |
|
"grad_norm": 0.4612107276916504, |
|
"learning_rate": 9.809090070791909e-05, |
|
"loss": 3.391, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.3325424414648053, |
|
"grad_norm": 0.48259028792381287, |
|
"learning_rate": 9.808007740920646e-05, |
|
"loss": 3.3383, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.3332781548308779, |
|
"grad_norm": 0.4375728666782379, |
|
"learning_rate": 9.806922411730323e-05, |
|
"loss": 3.361, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.33401386819695045, |
|
"grad_norm": 0.5056487917900085, |
|
"learning_rate": 9.805834083897993e-05, |
|
"loss": 3.3882, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.33474958156302304, |
|
"grad_norm": 0.5022489428520203, |
|
"learning_rate": 9.804742758102567e-05, |
|
"loss": 3.3858, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.3354852949290956, |
|
"grad_norm": 0.5145370364189148, |
|
"learning_rate": 9.803648435024837e-05, |
|
"loss": 3.3201, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.3362210082951682, |
|
"grad_norm": 0.905788004398346, |
|
"learning_rate": 9.80255111534746e-05, |
|
"loss": 3.3431, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.3369567216612408, |
|
"grad_norm": 0.456967294216156, |
|
"learning_rate": 9.801450799754963e-05, |
|
"loss": 3.3356, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.33769243502731333, |
|
"grad_norm": 0.47754788398742676, |
|
"learning_rate": 9.800347488933743e-05, |
|
"loss": 3.3502, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.3384281483933859, |
|
"grad_norm": 0.474417507648468, |
|
"learning_rate": 9.799241183572062e-05, |
|
"loss": 3.38, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.3391638617594585, |
|
"grad_norm": 0.453109472990036, |
|
"learning_rate": 9.798131884360054e-05, |
|
"loss": 3.3487, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.3398995751255311, |
|
"grad_norm": 0.4781600832939148, |
|
"learning_rate": 9.797019591989721e-05, |
|
"loss": 3.3442, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.3406352884916037, |
|
"grad_norm": 0.4538155794143677, |
|
"learning_rate": 9.795904307154927e-05, |
|
"loss": 3.3541, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.34137100185767627, |
|
"grad_norm": 0.45867714285850525, |
|
"learning_rate": 9.794786030551409e-05, |
|
"loss": 3.3293, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.3421067152237488, |
|
"grad_norm": 0.455285906791687, |
|
"learning_rate": 9.793664762876766e-05, |
|
"loss": 3.3302, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.3428424285898214, |
|
"grad_norm": 0.4696787893772125, |
|
"learning_rate": 9.792540504830465e-05, |
|
"loss": 3.337, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.343578141955894, |
|
"grad_norm": 0.4765075147151947, |
|
"learning_rate": 9.791413257113838e-05, |
|
"loss": 3.3768, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.34431385532196657, |
|
"grad_norm": 0.4819774031639099, |
|
"learning_rate": 9.790283020430079e-05, |
|
"loss": 3.3431, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.34504956868803915, |
|
"grad_norm": 0.47870171070098877, |
|
"learning_rate": 9.789149795484252e-05, |
|
"loss": 3.34, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.34578528205411174, |
|
"grad_norm": 0.4422255754470825, |
|
"learning_rate": 9.788013582983285e-05, |
|
"loss": 3.317, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.3465209954201843, |
|
"grad_norm": 0.4485225975513458, |
|
"learning_rate": 9.78687438363596e-05, |
|
"loss": 3.3314, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.34725670878625686, |
|
"grad_norm": 0.4411897659301758, |
|
"learning_rate": 9.785732198152936e-05, |
|
"loss": 3.3204, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.34799242215232945, |
|
"grad_norm": 0.49034199118614197, |
|
"learning_rate": 9.784587027246726e-05, |
|
"loss": 3.3287, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.34872813551840204, |
|
"grad_norm": 0.4629896581172943, |
|
"learning_rate": 9.783438871631707e-05, |
|
"loss": 3.3663, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.3494638488844746, |
|
"grad_norm": 0.4768849313259125, |
|
"learning_rate": 9.782287732024118e-05, |
|
"loss": 3.2822, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.3501995622505472, |
|
"grad_norm": 0.45183610916137695, |
|
"learning_rate": 9.781133609142062e-05, |
|
"loss": 3.3665, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.35093527561661975, |
|
"grad_norm": 0.46104103326797485, |
|
"learning_rate": 9.779976503705499e-05, |
|
"loss": 3.3192, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.35167098898269233, |
|
"grad_norm": 0.47722548246383667, |
|
"learning_rate": 9.778816416436253e-05, |
|
"loss": 3.3519, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.3524067023487649, |
|
"grad_norm": 0.45925357937812805, |
|
"learning_rate": 9.777653348058004e-05, |
|
"loss": 3.3195, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.3531424157148375, |
|
"grad_norm": 0.46248316764831543, |
|
"learning_rate": 9.7764872992963e-05, |
|
"loss": 3.3221, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.3538781290809101, |
|
"grad_norm": 0.45539459586143494, |
|
"learning_rate": 9.775318270878537e-05, |
|
"loss": 3.3492, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.35461384244698263, |
|
"grad_norm": 0.47933024168014526, |
|
"learning_rate": 9.774146263533976e-05, |
|
"loss": 3.3284, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.3553495558130552, |
|
"grad_norm": 0.5591832399368286, |
|
"learning_rate": 9.77297127799374e-05, |
|
"loss": 3.3435, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.3560852691791278, |
|
"grad_norm": 0.48164159059524536, |
|
"learning_rate": 9.7717933149908e-05, |
|
"loss": 3.3433, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.3568209825452004, |
|
"grad_norm": 0.48207688331604004, |
|
"learning_rate": 9.770612375259991e-05, |
|
"loss": 3.3348, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.357556695911273, |
|
"grad_norm": 0.49394533038139343, |
|
"learning_rate": 9.769428459538008e-05, |
|
"loss": 3.3, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.35829240927734557, |
|
"grad_norm": 0.45821693539619446, |
|
"learning_rate": 9.768241568563392e-05, |
|
"loss": 3.3498, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.3590281226434181, |
|
"grad_norm": 0.4812382459640503, |
|
"learning_rate": 9.767051703076552e-05, |
|
"loss": 3.2435, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.3597638360094907, |
|
"grad_norm": 0.44694334268569946, |
|
"learning_rate": 9.765858863819744e-05, |
|
"loss": 3.3879, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.3604995493755633, |
|
"grad_norm": 0.4820118546485901, |
|
"learning_rate": 9.764663051537082e-05, |
|
"loss": 3.3189, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.36123526274163587, |
|
"grad_norm": 0.464145302772522, |
|
"learning_rate": 9.763464266974534e-05, |
|
"loss": 3.3726, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.36197097610770845, |
|
"grad_norm": 0.4518837630748749, |
|
"learning_rate": 9.762262510879925e-05, |
|
"loss": 3.3453, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.36270668947378104, |
|
"grad_norm": 0.4643986225128174, |
|
"learning_rate": 9.761057784002929e-05, |
|
"loss": 3.3463, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.3634424028398536, |
|
"grad_norm": 0.4880530834197998, |
|
"learning_rate": 9.759850087095076e-05, |
|
"loss": 3.3116, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.36417811620592616, |
|
"grad_norm": 0.4663744270801544, |
|
"learning_rate": 9.75863942090975e-05, |
|
"loss": 3.2909, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.36491382957199875, |
|
"grad_norm": 0.5150174498558044, |
|
"learning_rate": 9.757425786202184e-05, |
|
"loss": 3.2635, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.36564954293807134, |
|
"grad_norm": 0.5246181488037109, |
|
"learning_rate": 9.756209183729466e-05, |
|
"loss": 3.2857, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.3663852563041439, |
|
"grad_norm": 0.48053282499313354, |
|
"learning_rate": 9.754989614250532e-05, |
|
"loss": 3.3379, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.36712096967021646, |
|
"grad_norm": 0.44896063208580017, |
|
"learning_rate": 9.753767078526173e-05, |
|
"loss": 3.2669, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.36785668303628904, |
|
"grad_norm": 0.47481274604797363, |
|
"learning_rate": 9.752541577319025e-05, |
|
"loss": 3.3064, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.36859239640236163, |
|
"grad_norm": 0.4941543638706207, |
|
"learning_rate": 9.75131311139358e-05, |
|
"loss": 3.3203, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.3693281097684342, |
|
"grad_norm": 0.41711509227752686, |
|
"learning_rate": 9.750081681516174e-05, |
|
"loss": 3.2739, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.3700638231345068, |
|
"grad_norm": 0.48057687282562256, |
|
"learning_rate": 9.748847288454996e-05, |
|
"loss": 3.3552, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.3707995365005794, |
|
"grad_norm": 0.454571396112442, |
|
"learning_rate": 9.747609932980083e-05, |
|
"loss": 3.3066, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.37153524986665193, |
|
"grad_norm": 0.4346379041671753, |
|
"learning_rate": 9.746369615863317e-05, |
|
"loss": 3.3061, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.3722709632327245, |
|
"grad_norm": 0.42454394698143005, |
|
"learning_rate": 9.74512633787843e-05, |
|
"loss": 3.2971, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.3730066765987971, |
|
"grad_norm": 0.4378200173377991, |
|
"learning_rate": 9.743880099801e-05, |
|
"loss": 3.3169, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.3737423899648697, |
|
"grad_norm": 0.47664156556129456, |
|
"learning_rate": 9.742630902408452e-05, |
|
"loss": 3.3459, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.3744781033309423, |
|
"grad_norm": 0.48526403307914734, |
|
"learning_rate": 9.741378746480057e-05, |
|
"loss": 3.3237, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.37521381669701487, |
|
"grad_norm": 0.48349133133888245, |
|
"learning_rate": 9.740123632796933e-05, |
|
"loss": 3.3074, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.3759495300630874, |
|
"grad_norm": 0.46515288949012756, |
|
"learning_rate": 9.738865562142043e-05, |
|
"loss": 3.286, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.37668524342916, |
|
"grad_norm": 0.45699048042297363, |
|
"learning_rate": 9.737604535300188e-05, |
|
"loss": 3.3676, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.3774209567952326, |
|
"grad_norm": 0.48287123441696167, |
|
"learning_rate": 9.736340553058025e-05, |
|
"loss": 3.2953, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.37815667016130516, |
|
"grad_norm": 0.4803648293018341, |
|
"learning_rate": 9.735073616204045e-05, |
|
"loss": 3.3047, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.37889238352737775, |
|
"grad_norm": 0.4733999967575073, |
|
"learning_rate": 9.733803725528585e-05, |
|
"loss": 3.3511, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.3796280968934503, |
|
"grad_norm": 0.48089954257011414, |
|
"learning_rate": 9.732530881823824e-05, |
|
"loss": 3.3259, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.38036381025952287, |
|
"grad_norm": 0.47004076838493347, |
|
"learning_rate": 9.731255085883787e-05, |
|
"loss": 3.3201, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.38109952362559546, |
|
"grad_norm": 0.8502194881439209, |
|
"learning_rate": 9.729976338504336e-05, |
|
"loss": 3.3501, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.38183523699166805, |
|
"grad_norm": 0.48328205943107605, |
|
"learning_rate": 9.728694640483176e-05, |
|
"loss": 3.2916, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.38257095035774064, |
|
"grad_norm": 0.43932202458381653, |
|
"learning_rate": 9.727409992619853e-05, |
|
"loss": 3.3287, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.3833066637238132, |
|
"grad_norm": 0.4448724091053009, |
|
"learning_rate": 9.726122395715751e-05, |
|
"loss": 3.3104, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.38404237708988576, |
|
"grad_norm": 0.454637736082077, |
|
"learning_rate": 9.724831850574099e-05, |
|
"loss": 3.2872, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.38477809045595834, |
|
"grad_norm": 0.5293341875076294, |
|
"learning_rate": 9.723538357999958e-05, |
|
"loss": 3.369, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.38551380382203093, |
|
"grad_norm": 0.46058687567710876, |
|
"learning_rate": 9.722241918800234e-05, |
|
"loss": 3.3044, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.3862495171881035, |
|
"grad_norm": 0.45656171441078186, |
|
"learning_rate": 9.720942533783666e-05, |
|
"loss": 3.329, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.3869852305541761, |
|
"grad_norm": 0.4675185978412628, |
|
"learning_rate": 9.719640203760835e-05, |
|
"loss": 3.2812, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.3877209439202487, |
|
"grad_norm": 0.47009697556495667, |
|
"learning_rate": 9.718334929544155e-05, |
|
"loss": 3.3665, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.3884566572863212, |
|
"grad_norm": 0.4447770118713379, |
|
"learning_rate": 9.71702671194788e-05, |
|
"loss": 3.3148, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.3891923706523938, |
|
"grad_norm": 0.4634477198123932, |
|
"learning_rate": 9.715715551788101e-05, |
|
"loss": 3.3076, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.3899280840184664, |
|
"grad_norm": 0.4768572151660919, |
|
"learning_rate": 9.71440144988274e-05, |
|
"loss": 3.283, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.390663797384539, |
|
"grad_norm": 0.46009397506713867, |
|
"learning_rate": 9.713084407051556e-05, |
|
"loss": 3.3147, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.3913995107506116, |
|
"grad_norm": 0.48648467659950256, |
|
"learning_rate": 9.711764424116145e-05, |
|
"loss": 3.2939, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.39213522411668417, |
|
"grad_norm": 0.49335330724716187, |
|
"learning_rate": 9.710441501899936e-05, |
|
"loss": 3.3232, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.3928709374827567, |
|
"grad_norm": 0.4541822373867035, |
|
"learning_rate": 9.709115641228189e-05, |
|
"loss": 3.3027, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.3936066508488293, |
|
"grad_norm": 0.4860965311527252, |
|
"learning_rate": 9.707786842927999e-05, |
|
"loss": 3.3016, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.3943423642149019, |
|
"grad_norm": 0.46567508578300476, |
|
"learning_rate": 9.706455107828296e-05, |
|
"loss": 3.3336, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.39507807758097446, |
|
"grad_norm": 0.5198284387588501, |
|
"learning_rate": 9.705120436759837e-05, |
|
"loss": 3.3211, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.39581379094704705, |
|
"grad_norm": 0.44841471314430237, |
|
"learning_rate": 9.703782830555214e-05, |
|
"loss": 3.2929, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.3965495043131196, |
|
"grad_norm": 0.46957024931907654, |
|
"learning_rate": 9.70244229004885e-05, |
|
"loss": 3.32, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.39728521767919217, |
|
"grad_norm": 0.43909287452697754, |
|
"learning_rate": 9.701098816076996e-05, |
|
"loss": 3.2571, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.39802093104526476, |
|
"grad_norm": 0.4354236125946045, |
|
"learning_rate": 9.699752409477735e-05, |
|
"loss": 3.2885, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.39875664441133735, |
|
"grad_norm": 0.4583248496055603, |
|
"learning_rate": 9.69840307109098e-05, |
|
"loss": 3.2627, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.39949235777740993, |
|
"grad_norm": 0.4307055175304413, |
|
"learning_rate": 9.697050801758469e-05, |
|
"loss": 3.2989, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.4002280711434825, |
|
"grad_norm": 0.47710663080215454, |
|
"learning_rate": 9.695695602323776e-05, |
|
"loss": 3.2879, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.40096378450955505, |
|
"grad_norm": 0.4449078142642975, |
|
"learning_rate": 9.694337473632293e-05, |
|
"loss": 3.2946, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.40169949787562764, |
|
"grad_norm": 0.4442616403102875, |
|
"learning_rate": 9.692976416531247e-05, |
|
"loss": 3.3026, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.40243521124170023, |
|
"grad_norm": 0.4815976321697235, |
|
"learning_rate": 9.691612431869688e-05, |
|
"loss": 3.3186, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.4031709246077728, |
|
"grad_norm": 0.4745250344276428, |
|
"learning_rate": 9.690245520498495e-05, |
|
"loss": 3.2948, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.4039066379738454, |
|
"grad_norm": 0.4966549277305603, |
|
"learning_rate": 9.68887568327037e-05, |
|
"loss": 3.2993, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.404642351339918, |
|
"grad_norm": 0.43987634778022766, |
|
"learning_rate": 9.687502921039842e-05, |
|
"loss": 3.3187, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.4053780647059905, |
|
"grad_norm": 0.4542040228843689, |
|
"learning_rate": 9.686127234663261e-05, |
|
"loss": 3.2944, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.4061137780720631, |
|
"grad_norm": 0.44791513681411743, |
|
"learning_rate": 9.68474862499881e-05, |
|
"loss": 3.3427, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.4068494914381357, |
|
"grad_norm": 0.44339093565940857, |
|
"learning_rate": 9.683367092906484e-05, |
|
"loss": 3.3479, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.4075852048042083, |
|
"grad_norm": 0.41371455788612366, |
|
"learning_rate": 9.681982639248111e-05, |
|
"loss": 3.2453, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.4083209181702809, |
|
"grad_norm": 0.4320749342441559, |
|
"learning_rate": 9.680595264887334e-05, |
|
"loss": 3.335, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.4090566315363534, |
|
"grad_norm": 0.45809799432754517, |
|
"learning_rate": 9.679204970689624e-05, |
|
"loss": 3.3114, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.409792344902426, |
|
"grad_norm": 0.43507814407348633, |
|
"learning_rate": 9.67781175752227e-05, |
|
"loss": 3.2928, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.4105280582684986, |
|
"grad_norm": 0.474032998085022, |
|
"learning_rate": 9.676415626254381e-05, |
|
"loss": 3.3059, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.4112637716345712, |
|
"grad_norm": 0.4753536581993103, |
|
"learning_rate": 9.67501657775689e-05, |
|
"loss": 3.2696, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.41199948500064376, |
|
"grad_norm": 0.4632064700126648, |
|
"learning_rate": 9.673614612902547e-05, |
|
"loss": 3.2998, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.41273519836671635, |
|
"grad_norm": 0.46428951621055603, |
|
"learning_rate": 9.672209732565922e-05, |
|
"loss": 3.2535, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.4134709117327889, |
|
"grad_norm": 0.44999969005584717, |
|
"learning_rate": 9.670801937623405e-05, |
|
"loss": 3.2638, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.41420662509886147, |
|
"grad_norm": 0.4350806176662445, |
|
"learning_rate": 9.669391228953202e-05, |
|
"loss": 3.3014, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.41494233846493406, |
|
"grad_norm": 0.45323678851127625, |
|
"learning_rate": 9.667977607435337e-05, |
|
"loss": 3.2623, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.41567805183100665, |
|
"grad_norm": 0.47555407881736755, |
|
"learning_rate": 9.666561073951655e-05, |
|
"loss": 3.3212, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.41641376519707923, |
|
"grad_norm": 0.46264752745628357, |
|
"learning_rate": 9.665141629385809e-05, |
|
"loss": 3.3034, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.4171494785631518, |
|
"grad_norm": 0.45676395297050476, |
|
"learning_rate": 9.663719274623277e-05, |
|
"loss": 3.323, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.41788519192922435, |
|
"grad_norm": 0.4800090789794922, |
|
"learning_rate": 9.662294010551348e-05, |
|
"loss": 3.2806, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.41862090529529694, |
|
"grad_norm": 0.4712800085544586, |
|
"learning_rate": 9.660865838059128e-05, |
|
"loss": 3.3134, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.41935661866136953, |
|
"grad_norm": 0.4544120132923126, |
|
"learning_rate": 9.659434758037532e-05, |
|
"loss": 3.2902, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.4200923320274421, |
|
"grad_norm": 0.4356352388858795, |
|
"learning_rate": 9.658000771379297e-05, |
|
"loss": 3.3005, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.4208280453935147, |
|
"grad_norm": 0.46152356266975403, |
|
"learning_rate": 9.656563878978965e-05, |
|
"loss": 3.3022, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.42156375875958724, |
|
"grad_norm": 0.44487205147743225, |
|
"learning_rate": 9.655124081732897e-05, |
|
"loss": 3.253, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.4222994721256598, |
|
"grad_norm": 0.4438563287258148, |
|
"learning_rate": 9.653681380539263e-05, |
|
"loss": 3.2019, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.4230351854917324, |
|
"grad_norm": 0.44498592615127563, |
|
"learning_rate": 9.652235776298046e-05, |
|
"loss": 3.3301, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.423770898857805, |
|
"grad_norm": 0.4550943672657013, |
|
"learning_rate": 9.650787269911037e-05, |
|
"loss": 3.2636, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.4245066122238776, |
|
"grad_norm": 0.44825536012649536, |
|
"learning_rate": 9.649335862281843e-05, |
|
"loss": 3.2851, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.4252423255899502, |
|
"grad_norm": 0.43271371722221375, |
|
"learning_rate": 9.647881554315873e-05, |
|
"loss": 3.2943, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.4259780389560227, |
|
"grad_norm": 0.46533486247062683, |
|
"learning_rate": 9.646424346920353e-05, |
|
"loss": 3.276, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.4267137523220953, |
|
"grad_norm": 0.42707332968711853, |
|
"learning_rate": 9.644964241004313e-05, |
|
"loss": 3.3076, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.4274494656881679, |
|
"grad_norm": 0.43310198187828064, |
|
"learning_rate": 9.643501237478592e-05, |
|
"loss": 3.3004, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.4281851790542405, |
|
"grad_norm": 0.4489288926124573, |
|
"learning_rate": 9.642035337255839e-05, |
|
"loss": 3.3036, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.42892089242031306, |
|
"grad_norm": 0.43780744075775146, |
|
"learning_rate": 9.640566541250508e-05, |
|
"loss": 3.2972, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.42965660578638565, |
|
"grad_norm": 0.4519082307815552, |
|
"learning_rate": 9.639094850378858e-05, |
|
"loss": 3.3251, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.4303923191524582, |
|
"grad_norm": 0.46353888511657715, |
|
"learning_rate": 9.637620265558955e-05, |
|
"loss": 3.2342, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.43112803251853077, |
|
"grad_norm": 0.45390161871910095, |
|
"learning_rate": 9.636142787710672e-05, |
|
"loss": 3.2985, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.43186374588460336, |
|
"grad_norm": 0.43484824895858765, |
|
"learning_rate": 9.634662417755685e-05, |
|
"loss": 3.2721, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.43259945925067594, |
|
"grad_norm": 0.48000025749206543, |
|
"learning_rate": 9.633179156617476e-05, |
|
"loss": 3.2685, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.43333517261674853, |
|
"grad_norm": 0.4266999065876007, |
|
"learning_rate": 9.631693005221327e-05, |
|
"loss": 3.2389, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.4340708859828211, |
|
"grad_norm": 0.4476128816604614, |
|
"learning_rate": 9.630203964494327e-05, |
|
"loss": 3.2802, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.43480659934889365, |
|
"grad_norm": 0.436440646648407, |
|
"learning_rate": 9.628712035365363e-05, |
|
"loss": 3.2765, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.43554231271496624, |
|
"grad_norm": 0.4390304386615753, |
|
"learning_rate": 9.62721721876513e-05, |
|
"loss": 3.2969, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.4362780260810388, |
|
"grad_norm": 0.45296919345855713, |
|
"learning_rate": 9.625719515626117e-05, |
|
"loss": 3.2646, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.4370137394471114, |
|
"grad_norm": 0.4439026117324829, |
|
"learning_rate": 9.62421892688262e-05, |
|
"loss": 3.3088, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.437749452813184, |
|
"grad_norm": 0.43667683005332947, |
|
"learning_rate": 9.622715453470732e-05, |
|
"loss": 3.285, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.43848516617925654, |
|
"grad_norm": 0.4610852599143982, |
|
"learning_rate": 9.621209096328343e-05, |
|
"loss": 3.2502, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.4392208795453291, |
|
"grad_norm": 0.43769463896751404, |
|
"learning_rate": 9.61969985639515e-05, |
|
"loss": 3.3171, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.4399565929114017, |
|
"grad_norm": 0.4450635313987732, |
|
"learning_rate": 9.61818773461264e-05, |
|
"loss": 3.2985, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.4406923062774743, |
|
"grad_norm": 0.423850417137146, |
|
"learning_rate": 9.616672731924101e-05, |
|
"loss": 3.3041, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.4414280196435469, |
|
"grad_norm": 0.4177245497703552, |
|
"learning_rate": 9.615154849274618e-05, |
|
"loss": 3.2825, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.4421637330096195, |
|
"grad_norm": 0.44956111907958984, |
|
"learning_rate": 9.613634087611073e-05, |
|
"loss": 3.2986, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.442899446375692, |
|
"grad_norm": 0.4504786431789398, |
|
"learning_rate": 9.612110447882144e-05, |
|
"loss": 3.3064, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.4436351597417646, |
|
"grad_norm": 0.4433842897415161, |
|
"learning_rate": 9.610583931038304e-05, |
|
"loss": 3.2896, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.4443708731078372, |
|
"grad_norm": 0.4560666084289551, |
|
"learning_rate": 9.609054538031821e-05, |
|
"loss": 3.2007, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.44510658647390977, |
|
"grad_norm": 0.4344310760498047, |
|
"learning_rate": 9.607522269816756e-05, |
|
"loss": 3.2874, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.44584229983998236, |
|
"grad_norm": 0.4632498025894165, |
|
"learning_rate": 9.605987127348966e-05, |
|
"loss": 3.2885, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.44657801320605495, |
|
"grad_norm": 0.4609225392341614, |
|
"learning_rate": 9.604449111586098e-05, |
|
"loss": 3.2744, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.4473137265721275, |
|
"grad_norm": 0.43838661909103394, |
|
"learning_rate": 9.602908223487594e-05, |
|
"loss": 3.2586, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.44804943993820007, |
|
"grad_norm": 0.4453481435775757, |
|
"learning_rate": 9.601364464014688e-05, |
|
"loss": 3.2933, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.44878515330427265, |
|
"grad_norm": 0.4828120172023773, |
|
"learning_rate": 9.599817834130402e-05, |
|
"loss": 3.2679, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.44952086667034524, |
|
"grad_norm": 0.5243907570838928, |
|
"learning_rate": 9.598268334799552e-05, |
|
"loss": 3.2942, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.45025658003641783, |
|
"grad_norm": 0.45438796281814575, |
|
"learning_rate": 9.59671596698874e-05, |
|
"loss": 3.2831, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.45099229340249036, |
|
"grad_norm": 0.4482550323009491, |
|
"learning_rate": 9.595160731666366e-05, |
|
"loss": 3.2718, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.45172800676856295, |
|
"grad_norm": 0.43046483397483826, |
|
"learning_rate": 9.593602629802608e-05, |
|
"loss": 3.2916, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.45246372013463554, |
|
"grad_norm": 0.499222993850708, |
|
"learning_rate": 9.592041662369438e-05, |
|
"loss": 3.2908, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.4531994335007081, |
|
"grad_norm": 0.429533988237381, |
|
"learning_rate": 9.590477830340613e-05, |
|
"loss": 3.2711, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.4539351468667807, |
|
"grad_norm": 0.47045251727104187, |
|
"learning_rate": 9.588911134691684e-05, |
|
"loss": 3.293, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.4546708602328533, |
|
"grad_norm": 0.42130205035209656, |
|
"learning_rate": 9.587341576399979e-05, |
|
"loss": 3.26, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.45540657359892583, |
|
"grad_norm": 0.4418286681175232, |
|
"learning_rate": 9.585769156444614e-05, |
|
"loss": 3.2765, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.4561422869649984, |
|
"grad_norm": 0.43507617712020874, |
|
"learning_rate": 9.584193875806498e-05, |
|
"loss": 3.3047, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.456878000331071, |
|
"grad_norm": 0.42862462997436523, |
|
"learning_rate": 9.582615735468314e-05, |
|
"loss": 3.2935, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.4576137136971436, |
|
"grad_norm": 0.44380027055740356, |
|
"learning_rate": 9.581034736414536e-05, |
|
"loss": 3.2585, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.4583494270632162, |
|
"grad_norm": 0.42932000756263733, |
|
"learning_rate": 9.579450879631416e-05, |
|
"loss": 3.2444, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.4590851404292888, |
|
"grad_norm": 0.4627602696418762, |
|
"learning_rate": 9.577864166106993e-05, |
|
"loss": 3.2885, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.4598208537953613, |
|
"grad_norm": 0.4066958427429199, |
|
"learning_rate": 9.576274596831087e-05, |
|
"loss": 3.3016, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.4605565671614339, |
|
"grad_norm": 0.4360782504081726, |
|
"learning_rate": 9.574682172795299e-05, |
|
"loss": 3.2607, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.4612922805275065, |
|
"grad_norm": 0.44138815999031067, |
|
"learning_rate": 9.573086894993008e-05, |
|
"loss": 3.288, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.46202799389357907, |
|
"grad_norm": 0.4372709095478058, |
|
"learning_rate": 9.571488764419381e-05, |
|
"loss": 3.2366, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.46276370725965166, |
|
"grad_norm": 0.4353005886077881, |
|
"learning_rate": 9.569887782071356e-05, |
|
"loss": 3.3092, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.46349942062572425, |
|
"grad_norm": 0.4514767527580261, |
|
"learning_rate": 9.568283948947655e-05, |
|
"loss": 3.3014, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.4642351339917968, |
|
"grad_norm": 0.4290522336959839, |
|
"learning_rate": 9.566677266048778e-05, |
|
"loss": 3.2763, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.46497084735786937, |
|
"grad_norm": 0.43439796566963196, |
|
"learning_rate": 9.565067734376999e-05, |
|
"loss": 3.2469, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.46570656072394195, |
|
"grad_norm": 0.47817927598953247, |
|
"learning_rate": 9.563455354936375e-05, |
|
"loss": 3.2636, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.46644227409001454, |
|
"grad_norm": 0.44186869263648987, |
|
"learning_rate": 9.561840128732735e-05, |
|
"loss": 3.2496, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.46717798745608713, |
|
"grad_norm": 0.4209322929382324, |
|
"learning_rate": 9.560222056773684e-05, |
|
"loss": 3.2629, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.46791370082215966, |
|
"grad_norm": 0.42116010189056396, |
|
"learning_rate": 9.558601140068606e-05, |
|
"loss": 3.272, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.46864941418823225, |
|
"grad_norm": 0.4619746804237366, |
|
"learning_rate": 9.556977379628656e-05, |
|
"loss": 3.2602, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.46938512755430484, |
|
"grad_norm": 0.44723060727119446, |
|
"learning_rate": 9.555350776466763e-05, |
|
"loss": 3.2318, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.4701208409203774, |
|
"grad_norm": 0.4265129864215851, |
|
"learning_rate": 9.553721331597632e-05, |
|
"loss": 3.2932, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.47085655428645, |
|
"grad_norm": 0.43263575434684753, |
|
"learning_rate": 9.55208904603774e-05, |
|
"loss": 3.2246, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.4715922676525226, |
|
"grad_norm": 0.4542834460735321, |
|
"learning_rate": 9.550453920805333e-05, |
|
"loss": 3.2558, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.47232798101859513, |
|
"grad_norm": 0.4443358778953552, |
|
"learning_rate": 9.548815956920429e-05, |
|
"loss": 3.2722, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.4730636943846677, |
|
"grad_norm": 0.5469712018966675, |
|
"learning_rate": 9.547175155404824e-05, |
|
"loss": 3.2956, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.4737994077507403, |
|
"grad_norm": 0.45458346605300903, |
|
"learning_rate": 9.545531517282073e-05, |
|
"loss": 3.3005, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.4745351211168129, |
|
"grad_norm": 0.4310155212879181, |
|
"learning_rate": 9.543885043577511e-05, |
|
"loss": 3.301, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.4752708344828855, |
|
"grad_norm": 0.44873273372650146, |
|
"learning_rate": 9.542235735318232e-05, |
|
"loss": 3.244, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.4760065478489581, |
|
"grad_norm": 0.42980292439460754, |
|
"learning_rate": 9.540583593533107e-05, |
|
"loss": 3.2755, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.4767422612150306, |
|
"grad_norm": 0.41110992431640625, |
|
"learning_rate": 9.538928619252772e-05, |
|
"loss": 3.2582, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.4774779745811032, |
|
"grad_norm": 0.4515661597251892, |
|
"learning_rate": 9.537270813509622e-05, |
|
"loss": 3.2538, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.4782136879471758, |
|
"grad_norm": 0.4295632541179657, |
|
"learning_rate": 9.535610177337833e-05, |
|
"loss": 3.1912, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.47894940131324837, |
|
"grad_norm": 0.43053045868873596, |
|
"learning_rate": 9.533946711773335e-05, |
|
"loss": 3.2977, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.47968511467932096, |
|
"grad_norm": 0.4485345482826233, |
|
"learning_rate": 9.532280417853825e-05, |
|
"loss": 3.3098, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.4804208280453935, |
|
"grad_norm": 0.45671671628952026, |
|
"learning_rate": 9.53061129661877e-05, |
|
"loss": 3.1899, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.4811565414114661, |
|
"grad_norm": 0.45602527260780334, |
|
"learning_rate": 9.528939349109393e-05, |
|
"loss": 3.2406, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.48189225477753866, |
|
"grad_norm": 0.43597233295440674, |
|
"learning_rate": 9.52726457636869e-05, |
|
"loss": 3.2476, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.48262796814361125, |
|
"grad_norm": 0.4405997097492218, |
|
"learning_rate": 9.525586979441407e-05, |
|
"loss": 3.2843, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.48336368150968384, |
|
"grad_norm": 0.4386025071144104, |
|
"learning_rate": 9.523906559374063e-05, |
|
"loss": 3.2067, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.48409939487575643, |
|
"grad_norm": 0.45547547936439514, |
|
"learning_rate": 9.52222331721493e-05, |
|
"loss": 3.3049, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.48483510824182896, |
|
"grad_norm": 0.4486294090747833, |
|
"learning_rate": 9.520537254014045e-05, |
|
"loss": 3.2632, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.48557082160790155, |
|
"grad_norm": 0.47906458377838135, |
|
"learning_rate": 9.518848370823205e-05, |
|
"loss": 3.2809, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.48630653497397414, |
|
"grad_norm": 0.4234841763973236, |
|
"learning_rate": 9.517156668695962e-05, |
|
"loss": 3.2427, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.4870422483400467, |
|
"grad_norm": 0.44342437386512756, |
|
"learning_rate": 9.515462148687633e-05, |
|
"loss": 3.246, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.4877779617061193, |
|
"grad_norm": 0.4205591082572937, |
|
"learning_rate": 9.513764811855284e-05, |
|
"loss": 3.2596, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.4885136750721919, |
|
"grad_norm": 0.4499843418598175, |
|
"learning_rate": 9.512064659257749e-05, |
|
"loss": 3.2535, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.48924938843826443, |
|
"grad_norm": 0.4281398355960846, |
|
"learning_rate": 9.510361691955608e-05, |
|
"loss": 3.2301, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.489985101804337, |
|
"grad_norm": 0.44669631123542786, |
|
"learning_rate": 9.508655911011204e-05, |
|
"loss": 3.2901, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.4907208151704096, |
|
"grad_norm": 0.4267096221446991, |
|
"learning_rate": 9.506947317488634e-05, |
|
"loss": 3.2702, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.4914565285364822, |
|
"grad_norm": 0.4454759657382965, |
|
"learning_rate": 9.505235912453746e-05, |
|
"loss": 3.2507, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.4921922419025548, |
|
"grad_norm": 0.42352691292762756, |
|
"learning_rate": 9.503521696974146e-05, |
|
"loss": 3.2663, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.4929279552686273, |
|
"grad_norm": 0.42943668365478516, |
|
"learning_rate": 9.50180467211919e-05, |
|
"loss": 3.2458, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.4936636686346999, |
|
"grad_norm": 0.4593370854854584, |
|
"learning_rate": 9.50008483895999e-05, |
|
"loss": 3.2092, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.4943993820007725, |
|
"grad_norm": 0.45478084683418274, |
|
"learning_rate": 9.498362198569406e-05, |
|
"loss": 3.3157, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.4951350953668451, |
|
"grad_norm": 0.4786463975906372, |
|
"learning_rate": 9.496636752022053e-05, |
|
"loss": 3.274, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.49587080873291767, |
|
"grad_norm": 0.4654693901538849, |
|
"learning_rate": 9.494908500394293e-05, |
|
"loss": 3.2673, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.49660652209899026, |
|
"grad_norm": 0.4555895924568176, |
|
"learning_rate": 9.493177444764238e-05, |
|
"loss": 3.2796, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.4973422354650628, |
|
"grad_norm": 0.4198709726333618, |
|
"learning_rate": 9.491443586211756e-05, |
|
"loss": 3.2293, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.4980779488311354, |
|
"grad_norm": 0.416696161031723, |
|
"learning_rate": 9.489706925818454e-05, |
|
"loss": 3.2122, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.49881366219720796, |
|
"grad_norm": 0.43891972303390503, |
|
"learning_rate": 9.487967464667691e-05, |
|
"loss": 3.2953, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.49954937556328055, |
|
"grad_norm": 0.42918211221694946, |
|
"learning_rate": 9.486225203844575e-05, |
|
"loss": 3.2869, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.5002850889293531, |
|
"grad_norm": 0.4077829420566559, |
|
"learning_rate": 9.484480144435958e-05, |
|
"loss": 3.2129, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.5010208022954257, |
|
"grad_norm": 0.4325737953186035, |
|
"learning_rate": 9.482732287530438e-05, |
|
"loss": 3.2208, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.5017565156614983, |
|
"grad_norm": 0.4620782732963562, |
|
"learning_rate": 9.48098163421836e-05, |
|
"loss": 3.2308, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.5024922290275708, |
|
"grad_norm": 0.4308145046234131, |
|
"learning_rate": 9.479228185591809e-05, |
|
"loss": 3.2112, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.5032279423936434, |
|
"grad_norm": 0.44704335927963257, |
|
"learning_rate": 9.47747194274462e-05, |
|
"loss": 3.2742, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.503963655759716, |
|
"grad_norm": 0.4341581165790558, |
|
"learning_rate": 9.475712906772367e-05, |
|
"loss": 3.2398, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.5046993691257886, |
|
"grad_norm": 0.4126861095428467, |
|
"learning_rate": 9.473951078772367e-05, |
|
"loss": 3.2395, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.5054350824918612, |
|
"grad_norm": 0.4292146861553192, |
|
"learning_rate": 9.47218645984368e-05, |
|
"loss": 3.2677, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.5061707958579338, |
|
"grad_norm": 0.38945844769477844, |
|
"learning_rate": 9.470419051087104e-05, |
|
"loss": 3.3027, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.5069065092240064, |
|
"grad_norm": 0.42329898476600647, |
|
"learning_rate": 9.46864885360518e-05, |
|
"loss": 3.2205, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.5076422225900789, |
|
"grad_norm": 0.4216727614402771, |
|
"learning_rate": 9.466875868502189e-05, |
|
"loss": 3.2631, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.5083779359561514, |
|
"grad_norm": 0.45686569809913635, |
|
"learning_rate": 9.46510009688415e-05, |
|
"loss": 3.256, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.509113649322224, |
|
"grad_norm": 0.4636845588684082, |
|
"learning_rate": 9.463321539858821e-05, |
|
"loss": 3.2798, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.5098493626882966, |
|
"grad_norm": 0.42751073837280273, |
|
"learning_rate": 9.461540198535695e-05, |
|
"loss": 3.2488, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.5105850760543692, |
|
"grad_norm": 0.44353827834129333, |
|
"learning_rate": 9.459756074026003e-05, |
|
"loss": 3.3067, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.5113207894204418, |
|
"grad_norm": 0.43093836307525635, |
|
"learning_rate": 9.457969167442716e-05, |
|
"loss": 3.2447, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.5120565027865144, |
|
"grad_norm": 0.4117187261581421, |
|
"learning_rate": 9.456179479900536e-05, |
|
"loss": 3.2757, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.512792216152587, |
|
"grad_norm": 0.4384092688560486, |
|
"learning_rate": 9.454387012515902e-05, |
|
"loss": 3.244, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.5135279295186596, |
|
"grad_norm": 0.44672396779060364, |
|
"learning_rate": 9.452591766406983e-05, |
|
"loss": 3.2527, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.5142636428847321, |
|
"grad_norm": 0.41822147369384766, |
|
"learning_rate": 9.45079374269369e-05, |
|
"loss": 3.2535, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.5149993562508047, |
|
"grad_norm": 0.4356013834476471, |
|
"learning_rate": 9.448992942497659e-05, |
|
"loss": 3.2886, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.5157350696168773, |
|
"grad_norm": 0.4153957664966583, |
|
"learning_rate": 9.44718936694226e-05, |
|
"loss": 3.2152, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.5164707829829498, |
|
"grad_norm": 0.4125785827636719, |
|
"learning_rate": 9.445383017152596e-05, |
|
"loss": 3.2183, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.5172064963490224, |
|
"grad_norm": 0.41547253727912903, |
|
"learning_rate": 9.4435738942555e-05, |
|
"loss": 3.2421, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.517942209715095, |
|
"grad_norm": 0.44014525413513184, |
|
"learning_rate": 9.441761999379533e-05, |
|
"loss": 3.2572, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.5186779230811676, |
|
"grad_norm": 0.4082861542701721, |
|
"learning_rate": 9.439947333654985e-05, |
|
"loss": 3.2241, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.5194136364472401, |
|
"grad_norm": 0.4412677586078644, |
|
"learning_rate": 9.438129898213879e-05, |
|
"loss": 3.2687, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.5201493498133127, |
|
"grad_norm": 0.42670226097106934, |
|
"learning_rate": 9.436309694189963e-05, |
|
"loss": 3.2835, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.5208850631793853, |
|
"grad_norm": 0.4251077473163605, |
|
"learning_rate": 9.434486722718712e-05, |
|
"loss": 3.2057, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.5216207765454579, |
|
"grad_norm": 0.4343535602092743, |
|
"learning_rate": 9.432660984937325e-05, |
|
"loss": 3.2419, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.5223564899115305, |
|
"grad_norm": 0.43313470482826233, |
|
"learning_rate": 9.430832481984731e-05, |
|
"loss": 3.2737, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.5230922032776031, |
|
"grad_norm": 0.4118582308292389, |
|
"learning_rate": 9.429001215001581e-05, |
|
"loss": 3.2394, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.5238279166436757, |
|
"grad_norm": 0.41588348150253296, |
|
"learning_rate": 9.427167185130252e-05, |
|
"loss": 3.232, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.5245636300097481, |
|
"grad_norm": 0.40552860498428345, |
|
"learning_rate": 9.425330393514843e-05, |
|
"loss": 3.2939, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.5252993433758207, |
|
"grad_norm": 0.45274195075035095, |
|
"learning_rate": 9.423490841301178e-05, |
|
"loss": 3.2761, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.5260350567418933, |
|
"grad_norm": 0.43010491132736206, |
|
"learning_rate": 9.421648529636801e-05, |
|
"loss": 3.2386, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.5267707701079659, |
|
"grad_norm": 0.4486023783683777, |
|
"learning_rate": 9.41980345967098e-05, |
|
"loss": 3.2547, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.5275064834740385, |
|
"grad_norm": 0.43645188212394714, |
|
"learning_rate": 9.417955632554697e-05, |
|
"loss": 3.2605, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.5282421968401111, |
|
"grad_norm": 0.4431368410587311, |
|
"learning_rate": 9.416105049440662e-05, |
|
"loss": 3.2333, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.5289779102061837, |
|
"grad_norm": 0.447656512260437, |
|
"learning_rate": 9.414251711483303e-05, |
|
"loss": 3.267, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.5297136235722563, |
|
"grad_norm": 0.4308032989501953, |
|
"learning_rate": 9.41239561983876e-05, |
|
"loss": 3.2702, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.5304493369383289, |
|
"grad_norm": 0.42887935042381287, |
|
"learning_rate": 9.4105367756649e-05, |
|
"loss": 3.2607, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.5311850503044014, |
|
"grad_norm": 0.44105619192123413, |
|
"learning_rate": 9.408675180121298e-05, |
|
"loss": 3.2511, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.531920763670474, |
|
"grad_norm": 0.41123899817466736, |
|
"learning_rate": 9.406810834369253e-05, |
|
"loss": 3.2257, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.5326564770365466, |
|
"grad_norm": 0.4103626310825348, |
|
"learning_rate": 9.404943739571774e-05, |
|
"loss": 3.2482, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.5333921904026191, |
|
"grad_norm": 0.4202837646007538, |
|
"learning_rate": 9.40307389689359e-05, |
|
"loss": 3.2751, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.5341279037686917, |
|
"grad_norm": 0.4100697934627533, |
|
"learning_rate": 9.401201307501139e-05, |
|
"loss": 3.2473, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.5348636171347643, |
|
"grad_norm": 0.41449519991874695, |
|
"learning_rate": 9.399325972562576e-05, |
|
"loss": 3.1886, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.5355993305008369, |
|
"grad_norm": 0.42789408564567566, |
|
"learning_rate": 9.397447893247767e-05, |
|
"loss": 3.2318, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.5363350438669094, |
|
"grad_norm": 0.4444492757320404, |
|
"learning_rate": 9.395567070728292e-05, |
|
"loss": 3.2582, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.537070757232982, |
|
"grad_norm": 0.4582795798778534, |
|
"learning_rate": 9.39368350617744e-05, |
|
"loss": 3.296, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.5378064705990546, |
|
"grad_norm": 0.41776713728904724, |
|
"learning_rate": 9.39179720077021e-05, |
|
"loss": 3.2169, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.5385421839651272, |
|
"grad_norm": 0.4293855130672455, |
|
"learning_rate": 9.389908155683315e-05, |
|
"loss": 3.2569, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.5392778973311998, |
|
"grad_norm": 0.4292116165161133, |
|
"learning_rate": 9.388016372095172e-05, |
|
"loss": 3.2732, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.5400136106972724, |
|
"grad_norm": 0.40844982862472534, |
|
"learning_rate": 9.38612185118591e-05, |
|
"loss": 3.2535, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.540749324063345, |
|
"grad_norm": 0.4502781331539154, |
|
"learning_rate": 9.384224594137363e-05, |
|
"loss": 3.3008, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.5414850374294174, |
|
"grad_norm": 0.41482973098754883, |
|
"learning_rate": 9.382324602133075e-05, |
|
"loss": 3.2708, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.54222075079549, |
|
"grad_norm": 0.407712459564209, |
|
"learning_rate": 9.380421876358289e-05, |
|
"loss": 3.2193, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.5429564641615626, |
|
"grad_norm": 0.40948861837387085, |
|
"learning_rate": 9.378516417999966e-05, |
|
"loss": 3.2599, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.5436921775276352, |
|
"grad_norm": 0.4116959273815155, |
|
"learning_rate": 9.376608228246759e-05, |
|
"loss": 3.212, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.5444278908937078, |
|
"grad_norm": 0.40750423073768616, |
|
"learning_rate": 9.374697308289033e-05, |
|
"loss": 3.228, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.5451636042597804, |
|
"grad_norm": 0.4093017280101776, |
|
"learning_rate": 9.372783659318852e-05, |
|
"loss": 3.2442, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.545899317625853, |
|
"grad_norm": 0.4156046509742737, |
|
"learning_rate": 9.37086728252998e-05, |
|
"loss": 3.2938, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.5466350309919256, |
|
"grad_norm": 0.4261894226074219, |
|
"learning_rate": 9.368948179117891e-05, |
|
"loss": 3.2177, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.5473707443579982, |
|
"grad_norm": 0.4255836606025696, |
|
"learning_rate": 9.367026350279755e-05, |
|
"loss": 3.2701, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.5481064577240707, |
|
"grad_norm": 0.43402138352394104, |
|
"learning_rate": 9.365101797214439e-05, |
|
"loss": 3.2173, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.5488421710901433, |
|
"grad_norm": 0.4054631292819977, |
|
"learning_rate": 9.363174521122516e-05, |
|
"loss": 3.2659, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.5495778844562158, |
|
"grad_norm": 0.4579883813858032, |
|
"learning_rate": 9.361244523206255e-05, |
|
"loss": 3.2249, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.5503135978222884, |
|
"grad_norm": 0.40640729665756226, |
|
"learning_rate": 9.359311804669617e-05, |
|
"loss": 3.1534, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.551049311188361, |
|
"grad_norm": 0.4273945093154907, |
|
"learning_rate": 9.35737636671827e-05, |
|
"loss": 3.271, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.5517850245544336, |
|
"grad_norm": 0.4102952778339386, |
|
"learning_rate": 9.355438210559575e-05, |
|
"loss": 3.2139, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.5525207379205062, |
|
"grad_norm": 0.43881139159202576, |
|
"learning_rate": 9.353497337402583e-05, |
|
"loss": 3.2115, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 0.5532564512865787, |
|
"grad_norm": 0.4113704264163971, |
|
"learning_rate": 9.351553748458049e-05, |
|
"loss": 3.2322, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.5539921646526513, |
|
"grad_norm": 0.4157336950302124, |
|
"learning_rate": 9.349607444938416e-05, |
|
"loss": 3.2485, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.5547278780187239, |
|
"grad_norm": 0.4389907717704773, |
|
"learning_rate": 9.34765842805782e-05, |
|
"loss": 3.2542, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.5554635913847965, |
|
"grad_norm": 0.45203500986099243, |
|
"learning_rate": 9.345706699032093e-05, |
|
"loss": 3.2511, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.5561993047508691, |
|
"grad_norm": 0.42221277952194214, |
|
"learning_rate": 9.343752259078761e-05, |
|
"loss": 3.2598, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.5569350181169417, |
|
"grad_norm": 0.4307563304901123, |
|
"learning_rate": 9.341795109417033e-05, |
|
"loss": 3.2495, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 0.5576707314830143, |
|
"grad_norm": 0.40989741683006287, |
|
"learning_rate": 9.339835251267816e-05, |
|
"loss": 3.2723, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.5584064448490867, |
|
"grad_norm": 0.4154078960418701, |
|
"learning_rate": 9.337872685853702e-05, |
|
"loss": 3.1988, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.5591421582151593, |
|
"grad_norm": 0.4232662618160248, |
|
"learning_rate": 9.335907414398975e-05, |
|
"loss": 3.2036, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.5598778715812319, |
|
"grad_norm": 0.4037014842033386, |
|
"learning_rate": 9.333939438129603e-05, |
|
"loss": 3.2554, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 0.5606135849473045, |
|
"grad_norm": 0.4122985005378723, |
|
"learning_rate": 9.331968758273248e-05, |
|
"loss": 3.2355, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.5613492983133771, |
|
"grad_norm": 0.40354782342910767, |
|
"learning_rate": 9.32999537605925e-05, |
|
"loss": 3.2135, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 0.5620850116794497, |
|
"grad_norm": 0.43905869126319885, |
|
"learning_rate": 9.328019292718643e-05, |
|
"loss": 3.139, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.5628207250455223, |
|
"grad_norm": 0.4707077443599701, |
|
"learning_rate": 9.326040509484139e-05, |
|
"loss": 3.2188, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.5635564384115949, |
|
"grad_norm": 0.4245714247226715, |
|
"learning_rate": 9.324059027590142e-05, |
|
"loss": 3.2614, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.5642921517776675, |
|
"grad_norm": 0.4019360840320587, |
|
"learning_rate": 9.32207484827273e-05, |
|
"loss": 3.2512, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 0.56502786514374, |
|
"grad_norm": 0.4229870140552521, |
|
"learning_rate": 9.320087972769671e-05, |
|
"loss": 3.2514, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.5657635785098126, |
|
"grad_norm": 0.401112824678421, |
|
"learning_rate": 9.318098402320412e-05, |
|
"loss": 3.2666, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 0.5664992918758851, |
|
"grad_norm": 0.396757036447525, |
|
"learning_rate": 9.316106138166082e-05, |
|
"loss": 3.2273, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.5672350052419577, |
|
"grad_norm": 0.44432303309440613, |
|
"learning_rate": 9.31411118154949e-05, |
|
"loss": 3.2244, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.5679707186080303, |
|
"grad_norm": 0.4479587972164154, |
|
"learning_rate": 9.312113533715125e-05, |
|
"loss": 3.2359, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 0.5687064319741029, |
|
"grad_norm": 0.4089357554912567, |
|
"learning_rate": 9.310113195909155e-05, |
|
"loss": 3.2186, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 0.5694421453401755, |
|
"grad_norm": 0.3963668644428253, |
|
"learning_rate": 9.308110169379425e-05, |
|
"loss": 3.2326, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.570177858706248, |
|
"grad_norm": 0.4088764190673828, |
|
"learning_rate": 9.306104455375457e-05, |
|
"loss": 3.2348, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.5709135720723206, |
|
"grad_norm": 0.41223156452178955, |
|
"learning_rate": 9.304096055148452e-05, |
|
"loss": 3.2306, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 0.5716492854383932, |
|
"grad_norm": 0.4141092896461487, |
|
"learning_rate": 9.302084969951283e-05, |
|
"loss": 3.1796, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.5723849988044658, |
|
"grad_norm": 0.4188261330127716, |
|
"learning_rate": 9.300071201038503e-05, |
|
"loss": 3.2539, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 0.5731207121705384, |
|
"grad_norm": 0.42103490233421326, |
|
"learning_rate": 9.298054749666334e-05, |
|
"loss": 3.2387, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 0.573856425536611, |
|
"grad_norm": 0.41063937544822693, |
|
"learning_rate": 9.296035617092673e-05, |
|
"loss": 3.2052, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.5745921389026836, |
|
"grad_norm": 0.41358301043510437, |
|
"learning_rate": 9.294013804577093e-05, |
|
"loss": 3.2474, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 0.575327852268756, |
|
"grad_norm": 0.4047495722770691, |
|
"learning_rate": 9.291989313380833e-05, |
|
"loss": 3.1907, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 0.5760635656348286, |
|
"grad_norm": 0.4267469644546509, |
|
"learning_rate": 9.289962144766806e-05, |
|
"loss": 3.2109, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.5767992790009012, |
|
"grad_norm": 0.42642202973365784, |
|
"learning_rate": 9.287932299999597e-05, |
|
"loss": 3.2168, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 0.5775349923669738, |
|
"grad_norm": 0.4209352433681488, |
|
"learning_rate": 9.285899780345458e-05, |
|
"loss": 3.2363, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.5782707057330464, |
|
"grad_norm": 0.39841902256011963, |
|
"learning_rate": 9.283864587072311e-05, |
|
"loss": 3.2399, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.579006419099119, |
|
"grad_norm": 0.42725417017936707, |
|
"learning_rate": 9.281826721449741e-05, |
|
"loss": 3.224, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 0.5797421324651916, |
|
"grad_norm": 0.4100605845451355, |
|
"learning_rate": 9.279786184749009e-05, |
|
"loss": 3.184, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 0.5804778458312642, |
|
"grad_norm": 0.4014204144477844, |
|
"learning_rate": 9.277742978243033e-05, |
|
"loss": 3.2129, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.5812135591973367, |
|
"grad_norm": 0.4076472222805023, |
|
"learning_rate": 9.275697103206406e-05, |
|
"loss": 3.2587, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.5819492725634093, |
|
"grad_norm": 0.4247482120990753, |
|
"learning_rate": 9.273648560915376e-05, |
|
"loss": 3.2307, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 0.5826849859294819, |
|
"grad_norm": 0.3832865059375763, |
|
"learning_rate": 9.271597352647861e-05, |
|
"loss": 3.2394, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.5834206992955544, |
|
"grad_norm": 0.38227957487106323, |
|
"learning_rate": 9.269543479683441e-05, |
|
"loss": 3.2031, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 0.584156412661627, |
|
"grad_norm": 0.4235737919807434, |
|
"learning_rate": 9.267486943303358e-05, |
|
"loss": 3.2341, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 0.5848921260276996, |
|
"grad_norm": 0.40164512395858765, |
|
"learning_rate": 9.265427744790513e-05, |
|
"loss": 3.1821, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.5856278393937722, |
|
"grad_norm": 0.40419676899909973, |
|
"learning_rate": 9.263365885429474e-05, |
|
"loss": 3.2159, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 0.5863635527598448, |
|
"grad_norm": 0.38921478390693665, |
|
"learning_rate": 9.26130136650646e-05, |
|
"loss": 3.2628, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 0.5870992661259173, |
|
"grad_norm": 0.43494653701782227, |
|
"learning_rate": 9.259234189309359e-05, |
|
"loss": 3.1855, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.5878349794919899, |
|
"grad_norm": 0.4748268127441406, |
|
"learning_rate": 9.257164355127705e-05, |
|
"loss": 3.2326, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 0.5885706928580625, |
|
"grad_norm": 0.40674278140068054, |
|
"learning_rate": 9.255091865252704e-05, |
|
"loss": 3.2202, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.5893064062241351, |
|
"grad_norm": 0.4126112759113312, |
|
"learning_rate": 9.253016720977208e-05, |
|
"loss": 3.1978, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.5900421195902077, |
|
"grad_norm": 0.40558794140815735, |
|
"learning_rate": 9.250938923595729e-05, |
|
"loss": 3.2041, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 0.5907778329562803, |
|
"grad_norm": 0.41431719064712524, |
|
"learning_rate": 9.248858474404431e-05, |
|
"loss": 3.2145, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 0.5915135463223528, |
|
"grad_norm": 0.4020199477672577, |
|
"learning_rate": 9.246775374701139e-05, |
|
"loss": 3.1712, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.5922492596884253, |
|
"grad_norm": 0.4117688238620758, |
|
"learning_rate": 9.244689625785323e-05, |
|
"loss": 3.2534, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.5929849730544979, |
|
"grad_norm": 0.4227346181869507, |
|
"learning_rate": 9.24260122895811e-05, |
|
"loss": 3.2094, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.5937206864205705, |
|
"grad_norm": 0.3910035490989685, |
|
"learning_rate": 9.24051018552228e-05, |
|
"loss": 3.2119, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.5944563997866431, |
|
"grad_norm": 0.41752034425735474, |
|
"learning_rate": 9.238416496782261e-05, |
|
"loss": 3.1458, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 0.5951921131527157, |
|
"grad_norm": 0.40074622631073, |
|
"learning_rate": 9.236320164044132e-05, |
|
"loss": 3.2612, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 0.5959278265187883, |
|
"grad_norm": 0.4130959212779999, |
|
"learning_rate": 9.234221188615624e-05, |
|
"loss": 3.1766, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.5966635398848609, |
|
"grad_norm": 0.4178720712661743, |
|
"learning_rate": 9.232119571806114e-05, |
|
"loss": 3.2147, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 0.5973992532509335, |
|
"grad_norm": 0.408380925655365, |
|
"learning_rate": 9.230015314926625e-05, |
|
"loss": 3.2038, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 0.598134966617006, |
|
"grad_norm": 0.40057528018951416, |
|
"learning_rate": 9.227908419289833e-05, |
|
"loss": 3.203, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.5988706799830786, |
|
"grad_norm": 0.42866384983062744, |
|
"learning_rate": 9.225798886210052e-05, |
|
"loss": 3.217, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 0.5996063933491512, |
|
"grad_norm": 0.4330470860004425, |
|
"learning_rate": 9.223686717003248e-05, |
|
"loss": 3.1826, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.6003421067152237, |
|
"grad_norm": 0.389643132686615, |
|
"learning_rate": 9.221571912987029e-05, |
|
"loss": 3.2339, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.6010778200812963, |
|
"grad_norm": 0.412850558757782, |
|
"learning_rate": 9.219454475480644e-05, |
|
"loss": 3.1956, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 0.6018135334473689, |
|
"grad_norm": 0.4014042317867279, |
|
"learning_rate": 9.217334405804991e-05, |
|
"loss": 3.1871, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 0.6025492468134415, |
|
"grad_norm": 0.4323343336582184, |
|
"learning_rate": 9.215211705282604e-05, |
|
"loss": 3.2368, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.603284960179514, |
|
"grad_norm": 0.4045550227165222, |
|
"learning_rate": 9.213086375237663e-05, |
|
"loss": 3.186, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.6040206735455866, |
|
"grad_norm": 0.39329978823661804, |
|
"learning_rate": 9.210958416995984e-05, |
|
"loss": 3.2015, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 0.6047563869116592, |
|
"grad_norm": 0.40735575556755066, |
|
"learning_rate": 9.208827831885026e-05, |
|
"loss": 3.2184, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.6054921002777318, |
|
"grad_norm": 0.4116288125514984, |
|
"learning_rate": 9.206694621233882e-05, |
|
"loss": 3.2264, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 0.6062278136438044, |
|
"grad_norm": 0.4052058458328247, |
|
"learning_rate": 9.204558786373288e-05, |
|
"loss": 3.2455, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 0.606963527009877, |
|
"grad_norm": 0.41139885783195496, |
|
"learning_rate": 9.202420328635616e-05, |
|
"loss": 3.1959, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.6076992403759496, |
|
"grad_norm": 0.4093032777309418, |
|
"learning_rate": 9.200279249354872e-05, |
|
"loss": 3.1881, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 0.608434953742022, |
|
"grad_norm": 0.443782240152359, |
|
"learning_rate": 9.198135549866699e-05, |
|
"loss": 3.2383, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 0.6091706671080946, |
|
"grad_norm": 0.42610660195350647, |
|
"learning_rate": 9.195989231508373e-05, |
|
"loss": 3.2359, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.6099063804741672, |
|
"grad_norm": 0.4072706401348114, |
|
"learning_rate": 9.193840295618807e-05, |
|
"loss": 3.2154, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 0.6106420938402398, |
|
"grad_norm": 0.4002029299736023, |
|
"learning_rate": 9.191688743538542e-05, |
|
"loss": 3.1847, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.6113778072063124, |
|
"grad_norm": 0.4040053188800812, |
|
"learning_rate": 9.189534576609757e-05, |
|
"loss": 3.2215, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.612113520572385, |
|
"grad_norm": 0.4218162000179291, |
|
"learning_rate": 9.187377796176256e-05, |
|
"loss": 3.2408, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.6128492339384576, |
|
"grad_norm": 0.42694374918937683, |
|
"learning_rate": 9.185218403583478e-05, |
|
"loss": 3.2176, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 0.6135849473045302, |
|
"grad_norm": 0.40308234095573425, |
|
"learning_rate": 9.18305640017849e-05, |
|
"loss": 3.1799, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.6143206606706028, |
|
"grad_norm": 0.3952566981315613, |
|
"learning_rate": 9.180891787309985e-05, |
|
"loss": 3.244, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.6150563740366753, |
|
"grad_norm": 0.39796093106269836, |
|
"learning_rate": 9.178724566328287e-05, |
|
"loss": 3.1809, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 0.6157920874027479, |
|
"grad_norm": 0.41804805397987366, |
|
"learning_rate": 9.176554738585348e-05, |
|
"loss": 3.228, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.6165278007688205, |
|
"grad_norm": 0.4276120066642761, |
|
"learning_rate": 9.174382305434742e-05, |
|
"loss": 3.2218, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 0.617263514134893, |
|
"grad_norm": 0.44311702251434326, |
|
"learning_rate": 9.172207268231674e-05, |
|
"loss": 3.2191, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 0.6179992275009656, |
|
"grad_norm": 0.43954578042030334, |
|
"learning_rate": 9.170029628332966e-05, |
|
"loss": 3.2388, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.6187349408670382, |
|
"grad_norm": 0.38372981548309326, |
|
"learning_rate": 9.167849387097072e-05, |
|
"loss": 3.2059, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 0.6194706542331108, |
|
"grad_norm": 0.3934963643550873, |
|
"learning_rate": 9.165666545884062e-05, |
|
"loss": 3.2022, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 0.6202063675991834, |
|
"grad_norm": 0.41195252537727356, |
|
"learning_rate": 9.163481106055629e-05, |
|
"loss": 3.1958, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 0.6209420809652559, |
|
"grad_norm": 0.42152824997901917, |
|
"learning_rate": 9.161293068975092e-05, |
|
"loss": 3.2529, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 0.6216777943313285, |
|
"grad_norm": 0.41656965017318726, |
|
"learning_rate": 9.159102436007385e-05, |
|
"loss": 3.2559, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 0.6224135076974011, |
|
"grad_norm": 0.3994307518005371, |
|
"learning_rate": 9.156909208519062e-05, |
|
"loss": 3.2101, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 0.6231492210634737, |
|
"grad_norm": 0.40668097138404846, |
|
"learning_rate": 9.154713387878297e-05, |
|
"loss": 3.1927, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 0.6238849344295463, |
|
"grad_norm": 0.42031413316726685, |
|
"learning_rate": 9.152514975454884e-05, |
|
"loss": 3.2174, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 0.6246206477956189, |
|
"grad_norm": 0.3852512836456299, |
|
"learning_rate": 9.150313972620227e-05, |
|
"loss": 3.1662, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 0.6253563611616914, |
|
"grad_norm": 0.3993820250034332, |
|
"learning_rate": 9.148110380747352e-05, |
|
"loss": 3.1738, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.6260920745277639, |
|
"grad_norm": 0.4049205482006073, |
|
"learning_rate": 9.145904201210898e-05, |
|
"loss": 3.1861, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 0.6268277878938365, |
|
"grad_norm": 0.40449580550193787, |
|
"learning_rate": 9.143695435387117e-05, |
|
"loss": 3.2229, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 0.6275635012599091, |
|
"grad_norm": 0.39980998635292053, |
|
"learning_rate": 9.141484084653878e-05, |
|
"loss": 3.2294, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 0.6282992146259817, |
|
"grad_norm": 0.42464175820350647, |
|
"learning_rate": 9.139270150390658e-05, |
|
"loss": 3.2189, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 0.6290349279920543, |
|
"grad_norm": 0.3991568684577942, |
|
"learning_rate": 9.137053633978548e-05, |
|
"loss": 3.2124, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.6297706413581269, |
|
"grad_norm": 0.40061402320861816, |
|
"learning_rate": 9.13483453680025e-05, |
|
"loss": 3.1887, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 0.6305063547241995, |
|
"grad_norm": 0.3983337879180908, |
|
"learning_rate": 9.132612860240075e-05, |
|
"loss": 3.1858, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 0.6312420680902721, |
|
"grad_norm": 0.38647231459617615, |
|
"learning_rate": 9.130388605683946e-05, |
|
"loss": 3.2476, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 0.6319777814563446, |
|
"grad_norm": 0.40448683500289917, |
|
"learning_rate": 9.128161774519389e-05, |
|
"loss": 3.1775, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 0.6327134948224172, |
|
"grad_norm": 0.4101662039756775, |
|
"learning_rate": 9.125932368135542e-05, |
|
"loss": 3.2317, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.6334492081884897, |
|
"grad_norm": 0.4105026125907898, |
|
"learning_rate": 9.123700387923144e-05, |
|
"loss": 3.2358, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 0.6341849215545623, |
|
"grad_norm": 0.42156288027763367, |
|
"learning_rate": 9.121465835274549e-05, |
|
"loss": 3.2202, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 0.6349206349206349, |
|
"grad_norm": 0.4276079833507538, |
|
"learning_rate": 9.119228711583705e-05, |
|
"loss": 3.2128, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 0.6356563482867075, |
|
"grad_norm": 0.3834649324417114, |
|
"learning_rate": 9.116989018246171e-05, |
|
"loss": 3.1966, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.6363920616527801, |
|
"grad_norm": 0.39798030257225037, |
|
"learning_rate": 9.114746756659105e-05, |
|
"loss": 3.1433, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 0.6371277750188526, |
|
"grad_norm": 0.3975779414176941, |
|
"learning_rate": 9.112501928221271e-05, |
|
"loss": 3.2573, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 0.6378634883849252, |
|
"grad_norm": 0.3947142958641052, |
|
"learning_rate": 9.110254534333033e-05, |
|
"loss": 3.191, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 0.6385992017509978, |
|
"grad_norm": 0.4371863901615143, |
|
"learning_rate": 9.10800457639635e-05, |
|
"loss": 3.2229, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.6393349151170704, |
|
"grad_norm": 0.39925292134284973, |
|
"learning_rate": 9.105752055814789e-05, |
|
"loss": 3.2405, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 0.640070628483143, |
|
"grad_norm": 0.40571677684783936, |
|
"learning_rate": 9.10349697399351e-05, |
|
"loss": 3.2267, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.6408063418492156, |
|
"grad_norm": 0.3942791223526001, |
|
"learning_rate": 9.101239332339272e-05, |
|
"loss": 3.2074, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 0.6415420552152882, |
|
"grad_norm": 0.42931607365608215, |
|
"learning_rate": 9.098979132260435e-05, |
|
"loss": 3.1736, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 0.6422777685813607, |
|
"grad_norm": 0.4185764789581299, |
|
"learning_rate": 9.096716375166945e-05, |
|
"loss": 3.1654, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 0.6430134819474332, |
|
"grad_norm": 0.4023076295852661, |
|
"learning_rate": 9.094451062470353e-05, |
|
"loss": 3.1562, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 0.6437491953135058, |
|
"grad_norm": 0.40013644099235535, |
|
"learning_rate": 9.0921831955838e-05, |
|
"loss": 3.209, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 0.6444849086795784, |
|
"grad_norm": 0.4097345471382141, |
|
"learning_rate": 9.089912775922021e-05, |
|
"loss": 3.1925, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 0.645220622045651, |
|
"grad_norm": 0.4017821252346039, |
|
"learning_rate": 9.087639804901343e-05, |
|
"loss": 3.1849, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 0.6459563354117236, |
|
"grad_norm": 0.40877318382263184, |
|
"learning_rate": 9.085364283939686e-05, |
|
"loss": 3.2203, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 0.6466920487777962, |
|
"grad_norm": 0.4291410744190216, |
|
"learning_rate": 9.083086214456557e-05, |
|
"loss": 3.1789, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 0.6474277621438688, |
|
"grad_norm": 0.4058206379413605, |
|
"learning_rate": 9.08080559787306e-05, |
|
"loss": 3.1903, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.6481634755099414, |
|
"grad_norm": 0.3808108866214752, |
|
"learning_rate": 9.078522435611879e-05, |
|
"loss": 3.1841, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 0.6488991888760139, |
|
"grad_norm": 0.43283572793006897, |
|
"learning_rate": 9.076236729097294e-05, |
|
"loss": 3.2212, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 0.6496349022420865, |
|
"grad_norm": 0.39330145716667175, |
|
"learning_rate": 9.073948479755164e-05, |
|
"loss": 3.2158, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 0.650370615608159, |
|
"grad_norm": 0.40737244486808777, |
|
"learning_rate": 9.071657689012943e-05, |
|
"loss": 3.2306, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 0.6511063289742316, |
|
"grad_norm": 0.3892798125743866, |
|
"learning_rate": 9.069364358299666e-05, |
|
"loss": 3.2162, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.6518420423403042, |
|
"grad_norm": 0.400855153799057, |
|
"learning_rate": 9.067068489045949e-05, |
|
"loss": 3.1358, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 0.6525777557063768, |
|
"grad_norm": 0.39898017048835754, |
|
"learning_rate": 9.064770082684e-05, |
|
"loss": 3.1866, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 0.6533134690724494, |
|
"grad_norm": 0.41870006918907166, |
|
"learning_rate": 9.062469140647601e-05, |
|
"loss": 3.1855, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 0.654049182438522, |
|
"grad_norm": 0.4006561040878296, |
|
"learning_rate": 9.060165664372122e-05, |
|
"loss": 3.2137, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 0.6547848958045945, |
|
"grad_norm": 0.4057448208332062, |
|
"learning_rate": 9.05785965529451e-05, |
|
"loss": 3.1655, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.6555206091706671, |
|
"grad_norm": 0.399264931678772, |
|
"learning_rate": 9.055551114853295e-05, |
|
"loss": 3.214, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 0.6562563225367397, |
|
"grad_norm": 0.399167001247406, |
|
"learning_rate": 9.053240044488586e-05, |
|
"loss": 3.1959, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 0.6569920359028123, |
|
"grad_norm": 0.39689528942108154, |
|
"learning_rate": 9.050926445642067e-05, |
|
"loss": 3.2065, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 0.6577277492688849, |
|
"grad_norm": 0.4583052098751068, |
|
"learning_rate": 9.048610319757002e-05, |
|
"loss": 3.2269, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 0.6584634626349575, |
|
"grad_norm": 0.4117734134197235, |
|
"learning_rate": 9.046291668278231e-05, |
|
"loss": 3.1955, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 0.65919917600103, |
|
"grad_norm": 0.3891928195953369, |
|
"learning_rate": 9.043970492652168e-05, |
|
"loss": 3.2248, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 0.6599348893671025, |
|
"grad_norm": 0.4023609757423401, |
|
"learning_rate": 9.041646794326803e-05, |
|
"loss": 3.2241, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 0.6606706027331751, |
|
"grad_norm": 0.39141857624053955, |
|
"learning_rate": 9.039320574751702e-05, |
|
"loss": 3.1826, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 0.6614063160992477, |
|
"grad_norm": 0.38678160309791565, |
|
"learning_rate": 9.036991835377997e-05, |
|
"loss": 3.1664, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.6621420294653203, |
|
"grad_norm": 0.3716258704662323, |
|
"learning_rate": 9.0346605776584e-05, |
|
"loss": 3.1835, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.6628777428313929, |
|
"grad_norm": 0.38958126306533813, |
|
"learning_rate": 9.032326803047189e-05, |
|
"loss": 3.2211, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 0.6636134561974655, |
|
"grad_norm": 0.3944793939590454, |
|
"learning_rate": 9.029990513000213e-05, |
|
"loss": 3.2145, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 0.6643491695635381, |
|
"grad_norm": 0.41081830859184265, |
|
"learning_rate": 9.027651708974891e-05, |
|
"loss": 3.2223, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 0.6650848829296107, |
|
"grad_norm": 0.3970061242580414, |
|
"learning_rate": 9.025310392430208e-05, |
|
"loss": 3.2183, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 0.6658205962956832, |
|
"grad_norm": 0.4093196988105774, |
|
"learning_rate": 9.022966564826721e-05, |
|
"loss": 3.2203, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 0.6665563096617558, |
|
"grad_norm": 0.4082465171813965, |
|
"learning_rate": 9.020620227626548e-05, |
|
"loss": 3.1892, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 0.6672920230278283, |
|
"grad_norm": 0.40329813957214355, |
|
"learning_rate": 9.018271382293377e-05, |
|
"loss": 3.1848, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 0.6680277363939009, |
|
"grad_norm": 0.3946298062801361, |
|
"learning_rate": 9.015920030292457e-05, |
|
"loss": 3.2053, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 0.6687634497599735, |
|
"grad_norm": 0.39222556352615356, |
|
"learning_rate": 9.013566173090604e-05, |
|
"loss": 3.2543, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 0.6694991631260461, |
|
"grad_norm": 0.39037418365478516, |
|
"learning_rate": 9.011209812156194e-05, |
|
"loss": 3.1635, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.6702348764921187, |
|
"grad_norm": 0.4084848463535309, |
|
"learning_rate": 9.008850948959164e-05, |
|
"loss": 3.1981, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 0.6709705898581912, |
|
"grad_norm": 0.4072742760181427, |
|
"learning_rate": 9.00648958497102e-05, |
|
"loss": 3.197, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 0.6717063032242638, |
|
"grad_norm": 0.396036833524704, |
|
"learning_rate": 9.004125721664816e-05, |
|
"loss": 3.2572, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 0.6724420165903364, |
|
"grad_norm": 0.3960723578929901, |
|
"learning_rate": 9.001759360515174e-05, |
|
"loss": 3.2029, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 0.673177729956409, |
|
"grad_norm": 0.4176108241081238, |
|
"learning_rate": 8.999390502998271e-05, |
|
"loss": 3.2187, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 0.6739134433224816, |
|
"grad_norm": 0.39103132486343384, |
|
"learning_rate": 8.99701915059184e-05, |
|
"loss": 3.2403, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 0.6746491566885542, |
|
"grad_norm": 0.40875765681266785, |
|
"learning_rate": 8.994645304775175e-05, |
|
"loss": 3.196, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 0.6753848700546267, |
|
"grad_norm": 0.3849492073059082, |
|
"learning_rate": 8.992268967029121e-05, |
|
"loss": 3.2015, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 0.6761205834206993, |
|
"grad_norm": 0.3874247968196869, |
|
"learning_rate": 8.989890138836077e-05, |
|
"loss": 3.2458, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 0.6768562967867718, |
|
"grad_norm": 0.4249545931816101, |
|
"learning_rate": 8.98750882168e-05, |
|
"loss": 3.2442, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.6775920101528444, |
|
"grad_norm": 0.37782514095306396, |
|
"learning_rate": 8.985125017046397e-05, |
|
"loss": 3.2012, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 0.678327723518917, |
|
"grad_norm": 0.3978985846042633, |
|
"learning_rate": 8.982738726422325e-05, |
|
"loss": 3.2127, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 0.6790634368849896, |
|
"grad_norm": 0.38309648633003235, |
|
"learning_rate": 8.980349951296396e-05, |
|
"loss": 3.1858, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 0.6797991502510622, |
|
"grad_norm": 0.42622271180152893, |
|
"learning_rate": 8.977958693158768e-05, |
|
"loss": 3.177, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 0.6805348636171348, |
|
"grad_norm": 0.3749631345272064, |
|
"learning_rate": 8.97556495350115e-05, |
|
"loss": 3.1836, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 0.6812705769832074, |
|
"grad_norm": 0.4103826582431793, |
|
"learning_rate": 8.973168733816799e-05, |
|
"loss": 3.188, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 0.68200629034928, |
|
"grad_norm": 0.3920148015022278, |
|
"learning_rate": 8.970770035600518e-05, |
|
"loss": 3.2252, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 0.6827420037153525, |
|
"grad_norm": 0.3953123688697815, |
|
"learning_rate": 8.968368860348657e-05, |
|
"loss": 3.1857, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 0.6834777170814251, |
|
"grad_norm": 0.39929309487342834, |
|
"learning_rate": 8.965965209559111e-05, |
|
"loss": 3.178, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 0.6842134304474976, |
|
"grad_norm": 0.4072224795818329, |
|
"learning_rate": 8.963559084731319e-05, |
|
"loss": 3.1687, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.6849491438135702, |
|
"grad_norm": 0.421752005815506, |
|
"learning_rate": 8.961150487366265e-05, |
|
"loss": 3.1959, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 0.6856848571796428, |
|
"grad_norm": 0.3856801986694336, |
|
"learning_rate": 8.958739418966472e-05, |
|
"loss": 3.1962, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 0.6864205705457154, |
|
"grad_norm": 0.4316472113132477, |
|
"learning_rate": 8.956325881036008e-05, |
|
"loss": 3.1759, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 0.687156283911788, |
|
"grad_norm": 0.39210107922554016, |
|
"learning_rate": 8.953909875080477e-05, |
|
"loss": 3.1787, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 0.6878919972778605, |
|
"grad_norm": 0.39624738693237305, |
|
"learning_rate": 8.951491402607031e-05, |
|
"loss": 3.2155, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 0.6886277106439331, |
|
"grad_norm": 0.4137401878833771, |
|
"learning_rate": 8.949070465124352e-05, |
|
"loss": 3.2029, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 0.6893634240100057, |
|
"grad_norm": 0.37497127056121826, |
|
"learning_rate": 8.946647064142662e-05, |
|
"loss": 3.213, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 0.6900991373760783, |
|
"grad_norm": 0.4009439945220947, |
|
"learning_rate": 8.944221201173723e-05, |
|
"loss": 3.1978, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 0.6908348507421509, |
|
"grad_norm": 0.390789270401001, |
|
"learning_rate": 8.941792877730829e-05, |
|
"loss": 3.1502, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 0.6915705641082235, |
|
"grad_norm": 0.3750687837600708, |
|
"learning_rate": 8.939362095328811e-05, |
|
"loss": 3.2011, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.692306277474296, |
|
"grad_norm": 0.3848758041858673, |
|
"learning_rate": 8.936928855484035e-05, |
|
"loss": 3.2202, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 0.6930419908403685, |
|
"grad_norm": 0.406440794467926, |
|
"learning_rate": 8.934493159714397e-05, |
|
"loss": 3.1547, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 0.6937777042064411, |
|
"grad_norm": 0.37811365723609924, |
|
"learning_rate": 8.932055009539326e-05, |
|
"loss": 3.1863, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 0.6945134175725137, |
|
"grad_norm": 0.3805864155292511, |
|
"learning_rate": 8.929614406479785e-05, |
|
"loss": 3.1736, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 0.6952491309385863, |
|
"grad_norm": 0.3834070563316345, |
|
"learning_rate": 8.927171352058262e-05, |
|
"loss": 3.1659, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 0.6959848443046589, |
|
"grad_norm": 0.37774044275283813, |
|
"learning_rate": 8.924725847798776e-05, |
|
"loss": 3.1884, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 0.6967205576707315, |
|
"grad_norm": 0.3782852590084076, |
|
"learning_rate": 8.92227789522688e-05, |
|
"loss": 3.1593, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 0.6974562710368041, |
|
"grad_norm": 0.4085491895675659, |
|
"learning_rate": 8.919827495869645e-05, |
|
"loss": 3.2332, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 0.6981919844028767, |
|
"grad_norm": 0.38499242067337036, |
|
"learning_rate": 8.917374651255675e-05, |
|
"loss": 3.2051, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 0.6989276977689493, |
|
"grad_norm": 0.41638848185539246, |
|
"learning_rate": 8.914919362915096e-05, |
|
"loss": 3.1209, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.6996634111350218, |
|
"grad_norm": 0.3790314197540283, |
|
"learning_rate": 8.912461632379561e-05, |
|
"loss": 3.1852, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 0.7003991245010944, |
|
"grad_norm": 0.3726657032966614, |
|
"learning_rate": 8.910001461182245e-05, |
|
"loss": 3.1979, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 0.7011348378671669, |
|
"grad_norm": 0.4013272523880005, |
|
"learning_rate": 8.907538850857844e-05, |
|
"loss": 3.1678, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 0.7018705512332395, |
|
"grad_norm": 0.4136090576648712, |
|
"learning_rate": 8.905073802942579e-05, |
|
"loss": 3.1486, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 0.7026062645993121, |
|
"grad_norm": 0.4203248918056488, |
|
"learning_rate": 8.902606318974189e-05, |
|
"loss": 3.2102, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 0.7033419779653847, |
|
"grad_norm": 0.41708293557167053, |
|
"learning_rate": 8.900136400491934e-05, |
|
"loss": 3.2344, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 0.7040776913314573, |
|
"grad_norm": 0.40059348940849304, |
|
"learning_rate": 8.897664049036594e-05, |
|
"loss": 3.1628, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 0.7048134046975298, |
|
"grad_norm": 0.41126495599746704, |
|
"learning_rate": 8.89518926615046e-05, |
|
"loss": 3.2079, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 0.7055491180636024, |
|
"grad_norm": 0.39519059658050537, |
|
"learning_rate": 8.89271205337735e-05, |
|
"loss": 3.1738, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 0.706284831429675, |
|
"grad_norm": 0.3954789936542511, |
|
"learning_rate": 8.890232412262588e-05, |
|
"loss": 3.2131, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 0.7070205447957476, |
|
"grad_norm": 0.37941592931747437, |
|
"learning_rate": 8.887750344353023e-05, |
|
"loss": 3.1606, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 0.7077562581618202, |
|
"grad_norm": 0.37760573625564575, |
|
"learning_rate": 8.885265851197006e-05, |
|
"loss": 3.1641, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 0.7084919715278928, |
|
"grad_norm": 0.3869093358516693, |
|
"learning_rate": 8.882778934344412e-05, |
|
"loss": 3.1779, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 0.7092276848939653, |
|
"grad_norm": 0.37767353653907776, |
|
"learning_rate": 8.88028959534662e-05, |
|
"loss": 3.1538, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 0.7099633982600378, |
|
"grad_norm": 0.3897157907485962, |
|
"learning_rate": 8.877797835756523e-05, |
|
"loss": 3.1598, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 0.7106991116261104, |
|
"grad_norm": 0.4006219804286957, |
|
"learning_rate": 8.875303657128526e-05, |
|
"loss": 3.1951, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 0.711434824992183, |
|
"grad_norm": 0.3921482563018799, |
|
"learning_rate": 8.872807061018542e-05, |
|
"loss": 3.177, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 0.7121705383582556, |
|
"grad_norm": 0.3957517147064209, |
|
"learning_rate": 8.870308048983989e-05, |
|
"loss": 3.2062, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 0.7129062517243282, |
|
"grad_norm": 0.4034371078014374, |
|
"learning_rate": 8.867806622583793e-05, |
|
"loss": 3.1455, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 0.7136419650904008, |
|
"grad_norm": 0.38453301787376404, |
|
"learning_rate": 8.865302783378391e-05, |
|
"loss": 3.1829, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 0.7143776784564734, |
|
"grad_norm": 0.40622347593307495, |
|
"learning_rate": 8.862796532929717e-05, |
|
"loss": 3.1663, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 0.715113391822546, |
|
"grad_norm": 0.4114111065864563, |
|
"learning_rate": 8.86028787280122e-05, |
|
"loss": 3.2128, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 0.7158491051886186, |
|
"grad_norm": 0.3917147219181061, |
|
"learning_rate": 8.857776804557842e-05, |
|
"loss": 3.1601, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 0.7165848185546911, |
|
"grad_norm": 0.3908882141113281, |
|
"learning_rate": 8.85526332976603e-05, |
|
"loss": 3.2072, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 0.7173205319207636, |
|
"grad_norm": 0.3869069814682007, |
|
"learning_rate": 8.852747449993735e-05, |
|
"loss": 3.1435, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 0.7180562452868362, |
|
"grad_norm": 0.37940549850463867, |
|
"learning_rate": 8.850229166810407e-05, |
|
"loss": 3.1857, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 0.7187919586529088, |
|
"grad_norm": 0.40026140213012695, |
|
"learning_rate": 8.847708481786996e-05, |
|
"loss": 3.1549, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 0.7195276720189814, |
|
"grad_norm": 0.39495396614074707, |
|
"learning_rate": 8.845185396495947e-05, |
|
"loss": 3.1916, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 0.720263385385054, |
|
"grad_norm": 0.3766854405403137, |
|
"learning_rate": 8.842659912511204e-05, |
|
"loss": 3.1937, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 0.7209990987511266, |
|
"grad_norm": 0.39338842034339905, |
|
"learning_rate": 8.84013203140821e-05, |
|
"loss": 3.1683, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 0.7217348121171991, |
|
"grad_norm": 0.3664061725139618, |
|
"learning_rate": 8.837601754763902e-05, |
|
"loss": 3.1868, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 0.7224705254832717, |
|
"grad_norm": 0.37891605496406555, |
|
"learning_rate": 8.835069084156709e-05, |
|
"loss": 3.1325, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 0.7232062388493443, |
|
"grad_norm": 0.4005630910396576, |
|
"learning_rate": 8.832534021166555e-05, |
|
"loss": 3.2046, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 0.7239419522154169, |
|
"grad_norm": 0.3800733983516693, |
|
"learning_rate": 8.829996567374857e-05, |
|
"loss": 3.2048, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 0.7246776655814895, |
|
"grad_norm": 0.396013468503952, |
|
"learning_rate": 8.827456724364525e-05, |
|
"loss": 3.1523, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 0.7254133789475621, |
|
"grad_norm": 0.37606799602508545, |
|
"learning_rate": 8.824914493719952e-05, |
|
"loss": 3.1769, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 0.7261490923136346, |
|
"grad_norm": 0.3814736604690552, |
|
"learning_rate": 8.82236987702703e-05, |
|
"loss": 3.1618, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 0.7268848056797071, |
|
"grad_norm": 0.42829668521881104, |
|
"learning_rate": 8.819822875873136e-05, |
|
"loss": 3.1892, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 0.7276205190457797, |
|
"grad_norm": 0.3849022686481476, |
|
"learning_rate": 8.817273491847129e-05, |
|
"loss": 3.2523, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 0.7283562324118523, |
|
"grad_norm": 0.3935648202896118, |
|
"learning_rate": 8.814721726539366e-05, |
|
"loss": 3.1743, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 0.7290919457779249, |
|
"grad_norm": 0.3935147523880005, |
|
"learning_rate": 8.812167581541675e-05, |
|
"loss": 3.1683, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 0.7298276591439975, |
|
"grad_norm": 0.41291266679763794, |
|
"learning_rate": 8.809611058447382e-05, |
|
"loss": 3.202, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 0.7305633725100701, |
|
"grad_norm": 0.39814290404319763, |
|
"learning_rate": 8.807052158851289e-05, |
|
"loss": 3.2389, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 0.7312990858761427, |
|
"grad_norm": 0.38150617480278015, |
|
"learning_rate": 8.804490884349683e-05, |
|
"loss": 3.1738, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 0.7320347992422153, |
|
"grad_norm": 0.41697201132774353, |
|
"learning_rate": 8.801927236540329e-05, |
|
"loss": 3.2173, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 0.7327705126082878, |
|
"grad_norm": 0.40556851029396057, |
|
"learning_rate": 8.799361217022477e-05, |
|
"loss": 3.115, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 0.7335062259743604, |
|
"grad_norm": 0.3922794759273529, |
|
"learning_rate": 8.796792827396855e-05, |
|
"loss": 3.1743, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 0.7342419393404329, |
|
"grad_norm": 0.3818107545375824, |
|
"learning_rate": 8.794222069265669e-05, |
|
"loss": 3.1707, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 0.7349776527065055, |
|
"grad_norm": 0.380044549703598, |
|
"learning_rate": 8.791648944232603e-05, |
|
"loss": 3.1717, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 0.7357133660725781, |
|
"grad_norm": 0.41100969910621643, |
|
"learning_rate": 8.789073453902814e-05, |
|
"loss": 3.1353, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.7364490794386507, |
|
"grad_norm": 0.39325809478759766, |
|
"learning_rate": 8.786495599882943e-05, |
|
"loss": 3.1665, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 0.7371847928047233, |
|
"grad_norm": 0.39705923199653625, |
|
"learning_rate": 8.783915383781099e-05, |
|
"loss": 3.181, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 0.7379205061707959, |
|
"grad_norm": 0.3803333640098572, |
|
"learning_rate": 8.781332807206861e-05, |
|
"loss": 3.1477, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 0.7386562195368684, |
|
"grad_norm": 0.38700178265571594, |
|
"learning_rate": 8.778747871771292e-05, |
|
"loss": 3.2384, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 0.739391932902941, |
|
"grad_norm": 0.4298153519630432, |
|
"learning_rate": 8.776160579086916e-05, |
|
"loss": 3.2115, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 0.7401276462690136, |
|
"grad_norm": 0.3944063186645508, |
|
"learning_rate": 8.773570930767732e-05, |
|
"loss": 3.1313, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 0.7408633596350862, |
|
"grad_norm": 0.3921501636505127, |
|
"learning_rate": 8.770978928429208e-05, |
|
"loss": 3.1643, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 0.7415990730011588, |
|
"grad_norm": 0.3809838593006134, |
|
"learning_rate": 8.76838457368828e-05, |
|
"loss": 3.2096, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 0.7423347863672314, |
|
"grad_norm": 0.38074982166290283, |
|
"learning_rate": 8.765787868163351e-05, |
|
"loss": 3.1887, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 0.7430704997333039, |
|
"grad_norm": 0.401103138923645, |
|
"learning_rate": 8.763188813474293e-05, |
|
"loss": 3.1852, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 0.7438062130993764, |
|
"grad_norm": 0.38959553837776184, |
|
"learning_rate": 8.76058741124244e-05, |
|
"loss": 3.1682, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 0.744541926465449, |
|
"grad_norm": 0.41069236397743225, |
|
"learning_rate": 8.757983663090592e-05, |
|
"loss": 3.1528, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 0.7452776398315216, |
|
"grad_norm": 0.4105203449726105, |
|
"learning_rate": 8.755377570643014e-05, |
|
"loss": 3.177, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 0.7460133531975942, |
|
"grad_norm": 0.4029715657234192, |
|
"learning_rate": 8.752769135525431e-05, |
|
"loss": 3.1583, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 0.7467490665636668, |
|
"grad_norm": 0.37962058186531067, |
|
"learning_rate": 8.750158359365031e-05, |
|
"loss": 3.1393, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 0.7474847799297394, |
|
"grad_norm": 0.39237701892852783, |
|
"learning_rate": 8.74754524379046e-05, |
|
"loss": 3.192, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 0.748220493295812, |
|
"grad_norm": 0.40408843755722046, |
|
"learning_rate": 8.744929790431826e-05, |
|
"loss": 3.1452, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 0.7489562066618846, |
|
"grad_norm": 0.38078784942626953, |
|
"learning_rate": 8.742312000920696e-05, |
|
"loss": 3.1608, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 0.7496919200279571, |
|
"grad_norm": 0.37361815571784973, |
|
"learning_rate": 8.73969187689009e-05, |
|
"loss": 3.1642, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 0.7504276333940297, |
|
"grad_norm": 0.3917367160320282, |
|
"learning_rate": 8.737069419974489e-05, |
|
"loss": 3.1804, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 0.7511633467601022, |
|
"grad_norm": 0.3755326271057129, |
|
"learning_rate": 8.734444631809827e-05, |
|
"loss": 3.1408, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 0.7518990601261748, |
|
"grad_norm": 0.38932299613952637, |
|
"learning_rate": 8.73181751403349e-05, |
|
"loss": 3.1216, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 0.7526347734922474, |
|
"grad_norm": 0.3907861113548279, |
|
"learning_rate": 8.729188068284324e-05, |
|
"loss": 3.1688, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 0.75337048685832, |
|
"grad_norm": 0.4114513099193573, |
|
"learning_rate": 8.726556296202621e-05, |
|
"loss": 3.2136, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.7541062002243926, |
|
"grad_norm": 0.38536152243614197, |
|
"learning_rate": 8.723922199430127e-05, |
|
"loss": 3.1645, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 0.7548419135904652, |
|
"grad_norm": 0.39056259393692017, |
|
"learning_rate": 8.721285779610035e-05, |
|
"loss": 3.1345, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 0.7555776269565377, |
|
"grad_norm": 0.37613552808761597, |
|
"learning_rate": 8.718647038386995e-05, |
|
"loss": 3.123, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 0.7563133403226103, |
|
"grad_norm": 0.4041471481323242, |
|
"learning_rate": 8.716005977407093e-05, |
|
"loss": 3.2016, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 0.7570490536886829, |
|
"grad_norm": 0.3961995244026184, |
|
"learning_rate": 8.713362598317875e-05, |
|
"loss": 3.1718, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 0.7577847670547555, |
|
"grad_norm": 0.3890473246574402, |
|
"learning_rate": 8.710716902768324e-05, |
|
"loss": 3.1566, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 0.7585204804208281, |
|
"grad_norm": 0.38676023483276367, |
|
"learning_rate": 8.708068892408872e-05, |
|
"loss": 3.1446, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 0.7592561937869006, |
|
"grad_norm": 0.4547286331653595, |
|
"learning_rate": 8.705418568891391e-05, |
|
"loss": 3.1901, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 0.7599919071529732, |
|
"grad_norm": 0.37865954637527466, |
|
"learning_rate": 8.702765933869203e-05, |
|
"loss": 3.1168, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 0.7607276205190457, |
|
"grad_norm": 0.3800100088119507, |
|
"learning_rate": 8.700110988997066e-05, |
|
"loss": 3.1179, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 0.7614633338851183, |
|
"grad_norm": 0.38291603326797485, |
|
"learning_rate": 8.697453735931182e-05, |
|
"loss": 3.1596, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 0.7621990472511909, |
|
"grad_norm": 0.3799181282520294, |
|
"learning_rate": 8.69479417632919e-05, |
|
"loss": 3.222, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 0.7629347606172635, |
|
"grad_norm": 0.3791024684906006, |
|
"learning_rate": 8.692132311850171e-05, |
|
"loss": 3.1126, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 0.7636704739833361, |
|
"grad_norm": 0.37659361958503723, |
|
"learning_rate": 8.689468144154642e-05, |
|
"loss": 3.1594, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 0.7644061873494087, |
|
"grad_norm": 0.3841865062713623, |
|
"learning_rate": 8.686801674904557e-05, |
|
"loss": 3.1806, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 0.7651419007154813, |
|
"grad_norm": 0.3803307116031647, |
|
"learning_rate": 8.684132905763309e-05, |
|
"loss": 3.1807, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 0.7658776140815539, |
|
"grad_norm": 0.38950908184051514, |
|
"learning_rate": 8.681461838395719e-05, |
|
"loss": 3.1673, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 0.7666133274476264, |
|
"grad_norm": 0.3641705811023712, |
|
"learning_rate": 8.678788474468045e-05, |
|
"loss": 3.1618, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 0.767349040813699, |
|
"grad_norm": 0.3801932632923126, |
|
"learning_rate": 8.67611281564798e-05, |
|
"loss": 3.1935, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 0.7680847541797715, |
|
"grad_norm": 0.36981040239334106, |
|
"learning_rate": 8.673434863604648e-05, |
|
"loss": 3.1661, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 0.7688204675458441, |
|
"grad_norm": 0.3908541202545166, |
|
"learning_rate": 8.670754620008598e-05, |
|
"loss": 3.1673, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 0.7695561809119167, |
|
"grad_norm": 0.3636099398136139, |
|
"learning_rate": 8.668072086531818e-05, |
|
"loss": 3.1264, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 0.7702918942779893, |
|
"grad_norm": 0.37544921040534973, |
|
"learning_rate": 8.665387264847715e-05, |
|
"loss": 3.1016, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 0.7710276076440619, |
|
"grad_norm": 0.39099445939064026, |
|
"learning_rate": 8.662700156631127e-05, |
|
"loss": 3.2002, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 0.7717633210101345, |
|
"grad_norm": 0.3761730492115021, |
|
"learning_rate": 8.660010763558324e-05, |
|
"loss": 3.1221, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 0.772499034376207, |
|
"grad_norm": 0.4066612124443054, |
|
"learning_rate": 8.65731908730699e-05, |
|
"loss": 3.1845, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.7732347477422796, |
|
"grad_norm": 0.3759371340274811, |
|
"learning_rate": 8.654625129556244e-05, |
|
"loss": 3.1465, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 0.7739704611083522, |
|
"grad_norm": 0.3886657655239105, |
|
"learning_rate": 8.651928891986622e-05, |
|
"loss": 3.1517, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 0.7747061744744248, |
|
"grad_norm": 0.38135573267936707, |
|
"learning_rate": 8.649230376280087e-05, |
|
"loss": 3.1349, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 0.7754418878404974, |
|
"grad_norm": 0.4106856882572174, |
|
"learning_rate": 8.646529584120015e-05, |
|
"loss": 3.2045, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 0.7761776012065699, |
|
"grad_norm": 0.40560615062713623, |
|
"learning_rate": 8.64382651719121e-05, |
|
"loss": 3.1467, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 0.7769133145726425, |
|
"grad_norm": 0.3781823515892029, |
|
"learning_rate": 8.641121177179893e-05, |
|
"loss": 3.1531, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 0.777649027938715, |
|
"grad_norm": 0.37491971254348755, |
|
"learning_rate": 8.6384135657737e-05, |
|
"loss": 3.1884, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 0.7783847413047876, |
|
"grad_norm": 0.380998432636261, |
|
"learning_rate": 8.635703684661689e-05, |
|
"loss": 3.1635, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 0.7791204546708602, |
|
"grad_norm": 0.4219949245452881, |
|
"learning_rate": 8.632991535534329e-05, |
|
"loss": 3.1436, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 0.7798561680369328, |
|
"grad_norm": 0.4060940742492676, |
|
"learning_rate": 8.630277120083507e-05, |
|
"loss": 3.2004, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 0.7805918814030054, |
|
"grad_norm": 0.36930394172668457, |
|
"learning_rate": 8.627560440002524e-05, |
|
"loss": 3.1599, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 0.781327594769078, |
|
"grad_norm": 0.3666263222694397, |
|
"learning_rate": 8.624841496986089e-05, |
|
"loss": 3.1352, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 0.7820633081351506, |
|
"grad_norm": 0.4501335322856903, |
|
"learning_rate": 8.622120292730327e-05, |
|
"loss": 3.1673, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 0.7827990215012232, |
|
"grad_norm": 0.4114534258842468, |
|
"learning_rate": 8.619396828932774e-05, |
|
"loss": 3.1506, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 0.7835347348672957, |
|
"grad_norm": 0.37227872014045715, |
|
"learning_rate": 8.616671107292373e-05, |
|
"loss": 3.2114, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 0.7842704482333683, |
|
"grad_norm": 0.39157432317733765, |
|
"learning_rate": 8.613943129509478e-05, |
|
"loss": 3.2101, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 0.7850061615994408, |
|
"grad_norm": 0.41202887892723083, |
|
"learning_rate": 8.61121289728585e-05, |
|
"loss": 3.1828, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 0.7857418749655134, |
|
"grad_norm": 0.3904684782028198, |
|
"learning_rate": 8.608480412324652e-05, |
|
"loss": 3.2128, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 0.786477588331586, |
|
"grad_norm": 0.39074650406837463, |
|
"learning_rate": 8.605745676330457e-05, |
|
"loss": 3.1899, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 0.7872133016976586, |
|
"grad_norm": 0.4131028950214386, |
|
"learning_rate": 8.603008691009243e-05, |
|
"loss": 3.1664, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 0.7879490150637312, |
|
"grad_norm": 0.3683393597602844, |
|
"learning_rate": 8.600269458068387e-05, |
|
"loss": 3.1694, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 0.7886847284298037, |
|
"grad_norm": 0.38590583205223083, |
|
"learning_rate": 8.597527979216672e-05, |
|
"loss": 3.1582, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 0.7894204417958763, |
|
"grad_norm": 0.38443443179130554, |
|
"learning_rate": 8.594784256164279e-05, |
|
"loss": 3.1427, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 0.7901561551619489, |
|
"grad_norm": 0.38856041431427, |
|
"learning_rate": 8.59203829062279e-05, |
|
"loss": 3.1903, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 0.7908918685280215, |
|
"grad_norm": 0.3836011290550232, |
|
"learning_rate": 8.589290084305187e-05, |
|
"loss": 3.159, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 0.7916275818940941, |
|
"grad_norm": 0.3806968629360199, |
|
"learning_rate": 8.58653963892585e-05, |
|
"loss": 3.1668, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 0.7923632952601667, |
|
"grad_norm": 0.41352030634880066, |
|
"learning_rate": 8.583786956200553e-05, |
|
"loss": 3.1994, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 0.7930990086262392, |
|
"grad_norm": 0.3863872289657593, |
|
"learning_rate": 8.581032037846466e-05, |
|
"loss": 3.174, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 0.7938347219923118, |
|
"grad_norm": 0.4021219313144684, |
|
"learning_rate": 8.578274885582159e-05, |
|
"loss": 3.1763, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 0.7945704353583843, |
|
"grad_norm": 0.3936547338962555, |
|
"learning_rate": 8.575515501127592e-05, |
|
"loss": 3.1346, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 0.7953061487244569, |
|
"grad_norm": 0.38866353034973145, |
|
"learning_rate": 8.57275388620411e-05, |
|
"loss": 3.1529, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 0.7960418620905295, |
|
"grad_norm": 0.3719068467617035, |
|
"learning_rate": 8.569990042534466e-05, |
|
"loss": 3.0875, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 0.7967775754566021, |
|
"grad_norm": 0.37235334515571594, |
|
"learning_rate": 8.567223971842788e-05, |
|
"loss": 3.1509, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 0.7975132888226747, |
|
"grad_norm": 0.3882967531681061, |
|
"learning_rate": 8.564455675854599e-05, |
|
"loss": 3.1392, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 0.7982490021887473, |
|
"grad_norm": 0.3814670145511627, |
|
"learning_rate": 8.561685156296813e-05, |
|
"loss": 3.1725, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 0.7989847155548199, |
|
"grad_norm": 0.38729429244995117, |
|
"learning_rate": 8.558912414897728e-05, |
|
"loss": 3.1396, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 0.7997204289208925, |
|
"grad_norm": 0.3847314715385437, |
|
"learning_rate": 8.556137453387027e-05, |
|
"loss": 3.0893, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 0.800456142286965, |
|
"grad_norm": 0.3961573541164398, |
|
"learning_rate": 8.553360273495779e-05, |
|
"loss": 3.146, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 0.8011918556530375, |
|
"grad_norm": 0.3685840666294098, |
|
"learning_rate": 8.55058087695644e-05, |
|
"loss": 3.1371, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 0.8019275690191101, |
|
"grad_norm": 0.37563103437423706, |
|
"learning_rate": 8.547799265502844e-05, |
|
"loss": 3.1751, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 0.8026632823851827, |
|
"grad_norm": 0.38960275053977966, |
|
"learning_rate": 8.545015440870208e-05, |
|
"loss": 3.1834, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 0.8033989957512553, |
|
"grad_norm": 0.3829275369644165, |
|
"learning_rate": 8.542229404795132e-05, |
|
"loss": 3.1301, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 0.8041347091173279, |
|
"grad_norm": 0.37760934233665466, |
|
"learning_rate": 8.539441159015594e-05, |
|
"loss": 3.1482, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 0.8048704224834005, |
|
"grad_norm": 0.3774561285972595, |
|
"learning_rate": 8.536650705270948e-05, |
|
"loss": 3.1495, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 0.805606135849473, |
|
"grad_norm": 0.36744073033332825, |
|
"learning_rate": 8.53385804530193e-05, |
|
"loss": 3.1253, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 0.8063418492155456, |
|
"grad_norm": 0.3932728171348572, |
|
"learning_rate": 8.53106318085065e-05, |
|
"loss": 3.1845, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 0.8070775625816182, |
|
"grad_norm": 0.38730084896087646, |
|
"learning_rate": 8.528266113660591e-05, |
|
"loss": 3.1891, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 0.8078132759476908, |
|
"grad_norm": 0.382874995470047, |
|
"learning_rate": 8.525466845476616e-05, |
|
"loss": 3.1655, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 0.8085489893137634, |
|
"grad_norm": 0.36904412508010864, |
|
"learning_rate": 8.522665378044952e-05, |
|
"loss": 3.1939, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 0.809284702679836, |
|
"grad_norm": 0.36776015162467957, |
|
"learning_rate": 8.519861713113206e-05, |
|
"loss": 3.1426, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.8100204160459085, |
|
"grad_norm": 0.3698399066925049, |
|
"learning_rate": 8.517055852430356e-05, |
|
"loss": 3.1427, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 0.810756129411981, |
|
"grad_norm": 0.3883744478225708, |
|
"learning_rate": 8.514247797746744e-05, |
|
"loss": 3.1611, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 0.8114918427780536, |
|
"grad_norm": 0.37801653146743774, |
|
"learning_rate": 8.51143755081408e-05, |
|
"loss": 3.1424, |
|
"step": 11030 |
|
}, |
|
{ |
|
"epoch": 0.8122275561441262, |
|
"grad_norm": 0.3822377026081085, |
|
"learning_rate": 8.508625113385453e-05, |
|
"loss": 3.0966, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 0.8129632695101988, |
|
"grad_norm": 0.3712664246559143, |
|
"learning_rate": 8.505810487215305e-05, |
|
"loss": 3.1409, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 0.8136989828762714, |
|
"grad_norm": 0.37806233763694763, |
|
"learning_rate": 8.502993674059449e-05, |
|
"loss": 3.1397, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 0.814434696242344, |
|
"grad_norm": 0.3708866238594055, |
|
"learning_rate": 8.500174675675063e-05, |
|
"loss": 3.1526, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 0.8151704096084166, |
|
"grad_norm": 0.3839915990829468, |
|
"learning_rate": 8.497353493820688e-05, |
|
"loss": 3.123, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 0.8159061229744892, |
|
"grad_norm": 0.4151860475540161, |
|
"learning_rate": 8.494530130256227e-05, |
|
"loss": 3.1412, |
|
"step": 11090 |
|
}, |
|
{ |
|
"epoch": 0.8166418363405618, |
|
"grad_norm": 0.3969019949436188, |
|
"learning_rate": 8.49170458674294e-05, |
|
"loss": 3.1789, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 0.8173775497066343, |
|
"grad_norm": 0.37846624851226807, |
|
"learning_rate": 8.488876865043454e-05, |
|
"loss": 3.1684, |
|
"step": 11110 |
|
}, |
|
{ |
|
"epoch": 0.8181132630727068, |
|
"grad_norm": 0.37555307149887085, |
|
"learning_rate": 8.48604696692175e-05, |
|
"loss": 3.1397, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 0.8188489764387794, |
|
"grad_norm": 0.39476168155670166, |
|
"learning_rate": 8.483214894143166e-05, |
|
"loss": 3.1518, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 0.819584689804852, |
|
"grad_norm": 0.3914696276187897, |
|
"learning_rate": 8.480380648474403e-05, |
|
"loss": 3.1596, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 0.8203204031709246, |
|
"grad_norm": 0.3912908136844635, |
|
"learning_rate": 8.477544231683507e-05, |
|
"loss": 3.1871, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 0.8210561165369972, |
|
"grad_norm": 0.37355631589889526, |
|
"learning_rate": 8.474705645539886e-05, |
|
"loss": 3.1434, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 0.8217918299030698, |
|
"grad_norm": 0.3698188066482544, |
|
"learning_rate": 8.471864891814304e-05, |
|
"loss": 3.0801, |
|
"step": 11170 |
|
}, |
|
{ |
|
"epoch": 0.8225275432691423, |
|
"grad_norm": 0.3645183742046356, |
|
"learning_rate": 8.469021972278867e-05, |
|
"loss": 3.1388, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 0.8232632566352149, |
|
"grad_norm": 0.3691493272781372, |
|
"learning_rate": 8.466176888707041e-05, |
|
"loss": 3.1758, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 0.8239989700012875, |
|
"grad_norm": 0.37398234009742737, |
|
"learning_rate": 8.463329642873638e-05, |
|
"loss": 3.1055, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 0.8247346833673601, |
|
"grad_norm": 0.38691118359565735, |
|
"learning_rate": 8.46048023655482e-05, |
|
"loss": 3.1475, |
|
"step": 11210 |
|
}, |
|
{ |
|
"epoch": 0.8254703967334327, |
|
"grad_norm": 0.36699140071868896, |
|
"learning_rate": 8.457628671528094e-05, |
|
"loss": 3.1344, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 0.8262061100995053, |
|
"grad_norm": 0.3784330487251282, |
|
"learning_rate": 8.454774949572318e-05, |
|
"loss": 3.1172, |
|
"step": 11230 |
|
}, |
|
{ |
|
"epoch": 0.8269418234655778, |
|
"grad_norm": 0.3638308644294739, |
|
"learning_rate": 8.451919072467693e-05, |
|
"loss": 3.1731, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 0.8276775368316504, |
|
"grad_norm": 0.3659362196922302, |
|
"learning_rate": 8.449061041995766e-05, |
|
"loss": 3.1357, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 0.8284132501977229, |
|
"grad_norm": 0.37726885080337524, |
|
"learning_rate": 8.446200859939425e-05, |
|
"loss": 3.109, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 0.8291489635637955, |
|
"grad_norm": 0.3847838044166565, |
|
"learning_rate": 8.443338528082902e-05, |
|
"loss": 3.1779, |
|
"step": 11270 |
|
}, |
|
{ |
|
"epoch": 0.8298846769298681, |
|
"grad_norm": 0.3812064230442047, |
|
"learning_rate": 8.440474048211768e-05, |
|
"loss": 3.158, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 0.8306203902959407, |
|
"grad_norm": 0.40196287631988525, |
|
"learning_rate": 8.437607422112938e-05, |
|
"loss": 3.173, |
|
"step": 11290 |
|
}, |
|
{ |
|
"epoch": 0.8313561036620133, |
|
"grad_norm": 0.3687420189380646, |
|
"learning_rate": 8.434738651574662e-05, |
|
"loss": 3.1256, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 0.8320918170280859, |
|
"grad_norm": 0.36912721395492554, |
|
"learning_rate": 8.431867738386531e-05, |
|
"loss": 3.1726, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 0.8328275303941585, |
|
"grad_norm": 0.374905526638031, |
|
"learning_rate": 8.428994684339466e-05, |
|
"loss": 3.1554, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 0.833563243760231, |
|
"grad_norm": 0.37698689103126526, |
|
"learning_rate": 8.426119491225733e-05, |
|
"loss": 3.1715, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 0.8342989571263036, |
|
"grad_norm": 0.3949843645095825, |
|
"learning_rate": 8.423242160838926e-05, |
|
"loss": 3.145, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 0.8350346704923761, |
|
"grad_norm": 0.38596290349960327, |
|
"learning_rate": 8.420362694973972e-05, |
|
"loss": 3.1545, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 0.8357703838584487, |
|
"grad_norm": 0.361113041639328, |
|
"learning_rate": 8.417481095427134e-05, |
|
"loss": 3.1668, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 0.8365060972245213, |
|
"grad_norm": 0.38437777757644653, |
|
"learning_rate": 8.414597363996004e-05, |
|
"loss": 3.1842, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 0.8372418105905939, |
|
"grad_norm": 0.38113656640052795, |
|
"learning_rate": 8.411711502479505e-05, |
|
"loss": 3.1228, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 0.8379775239566665, |
|
"grad_norm": 0.37318846583366394, |
|
"learning_rate": 8.408823512677883e-05, |
|
"loss": 3.1408, |
|
"step": 11390 |
|
}, |
|
{ |
|
"epoch": 0.8387132373227391, |
|
"grad_norm": 0.3935425579547882, |
|
"learning_rate": 8.405933396392722e-05, |
|
"loss": 3.1441, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 0.8394489506888116, |
|
"grad_norm": 0.37884998321533203, |
|
"learning_rate": 8.403041155426925e-05, |
|
"loss": 3.1926, |
|
"step": 11410 |
|
}, |
|
{ |
|
"epoch": 0.8401846640548842, |
|
"grad_norm": 0.38540568947792053, |
|
"learning_rate": 8.400146791584722e-05, |
|
"loss": 3.1961, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 0.8409203774209568, |
|
"grad_norm": 0.392183393239975, |
|
"learning_rate": 8.397250306671666e-05, |
|
"loss": 3.0998, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 0.8416560907870294, |
|
"grad_norm": 0.3929624557495117, |
|
"learning_rate": 8.394351702494637e-05, |
|
"loss": 3.1336, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 0.842391804153102, |
|
"grad_norm": 0.38060176372528076, |
|
"learning_rate": 8.391450980861835e-05, |
|
"loss": 3.1406, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 0.8431275175191745, |
|
"grad_norm": 0.38198819756507874, |
|
"learning_rate": 8.38854814358278e-05, |
|
"loss": 3.1256, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 0.8438632308852471, |
|
"grad_norm": 0.3775307536125183, |
|
"learning_rate": 8.385643192468312e-05, |
|
"loss": 3.1567, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 0.8445989442513196, |
|
"grad_norm": 0.3741263449192047, |
|
"learning_rate": 8.382736129330591e-05, |
|
"loss": 3.1545, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 0.8453346576173922, |
|
"grad_norm": 0.3674101233482361, |
|
"learning_rate": 8.379826955983092e-05, |
|
"loss": 3.1613, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 0.8460703709834648, |
|
"grad_norm": 0.38992542028427124, |
|
"learning_rate": 8.376915674240611e-05, |
|
"loss": 3.1323, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.8468060843495374, |
|
"grad_norm": 0.3849126994609833, |
|
"learning_rate": 8.374002285919253e-05, |
|
"loss": 3.2039, |
|
"step": 11510 |
|
}, |
|
{ |
|
"epoch": 0.84754179771561, |
|
"grad_norm": 0.3764283061027527, |
|
"learning_rate": 8.371086792836443e-05, |
|
"loss": 3.125, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 0.8482775110816826, |
|
"grad_norm": 0.37651705741882324, |
|
"learning_rate": 8.368169196810916e-05, |
|
"loss": 3.1228, |
|
"step": 11530 |
|
}, |
|
{ |
|
"epoch": 0.8490132244477552, |
|
"grad_norm": 0.3919517397880554, |
|
"learning_rate": 8.365249499662719e-05, |
|
"loss": 3.1619, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 0.8497489378138278, |
|
"grad_norm": 0.377399206161499, |
|
"learning_rate": 8.362327703213208e-05, |
|
"loss": 3.1564, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 0.8504846511799004, |
|
"grad_norm": 0.3962908983230591, |
|
"learning_rate": 8.359403809285053e-05, |
|
"loss": 3.1482, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 0.8512203645459729, |
|
"grad_norm": 0.399509996175766, |
|
"learning_rate": 8.356477819702231e-05, |
|
"loss": 3.0999, |
|
"step": 11570 |
|
}, |
|
{ |
|
"epoch": 0.8519560779120454, |
|
"grad_norm": 0.4016306698322296, |
|
"learning_rate": 8.353549736290023e-05, |
|
"loss": 3.1327, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 0.852691791278118, |
|
"grad_norm": 0.38924241065979004, |
|
"learning_rate": 8.350619560875017e-05, |
|
"loss": 3.1378, |
|
"step": 11590 |
|
}, |
|
{ |
|
"epoch": 0.8534275046441906, |
|
"grad_norm": 0.39699849486351013, |
|
"learning_rate": 8.347687295285112e-05, |
|
"loss": 3.131, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 0.8541632180102632, |
|
"grad_norm": 0.36073020100593567, |
|
"learning_rate": 8.344752941349503e-05, |
|
"loss": 3.1618, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 0.8548989313763358, |
|
"grad_norm": 0.38798099756240845, |
|
"learning_rate": 8.341816500898692e-05, |
|
"loss": 3.1768, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 0.8556346447424084, |
|
"grad_norm": 0.37226933240890503, |
|
"learning_rate": 8.338877975764483e-05, |
|
"loss": 3.1506, |
|
"step": 11630 |
|
}, |
|
{ |
|
"epoch": 0.856370358108481, |
|
"grad_norm": 0.3917200565338135, |
|
"learning_rate": 8.335937367779976e-05, |
|
"loss": 3.1574, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 0.8571060714745535, |
|
"grad_norm": 0.37370383739471436, |
|
"learning_rate": 8.332994678779579e-05, |
|
"loss": 3.1438, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 0.8578417848406261, |
|
"grad_norm": 0.3972039222717285, |
|
"learning_rate": 8.330049910598987e-05, |
|
"loss": 3.1557, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 0.8585774982066987, |
|
"grad_norm": 0.38387760519981384, |
|
"learning_rate": 8.327103065075202e-05, |
|
"loss": 3.1509, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 0.8593132115727713, |
|
"grad_norm": 0.36181387305259705, |
|
"learning_rate": 8.324154144046515e-05, |
|
"loss": 3.1726, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 0.8600489249388438, |
|
"grad_norm": 0.3810345530509949, |
|
"learning_rate": 8.321203149352513e-05, |
|
"loss": 3.1901, |
|
"step": 11690 |
|
}, |
|
{ |
|
"epoch": 0.8607846383049164, |
|
"grad_norm": 0.3864314556121826, |
|
"learning_rate": 8.318250082834083e-05, |
|
"loss": 3.1788, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 0.861520351670989, |
|
"grad_norm": 0.39438217878341675, |
|
"learning_rate": 8.315294946333396e-05, |
|
"loss": 3.1094, |
|
"step": 11710 |
|
}, |
|
{ |
|
"epoch": 0.8622560650370615, |
|
"grad_norm": 0.3909660279750824, |
|
"learning_rate": 8.312337741693917e-05, |
|
"loss": 3.1339, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 0.8629917784031341, |
|
"grad_norm": 0.37569791078567505, |
|
"learning_rate": 8.309378470760402e-05, |
|
"loss": 3.2063, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 0.8637274917692067, |
|
"grad_norm": 0.3675804138183594, |
|
"learning_rate": 8.306417135378899e-05, |
|
"loss": 3.1175, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 0.8644632051352793, |
|
"grad_norm": 0.3829854726791382, |
|
"learning_rate": 8.303453737396736e-05, |
|
"loss": 3.1524, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 0.8651989185013519, |
|
"grad_norm": 0.3902895152568817, |
|
"learning_rate": 8.300488278662537e-05, |
|
"loss": 3.1543, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 0.8659346318674245, |
|
"grad_norm": 0.3615528345108032, |
|
"learning_rate": 8.297520761026203e-05, |
|
"loss": 3.1835, |
|
"step": 11770 |
|
}, |
|
{ |
|
"epoch": 0.8666703452334971, |
|
"grad_norm": 0.3822636008262634, |
|
"learning_rate": 8.294551186338925e-05, |
|
"loss": 3.1482, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 0.8674060585995697, |
|
"grad_norm": 0.36693650484085083, |
|
"learning_rate": 8.291579556453176e-05, |
|
"loss": 3.1794, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 0.8681417719656422, |
|
"grad_norm": 0.36783790588378906, |
|
"learning_rate": 8.28860587322271e-05, |
|
"loss": 3.1473, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 0.8688774853317147, |
|
"grad_norm": 0.36205679178237915, |
|
"learning_rate": 8.28563013850256e-05, |
|
"loss": 3.1309, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 0.8696131986977873, |
|
"grad_norm": 0.37128111720085144, |
|
"learning_rate": 8.282652354149047e-05, |
|
"loss": 3.1223, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 0.8703489120638599, |
|
"grad_norm": 0.3675532341003418, |
|
"learning_rate": 8.279672522019759e-05, |
|
"loss": 3.1302, |
|
"step": 11830 |
|
}, |
|
{ |
|
"epoch": 0.8710846254299325, |
|
"grad_norm": 0.36570990085601807, |
|
"learning_rate": 8.27669064397357e-05, |
|
"loss": 3.0489, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 0.8718203387960051, |
|
"grad_norm": 0.3862569034099579, |
|
"learning_rate": 8.273706721870626e-05, |
|
"loss": 3.0992, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 0.8725560521620777, |
|
"grad_norm": 0.35596564412117004, |
|
"learning_rate": 8.270720757572352e-05, |
|
"loss": 3.173, |
|
"step": 11860 |
|
}, |
|
{ |
|
"epoch": 0.8732917655281502, |
|
"grad_norm": 0.38876011967658997, |
|
"learning_rate": 8.267732752941443e-05, |
|
"loss": 3.1282, |
|
"step": 11870 |
|
}, |
|
{ |
|
"epoch": 0.8740274788942228, |
|
"grad_norm": 0.3655962646007538, |
|
"learning_rate": 8.264742709841868e-05, |
|
"loss": 3.1591, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 0.8747631922602954, |
|
"grad_norm": 0.378047376871109, |
|
"learning_rate": 8.26175063013887e-05, |
|
"loss": 3.1106, |
|
"step": 11890 |
|
}, |
|
{ |
|
"epoch": 0.875498905626368, |
|
"grad_norm": 0.41224589943885803, |
|
"learning_rate": 8.258756515698958e-05, |
|
"loss": 3.1519, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 0.8762346189924406, |
|
"grad_norm": 0.3781079351902008, |
|
"learning_rate": 8.255760368389914e-05, |
|
"loss": 3.1649, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 0.8769703323585131, |
|
"grad_norm": 0.3772408366203308, |
|
"learning_rate": 8.252762190080787e-05, |
|
"loss": 3.1815, |
|
"step": 11920 |
|
}, |
|
{ |
|
"epoch": 0.8777060457245857, |
|
"grad_norm": 0.36887019872665405, |
|
"learning_rate": 8.249761982641893e-05, |
|
"loss": 3.1422, |
|
"step": 11930 |
|
}, |
|
{ |
|
"epoch": 0.8784417590906582, |
|
"grad_norm": 0.37302523851394653, |
|
"learning_rate": 8.246759747944813e-05, |
|
"loss": 3.1111, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 0.8791774724567308, |
|
"grad_norm": 0.3750922977924347, |
|
"learning_rate": 8.243755487862392e-05, |
|
"loss": 3.1001, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 0.8799131858228034, |
|
"grad_norm": 0.3755902647972107, |
|
"learning_rate": 8.240749204268745e-05, |
|
"loss": 3.134, |
|
"step": 11960 |
|
}, |
|
{ |
|
"epoch": 0.880648899188876, |
|
"grad_norm": 0.3703187108039856, |
|
"learning_rate": 8.237740899039238e-05, |
|
"loss": 3.1396, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 0.8813846125549486, |
|
"grad_norm": 0.37014931440353394, |
|
"learning_rate": 8.234730574050509e-05, |
|
"loss": 3.1177, |
|
"step": 11980 |
|
}, |
|
{ |
|
"epoch": 0.8821203259210212, |
|
"grad_norm": 0.39811164140701294, |
|
"learning_rate": 8.231718231180448e-05, |
|
"loss": 3.1516, |
|
"step": 11990 |
|
}, |
|
{ |
|
"epoch": 0.8828560392870938, |
|
"grad_norm": 0.37592417001724243, |
|
"learning_rate": 8.22870387230821e-05, |
|
"loss": 3.1192, |
|
"step": 12000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 40776, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 4000, |
|
"total_flos": 1.087952394387456e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|