| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 2000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.005, |
| "grad_norm": 200.0, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 1.6281, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 268.0, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 1.5906, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.015, |
| "grad_norm": 209.0, |
| "learning_rate": 2.9e-06, |
| "loss": 1.5938, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 117.0, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 1.6305, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.025, |
| "grad_norm": 69.5, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 1.5219, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 86.5, |
| "learning_rate": 5.9e-06, |
| "loss": 1.4047, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.035, |
| "grad_norm": 104.5, |
| "learning_rate": 6.9e-06, |
| "loss": 1.2547, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 40.5, |
| "learning_rate": 7.9e-06, |
| "loss": 1.1195, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.045, |
| "grad_norm": 31.625, |
| "learning_rate": 8.900000000000001e-06, |
| "loss": 0.9152, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 40.0, |
| "learning_rate": 9.9e-06, |
| "loss": 0.8082, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.055, |
| "grad_norm": 20.875, |
| "learning_rate": 1.0900000000000002e-05, |
| "loss": 0.7641, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 12.0625, |
| "learning_rate": 1.1900000000000001e-05, |
| "loss": 0.6582, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.065, |
| "grad_norm": 11.875, |
| "learning_rate": 1.2900000000000002e-05, |
| "loss": 0.6531, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 19.5, |
| "learning_rate": 1.39e-05, |
| "loss": 0.5746, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.075, |
| "grad_norm": 9.625, |
| "learning_rate": 1.4900000000000001e-05, |
| "loss": 0.5488, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 7.0, |
| "learning_rate": 1.5900000000000004e-05, |
| "loss": 0.5033, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.085, |
| "grad_norm": 9.6875, |
| "learning_rate": 1.69e-05, |
| "loss": 0.4877, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 11.6875, |
| "learning_rate": 1.79e-05, |
| "loss": 0.4477, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.095, |
| "grad_norm": 9.25, |
| "learning_rate": 1.8900000000000002e-05, |
| "loss": 0.4496, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 28.125, |
| "learning_rate": 1.9900000000000003e-05, |
| "loss": 0.4447, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.105, |
| "grad_norm": 8.4375, |
| "learning_rate": 1.9998766324816606e-05, |
| "loss": 0.3975, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 11.4375, |
| "learning_rate": 1.9994502159417576e-05, |
| "loss": 0.3951, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.115, |
| "grad_norm": 6.90625, |
| "learning_rate": 1.9987193571841865e-05, |
| "loss": 0.3732, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 5.15625, |
| "learning_rate": 1.9976842788356054e-05, |
| "loss": 0.4131, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.125, |
| "grad_norm": 5.5, |
| "learning_rate": 1.9963452961909065e-05, |
| "loss": 0.3783, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 29.25, |
| "learning_rate": 1.9947028171171742e-05, |
| "loss": 0.3652, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.135, |
| "grad_norm": 5.75, |
| "learning_rate": 1.9927573419294456e-05, |
| "loss": 0.3396, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 6.75, |
| "learning_rate": 1.990509463238309e-05, |
| "loss": 0.3322, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.145, |
| "grad_norm": 25.125, |
| "learning_rate": 1.9879598657693894e-05, |
| "loss": 0.3232, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 4.5, |
| "learning_rate": 1.985109326154774e-05, |
| "loss": 0.3246, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.155, |
| "grad_norm": 5.3125, |
| "learning_rate": 1.981958712696444e-05, |
| "loss": 0.3107, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 5.375, |
| "learning_rate": 1.9785089851017788e-05, |
| "loss": 0.3199, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.165, |
| "grad_norm": 4.9375, |
| "learning_rate": 1.974761194191222e-05, |
| "loss": 0.2945, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 7.4375, |
| "learning_rate": 1.970716481578191e-05, |
| "loss": 0.2832, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.175, |
| "grad_norm": 7.40625, |
| "learning_rate": 1.9663760793213297e-05, |
| "loss": 0.2779, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 5.15625, |
| "learning_rate": 1.9617413095492114e-05, |
| "loss": 0.3148, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.185, |
| "grad_norm": 5.71875, |
| "learning_rate": 1.956813584057608e-05, |
| "loss": 0.2857, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 3.953125, |
| "learning_rate": 1.9515944038794384e-05, |
| "loss": 0.2896, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.195, |
| "grad_norm": 5.125, |
| "learning_rate": 1.9460853588275454e-05, |
| "loss": 0.2818, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 4.625, |
| "learning_rate": 1.940288127010419e-05, |
| "loss": 0.2939, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.205, |
| "grad_norm": 3.484375, |
| "learning_rate": 1.9342044743210295e-05, |
| "loss": 0.2803, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 4.375, |
| "learning_rate": 1.92783625389892e-05, |
| "loss": 0.2727, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.215, |
| "grad_norm": 4.78125, |
| "learning_rate": 1.9211854055657216e-05, |
| "loss": 0.2787, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 4.59375, |
| "learning_rate": 1.9142539552342638e-05, |
| "loss": 0.2818, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.225, |
| "grad_norm": 5.75, |
| "learning_rate": 1.907044014291465e-05, |
| "loss": 0.2744, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 4.46875, |
| "learning_rate": 1.8995577789551806e-05, |
| "loss": 0.2768, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.235, |
| "grad_norm": 4.84375, |
| "learning_rate": 1.8917975296052143e-05, |
| "loss": 0.2617, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.859375, |
| "learning_rate": 1.8837656300886937e-05, |
| "loss": 0.2687, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.245, |
| "grad_norm": 2.109375, |
| "learning_rate": 1.875464527000018e-05, |
| "loss": 0.2604, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 3.484375, |
| "learning_rate": 1.866896748935603e-05, |
| "loss": 0.2674, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.255, |
| "grad_norm": 4.28125, |
| "learning_rate": 1.858064905723645e-05, |
| "loss": 0.2608, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 4.1875, |
| "learning_rate": 1.8489716876291417e-05, |
| "loss": 0.2599, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.265, |
| "grad_norm": 2.640625, |
| "learning_rate": 1.8396198645344133e-05, |
| "loss": 0.2675, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 3.4375, |
| "learning_rate": 1.8300122850953678e-05, |
| "loss": 0.2666, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.275, |
| "grad_norm": 2.84375, |
| "learning_rate": 1.8201518758737726e-05, |
| "loss": 0.2628, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 2.796875, |
| "learning_rate": 1.8100416404457962e-05, |
| "loss": 0.2571, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.285, |
| "grad_norm": 2.640625, |
| "learning_rate": 1.799684658487091e-05, |
| "loss": 0.2585, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 3.65625, |
| "learning_rate": 1.789084084834691e-05, |
| "loss": 0.2493, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.295, |
| "grad_norm": 3.25, |
| "learning_rate": 1.778243148526021e-05, |
| "loss": 0.2497, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 1.8203125, |
| "learning_rate": 1.7671651518153e-05, |
| "loss": 0.2422, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.305, |
| "grad_norm": 2.75, |
| "learning_rate": 1.7558534691676396e-05, |
| "loss": 0.2465, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 3.53125, |
| "learning_rate": 1.744311546231154e-05, |
| "loss": 0.2442, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.315, |
| "grad_norm": 2.1875, |
| "learning_rate": 1.732542898787379e-05, |
| "loss": 0.2489, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.0, |
| "learning_rate": 1.7205511116803306e-05, |
| "loss": 0.2508, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.325, |
| "grad_norm": 2.234375, |
| "learning_rate": 1.708339837724529e-05, |
| "loss": 0.2433, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 4.1875, |
| "learning_rate": 1.6959127965923144e-05, |
| "loss": 0.2458, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.335, |
| "grad_norm": 2.109375, |
| "learning_rate": 1.6832737736807994e-05, |
| "loss": 0.2391, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 2.15625, |
| "learning_rate": 1.6704266189587992e-05, |
| "loss": 0.2273, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.345, |
| "grad_norm": 1.9765625, |
| "learning_rate": 1.657375245794096e-05, |
| "loss": 0.2487, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 2.953125, |
| "learning_rate": 1.644123629761387e-05, |
| "loss": 0.2394, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.355, |
| "grad_norm": 4.0625, |
| "learning_rate": 1.6306758074312866e-05, |
| "loss": 0.2298, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 2.125, |
| "learning_rate": 1.617035875140749e-05, |
| "loss": 0.2276, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.365, |
| "grad_norm": 3.46875, |
| "learning_rate": 1.6032079877452825e-05, |
| "loss": 0.2344, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 2.21875, |
| "learning_rate": 1.5891963573533424e-05, |
| "loss": 0.2327, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.375, |
| "grad_norm": 2.96875, |
| "learning_rate": 1.575005252043279e-05, |
| "loss": 0.2339, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 5.8125, |
| "learning_rate": 1.560638994563242e-05, |
| "loss": 0.2266, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.385, |
| "grad_norm": 6.25, |
| "learning_rate": 1.5461019610144292e-05, |
| "loss": 0.2316, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 2.265625, |
| "learning_rate": 1.531398579518083e-05, |
| "loss": 0.2227, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.395, |
| "grad_norm": 2.03125, |
| "learning_rate": 1.516533328866642e-05, |
| "loss": 0.2313, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 2.46875, |
| "learning_rate": 1.5015107371594576e-05, |
| "loss": 0.2365, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.405, |
| "grad_norm": 4.0625, |
| "learning_rate": 1.4863353804234906e-05, |
| "loss": 0.2281, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 2.328125, |
| "learning_rate": 1.47101188121941e-05, |
| "loss": 0.2334, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.415, |
| "grad_norm": 2.65625, |
| "learning_rate": 1.4555449072335157e-05, |
| "loss": 0.2204, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 3.09375, |
| "learning_rate": 1.4399391698559153e-05, |
| "loss": 0.2271, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.425, |
| "grad_norm": 1.90625, |
| "learning_rate": 1.4241994227453902e-05, |
| "loss": 0.2256, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 2.171875, |
| "learning_rate": 1.408330460381385e-05, |
| "loss": 0.2248, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.435, |
| "grad_norm": 8.125, |
| "learning_rate": 1.3923371166035615e-05, |
| "loss": 0.2195, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 2.296875, |
| "learning_rate": 1.3762242631393656e-05, |
| "loss": 0.2189, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.445, |
| "grad_norm": 2.171875, |
| "learning_rate": 1.3599968081200515e-05, |
| "loss": 0.2261, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 2.203125, |
| "learning_rate": 1.3436596945856164e-05, |
| "loss": 0.2249, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.455, |
| "grad_norm": 3.953125, |
| "learning_rate": 1.327217898979104e-05, |
| "loss": 0.2146, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 1.6875, |
| "learning_rate": 1.310676429630732e-05, |
| "loss": 0.2161, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.465, |
| "grad_norm": 2.6875, |
| "learning_rate": 1.294040325232304e-05, |
| "loss": 0.2221, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 2.0625, |
| "learning_rate": 1.2773146533023782e-05, |
| "loss": 0.2174, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.475, |
| "grad_norm": 1.8828125, |
| "learning_rate": 1.2605045086426487e-05, |
| "loss": 0.2241, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 1.9140625, |
| "learning_rate": 1.2436150117860226e-05, |
| "loss": 0.2187, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.485, |
| "grad_norm": 3.28125, |
| "learning_rate": 1.2266513074368552e-05, |
| "loss": 0.2182, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 2.78125, |
| "learning_rate": 1.2096185629038219e-05, |
| "loss": 0.2108, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.495, |
| "grad_norm": 2.109375, |
| "learning_rate": 1.1925219665259076e-05, |
| "loss": 0.2183, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 3.515625, |
| "learning_rate": 1.1753667260919872e-05, |
| "loss": 0.2213, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.505, |
| "grad_norm": 3.8125, |
| "learning_rate": 1.1581580672544839e-05, |
| "loss": 0.2141, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 3.0625, |
| "learning_rate": 1.1409012319375828e-05, |
| "loss": 0.2188, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.515, |
| "grad_norm": 2.125, |
| "learning_rate": 1.1236014767404929e-05, |
| "loss": 0.2138, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 2.34375, |
| "learning_rate": 1.1062640713362333e-05, |
| "loss": 0.2147, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.525, |
| "grad_norm": 2.4375, |
| "learning_rate": 1.0888942968664417e-05, |
| "loss": 0.2164, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 2.015625, |
| "learning_rate": 1.071497444332686e-05, |
| "loss": 0.2143, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.535, |
| "grad_norm": 2.3125, |
| "learning_rate": 1.0540788129847757e-05, |
| "loss": 0.2146, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 1.65625, |
| "learning_rate": 1.0366437087065564e-05, |
| "loss": 0.2107, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.545, |
| "grad_norm": 2.875, |
| "learning_rate": 1.01919744239969e-05, |
| "loss": 0.2086, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 2.078125, |
| "learning_rate": 1.0017453283658984e-05, |
| "loss": 0.2179, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.555, |
| "grad_norm": 2.171875, |
| "learning_rate": 9.842926826881796e-06, |
| "loss": 0.2144, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 3.546875, |
| "learning_rate": 9.668448216114739e-06, |
| "loss": 0.2112, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.565, |
| "grad_norm": 1.9765625, |
| "learning_rate": 9.494070599232868e-06, |
| "loss": 0.2066, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 1.8203125, |
| "learning_rate": 9.319847093347522e-06, |
| "loss": 0.2111, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.575, |
| "grad_norm": 2.4375, |
| "learning_rate": 9.145830768626326e-06, |
| "loss": 0.2165, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 2.765625, |
| "learning_rate": 8.972074632127533e-06, |
| "loss": 0.2124, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.585, |
| "grad_norm": 2.265625, |
| "learning_rate": 8.79863161165353e-06, |
| "loss": 0.2087, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 1.7890625, |
| "learning_rate": 8.625554539628536e-06, |
| "loss": 0.2106, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.595, |
| "grad_norm": 3.28125, |
| "learning_rate": 8.452896137005322e-06, |
| "loss": 0.2186, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 2.78125, |
| "learning_rate": 8.280708997205904e-06, |
| "loss": 0.2035, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.605, |
| "grad_norm": 1.5, |
| "learning_rate": 8.109045570101086e-06, |
| "loss": 0.2047, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 2.171875, |
| "learning_rate": 7.937958146033706e-06, |
| "loss": 0.2098, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.615, |
| "grad_norm": 3.109375, |
| "learning_rate": 7.767498839890489e-06, |
| "loss": 0.2138, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 2.359375, |
| "learning_rate": 7.597719575227364e-06, |
| "loss": 0.2004, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.625, |
| "grad_norm": 2.015625, |
| "learning_rate": 7.428672068453041e-06, |
| "loss": 0.202, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 1.796875, |
| "learning_rate": 7.260407813075676e-06, |
| "loss": 0.2035, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.635, |
| "grad_norm": 2.84375, |
| "learning_rate": 7.092978064017475e-06, |
| "loss": 0.2062, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 2.375, |
| "learning_rate": 6.92643382200193e-06, |
| "loss": 0.2096, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.645, |
| "grad_norm": 1.5546875, |
| "learning_rate": 6.7608258180185085e-06, |
| "loss": 0.2062, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 1.9609375, |
| "learning_rate": 6.596204497869501e-06, |
| "loss": 0.1997, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.655, |
| "grad_norm": 2.4375, |
| "learning_rate": 6.432620006803747e-06, |
| "loss": 0.2051, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 2.265625, |
| "learning_rate": 6.2701221742419106e-06, |
| "loss": 0.2069, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.665, |
| "grad_norm": 1.75, |
| "learning_rate": 6.108760498597939e-06, |
| "loss": 0.2104, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 2.3125, |
| "learning_rate": 5.948584132201376e-06, |
| "loss": 0.2053, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.675, |
| "grad_norm": 1.8671875, |
| "learning_rate": 5.789641866325091e-06, |
| "loss": 0.2034, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 2.09375, |
| "learning_rate": 5.631982116322981e-06, |
| "loss": 0.2029, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.685, |
| "grad_norm": 1.640625, |
| "learning_rate": 5.475652906882173e-06, |
| "loss": 0.2013, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 2.078125, |
| "learning_rate": 5.3207018573942684e-06, |
| "loss": 0.201, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.695, |
| "grad_norm": 1.4765625, |
| "learning_rate": 5.167176167449977e-06, |
| "loss": 0.2012, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 3.40625, |
| "learning_rate": 5.015122602461698e-06, |
| "loss": 0.2002, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.705, |
| "grad_norm": 2.4375, |
| "learning_rate": 4.864587479418302e-06, |
| "loss": 0.1928, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 3.75, |
| "learning_rate": 4.71561665277653e-06, |
| "loss": 0.2071, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.715, |
| "grad_norm": 3.015625, |
| "learning_rate": 4.568255500493292e-06, |
| "loss": 0.2014, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 2.6875, |
| "learning_rate": 4.422548910203099e-06, |
| "loss": 0.1977, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.725, |
| "grad_norm": 3.9375, |
| "learning_rate": 4.27854126554484e-06, |
| "loss": 0.2134, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 2.0, |
| "learning_rate": 4.136276432642107e-06, |
| "loss": 0.2042, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.735, |
| "grad_norm": 2.765625, |
| "learning_rate": 3.9957977467411615e-06, |
| "loss": 0.2013, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 2.828125, |
| "learning_rate": 3.857147999010568e-06, |
| "loss": 0.2078, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.745, |
| "grad_norm": 2.375, |
| "learning_rate": 3.7203694235066224e-06, |
| "loss": 0.2105, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 2.390625, |
| "learning_rate": 3.5855036843084213e-06, |
| "loss": 0.1982, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.755, |
| "grad_norm": 1.9765625, |
| "learning_rate": 3.452591862826603e-06, |
| "loss": 0.2046, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 2.1875, |
| "learning_rate": 3.3216744452895356e-06, |
| "loss": 0.1966, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.765, |
| "grad_norm": 2.0625, |
| "learning_rate": 3.192791310410822e-06, |
| "loss": 0.2008, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.77, |
| "grad_norm": 2.78125, |
| "learning_rate": 3.0659817172418694e-06, |
| "loss": 0.2061, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.775, |
| "grad_norm": 4.1875, |
| "learning_rate": 2.9412842932131904e-06, |
| "loss": 0.2051, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 1.578125, |
| "learning_rate": 2.8187370223681134e-06, |
| "loss": 0.2004, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.785, |
| "grad_norm": 1.9140625, |
| "learning_rate": 2.698377233792476e-06, |
| "loss": 0.1971, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.79, |
| "grad_norm": 2.390625, |
| "learning_rate": 2.5802415902438373e-06, |
| "loss": 0.2075, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.795, |
| "grad_norm": 2.71875, |
| "learning_rate": 2.464366076983623e-06, |
| "loss": 0.2067, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 1.984375, |
| "learning_rate": 2.3507859908156828e-06, |
| "loss": 0.2045, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.805, |
| "grad_norm": 2.125, |
| "learning_rate": 2.2395359293345396e-06, |
| "loss": 0.205, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 3.34375, |
| "learning_rate": 2.130649780386628e-06, |
| "loss": 0.1969, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.815, |
| "grad_norm": 2.84375, |
| "learning_rate": 2.024160711747717e-06, |
| "loss": 0.2022, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.82, |
| "grad_norm": 1.484375, |
| "learning_rate": 1.9201011610196972e-06, |
| "loss": 0.1927, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.825, |
| "grad_norm": 2.203125, |
| "learning_rate": 1.818502825749764e-06, |
| "loss": 0.1979, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.83, |
| "grad_norm": 2.078125, |
| "learning_rate": 1.7193966537750561e-06, |
| "loss": 0.196, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.835, |
| "grad_norm": 2.015625, |
| "learning_rate": 1.6228128337956128e-06, |
| "loss": 0.1928, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 1.8359375, |
| "learning_rate": 1.5287807861786308e-06, |
| "loss": 0.2021, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.845, |
| "grad_norm": 1.796875, |
| "learning_rate": 1.4373291539967182e-06, |
| "loss": 0.204, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.85, |
| "grad_norm": 1.6875, |
| "learning_rate": 1.3484857943029572e-06, |
| "loss": 0.1955, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.855, |
| "grad_norm": 2.125, |
| "learning_rate": 1.2622777696453482e-06, |
| "loss": 0.2059, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.86, |
| "grad_norm": 2.0, |
| "learning_rate": 1.1787313398233235e-06, |
| "loss": 0.2032, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.865, |
| "grad_norm": 2.28125, |
| "learning_rate": 1.097871953888735e-06, |
| "loss": 0.2061, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.87, |
| "grad_norm": 3.078125, |
| "learning_rate": 1.0197242423938447e-06, |
| "loss": 0.2141, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.875, |
| "grad_norm": 2.765625, |
| "learning_rate": 9.44312009888606e-07, |
| "loss": 0.2062, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 2.96875, |
| "learning_rate": 8.716582276695729e-07, |
| "loss": 0.2115, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.885, |
| "grad_norm": 2.40625, |
| "learning_rate": 8.017850267826233e-07, |
| "loss": 0.1975, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.89, |
| "grad_norm": 3.375, |
| "learning_rate": 7.347136912816277e-07, |
| "loss": 0.2001, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.895, |
| "grad_norm": 3.078125, |
| "learning_rate": 6.704646517451108e-07, |
| "loss": 0.1975, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 3.0625, |
| "learning_rate": 6.090574790529091e-07, |
| "loss": 0.2022, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.905, |
| "grad_norm": 2.515625, |
| "learning_rate": 5.505108784246926e-07, |
| "loss": 0.2062, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.91, |
| "grad_norm": 2.578125, |
| "learning_rate": 4.948426837221632e-07, |
| "loss": 0.2004, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.915, |
| "grad_norm": 2.140625, |
| "learning_rate": 4.420698520166988e-07, |
| "loss": 0.2004, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 3.265625, |
| "learning_rate": 3.922084584240582e-07, |
| "loss": 0.2022, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.925, |
| "grad_norm": 1.7734375, |
| "learning_rate": 3.4527369120775036e-07, |
| "loss": 0.2038, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 2.1875, |
| "learning_rate": 3.0127984715253246e-07, |
| "loss": 0.2012, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.935, |
| "grad_norm": 2.296875, |
| "learning_rate": 2.6024032720948446e-07, |
| "loss": 0.1992, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 1.8203125, |
| "learning_rate": 2.221676324139377e-07, |
| "loss": 0.1982, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.945, |
| "grad_norm": 4.15625, |
| "learning_rate": 1.8707336007754873e-07, |
| "loss": 0.2089, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.95, |
| "grad_norm": 2.8125, |
| "learning_rate": 1.549682002556341e-07, |
| "loss": 0.2021, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.955, |
| "grad_norm": 2.1875, |
| "learning_rate": 1.2586193249088607e-07, |
| "loss": 0.1992, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 2.234375, |
| "learning_rate": 9.976342283442464e-08, |
| "loss": 0.1955, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.965, |
| "grad_norm": 1.84375, |
| "learning_rate": 7.66806211451132e-08, |
| "loss": 0.191, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.97, |
| "grad_norm": 1.84375, |
| "learning_rate": 5.662055866795357e-08, |
| "loss": 0.2019, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.975, |
| "grad_norm": 2.65625, |
| "learning_rate": 3.9589345892304673e-08, |
| "loss": 0.2087, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.98, |
| "grad_norm": 2.078125, |
| "learning_rate": 2.5592170690560415e-08, |
| "loss": 0.2021, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.985, |
| "grad_norm": 4.84375, |
| "learning_rate": 1.4633296737882607e-08, |
| "loss": 0.2021, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.99, |
| "grad_norm": 2.046875, |
| "learning_rate": 6.716062213437679e-09, |
| "loss": 0.2073, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.995, |
| "grad_norm": 2.0625, |
| "learning_rate": 1.8428787835578222e-09, |
| "loss": 0.2001, |
| "step": 1990 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 2.09375, |
| "learning_rate": 1.5230867123072757e-11, |
| "loss": 0.1979, |
| "step": 2000 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 2000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 2000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 5.2073827663872e+17, |
| "train_batch_size": 32, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|