| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 10000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.001, |
| "grad_norm": 25.875, |
| "learning_rate": 1e-05, |
| "loss": 5.3992, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.002, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 4.1384, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.003, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 4.0857, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.004, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 3.5263, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.005, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 3.7144, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.006, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 3.6529, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.007, |
| "grad_norm": 27.625, |
| "learning_rate": 1e-05, |
| "loss": 3.4289, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.008, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 3.1815, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.009, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 3.3833, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 3.1461, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.011, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 3.166, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.012, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 3.255, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.013, |
| "grad_norm": 30.625, |
| "learning_rate": 1e-05, |
| "loss": 3.4299, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.014, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 3.308, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.015, |
| "grad_norm": 15.8125, |
| "learning_rate": 1e-05, |
| "loss": 2.9661, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.016, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.76, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.017, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.951, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.018, |
| "grad_norm": 24.375, |
| "learning_rate": 1e-05, |
| "loss": 2.9323, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.019, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.8869, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8807, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.021, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6979, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.022, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 3.2043, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.023, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 3.1682, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.024, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 3.0403, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.025, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.9605, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.026, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7015, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.027, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.92, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.028, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.9273, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.029, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 3.0933, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 3.0358, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.031, |
| "grad_norm": 14.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6916, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.032, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 3.0805, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.033, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8995, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.034, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8352, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.035, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 3.1113, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.036, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 3.144, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.037, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 3.1943, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.038, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 3.0268, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.039, |
| "grad_norm": 15.3125, |
| "learning_rate": 1e-05, |
| "loss": 2.9316, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 27.875, |
| "learning_rate": 1e-05, |
| "loss": 3.0486, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.041, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8194, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.042, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7822, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.043, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.945, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.044, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.552, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.045, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.762, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.046, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6553, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.047, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.8581, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.048, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7065, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.049, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5793, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7661, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.051, |
| "grad_norm": 22.625, |
| "learning_rate": 1e-05, |
| "loss": 3.034, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.052, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7611, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.053, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.672, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.054, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 3.2146, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.055, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.9011, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.056, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8977, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.057, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8751, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.058, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.8186, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.059, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.85, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5925, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.061, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8579, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.062, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.823, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.063, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9454, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.064, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6088, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.065, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7468, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.066, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5891, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.067, |
| "grad_norm": 22.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9447, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.068, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.91, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.069, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7367, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.9169, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.071, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.7177, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.072, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.7527, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.073, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8683, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.074, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8062, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.075, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 3.0231, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.076, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.642, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.077, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.9741, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.078, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6832, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.079, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5637, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4617, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.081, |
| "grad_norm": 24.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8996, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.082, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.8013, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.083, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7277, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.084, |
| "grad_norm": 21.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8149, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.085, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6265, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.086, |
| "grad_norm": 23.125, |
| "learning_rate": 1e-05, |
| "loss": 2.7303, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.087, |
| "grad_norm": 16.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6056, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.088, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5529, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.089, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9246, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6179, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.091, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8344, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.092, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3406, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.093, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9006, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.094, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6893, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.095, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.664, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.096, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6405, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.097, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4632, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.098, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7559, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.099, |
| "grad_norm": 14.1875, |
| "learning_rate": 1e-05, |
| "loss": 2.7806, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5489, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.101, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6177, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.102, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8924, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.103, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.9148, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.104, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6254, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.105, |
| "grad_norm": 22.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7462, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.106, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6232, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.107, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7208, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.108, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7076, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.109, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7055, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7597, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.111, |
| "grad_norm": 15.1875, |
| "learning_rate": 1e-05, |
| "loss": 2.4683, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.112, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3723, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.113, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.8105, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.114, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5635, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.115, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6278, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.116, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.797, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.117, |
| "grad_norm": 21.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7868, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.118, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.888, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.119, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7304, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6133, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.121, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.9734, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.122, |
| "grad_norm": 15.5, |
| "learning_rate": 1e-05, |
| "loss": 2.9616, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.123, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6913, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.124, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7537, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.125, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8173, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.126, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.9766, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.127, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6977, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.128, |
| "grad_norm": 15.5625, |
| "learning_rate": 1e-05, |
| "loss": 2.8323, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.129, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6251, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7067, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.131, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7888, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.132, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.618, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.133, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6424, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.134, |
| "grad_norm": 15.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5045, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.135, |
| "grad_norm": 22.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5698, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.136, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4595, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.137, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9727, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.138, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.81, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.139, |
| "grad_norm": 15.5625, |
| "learning_rate": 1e-05, |
| "loss": 2.7279, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4741, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.141, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8221, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.142, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.8687, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.143, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7914, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.144, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.699, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.145, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4823, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.146, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5993, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.147, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4884, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.148, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8256, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.149, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6345, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 13.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5908, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.151, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8215, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.152, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8448, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.153, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8291, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.154, |
| "grad_norm": 14.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3605, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.155, |
| "grad_norm": 15.0625, |
| "learning_rate": 1e-05, |
| "loss": 2.7866, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.156, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7259, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.157, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 3.1778, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.158, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7275, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.159, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5353, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 23.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5505, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.161, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4199, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.162, |
| "grad_norm": 15.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5307, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.163, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5556, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.164, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6736, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.165, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4342, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.166, |
| "grad_norm": 22.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6509, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.167, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7432, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.168, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8303, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.169, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6912, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6406, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.171, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6771, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.172, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.8519, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.173, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8109, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.174, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6078, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.175, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8467, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.176, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 3.0072, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.177, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7086, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.178, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.9887, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.179, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3727, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6543, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.181, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4249, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.182, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3869, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.183, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6094, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.184, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5353, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.185, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5744, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.186, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5109, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.187, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.923, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.188, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.1993, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.189, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6776, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.728, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.191, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4932, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.192, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6727, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.193, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4958, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.194, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.8153, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.195, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6896, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.196, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.776, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.197, |
| "grad_norm": 21.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8323, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.198, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5435, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.199, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.9077, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5479, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.201, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5075, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.202, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5987, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.203, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8348, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.204, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5709, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.205, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7491, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.206, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7234, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.207, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8423, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.208, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8345, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.209, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4739, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4002, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.211, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.945, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.212, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4584, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.213, |
| "grad_norm": 15.4375, |
| "learning_rate": 1e-05, |
| "loss": 2.758, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.214, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7057, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.215, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8857, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.216, |
| "grad_norm": 23.25, |
| "learning_rate": 1e-05, |
| "loss": 2.9281, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.217, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.9082, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.218, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6641, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.219, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5807, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 23.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7514, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.221, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6935, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.222, |
| "grad_norm": 23.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8666, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.223, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6467, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.224, |
| "grad_norm": 23.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5768, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.225, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5218, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.226, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4148, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.227, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.63, |
| "step": 2270 |
| }, |
| { |
| "epoch": 0.228, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.862, |
| "step": 2280 |
| }, |
| { |
| "epoch": 0.229, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6363, |
| "step": 2290 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4642, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.231, |
| "grad_norm": 15.0625, |
| "learning_rate": 1e-05, |
| "loss": 2.5706, |
| "step": 2310 |
| }, |
| { |
| "epoch": 0.232, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3966, |
| "step": 2320 |
| }, |
| { |
| "epoch": 0.233, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7149, |
| "step": 2330 |
| }, |
| { |
| "epoch": 0.234, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7477, |
| "step": 2340 |
| }, |
| { |
| "epoch": 0.235, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3279, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.236, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6766, |
| "step": 2360 |
| }, |
| { |
| "epoch": 0.237, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8714, |
| "step": 2370 |
| }, |
| { |
| "epoch": 0.238, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7685, |
| "step": 2380 |
| }, |
| { |
| "epoch": 0.239, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5915, |
| "step": 2390 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3483, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.241, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6826, |
| "step": 2410 |
| }, |
| { |
| "epoch": 0.242, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3133, |
| "step": 2420 |
| }, |
| { |
| "epoch": 0.243, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6807, |
| "step": 2430 |
| }, |
| { |
| "epoch": 0.244, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7883, |
| "step": 2440 |
| }, |
| { |
| "epoch": 0.245, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4904, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.246, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4732, |
| "step": 2460 |
| }, |
| { |
| "epoch": 0.247, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6669, |
| "step": 2470 |
| }, |
| { |
| "epoch": 0.248, |
| "grad_norm": 23.25, |
| "learning_rate": 1e-05, |
| "loss": 2.814, |
| "step": 2480 |
| }, |
| { |
| "epoch": 0.249, |
| "grad_norm": 23.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7448, |
| "step": 2490 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 23.0, |
| "learning_rate": 1e-05, |
| "loss": 2.9254, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.251, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5313, |
| "step": 2510 |
| }, |
| { |
| "epoch": 0.252, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6543, |
| "step": 2520 |
| }, |
| { |
| "epoch": 0.253, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6981, |
| "step": 2530 |
| }, |
| { |
| "epoch": 0.254, |
| "grad_norm": 22.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8172, |
| "step": 2540 |
| }, |
| { |
| "epoch": 0.255, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8452, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.256, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5752, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.257, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.8001, |
| "step": 2570 |
| }, |
| { |
| "epoch": 0.258, |
| "grad_norm": 15.3125, |
| "learning_rate": 1e-05, |
| "loss": 2.6752, |
| "step": 2580 |
| }, |
| { |
| "epoch": 0.259, |
| "grad_norm": 16.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5199, |
| "step": 2590 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 23.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5651, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.261, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.2722, |
| "step": 2610 |
| }, |
| { |
| "epoch": 0.262, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4842, |
| "step": 2620 |
| }, |
| { |
| "epoch": 0.263, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3899, |
| "step": 2630 |
| }, |
| { |
| "epoch": 0.264, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6523, |
| "step": 2640 |
| }, |
| { |
| "epoch": 0.265, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7928, |
| "step": 2650 |
| }, |
| { |
| "epoch": 0.266, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4716, |
| "step": 2660 |
| }, |
| { |
| "epoch": 0.267, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4442, |
| "step": 2670 |
| }, |
| { |
| "epoch": 0.268, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5541, |
| "step": 2680 |
| }, |
| { |
| "epoch": 0.269, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4891, |
| "step": 2690 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 21.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4502, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.271, |
| "grad_norm": 21.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7109, |
| "step": 2710 |
| }, |
| { |
| "epoch": 0.272, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5166, |
| "step": 2720 |
| }, |
| { |
| "epoch": 0.273, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5219, |
| "step": 2730 |
| }, |
| { |
| "epoch": 0.274, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7278, |
| "step": 2740 |
| }, |
| { |
| "epoch": 0.275, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4764, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.276, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.398, |
| "step": 2760 |
| }, |
| { |
| "epoch": 0.277, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5402, |
| "step": 2770 |
| }, |
| { |
| "epoch": 0.278, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5222, |
| "step": 2780 |
| }, |
| { |
| "epoch": 0.279, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5831, |
| "step": 2790 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6964, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.281, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7975, |
| "step": 2810 |
| }, |
| { |
| "epoch": 0.282, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6105, |
| "step": 2820 |
| }, |
| { |
| "epoch": 0.283, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5746, |
| "step": 2830 |
| }, |
| { |
| "epoch": 0.284, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.562, |
| "step": 2840 |
| }, |
| { |
| "epoch": 0.285, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.319, |
| "step": 2850 |
| }, |
| { |
| "epoch": 0.286, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.9131, |
| "step": 2860 |
| }, |
| { |
| "epoch": 0.287, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.617, |
| "step": 2870 |
| }, |
| { |
| "epoch": 0.288, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7058, |
| "step": 2880 |
| }, |
| { |
| "epoch": 0.289, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5933, |
| "step": 2890 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 39.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5761, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.291, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5418, |
| "step": 2910 |
| }, |
| { |
| "epoch": 0.292, |
| "grad_norm": 21.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3706, |
| "step": 2920 |
| }, |
| { |
| "epoch": 0.293, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6748, |
| "step": 2930 |
| }, |
| { |
| "epoch": 0.294, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4772, |
| "step": 2940 |
| }, |
| { |
| "epoch": 0.295, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3348, |
| "step": 2950 |
| }, |
| { |
| "epoch": 0.296, |
| "grad_norm": 15.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7895, |
| "step": 2960 |
| }, |
| { |
| "epoch": 0.297, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5933, |
| "step": 2970 |
| }, |
| { |
| "epoch": 0.298, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.8536, |
| "step": 2980 |
| }, |
| { |
| "epoch": 0.299, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4427, |
| "step": 2990 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5157, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.301, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.8311, |
| "step": 3010 |
| }, |
| { |
| "epoch": 0.302, |
| "grad_norm": 21.625, |
| "learning_rate": 1e-05, |
| "loss": 2.602, |
| "step": 3020 |
| }, |
| { |
| "epoch": 0.303, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6318, |
| "step": 3030 |
| }, |
| { |
| "epoch": 0.304, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.471, |
| "step": 3040 |
| }, |
| { |
| "epoch": 0.305, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.706, |
| "step": 3050 |
| }, |
| { |
| "epoch": 0.306, |
| "grad_norm": 14.1875, |
| "learning_rate": 1e-05, |
| "loss": 2.6716, |
| "step": 3060 |
| }, |
| { |
| "epoch": 0.307, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.2474, |
| "step": 3070 |
| }, |
| { |
| "epoch": 0.308, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6439, |
| "step": 3080 |
| }, |
| { |
| "epoch": 0.309, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.7511, |
| "step": 3090 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.485, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.311, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.711, |
| "step": 3110 |
| }, |
| { |
| "epoch": 0.312, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3507, |
| "step": 3120 |
| }, |
| { |
| "epoch": 0.313, |
| "grad_norm": 22.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8057, |
| "step": 3130 |
| }, |
| { |
| "epoch": 0.314, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8168, |
| "step": 3140 |
| }, |
| { |
| "epoch": 0.315, |
| "grad_norm": 15.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6906, |
| "step": 3150 |
| }, |
| { |
| "epoch": 0.316, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4336, |
| "step": 3160 |
| }, |
| { |
| "epoch": 0.317, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4364, |
| "step": 3170 |
| }, |
| { |
| "epoch": 0.318, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 3.0738, |
| "step": 3180 |
| }, |
| { |
| "epoch": 0.319, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6299, |
| "step": 3190 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4558, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.321, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3969, |
| "step": 3210 |
| }, |
| { |
| "epoch": 0.322, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3913, |
| "step": 3220 |
| }, |
| { |
| "epoch": 0.323, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4314, |
| "step": 3230 |
| }, |
| { |
| "epoch": 0.324, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6117, |
| "step": 3240 |
| }, |
| { |
| "epoch": 0.325, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3895, |
| "step": 3250 |
| }, |
| { |
| "epoch": 0.326, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5207, |
| "step": 3260 |
| }, |
| { |
| "epoch": 0.327, |
| "grad_norm": 15.75, |
| "learning_rate": 1e-05, |
| "loss": 2.281, |
| "step": 3270 |
| }, |
| { |
| "epoch": 0.328, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6651, |
| "step": 3280 |
| }, |
| { |
| "epoch": 0.329, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6336, |
| "step": 3290 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4576, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.331, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3937, |
| "step": 3310 |
| }, |
| { |
| "epoch": 0.332, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6475, |
| "step": 3320 |
| }, |
| { |
| "epoch": 0.333, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3746, |
| "step": 3330 |
| }, |
| { |
| "epoch": 0.334, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4735, |
| "step": 3340 |
| }, |
| { |
| "epoch": 0.335, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3351, |
| "step": 3350 |
| }, |
| { |
| "epoch": 0.336, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8694, |
| "step": 3360 |
| }, |
| { |
| "epoch": 0.337, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4649, |
| "step": 3370 |
| }, |
| { |
| "epoch": 0.338, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4623, |
| "step": 3380 |
| }, |
| { |
| "epoch": 0.339, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3888, |
| "step": 3390 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8593, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.341, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5711, |
| "step": 3410 |
| }, |
| { |
| "epoch": 0.342, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5877, |
| "step": 3420 |
| }, |
| { |
| "epoch": 0.343, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4582, |
| "step": 3430 |
| }, |
| { |
| "epoch": 0.344, |
| "grad_norm": 21.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6229, |
| "step": 3440 |
| }, |
| { |
| "epoch": 0.345, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5171, |
| "step": 3450 |
| }, |
| { |
| "epoch": 0.346, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3955, |
| "step": 3460 |
| }, |
| { |
| "epoch": 0.347, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3764, |
| "step": 3470 |
| }, |
| { |
| "epoch": 0.348, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6765, |
| "step": 3480 |
| }, |
| { |
| "epoch": 0.349, |
| "grad_norm": 24.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5731, |
| "step": 3490 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 15.8125, |
| "learning_rate": 1e-05, |
| "loss": 2.36, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.351, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5466, |
| "step": 3510 |
| }, |
| { |
| "epoch": 0.352, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4738, |
| "step": 3520 |
| }, |
| { |
| "epoch": 0.353, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4195, |
| "step": 3530 |
| }, |
| { |
| "epoch": 0.354, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7505, |
| "step": 3540 |
| }, |
| { |
| "epoch": 0.355, |
| "grad_norm": 22.625, |
| "learning_rate": 1e-05, |
| "loss": 2.671, |
| "step": 3550 |
| }, |
| { |
| "epoch": 0.356, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7635, |
| "step": 3560 |
| }, |
| { |
| "epoch": 0.357, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4406, |
| "step": 3570 |
| }, |
| { |
| "epoch": 0.358, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4645, |
| "step": 3580 |
| }, |
| { |
| "epoch": 0.359, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5336, |
| "step": 3590 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.2931, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.361, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4669, |
| "step": 3610 |
| }, |
| { |
| "epoch": 0.362, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3572, |
| "step": 3620 |
| }, |
| { |
| "epoch": 0.363, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5048, |
| "step": 3630 |
| }, |
| { |
| "epoch": 0.364, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3659, |
| "step": 3640 |
| }, |
| { |
| "epoch": 0.365, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3389, |
| "step": 3650 |
| }, |
| { |
| "epoch": 0.366, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6051, |
| "step": 3660 |
| }, |
| { |
| "epoch": 0.367, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7483, |
| "step": 3670 |
| }, |
| { |
| "epoch": 0.368, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6497, |
| "step": 3680 |
| }, |
| { |
| "epoch": 0.369, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3905, |
| "step": 3690 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4356, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.371, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6108, |
| "step": 3710 |
| }, |
| { |
| "epoch": 0.372, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5823, |
| "step": 3720 |
| }, |
| { |
| "epoch": 0.373, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4461, |
| "step": 3730 |
| }, |
| { |
| "epoch": 0.374, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4841, |
| "step": 3740 |
| }, |
| { |
| "epoch": 0.375, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5193, |
| "step": 3750 |
| }, |
| { |
| "epoch": 0.376, |
| "grad_norm": 15.8125, |
| "learning_rate": 1e-05, |
| "loss": 2.4734, |
| "step": 3760 |
| }, |
| { |
| "epoch": 0.377, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8992, |
| "step": 3770 |
| }, |
| { |
| "epoch": 0.378, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.8839, |
| "step": 3780 |
| }, |
| { |
| "epoch": 0.379, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.481, |
| "step": 3790 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3662, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.381, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7295, |
| "step": 3810 |
| }, |
| { |
| "epoch": 0.382, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5414, |
| "step": 3820 |
| }, |
| { |
| "epoch": 0.383, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4315, |
| "step": 3830 |
| }, |
| { |
| "epoch": 0.384, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3002, |
| "step": 3840 |
| }, |
| { |
| "epoch": 0.385, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3859, |
| "step": 3850 |
| }, |
| { |
| "epoch": 0.386, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3491, |
| "step": 3860 |
| }, |
| { |
| "epoch": 0.387, |
| "grad_norm": 14.5625, |
| "learning_rate": 1e-05, |
| "loss": 2.6115, |
| "step": 3870 |
| }, |
| { |
| "epoch": 0.388, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6592, |
| "step": 3880 |
| }, |
| { |
| "epoch": 0.389, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4516, |
| "step": 3890 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.64, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.391, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4264, |
| "step": 3910 |
| }, |
| { |
| "epoch": 0.392, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.8704, |
| "step": 3920 |
| }, |
| { |
| "epoch": 0.393, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3054, |
| "step": 3930 |
| }, |
| { |
| "epoch": 0.394, |
| "grad_norm": 21.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6544, |
| "step": 3940 |
| }, |
| { |
| "epoch": 0.395, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5953, |
| "step": 3950 |
| }, |
| { |
| "epoch": 0.396, |
| "grad_norm": 14.625, |
| "learning_rate": 1e-05, |
| "loss": 2.2923, |
| "step": 3960 |
| }, |
| { |
| "epoch": 0.397, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.417, |
| "step": 3970 |
| }, |
| { |
| "epoch": 0.398, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3225, |
| "step": 3980 |
| }, |
| { |
| "epoch": 0.399, |
| "grad_norm": 13.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.3487, |
| "step": 3990 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7811, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.401, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.2618, |
| "step": 4010 |
| }, |
| { |
| "epoch": 0.402, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3628, |
| "step": 4020 |
| }, |
| { |
| "epoch": 0.403, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5471, |
| "step": 4030 |
| }, |
| { |
| "epoch": 0.404, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.627, |
| "step": 4040 |
| }, |
| { |
| "epoch": 0.405, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5027, |
| "step": 4050 |
| }, |
| { |
| "epoch": 0.406, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.165, |
| "step": 4060 |
| }, |
| { |
| "epoch": 0.407, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.2934, |
| "step": 4070 |
| }, |
| { |
| "epoch": 0.408, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3613, |
| "step": 4080 |
| }, |
| { |
| "epoch": 0.409, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3609, |
| "step": 4090 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4449, |
| "step": 4100 |
| }, |
| { |
| "epoch": 0.411, |
| "grad_norm": 14.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4496, |
| "step": 4110 |
| }, |
| { |
| "epoch": 0.412, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4611, |
| "step": 4120 |
| }, |
| { |
| "epoch": 0.413, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6379, |
| "step": 4130 |
| }, |
| { |
| "epoch": 0.414, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6281, |
| "step": 4140 |
| }, |
| { |
| "epoch": 0.415, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5068, |
| "step": 4150 |
| }, |
| { |
| "epoch": 0.416, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3529, |
| "step": 4160 |
| }, |
| { |
| "epoch": 0.417, |
| "grad_norm": 15.6875, |
| "learning_rate": 1e-05, |
| "loss": 2.4111, |
| "step": 4170 |
| }, |
| { |
| "epoch": 0.418, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5589, |
| "step": 4180 |
| }, |
| { |
| "epoch": 0.419, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3388, |
| "step": 4190 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.1583, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.421, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.288, |
| "step": 4210 |
| }, |
| { |
| "epoch": 0.422, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4598, |
| "step": 4220 |
| }, |
| { |
| "epoch": 0.423, |
| "grad_norm": 14.5, |
| "learning_rate": 1e-05, |
| "loss": 2.7717, |
| "step": 4230 |
| }, |
| { |
| "epoch": 0.424, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6044, |
| "step": 4240 |
| }, |
| { |
| "epoch": 0.425, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8087, |
| "step": 4250 |
| }, |
| { |
| "epoch": 0.426, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3632, |
| "step": 4260 |
| }, |
| { |
| "epoch": 0.427, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3376, |
| "step": 4270 |
| }, |
| { |
| "epoch": 0.428, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5305, |
| "step": 4280 |
| }, |
| { |
| "epoch": 0.429, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2954, |
| "step": 4290 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5584, |
| "step": 4300 |
| }, |
| { |
| "epoch": 0.431, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.684, |
| "step": 4310 |
| }, |
| { |
| "epoch": 0.432, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4819, |
| "step": 4320 |
| }, |
| { |
| "epoch": 0.433, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5119, |
| "step": 4330 |
| }, |
| { |
| "epoch": 0.434, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5513, |
| "step": 4340 |
| }, |
| { |
| "epoch": 0.435, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4193, |
| "step": 4350 |
| }, |
| { |
| "epoch": 0.436, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4771, |
| "step": 4360 |
| }, |
| { |
| "epoch": 0.437, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3714, |
| "step": 4370 |
| }, |
| { |
| "epoch": 0.438, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3421, |
| "step": 4380 |
| }, |
| { |
| "epoch": 0.439, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7361, |
| "step": 4390 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.2974, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.441, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3927, |
| "step": 4410 |
| }, |
| { |
| "epoch": 0.442, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5367, |
| "step": 4420 |
| }, |
| { |
| "epoch": 0.443, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6905, |
| "step": 4430 |
| }, |
| { |
| "epoch": 0.444, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7203, |
| "step": 4440 |
| }, |
| { |
| "epoch": 0.445, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4527, |
| "step": 4450 |
| }, |
| { |
| "epoch": 0.446, |
| "grad_norm": 24.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6364, |
| "step": 4460 |
| }, |
| { |
| "epoch": 0.447, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.1915, |
| "step": 4470 |
| }, |
| { |
| "epoch": 0.448, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5231, |
| "step": 4480 |
| }, |
| { |
| "epoch": 0.449, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4303, |
| "step": 4490 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2239, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.451, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4674, |
| "step": 4510 |
| }, |
| { |
| "epoch": 0.452, |
| "grad_norm": 15.625, |
| "learning_rate": 1e-05, |
| "loss": 2.2829, |
| "step": 4520 |
| }, |
| { |
| "epoch": 0.453, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3622, |
| "step": 4530 |
| }, |
| { |
| "epoch": 0.454, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6612, |
| "step": 4540 |
| }, |
| { |
| "epoch": 0.455, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4915, |
| "step": 4550 |
| }, |
| { |
| "epoch": 0.456, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6999, |
| "step": 4560 |
| }, |
| { |
| "epoch": 0.457, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4066, |
| "step": 4570 |
| }, |
| { |
| "epoch": 0.458, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5309, |
| "step": 4580 |
| }, |
| { |
| "epoch": 0.459, |
| "grad_norm": 23.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4408, |
| "step": 4590 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 15.1875, |
| "learning_rate": 1e-05, |
| "loss": 2.2914, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.461, |
| "grad_norm": 15.375, |
| "learning_rate": 1e-05, |
| "loss": 2.8961, |
| "step": 4610 |
| }, |
| { |
| "epoch": 0.462, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4698, |
| "step": 4620 |
| }, |
| { |
| "epoch": 0.463, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3865, |
| "step": 4630 |
| }, |
| { |
| "epoch": 0.464, |
| "grad_norm": 22.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6814, |
| "step": 4640 |
| }, |
| { |
| "epoch": 0.465, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5154, |
| "step": 4650 |
| }, |
| { |
| "epoch": 0.466, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4509, |
| "step": 4660 |
| }, |
| { |
| "epoch": 0.467, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5722, |
| "step": 4670 |
| }, |
| { |
| "epoch": 0.468, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5152, |
| "step": 4680 |
| }, |
| { |
| "epoch": 0.469, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3603, |
| "step": 4690 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4957, |
| "step": 4700 |
| }, |
| { |
| "epoch": 0.471, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5935, |
| "step": 4710 |
| }, |
| { |
| "epoch": 0.472, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5148, |
| "step": 4720 |
| }, |
| { |
| "epoch": 0.473, |
| "grad_norm": 24.375, |
| "learning_rate": 1e-05, |
| "loss": 2.8182, |
| "step": 4730 |
| }, |
| { |
| "epoch": 0.474, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3348, |
| "step": 4740 |
| }, |
| { |
| "epoch": 0.475, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4282, |
| "step": 4750 |
| }, |
| { |
| "epoch": 0.476, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5341, |
| "step": 4760 |
| }, |
| { |
| "epoch": 0.477, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4703, |
| "step": 4770 |
| }, |
| { |
| "epoch": 0.478, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6941, |
| "step": 4780 |
| }, |
| { |
| "epoch": 0.479, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5162, |
| "step": 4790 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5827, |
| "step": 4800 |
| }, |
| { |
| "epoch": 0.481, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2945, |
| "step": 4810 |
| }, |
| { |
| "epoch": 0.482, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.2565, |
| "step": 4820 |
| }, |
| { |
| "epoch": 0.483, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6442, |
| "step": 4830 |
| }, |
| { |
| "epoch": 0.484, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5784, |
| "step": 4840 |
| }, |
| { |
| "epoch": 0.485, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.2645, |
| "step": 4850 |
| }, |
| { |
| "epoch": 0.486, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4722, |
| "step": 4860 |
| }, |
| { |
| "epoch": 0.487, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4258, |
| "step": 4870 |
| }, |
| { |
| "epoch": 0.488, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5565, |
| "step": 4880 |
| }, |
| { |
| "epoch": 0.489, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4211, |
| "step": 4890 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3477, |
| "step": 4900 |
| }, |
| { |
| "epoch": 0.491, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.432, |
| "step": 4910 |
| }, |
| { |
| "epoch": 0.492, |
| "grad_norm": 15.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.5616, |
| "step": 4920 |
| }, |
| { |
| "epoch": 0.493, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3926, |
| "step": 4930 |
| }, |
| { |
| "epoch": 0.494, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6728, |
| "step": 4940 |
| }, |
| { |
| "epoch": 0.495, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7499, |
| "step": 4950 |
| }, |
| { |
| "epoch": 0.496, |
| "grad_norm": 15.8125, |
| "learning_rate": 1e-05, |
| "loss": 2.7322, |
| "step": 4960 |
| }, |
| { |
| "epoch": 0.497, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3811, |
| "step": 4970 |
| }, |
| { |
| "epoch": 0.498, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6949, |
| "step": 4980 |
| }, |
| { |
| "epoch": 0.499, |
| "grad_norm": 22.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7396, |
| "step": 4990 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.7146, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.501, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3698, |
| "step": 5010 |
| }, |
| { |
| "epoch": 0.502, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5107, |
| "step": 5020 |
| }, |
| { |
| "epoch": 0.503, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.0926, |
| "step": 5030 |
| }, |
| { |
| "epoch": 0.504, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6375, |
| "step": 5040 |
| }, |
| { |
| "epoch": 0.505, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6672, |
| "step": 5050 |
| }, |
| { |
| "epoch": 0.506, |
| "grad_norm": 22.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5423, |
| "step": 5060 |
| }, |
| { |
| "epoch": 0.507, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.294, |
| "step": 5070 |
| }, |
| { |
| "epoch": 0.508, |
| "grad_norm": 23.0, |
| "learning_rate": 1e-05, |
| "loss": 2.2487, |
| "step": 5080 |
| }, |
| { |
| "epoch": 0.509, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5192, |
| "step": 5090 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 21.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7941, |
| "step": 5100 |
| }, |
| { |
| "epoch": 0.511, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.58, |
| "step": 5110 |
| }, |
| { |
| "epoch": 0.512, |
| "grad_norm": 13.5625, |
| "learning_rate": 1e-05, |
| "loss": 2.3418, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.513, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4757, |
| "step": 5130 |
| }, |
| { |
| "epoch": 0.514, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4808, |
| "step": 5140 |
| }, |
| { |
| "epoch": 0.515, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6622, |
| "step": 5150 |
| }, |
| { |
| "epoch": 0.516, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3826, |
| "step": 5160 |
| }, |
| { |
| "epoch": 0.517, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3865, |
| "step": 5170 |
| }, |
| { |
| "epoch": 0.518, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.2439, |
| "step": 5180 |
| }, |
| { |
| "epoch": 0.519, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.2499, |
| "step": 5190 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5951, |
| "step": 5200 |
| }, |
| { |
| "epoch": 0.521, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5181, |
| "step": 5210 |
| }, |
| { |
| "epoch": 0.522, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.8465, |
| "step": 5220 |
| }, |
| { |
| "epoch": 0.523, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.1901, |
| "step": 5230 |
| }, |
| { |
| "epoch": 0.524, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4548, |
| "step": 5240 |
| }, |
| { |
| "epoch": 0.525, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8651, |
| "step": 5250 |
| }, |
| { |
| "epoch": 0.526, |
| "grad_norm": 15.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.3295, |
| "step": 5260 |
| }, |
| { |
| "epoch": 0.527, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4822, |
| "step": 5270 |
| }, |
| { |
| "epoch": 0.528, |
| "grad_norm": 21.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5154, |
| "step": 5280 |
| }, |
| { |
| "epoch": 0.529, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5005, |
| "step": 5290 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7491, |
| "step": 5300 |
| }, |
| { |
| "epoch": 0.531, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5266, |
| "step": 5310 |
| }, |
| { |
| "epoch": 0.532, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.8332, |
| "step": 5320 |
| }, |
| { |
| "epoch": 0.533, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.8523, |
| "step": 5330 |
| }, |
| { |
| "epoch": 0.534, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4542, |
| "step": 5340 |
| }, |
| { |
| "epoch": 0.535, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7365, |
| "step": 5350 |
| }, |
| { |
| "epoch": 0.536, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6887, |
| "step": 5360 |
| }, |
| { |
| "epoch": 0.537, |
| "grad_norm": 15.5625, |
| "learning_rate": 1e-05, |
| "loss": 2.6409, |
| "step": 5370 |
| }, |
| { |
| "epoch": 0.538, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5305, |
| "step": 5380 |
| }, |
| { |
| "epoch": 0.539, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.82, |
| "step": 5390 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5188, |
| "step": 5400 |
| }, |
| { |
| "epoch": 0.541, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6195, |
| "step": 5410 |
| }, |
| { |
| "epoch": 0.542, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4742, |
| "step": 5420 |
| }, |
| { |
| "epoch": 0.543, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4407, |
| "step": 5430 |
| }, |
| { |
| "epoch": 0.544, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6211, |
| "step": 5440 |
| }, |
| { |
| "epoch": 0.545, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5502, |
| "step": 5450 |
| }, |
| { |
| "epoch": 0.546, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4467, |
| "step": 5460 |
| }, |
| { |
| "epoch": 0.547, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3672, |
| "step": 5470 |
| }, |
| { |
| "epoch": 0.548, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.2363, |
| "step": 5480 |
| }, |
| { |
| "epoch": 0.549, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4534, |
| "step": 5490 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5279, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.551, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4124, |
| "step": 5510 |
| }, |
| { |
| "epoch": 0.552, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.528, |
| "step": 5520 |
| }, |
| { |
| "epoch": 0.553, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3565, |
| "step": 5530 |
| }, |
| { |
| "epoch": 0.554, |
| "grad_norm": 15.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3215, |
| "step": 5540 |
| }, |
| { |
| "epoch": 0.555, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.2912, |
| "step": 5550 |
| }, |
| { |
| "epoch": 0.556, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.526, |
| "step": 5560 |
| }, |
| { |
| "epoch": 0.557, |
| "grad_norm": 24.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6994, |
| "step": 5570 |
| }, |
| { |
| "epoch": 0.558, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5592, |
| "step": 5580 |
| }, |
| { |
| "epoch": 0.559, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3593, |
| "step": 5590 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6465, |
| "step": 5600 |
| }, |
| { |
| "epoch": 0.561, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5188, |
| "step": 5610 |
| }, |
| { |
| "epoch": 0.562, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5507, |
| "step": 5620 |
| }, |
| { |
| "epoch": 0.563, |
| "grad_norm": 13.5625, |
| "learning_rate": 1e-05, |
| "loss": 2.4442, |
| "step": 5630 |
| }, |
| { |
| "epoch": 0.564, |
| "grad_norm": 15.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4114, |
| "step": 5640 |
| }, |
| { |
| "epoch": 0.565, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6847, |
| "step": 5650 |
| }, |
| { |
| "epoch": 0.566, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5711, |
| "step": 5660 |
| }, |
| { |
| "epoch": 0.567, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5364, |
| "step": 5670 |
| }, |
| { |
| "epoch": 0.568, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3055, |
| "step": 5680 |
| }, |
| { |
| "epoch": 0.569, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5007, |
| "step": 5690 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3948, |
| "step": 5700 |
| }, |
| { |
| "epoch": 0.571, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5915, |
| "step": 5710 |
| }, |
| { |
| "epoch": 0.572, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4658, |
| "step": 5720 |
| }, |
| { |
| "epoch": 0.573, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4869, |
| "step": 5730 |
| }, |
| { |
| "epoch": 0.574, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5369, |
| "step": 5740 |
| }, |
| { |
| "epoch": 0.575, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6827, |
| "step": 5750 |
| }, |
| { |
| "epoch": 0.576, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.289, |
| "step": 5760 |
| }, |
| { |
| "epoch": 0.577, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7057, |
| "step": 5770 |
| }, |
| { |
| "epoch": 0.578, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4836, |
| "step": 5780 |
| }, |
| { |
| "epoch": 0.579, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5267, |
| "step": 5790 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4439, |
| "step": 5800 |
| }, |
| { |
| "epoch": 0.581, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6057, |
| "step": 5810 |
| }, |
| { |
| "epoch": 0.582, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.2462, |
| "step": 5820 |
| }, |
| { |
| "epoch": 0.583, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.663, |
| "step": 5830 |
| }, |
| { |
| "epoch": 0.584, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.1693, |
| "step": 5840 |
| }, |
| { |
| "epoch": 0.585, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3145, |
| "step": 5850 |
| }, |
| { |
| "epoch": 0.586, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5249, |
| "step": 5860 |
| }, |
| { |
| "epoch": 0.587, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4747, |
| "step": 5870 |
| }, |
| { |
| "epoch": 0.588, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5228, |
| "step": 5880 |
| }, |
| { |
| "epoch": 0.589, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6576, |
| "step": 5890 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.8005, |
| "step": 5900 |
| }, |
| { |
| "epoch": 0.591, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5351, |
| "step": 5910 |
| }, |
| { |
| "epoch": 0.592, |
| "grad_norm": 22.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5083, |
| "step": 5920 |
| }, |
| { |
| "epoch": 0.593, |
| "grad_norm": 15.5625, |
| "learning_rate": 1e-05, |
| "loss": 2.5013, |
| "step": 5930 |
| }, |
| { |
| "epoch": 0.594, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.2795, |
| "step": 5940 |
| }, |
| { |
| "epoch": 0.595, |
| "grad_norm": 22.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6491, |
| "step": 5950 |
| }, |
| { |
| "epoch": 0.596, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4093, |
| "step": 5960 |
| }, |
| { |
| "epoch": 0.597, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5612, |
| "step": 5970 |
| }, |
| { |
| "epoch": 0.598, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5917, |
| "step": 5980 |
| }, |
| { |
| "epoch": 0.599, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4297, |
| "step": 5990 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 15.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.3789, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.601, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3833, |
| "step": 6010 |
| }, |
| { |
| "epoch": 0.602, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.616, |
| "step": 6020 |
| }, |
| { |
| "epoch": 0.603, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.1297, |
| "step": 6030 |
| }, |
| { |
| "epoch": 0.604, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.8368, |
| "step": 6040 |
| }, |
| { |
| "epoch": 0.605, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5996, |
| "step": 6050 |
| }, |
| { |
| "epoch": 0.606, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5608, |
| "step": 6060 |
| }, |
| { |
| "epoch": 0.607, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.2706, |
| "step": 6070 |
| }, |
| { |
| "epoch": 0.608, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4243, |
| "step": 6080 |
| }, |
| { |
| "epoch": 0.609, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4929, |
| "step": 6090 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5155, |
| "step": 6100 |
| }, |
| { |
| "epoch": 0.611, |
| "grad_norm": 25.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4735, |
| "step": 6110 |
| }, |
| { |
| "epoch": 0.612, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3251, |
| "step": 6120 |
| }, |
| { |
| "epoch": 0.613, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5207, |
| "step": 6130 |
| }, |
| { |
| "epoch": 0.614, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4789, |
| "step": 6140 |
| }, |
| { |
| "epoch": 0.615, |
| "grad_norm": 15.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5223, |
| "step": 6150 |
| }, |
| { |
| "epoch": 0.616, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.179, |
| "step": 6160 |
| }, |
| { |
| "epoch": 0.617, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5142, |
| "step": 6170 |
| }, |
| { |
| "epoch": 0.618, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5222, |
| "step": 6180 |
| }, |
| { |
| "epoch": 0.619, |
| "grad_norm": 13.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3094, |
| "step": 6190 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 15.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4234, |
| "step": 6200 |
| }, |
| { |
| "epoch": 0.621, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.245, |
| "step": 6210 |
| }, |
| { |
| "epoch": 0.622, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.8233, |
| "step": 6220 |
| }, |
| { |
| "epoch": 0.623, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5606, |
| "step": 6230 |
| }, |
| { |
| "epoch": 0.624, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.7335, |
| "step": 6240 |
| }, |
| { |
| "epoch": 0.625, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3693, |
| "step": 6250 |
| }, |
| { |
| "epoch": 0.626, |
| "grad_norm": 15.4375, |
| "learning_rate": 1e-05, |
| "loss": 2.7085, |
| "step": 6260 |
| }, |
| { |
| "epoch": 0.627, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4044, |
| "step": 6270 |
| }, |
| { |
| "epoch": 0.628, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6619, |
| "step": 6280 |
| }, |
| { |
| "epoch": 0.629, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.36, |
| "step": 6290 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4117, |
| "step": 6300 |
| }, |
| { |
| "epoch": 0.631, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4329, |
| "step": 6310 |
| }, |
| { |
| "epoch": 0.632, |
| "grad_norm": 23.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4537, |
| "step": 6320 |
| }, |
| { |
| "epoch": 0.633, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3511, |
| "step": 6330 |
| }, |
| { |
| "epoch": 0.634, |
| "grad_norm": 15.3125, |
| "learning_rate": 1e-05, |
| "loss": 2.444, |
| "step": 6340 |
| }, |
| { |
| "epoch": 0.635, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3465, |
| "step": 6350 |
| }, |
| { |
| "epoch": 0.636, |
| "grad_norm": 15.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4732, |
| "step": 6360 |
| }, |
| { |
| "epoch": 0.637, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3568, |
| "step": 6370 |
| }, |
| { |
| "epoch": 0.638, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6086, |
| "step": 6380 |
| }, |
| { |
| "epoch": 0.639, |
| "grad_norm": 23.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5556, |
| "step": 6390 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3932, |
| "step": 6400 |
| }, |
| { |
| "epoch": 0.641, |
| "grad_norm": 22.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3085, |
| "step": 6410 |
| }, |
| { |
| "epoch": 0.642, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5747, |
| "step": 6420 |
| }, |
| { |
| "epoch": 0.643, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5011, |
| "step": 6430 |
| }, |
| { |
| "epoch": 0.644, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5629, |
| "step": 6440 |
| }, |
| { |
| "epoch": 0.645, |
| "grad_norm": 14.8125, |
| "learning_rate": 1e-05, |
| "loss": 2.4009, |
| "step": 6450 |
| }, |
| { |
| "epoch": 0.646, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.2556, |
| "step": 6460 |
| }, |
| { |
| "epoch": 0.647, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3418, |
| "step": 6470 |
| }, |
| { |
| "epoch": 0.648, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5985, |
| "step": 6480 |
| }, |
| { |
| "epoch": 0.649, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4373, |
| "step": 6490 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3758, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.651, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4309, |
| "step": 6510 |
| }, |
| { |
| "epoch": 0.652, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6317, |
| "step": 6520 |
| }, |
| { |
| "epoch": 0.653, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.301, |
| "step": 6530 |
| }, |
| { |
| "epoch": 0.654, |
| "grad_norm": 15.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3398, |
| "step": 6540 |
| }, |
| { |
| "epoch": 0.655, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6061, |
| "step": 6550 |
| }, |
| { |
| "epoch": 0.656, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5678, |
| "step": 6560 |
| }, |
| { |
| "epoch": 0.657, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2114, |
| "step": 6570 |
| }, |
| { |
| "epoch": 0.658, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4241, |
| "step": 6580 |
| }, |
| { |
| "epoch": 0.659, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4832, |
| "step": 6590 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.462, |
| "step": 6600 |
| }, |
| { |
| "epoch": 0.661, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5769, |
| "step": 6610 |
| }, |
| { |
| "epoch": 0.662, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6055, |
| "step": 6620 |
| }, |
| { |
| "epoch": 0.663, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3458, |
| "step": 6630 |
| }, |
| { |
| "epoch": 0.664, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.531, |
| "step": 6640 |
| }, |
| { |
| "epoch": 0.665, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3875, |
| "step": 6650 |
| }, |
| { |
| "epoch": 0.666, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4711, |
| "step": 6660 |
| }, |
| { |
| "epoch": 0.667, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5693, |
| "step": 6670 |
| }, |
| { |
| "epoch": 0.668, |
| "grad_norm": 23.5, |
| "learning_rate": 1e-05, |
| "loss": 2.9522, |
| "step": 6680 |
| }, |
| { |
| "epoch": 0.669, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7094, |
| "step": 6690 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5991, |
| "step": 6700 |
| }, |
| { |
| "epoch": 0.671, |
| "grad_norm": 15.3125, |
| "learning_rate": 1e-05, |
| "loss": 2.4768, |
| "step": 6710 |
| }, |
| { |
| "epoch": 0.672, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.204, |
| "step": 6720 |
| }, |
| { |
| "epoch": 0.673, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5277, |
| "step": 6730 |
| }, |
| { |
| "epoch": 0.674, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4017, |
| "step": 6740 |
| }, |
| { |
| "epoch": 0.675, |
| "grad_norm": 14.6875, |
| "learning_rate": 1e-05, |
| "loss": 2.2573, |
| "step": 6750 |
| }, |
| { |
| "epoch": 0.676, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.2073, |
| "step": 6760 |
| }, |
| { |
| "epoch": 0.677, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.2279, |
| "step": 6770 |
| }, |
| { |
| "epoch": 0.678, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5572, |
| "step": 6780 |
| }, |
| { |
| "epoch": 0.679, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5595, |
| "step": 6790 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4021, |
| "step": 6800 |
| }, |
| { |
| "epoch": 0.681, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3983, |
| "step": 6810 |
| }, |
| { |
| "epoch": 0.682, |
| "grad_norm": 21.5, |
| "learning_rate": 1e-05, |
| "loss": 2.911, |
| "step": 6820 |
| }, |
| { |
| "epoch": 0.683, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.561, |
| "step": 6830 |
| }, |
| { |
| "epoch": 0.684, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.2879, |
| "step": 6840 |
| }, |
| { |
| "epoch": 0.685, |
| "grad_norm": 23.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4359, |
| "step": 6850 |
| }, |
| { |
| "epoch": 0.686, |
| "grad_norm": 23.75, |
| "learning_rate": 1e-05, |
| "loss": 2.388, |
| "step": 6860 |
| }, |
| { |
| "epoch": 0.687, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3052, |
| "step": 6870 |
| }, |
| { |
| "epoch": 0.688, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4347, |
| "step": 6880 |
| }, |
| { |
| "epoch": 0.689, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4526, |
| "step": 6890 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4675, |
| "step": 6900 |
| }, |
| { |
| "epoch": 0.691, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3019, |
| "step": 6910 |
| }, |
| { |
| "epoch": 0.692, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4313, |
| "step": 6920 |
| }, |
| { |
| "epoch": 0.693, |
| "grad_norm": 15.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4364, |
| "step": 6930 |
| }, |
| { |
| "epoch": 0.694, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4656, |
| "step": 6940 |
| }, |
| { |
| "epoch": 0.695, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.2348, |
| "step": 6950 |
| }, |
| { |
| "epoch": 0.696, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3488, |
| "step": 6960 |
| }, |
| { |
| "epoch": 0.697, |
| "grad_norm": 15.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5539, |
| "step": 6970 |
| }, |
| { |
| "epoch": 0.698, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6022, |
| "step": 6980 |
| }, |
| { |
| "epoch": 0.699, |
| "grad_norm": 33.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5026, |
| "step": 6990 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5152, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.701, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4401, |
| "step": 7010 |
| }, |
| { |
| "epoch": 0.702, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4471, |
| "step": 7020 |
| }, |
| { |
| "epoch": 0.703, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3658, |
| "step": 7030 |
| }, |
| { |
| "epoch": 0.704, |
| "grad_norm": 15.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.4041, |
| "step": 7040 |
| }, |
| { |
| "epoch": 0.705, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4155, |
| "step": 7050 |
| }, |
| { |
| "epoch": 0.706, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.402, |
| "step": 7060 |
| }, |
| { |
| "epoch": 0.707, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.2344, |
| "step": 7070 |
| }, |
| { |
| "epoch": 0.708, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4367, |
| "step": 7080 |
| }, |
| { |
| "epoch": 0.709, |
| "grad_norm": 22.0, |
| "learning_rate": 1e-05, |
| "loss": 2.1583, |
| "step": 7090 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 23.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6793, |
| "step": 7100 |
| }, |
| { |
| "epoch": 0.711, |
| "grad_norm": 13.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4259, |
| "step": 7110 |
| }, |
| { |
| "epoch": 0.712, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5568, |
| "step": 7120 |
| }, |
| { |
| "epoch": 0.713, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5071, |
| "step": 7130 |
| }, |
| { |
| "epoch": 0.714, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7093, |
| "step": 7140 |
| }, |
| { |
| "epoch": 0.715, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6179, |
| "step": 7150 |
| }, |
| { |
| "epoch": 0.716, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.0915, |
| "step": 7160 |
| }, |
| { |
| "epoch": 0.717, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.9423, |
| "step": 7170 |
| }, |
| { |
| "epoch": 0.718, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4333, |
| "step": 7180 |
| }, |
| { |
| "epoch": 0.719, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5069, |
| "step": 7190 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3688, |
| "step": 7200 |
| }, |
| { |
| "epoch": 0.721, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.6047, |
| "step": 7210 |
| }, |
| { |
| "epoch": 0.722, |
| "grad_norm": 21.75, |
| "learning_rate": 1e-05, |
| "loss": 2.871, |
| "step": 7220 |
| }, |
| { |
| "epoch": 0.723, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5762, |
| "step": 7230 |
| }, |
| { |
| "epoch": 0.724, |
| "grad_norm": 15.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3892, |
| "step": 7240 |
| }, |
| { |
| "epoch": 0.725, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3978, |
| "step": 7250 |
| }, |
| { |
| "epoch": 0.726, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5605, |
| "step": 7260 |
| }, |
| { |
| "epoch": 0.727, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5202, |
| "step": 7270 |
| }, |
| { |
| "epoch": 0.728, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4805, |
| "step": 7280 |
| }, |
| { |
| "epoch": 0.729, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4678, |
| "step": 7290 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6738, |
| "step": 7300 |
| }, |
| { |
| "epoch": 0.731, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5489, |
| "step": 7310 |
| }, |
| { |
| "epoch": 0.732, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4729, |
| "step": 7320 |
| }, |
| { |
| "epoch": 0.733, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4355, |
| "step": 7330 |
| }, |
| { |
| "epoch": 0.734, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.503, |
| "step": 7340 |
| }, |
| { |
| "epoch": 0.735, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5859, |
| "step": 7350 |
| }, |
| { |
| "epoch": 0.736, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4242, |
| "step": 7360 |
| }, |
| { |
| "epoch": 0.737, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4762, |
| "step": 7370 |
| }, |
| { |
| "epoch": 0.738, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.72, |
| "step": 7380 |
| }, |
| { |
| "epoch": 0.739, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.356, |
| "step": 7390 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4734, |
| "step": 7400 |
| }, |
| { |
| "epoch": 0.741, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.1348, |
| "step": 7410 |
| }, |
| { |
| "epoch": 0.742, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5484, |
| "step": 7420 |
| }, |
| { |
| "epoch": 0.743, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5783, |
| "step": 7430 |
| }, |
| { |
| "epoch": 0.744, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5305, |
| "step": 7440 |
| }, |
| { |
| "epoch": 0.745, |
| "grad_norm": 15.125, |
| "learning_rate": 1e-05, |
| "loss": 2.8296, |
| "step": 7450 |
| }, |
| { |
| "epoch": 0.746, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3706, |
| "step": 7460 |
| }, |
| { |
| "epoch": 0.747, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6791, |
| "step": 7470 |
| }, |
| { |
| "epoch": 0.748, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4498, |
| "step": 7480 |
| }, |
| { |
| "epoch": 0.749, |
| "grad_norm": 21.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5614, |
| "step": 7490 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5407, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.751, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5135, |
| "step": 7510 |
| }, |
| { |
| "epoch": 0.752, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.7967, |
| "step": 7520 |
| }, |
| { |
| "epoch": 0.753, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5001, |
| "step": 7530 |
| }, |
| { |
| "epoch": 0.754, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2064, |
| "step": 7540 |
| }, |
| { |
| "epoch": 0.755, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3669, |
| "step": 7550 |
| }, |
| { |
| "epoch": 0.756, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5826, |
| "step": 7560 |
| }, |
| { |
| "epoch": 0.757, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5413, |
| "step": 7570 |
| }, |
| { |
| "epoch": 0.758, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3163, |
| "step": 7580 |
| }, |
| { |
| "epoch": 0.759, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4117, |
| "step": 7590 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3615, |
| "step": 7600 |
| }, |
| { |
| "epoch": 0.761, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6035, |
| "step": 7610 |
| }, |
| { |
| "epoch": 0.762, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3826, |
| "step": 7620 |
| }, |
| { |
| "epoch": 0.763, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6047, |
| "step": 7630 |
| }, |
| { |
| "epoch": 0.764, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4846, |
| "step": 7640 |
| }, |
| { |
| "epoch": 0.765, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3824, |
| "step": 7650 |
| }, |
| { |
| "epoch": 0.766, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4052, |
| "step": 7660 |
| }, |
| { |
| "epoch": 0.767, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4984, |
| "step": 7670 |
| }, |
| { |
| "epoch": 0.768, |
| "grad_norm": 22.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6286, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.769, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6427, |
| "step": 7690 |
| }, |
| { |
| "epoch": 0.77, |
| "grad_norm": 15.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4465, |
| "step": 7700 |
| }, |
| { |
| "epoch": 0.771, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4003, |
| "step": 7710 |
| }, |
| { |
| "epoch": 0.772, |
| "grad_norm": 22.875, |
| "learning_rate": 1e-05, |
| "loss": 2.0484, |
| "step": 7720 |
| }, |
| { |
| "epoch": 0.773, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.1542, |
| "step": 7730 |
| }, |
| { |
| "epoch": 0.774, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5422, |
| "step": 7740 |
| }, |
| { |
| "epoch": 0.775, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.2782, |
| "step": 7750 |
| }, |
| { |
| "epoch": 0.776, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3083, |
| "step": 7760 |
| }, |
| { |
| "epoch": 0.777, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4878, |
| "step": 7770 |
| }, |
| { |
| "epoch": 0.778, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3609, |
| "step": 7780 |
| }, |
| { |
| "epoch": 0.779, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4256, |
| "step": 7790 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4449, |
| "step": 7800 |
| }, |
| { |
| "epoch": 0.781, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4159, |
| "step": 7810 |
| }, |
| { |
| "epoch": 0.782, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5835, |
| "step": 7820 |
| }, |
| { |
| "epoch": 0.783, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5489, |
| "step": 7830 |
| }, |
| { |
| "epoch": 0.784, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.532, |
| "step": 7840 |
| }, |
| { |
| "epoch": 0.785, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4676, |
| "step": 7850 |
| }, |
| { |
| "epoch": 0.786, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4016, |
| "step": 7860 |
| }, |
| { |
| "epoch": 0.787, |
| "grad_norm": 21.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5432, |
| "step": 7870 |
| }, |
| { |
| "epoch": 0.788, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6657, |
| "step": 7880 |
| }, |
| { |
| "epoch": 0.789, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4801, |
| "step": 7890 |
| }, |
| { |
| "epoch": 0.79, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4901, |
| "step": 7900 |
| }, |
| { |
| "epoch": 0.791, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.6168, |
| "step": 7910 |
| }, |
| { |
| "epoch": 0.792, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.252, |
| "step": 7920 |
| }, |
| { |
| "epoch": 0.793, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3646, |
| "step": 7930 |
| }, |
| { |
| "epoch": 0.794, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4548, |
| "step": 7940 |
| }, |
| { |
| "epoch": 0.795, |
| "grad_norm": 15.875, |
| "learning_rate": 1e-05, |
| "loss": 2.339, |
| "step": 7950 |
| }, |
| { |
| "epoch": 0.796, |
| "grad_norm": 21.875, |
| "learning_rate": 1e-05, |
| "loss": 2.2785, |
| "step": 7960 |
| }, |
| { |
| "epoch": 0.797, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.1465, |
| "step": 7970 |
| }, |
| { |
| "epoch": 0.798, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.256, |
| "step": 7980 |
| }, |
| { |
| "epoch": 0.799, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.378, |
| "step": 7990 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4195, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.801, |
| "grad_norm": 23.5, |
| "learning_rate": 1e-05, |
| "loss": 2.2677, |
| "step": 8010 |
| }, |
| { |
| "epoch": 0.802, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4668, |
| "step": 8020 |
| }, |
| { |
| "epoch": 0.803, |
| "grad_norm": 25.125, |
| "learning_rate": 1e-05, |
| "loss": 2.299, |
| "step": 8030 |
| }, |
| { |
| "epoch": 0.804, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3354, |
| "step": 8040 |
| }, |
| { |
| "epoch": 0.805, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3383, |
| "step": 8050 |
| }, |
| { |
| "epoch": 0.806, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5364, |
| "step": 8060 |
| }, |
| { |
| "epoch": 0.807, |
| "grad_norm": 15.8125, |
| "learning_rate": 1e-05, |
| "loss": 2.3349, |
| "step": 8070 |
| }, |
| { |
| "epoch": 0.808, |
| "grad_norm": 21.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7276, |
| "step": 8080 |
| }, |
| { |
| "epoch": 0.809, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5475, |
| "step": 8090 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 15.5625, |
| "learning_rate": 1e-05, |
| "loss": 2.4244, |
| "step": 8100 |
| }, |
| { |
| "epoch": 0.811, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5047, |
| "step": 8110 |
| }, |
| { |
| "epoch": 0.812, |
| "grad_norm": 22.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5955, |
| "step": 8120 |
| }, |
| { |
| "epoch": 0.813, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.502, |
| "step": 8130 |
| }, |
| { |
| "epoch": 0.814, |
| "grad_norm": 15.1875, |
| "learning_rate": 1e-05, |
| "loss": 2.6035, |
| "step": 8140 |
| }, |
| { |
| "epoch": 0.815, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.1707, |
| "step": 8150 |
| }, |
| { |
| "epoch": 0.816, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6626, |
| "step": 8160 |
| }, |
| { |
| "epoch": 0.817, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3322, |
| "step": 8170 |
| }, |
| { |
| "epoch": 0.818, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.225, |
| "step": 8180 |
| }, |
| { |
| "epoch": 0.819, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4109, |
| "step": 8190 |
| }, |
| { |
| "epoch": 0.82, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4814, |
| "step": 8200 |
| }, |
| { |
| "epoch": 0.821, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6057, |
| "step": 8210 |
| }, |
| { |
| "epoch": 0.822, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5058, |
| "step": 8220 |
| }, |
| { |
| "epoch": 0.823, |
| "grad_norm": 14.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3002, |
| "step": 8230 |
| }, |
| { |
| "epoch": 0.824, |
| "grad_norm": 14.75, |
| "learning_rate": 1e-05, |
| "loss": 2.124, |
| "step": 8240 |
| }, |
| { |
| "epoch": 0.825, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.1385, |
| "step": 8250 |
| }, |
| { |
| "epoch": 0.826, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6116, |
| "step": 8260 |
| }, |
| { |
| "epoch": 0.827, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.7004, |
| "step": 8270 |
| }, |
| { |
| "epoch": 0.828, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4591, |
| "step": 8280 |
| }, |
| { |
| "epoch": 0.829, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3282, |
| "step": 8290 |
| }, |
| { |
| "epoch": 0.83, |
| "grad_norm": 23.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7295, |
| "step": 8300 |
| }, |
| { |
| "epoch": 0.831, |
| "grad_norm": 15.6875, |
| "learning_rate": 1e-05, |
| "loss": 2.2418, |
| "step": 8310 |
| }, |
| { |
| "epoch": 0.832, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5152, |
| "step": 8320 |
| }, |
| { |
| "epoch": 0.833, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3926, |
| "step": 8330 |
| }, |
| { |
| "epoch": 0.834, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4559, |
| "step": 8340 |
| }, |
| { |
| "epoch": 0.835, |
| "grad_norm": 22.5, |
| "learning_rate": 1e-05, |
| "loss": 2.2616, |
| "step": 8350 |
| }, |
| { |
| "epoch": 0.836, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.2286, |
| "step": 8360 |
| }, |
| { |
| "epoch": 0.837, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4871, |
| "step": 8370 |
| }, |
| { |
| "epoch": 0.838, |
| "grad_norm": 15.625, |
| "learning_rate": 1e-05, |
| "loss": 2.2564, |
| "step": 8380 |
| }, |
| { |
| "epoch": 0.839, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6026, |
| "step": 8390 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3815, |
| "step": 8400 |
| }, |
| { |
| "epoch": 0.841, |
| "grad_norm": 15.8125, |
| "learning_rate": 1e-05, |
| "loss": 2.3241, |
| "step": 8410 |
| }, |
| { |
| "epoch": 0.842, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4265, |
| "step": 8420 |
| }, |
| { |
| "epoch": 0.843, |
| "grad_norm": 20.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3607, |
| "step": 8430 |
| }, |
| { |
| "epoch": 0.844, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5177, |
| "step": 8440 |
| }, |
| { |
| "epoch": 0.845, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3888, |
| "step": 8450 |
| }, |
| { |
| "epoch": 0.846, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5138, |
| "step": 8460 |
| }, |
| { |
| "epoch": 0.847, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6599, |
| "step": 8470 |
| }, |
| { |
| "epoch": 0.848, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4258, |
| "step": 8480 |
| }, |
| { |
| "epoch": 0.849, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2298, |
| "step": 8490 |
| }, |
| { |
| "epoch": 0.85, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.8115, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.851, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6512, |
| "step": 8510 |
| }, |
| { |
| "epoch": 0.852, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3415, |
| "step": 8520 |
| }, |
| { |
| "epoch": 0.853, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4836, |
| "step": 8530 |
| }, |
| { |
| "epoch": 0.854, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5127, |
| "step": 8540 |
| }, |
| { |
| "epoch": 0.855, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7786, |
| "step": 8550 |
| }, |
| { |
| "epoch": 0.856, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.411, |
| "step": 8560 |
| }, |
| { |
| "epoch": 0.857, |
| "grad_norm": 24.375, |
| "learning_rate": 1e-05, |
| "loss": 2.2507, |
| "step": 8570 |
| }, |
| { |
| "epoch": 0.858, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5039, |
| "step": 8580 |
| }, |
| { |
| "epoch": 0.859, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.2632, |
| "step": 8590 |
| }, |
| { |
| "epoch": 0.86, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4987, |
| "step": 8600 |
| }, |
| { |
| "epoch": 0.861, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2589, |
| "step": 8610 |
| }, |
| { |
| "epoch": 0.862, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4368, |
| "step": 8620 |
| }, |
| { |
| "epoch": 0.863, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6051, |
| "step": 8630 |
| }, |
| { |
| "epoch": 0.864, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.4449, |
| "step": 8640 |
| }, |
| { |
| "epoch": 0.865, |
| "grad_norm": 22.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5124, |
| "step": 8650 |
| }, |
| { |
| "epoch": 0.866, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.456, |
| "step": 8660 |
| }, |
| { |
| "epoch": 0.867, |
| "grad_norm": 16.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3064, |
| "step": 8670 |
| }, |
| { |
| "epoch": 0.868, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4437, |
| "step": 8680 |
| }, |
| { |
| "epoch": 0.869, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.2141, |
| "step": 8690 |
| }, |
| { |
| "epoch": 0.87, |
| "grad_norm": 20.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4081, |
| "step": 8700 |
| }, |
| { |
| "epoch": 0.871, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2815, |
| "step": 8710 |
| }, |
| { |
| "epoch": 0.872, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4717, |
| "step": 8720 |
| }, |
| { |
| "epoch": 0.873, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.7244, |
| "step": 8730 |
| }, |
| { |
| "epoch": 0.874, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3738, |
| "step": 8740 |
| }, |
| { |
| "epoch": 0.875, |
| "grad_norm": 16.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3144, |
| "step": 8750 |
| }, |
| { |
| "epoch": 0.876, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.339, |
| "step": 8760 |
| }, |
| { |
| "epoch": 0.877, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.1836, |
| "step": 8770 |
| }, |
| { |
| "epoch": 0.878, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.2632, |
| "step": 8780 |
| }, |
| { |
| "epoch": 0.879, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.722, |
| "step": 8790 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4821, |
| "step": 8800 |
| }, |
| { |
| "epoch": 0.881, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.3665, |
| "step": 8810 |
| }, |
| { |
| "epoch": 0.882, |
| "grad_norm": 15.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3065, |
| "step": 8820 |
| }, |
| { |
| "epoch": 0.883, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.6674, |
| "step": 8830 |
| }, |
| { |
| "epoch": 0.884, |
| "grad_norm": 15.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5508, |
| "step": 8840 |
| }, |
| { |
| "epoch": 0.885, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.2352, |
| "step": 8850 |
| }, |
| { |
| "epoch": 0.886, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.161, |
| "step": 8860 |
| }, |
| { |
| "epoch": 0.887, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4657, |
| "step": 8870 |
| }, |
| { |
| "epoch": 0.888, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.8735, |
| "step": 8880 |
| }, |
| { |
| "epoch": 0.889, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5344, |
| "step": 8890 |
| }, |
| { |
| "epoch": 0.89, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3382, |
| "step": 8900 |
| }, |
| { |
| "epoch": 0.891, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.1438, |
| "step": 8910 |
| }, |
| { |
| "epoch": 0.892, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5998, |
| "step": 8920 |
| }, |
| { |
| "epoch": 0.893, |
| "grad_norm": 25.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4666, |
| "step": 8930 |
| }, |
| { |
| "epoch": 0.894, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6702, |
| "step": 8940 |
| }, |
| { |
| "epoch": 0.895, |
| "grad_norm": 19.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5725, |
| "step": 8950 |
| }, |
| { |
| "epoch": 0.896, |
| "grad_norm": 23.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5048, |
| "step": 8960 |
| }, |
| { |
| "epoch": 0.897, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.1488, |
| "step": 8970 |
| }, |
| { |
| "epoch": 0.898, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2635, |
| "step": 8980 |
| }, |
| { |
| "epoch": 0.899, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.0649, |
| "step": 8990 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.675, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.901, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5572, |
| "step": 9010 |
| }, |
| { |
| "epoch": 0.902, |
| "grad_norm": 14.6875, |
| "learning_rate": 1e-05, |
| "loss": 2.2539, |
| "step": 9020 |
| }, |
| { |
| "epoch": 0.903, |
| "grad_norm": 15.3125, |
| "learning_rate": 1e-05, |
| "loss": 2.4034, |
| "step": 9030 |
| }, |
| { |
| "epoch": 0.904, |
| "grad_norm": 16.875, |
| "learning_rate": 1e-05, |
| "loss": 2.287, |
| "step": 9040 |
| }, |
| { |
| "epoch": 0.905, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4461, |
| "step": 9050 |
| }, |
| { |
| "epoch": 0.906, |
| "grad_norm": 22.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5277, |
| "step": 9060 |
| }, |
| { |
| "epoch": 0.907, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4549, |
| "step": 9070 |
| }, |
| { |
| "epoch": 0.908, |
| "grad_norm": 22.0, |
| "learning_rate": 1e-05, |
| "loss": 2.2709, |
| "step": 9080 |
| }, |
| { |
| "epoch": 0.909, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3713, |
| "step": 9090 |
| }, |
| { |
| "epoch": 0.91, |
| "grad_norm": 22.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4719, |
| "step": 9100 |
| }, |
| { |
| "epoch": 0.911, |
| "grad_norm": 22.125, |
| "learning_rate": 1e-05, |
| "loss": 2.4898, |
| "step": 9110 |
| }, |
| { |
| "epoch": 0.912, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7871, |
| "step": 9120 |
| }, |
| { |
| "epoch": 0.913, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4378, |
| "step": 9130 |
| }, |
| { |
| "epoch": 0.914, |
| "grad_norm": 19.125, |
| "learning_rate": 1e-05, |
| "loss": 2.476, |
| "step": 9140 |
| }, |
| { |
| "epoch": 0.915, |
| "grad_norm": 17.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5315, |
| "step": 9150 |
| }, |
| { |
| "epoch": 0.916, |
| "grad_norm": 24.25, |
| "learning_rate": 1e-05, |
| "loss": 2.6469, |
| "step": 9160 |
| }, |
| { |
| "epoch": 0.917, |
| "grad_norm": 15.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.2646, |
| "step": 9170 |
| }, |
| { |
| "epoch": 0.918, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.2606, |
| "step": 9180 |
| }, |
| { |
| "epoch": 0.919, |
| "grad_norm": 17.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3753, |
| "step": 9190 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3415, |
| "step": 9200 |
| }, |
| { |
| "epoch": 0.921, |
| "grad_norm": 15.3125, |
| "learning_rate": 1e-05, |
| "loss": 2.3536, |
| "step": 9210 |
| }, |
| { |
| "epoch": 0.922, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.6304, |
| "step": 9220 |
| }, |
| { |
| "epoch": 0.923, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.1607, |
| "step": 9230 |
| }, |
| { |
| "epoch": 0.924, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5074, |
| "step": 9240 |
| }, |
| { |
| "epoch": 0.925, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3814, |
| "step": 9250 |
| }, |
| { |
| "epoch": 0.926, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4138, |
| "step": 9260 |
| }, |
| { |
| "epoch": 0.927, |
| "grad_norm": 21.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3224, |
| "step": 9270 |
| }, |
| { |
| "epoch": 0.928, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.351, |
| "step": 9280 |
| }, |
| { |
| "epoch": 0.929, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.1169, |
| "step": 9290 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.2892, |
| "step": 9300 |
| }, |
| { |
| "epoch": 0.931, |
| "grad_norm": 15.5, |
| "learning_rate": 1e-05, |
| "loss": 2.0857, |
| "step": 9310 |
| }, |
| { |
| "epoch": 0.932, |
| "grad_norm": 18.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4133, |
| "step": 9320 |
| }, |
| { |
| "epoch": 0.933, |
| "grad_norm": 20.25, |
| "learning_rate": 1e-05, |
| "loss": 2.671, |
| "step": 9330 |
| }, |
| { |
| "epoch": 0.934, |
| "grad_norm": 17.625, |
| "learning_rate": 1e-05, |
| "loss": 2.3847, |
| "step": 9340 |
| }, |
| { |
| "epoch": 0.935, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4758, |
| "step": 9350 |
| }, |
| { |
| "epoch": 0.936, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6941, |
| "step": 9360 |
| }, |
| { |
| "epoch": 0.937, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.3536, |
| "step": 9370 |
| }, |
| { |
| "epoch": 0.938, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5701, |
| "step": 9380 |
| }, |
| { |
| "epoch": 0.939, |
| "grad_norm": 20.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5509, |
| "step": 9390 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 17.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4527, |
| "step": 9400 |
| }, |
| { |
| "epoch": 0.941, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.5769, |
| "step": 9410 |
| }, |
| { |
| "epoch": 0.942, |
| "grad_norm": 15.5, |
| "learning_rate": 1e-05, |
| "loss": 2.6381, |
| "step": 9420 |
| }, |
| { |
| "epoch": 0.943, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4201, |
| "step": 9430 |
| }, |
| { |
| "epoch": 0.944, |
| "grad_norm": 15.0625, |
| "learning_rate": 1e-05, |
| "loss": 2.3302, |
| "step": 9440 |
| }, |
| { |
| "epoch": 0.945, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3352, |
| "step": 9450 |
| }, |
| { |
| "epoch": 0.946, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5014, |
| "step": 9460 |
| }, |
| { |
| "epoch": 0.947, |
| "grad_norm": 21.625, |
| "learning_rate": 1e-05, |
| "loss": 2.5435, |
| "step": 9470 |
| }, |
| { |
| "epoch": 0.948, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.3672, |
| "step": 9480 |
| }, |
| { |
| "epoch": 0.949, |
| "grad_norm": 15.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.4762, |
| "step": 9490 |
| }, |
| { |
| "epoch": 0.95, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3456, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.951, |
| "grad_norm": 14.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.2706, |
| "step": 9510 |
| }, |
| { |
| "epoch": 0.952, |
| "grad_norm": 14.4375, |
| "learning_rate": 1e-05, |
| "loss": 2.2798, |
| "step": 9520 |
| }, |
| { |
| "epoch": 0.953, |
| "grad_norm": 15.6875, |
| "learning_rate": 1e-05, |
| "loss": 2.3792, |
| "step": 9530 |
| }, |
| { |
| "epoch": 0.954, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4263, |
| "step": 9540 |
| }, |
| { |
| "epoch": 0.955, |
| "grad_norm": 21.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4261, |
| "step": 9550 |
| }, |
| { |
| "epoch": 0.956, |
| "grad_norm": 15.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.2741, |
| "step": 9560 |
| }, |
| { |
| "epoch": 0.957, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.5487, |
| "step": 9570 |
| }, |
| { |
| "epoch": 0.958, |
| "grad_norm": 18.125, |
| "learning_rate": 1e-05, |
| "loss": 2.5523, |
| "step": 9580 |
| }, |
| { |
| "epoch": 0.959, |
| "grad_norm": 16.375, |
| "learning_rate": 1e-05, |
| "loss": 2.3358, |
| "step": 9590 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 19.75, |
| "learning_rate": 1e-05, |
| "loss": 2.5275, |
| "step": 9600 |
| }, |
| { |
| "epoch": 0.961, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7839, |
| "step": 9610 |
| }, |
| { |
| "epoch": 0.962, |
| "grad_norm": 19.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3682, |
| "step": 9620 |
| }, |
| { |
| "epoch": 0.963, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.674, |
| "step": 9630 |
| }, |
| { |
| "epoch": 0.964, |
| "grad_norm": 21.375, |
| "learning_rate": 1e-05, |
| "loss": 2.777, |
| "step": 9640 |
| }, |
| { |
| "epoch": 0.965, |
| "grad_norm": 21.0, |
| "learning_rate": 1e-05, |
| "loss": 2.7429, |
| "step": 9650 |
| }, |
| { |
| "epoch": 0.966, |
| "grad_norm": 21.5, |
| "learning_rate": 1e-05, |
| "loss": 2.5318, |
| "step": 9660 |
| }, |
| { |
| "epoch": 0.967, |
| "grad_norm": 14.9375, |
| "learning_rate": 1e-05, |
| "loss": 2.2693, |
| "step": 9670 |
| }, |
| { |
| "epoch": 0.968, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.283, |
| "step": 9680 |
| }, |
| { |
| "epoch": 0.969, |
| "grad_norm": 18.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4526, |
| "step": 9690 |
| }, |
| { |
| "epoch": 0.97, |
| "grad_norm": 18.375, |
| "learning_rate": 1e-05, |
| "loss": 2.5566, |
| "step": 9700 |
| }, |
| { |
| "epoch": 0.971, |
| "grad_norm": 20.75, |
| "learning_rate": 1e-05, |
| "loss": 2.6325, |
| "step": 9710 |
| }, |
| { |
| "epoch": 0.972, |
| "grad_norm": 17.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3837, |
| "step": 9720 |
| }, |
| { |
| "epoch": 0.973, |
| "grad_norm": 18.875, |
| "learning_rate": 1e-05, |
| "loss": 2.3143, |
| "step": 9730 |
| }, |
| { |
| "epoch": 0.974, |
| "grad_norm": 23.0, |
| "learning_rate": 1e-05, |
| "loss": 2.4566, |
| "step": 9740 |
| }, |
| { |
| "epoch": 0.975, |
| "grad_norm": 19.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3766, |
| "step": 9750 |
| }, |
| { |
| "epoch": 0.976, |
| "grad_norm": 16.5, |
| "learning_rate": 1e-05, |
| "loss": 2.4397, |
| "step": 9760 |
| }, |
| { |
| "epoch": 0.977, |
| "grad_norm": 21.75, |
| "learning_rate": 1e-05, |
| "loss": 2.398, |
| "step": 9770 |
| }, |
| { |
| "epoch": 0.978, |
| "grad_norm": 16.0, |
| "learning_rate": 1e-05, |
| "loss": 2.5654, |
| "step": 9780 |
| }, |
| { |
| "epoch": 0.979, |
| "grad_norm": 21.875, |
| "learning_rate": 1e-05, |
| "loss": 2.2103, |
| "step": 9790 |
| }, |
| { |
| "epoch": 0.98, |
| "grad_norm": 18.25, |
| "learning_rate": 1e-05, |
| "loss": 2.7017, |
| "step": 9800 |
| }, |
| { |
| "epoch": 0.981, |
| "grad_norm": 20.375, |
| "learning_rate": 1e-05, |
| "loss": 2.2785, |
| "step": 9810 |
| }, |
| { |
| "epoch": 0.982, |
| "grad_norm": 19.875, |
| "learning_rate": 1e-05, |
| "loss": 2.2868, |
| "step": 9820 |
| }, |
| { |
| "epoch": 0.983, |
| "grad_norm": 22.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4951, |
| "step": 9830 |
| }, |
| { |
| "epoch": 0.984, |
| "grad_norm": 16.75, |
| "learning_rate": 1e-05, |
| "loss": 2.4665, |
| "step": 9840 |
| }, |
| { |
| "epoch": 0.985, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3834, |
| "step": 9850 |
| }, |
| { |
| "epoch": 0.986, |
| "grad_norm": 17.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3291, |
| "step": 9860 |
| }, |
| { |
| "epoch": 0.987, |
| "grad_norm": 14.25, |
| "learning_rate": 1e-05, |
| "loss": 2.3071, |
| "step": 9870 |
| }, |
| { |
| "epoch": 0.988, |
| "grad_norm": 16.625, |
| "learning_rate": 1e-05, |
| "loss": 2.327, |
| "step": 9880 |
| }, |
| { |
| "epoch": 0.989, |
| "grad_norm": 17.0, |
| "learning_rate": 1e-05, |
| "loss": 2.2625, |
| "step": 9890 |
| }, |
| { |
| "epoch": 0.99, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.4562, |
| "step": 9900 |
| }, |
| { |
| "epoch": 0.991, |
| "grad_norm": 20.875, |
| "learning_rate": 1e-05, |
| "loss": 2.384, |
| "step": 9910 |
| }, |
| { |
| "epoch": 0.992, |
| "grad_norm": 18.75, |
| "learning_rate": 1e-05, |
| "loss": 2.517, |
| "step": 9920 |
| }, |
| { |
| "epoch": 0.993, |
| "grad_norm": 21.5, |
| "learning_rate": 1e-05, |
| "loss": 2.738, |
| "step": 9930 |
| }, |
| { |
| "epoch": 0.994, |
| "grad_norm": 19.625, |
| "learning_rate": 1e-05, |
| "loss": 2.4037, |
| "step": 9940 |
| }, |
| { |
| "epoch": 0.995, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.1027, |
| "step": 9950 |
| }, |
| { |
| "epoch": 0.996, |
| "grad_norm": 17.5, |
| "learning_rate": 1e-05, |
| "loss": 2.3745, |
| "step": 9960 |
| }, |
| { |
| "epoch": 0.997, |
| "grad_norm": 18.625, |
| "learning_rate": 1e-05, |
| "loss": 2.1476, |
| "step": 9970 |
| }, |
| { |
| "epoch": 0.998, |
| "grad_norm": 26.25, |
| "learning_rate": 1e-05, |
| "loss": 2.4481, |
| "step": 9980 |
| }, |
| { |
| "epoch": 0.999, |
| "grad_norm": 19.375, |
| "learning_rate": 1e-05, |
| "loss": 2.6352, |
| "step": 9990 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 20.125, |
| "learning_rate": 1e-05, |
| "loss": 2.671, |
| "step": 10000 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 10000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 1000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 0.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|