|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9998688524590164, |
|
"eval_steps": 500, |
|
"global_step": 1906, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.4402, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.2974, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.3829, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.2123, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.2531, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3e-06, |
|
"loss": 1.2363, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.5000000000000004e-06, |
|
"loss": 1.3185, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.4033, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.3155, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5e-06, |
|
"loss": 1.3133, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 1.1435, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6e-06, |
|
"loss": 1.3102, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 1.3607, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 1.3813, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.0608, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.231, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.500000000000002e-06, |
|
"loss": 1.2295, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9e-06, |
|
"loss": 1.3969, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.5e-06, |
|
"loss": 1.3138, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3306, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.05e-05, |
|
"loss": 1.3093, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 1.3343, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.1500000000000002e-05, |
|
"loss": 1.2608, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.2221, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.25e-05, |
|
"loss": 1.3127, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 1.1553, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 1.193, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 1.1875, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.45e-05, |
|
"loss": 1.3608, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.3877, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.55e-05, |
|
"loss": 1.1709, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.3518, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.65e-05, |
|
"loss": 1.0326, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 1.2824, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.75e-05, |
|
"loss": 1.1297, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.375, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.85e-05, |
|
"loss": 1.3613, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9e-05, |
|
"loss": 1.2704, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9500000000000003e-05, |
|
"loss": 1.2878, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2e-05, |
|
"loss": 1.2387, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.05e-05, |
|
"loss": 1.2736, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.1e-05, |
|
"loss": 1.0853, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.15e-05, |
|
"loss": 1.3032, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 1.4046, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.25e-05, |
|
"loss": 1.2216, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 1.3412, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.35e-05, |
|
"loss": 1.1387, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.2394, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.45e-05, |
|
"loss": 1.1871, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.3087, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.5500000000000003e-05, |
|
"loss": 1.4174, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 1.3503, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.6500000000000004e-05, |
|
"loss": 1.0027, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 1.1663, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 1.267, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 1.4091, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.8499999999999998e-05, |
|
"loss": 1.1291, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.9e-05, |
|
"loss": 1.1999, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.95e-05, |
|
"loss": 1.4182, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3e-05, |
|
"loss": 1.2489, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.05e-05, |
|
"loss": 1.3163, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.1e-05, |
|
"loss": 1.4771, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.15e-05, |
|
"loss": 1.3095, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.342, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 1.2861, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.3e-05, |
|
"loss": 1.3194, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.35e-05, |
|
"loss": 1.453, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 1.3028, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.45e-05, |
|
"loss": 1.2935, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.1916, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.55e-05, |
|
"loss": 1.0879, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.1877, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.65e-05, |
|
"loss": 1.4162, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.7e-05, |
|
"loss": 1.2123, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 1.0477, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.8e-05, |
|
"loss": 1.2846, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.85e-05, |
|
"loss": 1.3299, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 1.1059, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.9500000000000005e-05, |
|
"loss": 1.2354, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4e-05, |
|
"loss": 1.212, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.05e-05, |
|
"loss": 1.3152, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.1e-05, |
|
"loss": 1.3151, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.15e-05, |
|
"loss": 1.1023, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.2e-05, |
|
"loss": 0.955, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.25e-05, |
|
"loss": 1.3137, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.3e-05, |
|
"loss": 1.1734, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.35e-05, |
|
"loss": 1.5858, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 1.3958, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.4500000000000004e-05, |
|
"loss": 1.2501, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.9868, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.55e-05, |
|
"loss": 1.4658, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.2563, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.6500000000000005e-05, |
|
"loss": 1.4123, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7e-05, |
|
"loss": 1.2969, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.75e-05, |
|
"loss": 1.3332, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.2221, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.85e-05, |
|
"loss": 0.9418, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9e-05, |
|
"loss": 1.2269, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9500000000000004e-05, |
|
"loss": 1.2562, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5e-05, |
|
"loss": 1.2796, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.05e-05, |
|
"loss": 1.2733, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 1.1599, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.1500000000000005e-05, |
|
"loss": 1.4692, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.2577, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.25e-05, |
|
"loss": 1.1689, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 1.2731, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.3500000000000006e-05, |
|
"loss": 1.2086, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.2367, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.45e-05, |
|
"loss": 1.2464, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.1808, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.550000000000001e-05, |
|
"loss": 1.4329, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.055, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.65e-05, |
|
"loss": 1.2394, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 1.2869, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.7499999999999995e-05, |
|
"loss": 1.4027, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.8e-05, |
|
"loss": 1.4062, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.85e-05, |
|
"loss": 1.4758, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.9e-05, |
|
"loss": 1.3745, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.95e-05, |
|
"loss": 1.0941, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6e-05, |
|
"loss": 1.3471, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.05e-05, |
|
"loss": 1.4539, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.1e-05, |
|
"loss": 1.2483, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.15e-05, |
|
"loss": 1.288, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.2e-05, |
|
"loss": 1.269, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.25e-05, |
|
"loss": 1.3949, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.3e-05, |
|
"loss": 1.1431, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.35e-05, |
|
"loss": 1.2321, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.3515, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.450000000000001e-05, |
|
"loss": 1.1677, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 1.1339, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.55e-05, |
|
"loss": 1.1899, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.6e-05, |
|
"loss": 1.224, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.65e-05, |
|
"loss": 1.2669, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.7e-05, |
|
"loss": 1.294, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.750000000000001e-05, |
|
"loss": 1.4892, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.3983, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.850000000000001e-05, |
|
"loss": 1.1145, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.9e-05, |
|
"loss": 1.3734, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.95e-05, |
|
"loss": 1.3397, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7e-05, |
|
"loss": 1.2817, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.05e-05, |
|
"loss": 1.4555, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.1e-05, |
|
"loss": 1.0274, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.15e-05, |
|
"loss": 1.3816, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.2113, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.25e-05, |
|
"loss": 1.4015, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.3e-05, |
|
"loss": 1.4337, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.35e-05, |
|
"loss": 1.0526, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.4e-05, |
|
"loss": 1.512, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.450000000000001e-05, |
|
"loss": 1.2425, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.2719, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.55e-05, |
|
"loss": 1.3005, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.3609, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.65e-05, |
|
"loss": 1.2509, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.7e-05, |
|
"loss": 1.1672, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.75e-05, |
|
"loss": 1.3381, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.3018, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.850000000000001e-05, |
|
"loss": 1.4406, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 1.364, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.950000000000001e-05, |
|
"loss": 1.228, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8e-05, |
|
"loss": 1.0226, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.05e-05, |
|
"loss": 0.8691, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.1e-05, |
|
"loss": 1.357, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.15e-05, |
|
"loss": 1.2473, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.2e-05, |
|
"loss": 1.3198, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.25e-05, |
|
"loss": 1.0344, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.3e-05, |
|
"loss": 1.158, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.35e-05, |
|
"loss": 1.1938, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.5798, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.450000000000001e-05, |
|
"loss": 1.2496, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.5e-05, |
|
"loss": 1.1503, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.55e-05, |
|
"loss": 1.1272, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.6e-05, |
|
"loss": 1.1914, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.65e-05, |
|
"loss": 1.0894, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.7e-05, |
|
"loss": 1.2987, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.75e-05, |
|
"loss": 1.1007, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 1.3069, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.850000000000001e-05, |
|
"loss": 1.2621, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 1.2495, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.950000000000001e-05, |
|
"loss": 1.2971, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9e-05, |
|
"loss": 1.4206, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.05e-05, |
|
"loss": 1.1317, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.1e-05, |
|
"loss": 1.3848, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.15e-05, |
|
"loss": 1.2269, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.0285, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.250000000000001e-05, |
|
"loss": 1.1156, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 1.1655, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.350000000000001e-05, |
|
"loss": 1.3893, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.4e-05, |
|
"loss": 1.4152, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.449999999999999e-05, |
|
"loss": 1.1634, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.5e-05, |
|
"loss": 1.2566, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.55e-05, |
|
"loss": 1.1817, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.1584, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.65e-05, |
|
"loss": 1.3525, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.7e-05, |
|
"loss": 1.2545, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.75e-05, |
|
"loss": 1.1936, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.1608, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.850000000000001e-05, |
|
"loss": 1.3834, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 1.0125, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.95e-05, |
|
"loss": 1.4028, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0001, |
|
"loss": 1.3749, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.999991522231024e-05, |
|
"loss": 1.2947, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.999966088952841e-05, |
|
"loss": 1.0832, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.999923700251703e-05, |
|
"loss": 1.3497, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.99986435627135e-05, |
|
"loss": 1.1604, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.999788057213027e-05, |
|
"loss": 1.1839, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.999694803335468e-05, |
|
"loss": 1.251, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.999584594954913e-05, |
|
"loss": 1.1404, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.999457432445088e-05, |
|
"loss": 1.5109, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.999313316237211e-05, |
|
"loss": 1.2562, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.999152246820001e-05, |
|
"loss": 1.0774, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.998974224739658e-05, |
|
"loss": 1.1858, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.998779250599876e-05, |
|
"loss": 1.3693, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.998567325061834e-05, |
|
"loss": 1.3105, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.998338448844192e-05, |
|
"loss": 1.183, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.998092622723096e-05, |
|
"loss": 1.2715, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.997829847532166e-05, |
|
"loss": 1.4714, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.997550124162503e-05, |
|
"loss": 1.2119, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.99725345356268e-05, |
|
"loss": 1.2402, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.996939836738737e-05, |
|
"loss": 1.3608, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.996609274754182e-05, |
|
"loss": 1.4216, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.99626176872999e-05, |
|
"loss": 1.1339, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.995897319844587e-05, |
|
"loss": 0.7566, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.99551592933386e-05, |
|
"loss": 1.1302, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.995117598491145e-05, |
|
"loss": 1.2968, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.994702328667224e-05, |
|
"loss": 1.0836, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.994270121270326e-05, |
|
"loss": 1.4386, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.993820977766108e-05, |
|
"loss": 1.133, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.993354899677665e-05, |
|
"loss": 0.8334, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.992871888585518e-05, |
|
"loss": 1.2115, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.99237194612761e-05, |
|
"loss": 1.355, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.991855073999299e-05, |
|
"loss": 1.1176, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.991321273953355e-05, |
|
"loss": 1.2793, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.990770547799952e-05, |
|
"loss": 1.363, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.99020289740666e-05, |
|
"loss": 1.0069, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.989618324698445e-05, |
|
"loss": 1.3302, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.989016831657652e-05, |
|
"loss": 1.1625, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.98839842032401e-05, |
|
"loss": 1.273, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.98776309279462e-05, |
|
"loss": 1.1127, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.987110851223947e-05, |
|
"loss": 1.2729, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.986441697823808e-05, |
|
"loss": 1.1195, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.985755634863378e-05, |
|
"loss": 1.2677, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.985052664669167e-05, |
|
"loss": 1.259, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.984332789625026e-05, |
|
"loss": 1.3361, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.983596012172127e-05, |
|
"loss": 1.1438, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.982842334808964e-05, |
|
"loss": 1.0646, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.982071760091333e-05, |
|
"loss": 1.3861, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.98128429063234e-05, |
|
"loss": 1.4703, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.980479929102377e-05, |
|
"loss": 1.2234, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.97965867822912e-05, |
|
"loss": 1.2941, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.978820540797522e-05, |
|
"loss": 0.9393, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.977965519649792e-05, |
|
"loss": 1.3673, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.977093617685404e-05, |
|
"loss": 1.083, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.976204837861069e-05, |
|
"loss": 1.3907, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.975299183190734e-05, |
|
"loss": 1.1981, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.974376656745575e-05, |
|
"loss": 1.154, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.973437261653973e-05, |
|
"loss": 1.1126, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.972481001101522e-05, |
|
"loss": 1.3774, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.971507878331004e-05, |
|
"loss": 1.3103, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.970517896642382e-05, |
|
"loss": 1.0752, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.96951105939279e-05, |
|
"loss": 1.3074, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.968487369996522e-05, |
|
"loss": 1.0814, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.967446831925021e-05, |
|
"loss": 1.183, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.966389448706859e-05, |
|
"loss": 1.2898, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.965315223927741e-05, |
|
"loss": 1.0228, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.964224161230475e-05, |
|
"loss": 1.2523, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.963116264314974e-05, |
|
"loss": 1.1339, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.961991536938235e-05, |
|
"loss": 1.13, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.960849982914331e-05, |
|
"loss": 1.0781, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.959691606114393e-05, |
|
"loss": 1.2281, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.9585164104666e-05, |
|
"loss": 1.1895, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.957324399956172e-05, |
|
"loss": 1.3988, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.956115578625338e-05, |
|
"loss": 1.0442, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.954889950573348e-05, |
|
"loss": 1.0827, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.953647519956433e-05, |
|
"loss": 0.9302, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.95238829098781e-05, |
|
"loss": 1.3566, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.951112267937663e-05, |
|
"loss": 1.3031, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.94981945513312e-05, |
|
"loss": 1.0653, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.948509856958252e-05, |
|
"loss": 0.9141, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.947183477854042e-05, |
|
"loss": 1.1823, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.945840322318391e-05, |
|
"loss": 1.1461, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.944480394906078e-05, |
|
"loss": 1.303, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.943103700228766e-05, |
|
"loss": 1.065, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.941710242954976e-05, |
|
"loss": 1.2391, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.940300027810067e-05, |
|
"loss": 1.1476, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.938873059576235e-05, |
|
"loss": 1.427, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.93742934309248e-05, |
|
"loss": 1.1731, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.935968883254601e-05, |
|
"loss": 1.2476, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.934491685015174e-05, |
|
"loss": 1.3016, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.932997753383537e-05, |
|
"loss": 1.2605, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.931487093425774e-05, |
|
"loss": 1.2571, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.929959710264695e-05, |
|
"loss": 1.1089, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.92841560907982e-05, |
|
"loss": 1.2059, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.926854795107363e-05, |
|
"loss": 1.1879, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.92527727364021e-05, |
|
"loss": 1.3873, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.92368305002791e-05, |
|
"loss": 1.3071, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.922072129676644e-05, |
|
"loss": 1.011, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.920444518049217e-05, |
|
"loss": 1.2344, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.918800220665035e-05, |
|
"loss": 1.2956, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.917139243100087e-05, |
|
"loss": 1.1677, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.915461590986926e-05, |
|
"loss": 0.9923, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.91376727001465e-05, |
|
"loss": 1.1869, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.912056285928891e-05, |
|
"loss": 1.1501, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.91032864453177e-05, |
|
"loss": 1.4009, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.908584351681911e-05, |
|
"loss": 1.307, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.906823413294397e-05, |
|
"loss": 1.3197, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.905045835340759e-05, |
|
"loss": 1.1601, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.903251623848957e-05, |
|
"loss": 1.0623, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.901440784903353e-05, |
|
"loss": 1.2217, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.899613324644698e-05, |
|
"loss": 1.2233, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.897769249270105e-05, |
|
"loss": 1.2024, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.895908565033036e-05, |
|
"loss": 1.1011, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.894031278243266e-05, |
|
"loss": 1.1673, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.89213739526688e-05, |
|
"loss": 1.2906, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.890226922526237e-05, |
|
"loss": 1.3677, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.888299866499957e-05, |
|
"loss": 1.1037, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.886356233722894e-05, |
|
"loss": 1.234, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.884396030786115e-05, |
|
"loss": 1.0158, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.88241926433688e-05, |
|
"loss": 1.0984, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.880425941078617e-05, |
|
"loss": 1.2505, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.878416067770898e-05, |
|
"loss": 1.3511, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.87638965122942e-05, |
|
"loss": 1.1775, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.874346698325983e-05, |
|
"loss": 1.15, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.872287215988456e-05, |
|
"loss": 1.1318, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.870211211200766e-05, |
|
"loss": 1.1427, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.86811869100287e-05, |
|
"loss": 1.1519, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.866009662490727e-05, |
|
"loss": 1.1259, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.863884132816282e-05, |
|
"loss": 1.353, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.861742109187432e-05, |
|
"loss": 1.3784, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.859583598868012e-05, |
|
"loss": 1.1854, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.857408609177762e-05, |
|
"loss": 1.2821, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.855217147492309e-05, |
|
"loss": 1.3438, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.853009221243127e-05, |
|
"loss": 1.1585, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.850784837917542e-05, |
|
"loss": 1.2573, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.848544005058668e-05, |
|
"loss": 1.1598, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.846286730265417e-05, |
|
"loss": 1.2003, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.844013021192447e-05, |
|
"loss": 1.1554, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.841722885550152e-05, |
|
"loss": 1.3157, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.839416331104625e-05, |
|
"loss": 1.3834, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.837093365677644e-05, |
|
"loss": 1.3736, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.834753997146633e-05, |
|
"loss": 1.4065, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.832398233444644e-05, |
|
"loss": 1.1594, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.830026082560324e-05, |
|
"loss": 1.1042, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.827637552537892e-05, |
|
"loss": 1.2533, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.825232651477109e-05, |
|
"loss": 1.2629, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.822811387533255e-05, |
|
"loss": 1.3435, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.820373768917095e-05, |
|
"loss": 1.1654, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.817919803894857e-05, |
|
"loss": 1.1599, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.815449500788202e-05, |
|
"loss": 1.2459, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.812962867974191e-05, |
|
"loss": 1.1792, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.810459913885265e-05, |
|
"loss": 1.29, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.807940647009209e-05, |
|
"loss": 1.184, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.805405075889129e-05, |
|
"loss": 1.2152, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.802853209123421e-05, |
|
"loss": 1.1893, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.800285055365736e-05, |
|
"loss": 1.0028, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.797700623324963e-05, |
|
"loss": 1.1743, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.79509992176519e-05, |
|
"loss": 1.3778, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.79248295950567e-05, |
|
"loss": 1.2219, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.789849745420812e-05, |
|
"loss": 1.2271, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.787200288440119e-05, |
|
"loss": 1.2562, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.784534597548193e-05, |
|
"loss": 1.3299, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.781852681784673e-05, |
|
"loss": 1.0952, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.779154550244228e-05, |
|
"loss": 1.2017, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.776440212076508e-05, |
|
"loss": 1.3052, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.773709676486129e-05, |
|
"loss": 1.2865, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.77096295273263e-05, |
|
"loss": 1.3906, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.768200050130446e-05, |
|
"loss": 1.2156, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.765420978048878e-05, |
|
"loss": 1.3526, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.76262574591206e-05, |
|
"loss": 1.1161, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.75981436319892e-05, |
|
"loss": 1.1305, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.756986839443165e-05, |
|
"loss": 1.2702, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.754143184233229e-05, |
|
"loss": 1.2484, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.751283407212252e-05, |
|
"loss": 1.1719, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.748407518078048e-05, |
|
"loss": 1.1618, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.745515526583065e-05, |
|
"loss": 1.3899, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.74260744253436e-05, |
|
"loss": 1.2776, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.739683275793554e-05, |
|
"loss": 1.1928, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.736743036276815e-05, |
|
"loss": 1.2492, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.733786733954809e-05, |
|
"loss": 1.1258, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.730814378852678e-05, |
|
"loss": 1.2204, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.727825981049994e-05, |
|
"loss": 1.0736, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.72482155068074e-05, |
|
"loss": 1.231, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.72180109793326e-05, |
|
"loss": 1.3367, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.718764633050235e-05, |
|
"loss": 1.3104, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.715712166328644e-05, |
|
"loss": 1.2265, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.712643708119729e-05, |
|
"loss": 1.2225, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.709559268828963e-05, |
|
"loss": 1.3006, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.706458858916012e-05, |
|
"loss": 1.3773, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.703342488894697e-05, |
|
"loss": 1.2459, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.700210169332968e-05, |
|
"loss": 1.2936, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.697061910852856e-05, |
|
"loss": 1.1028, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.693897724130442e-05, |
|
"loss": 1.1513, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.690717619895828e-05, |
|
"loss": 1.2812, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.687521608933086e-05, |
|
"loss": 1.1474, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.684309702080233e-05, |
|
"loss": 1.1615, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.681081910229192e-05, |
|
"loss": 1.0054, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.677838244325754e-05, |
|
"loss": 1.1671, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.674578715369536e-05, |
|
"loss": 1.3808, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.671303334413952e-05, |
|
"loss": 1.3508, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.668012112566174e-05, |
|
"loss": 1.0374, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.664705060987085e-05, |
|
"loss": 1.2246, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.661382190891256e-05, |
|
"loss": 1.3459, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.658043513546897e-05, |
|
"loss": 1.237, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.654689040275819e-05, |
|
"loss": 1.1525, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.651318782453407e-05, |
|
"loss": 1.0688, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.647932751508562e-05, |
|
"loss": 1.0431, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.644530958923683e-05, |
|
"loss": 1.2472, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.641113416234615e-05, |
|
"loss": 1.3164, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.637680135030609e-05, |
|
"loss": 1.2452, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.634231126954296e-05, |
|
"loss": 1.1477, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.63076640370163e-05, |
|
"loss": 1.2717, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.62728597702186e-05, |
|
"loss": 1.1911, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.623789858717492e-05, |
|
"loss": 1.0488, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.620278060644232e-05, |
|
"loss": 1.0588, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.616750594710973e-05, |
|
"loss": 1.0693, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.613207472879725e-05, |
|
"loss": 1.0284, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.6096487071656e-05, |
|
"loss": 1.0446, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.60607430963675e-05, |
|
"loss": 1.0755, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.602484292414349e-05, |
|
"loss": 1.2591, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.598878667672526e-05, |
|
"loss": 1.1403, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.595257447638343e-05, |
|
"loss": 1.2955, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.59162064459175e-05, |
|
"loss": 1.0571, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.587968270865532e-05, |
|
"loss": 1.1257, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.584300338845287e-05, |
|
"loss": 1.232, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.580616860969364e-05, |
|
"loss": 1.3145, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.576917849728834e-05, |
|
"loss": 1.3523, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.573203317667442e-05, |
|
"loss": 1.102, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.569473277381565e-05, |
|
"loss": 1.1006, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.56572774152017e-05, |
|
"loss": 1.3029, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.561966722784774e-05, |
|
"loss": 1.1221, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.558190233929396e-05, |
|
"loss": 1.1844, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.554398287760515e-05, |
|
"loss": 1.0214, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.55059089713703e-05, |
|
"loss": 1.191, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.546768074970212e-05, |
|
"loss": 1.2985, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.54292983422366e-05, |
|
"loss": 1.032, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.539076187913262e-05, |
|
"loss": 1.2591, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.53520714910715e-05, |
|
"loss": 1.4778, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.531322730925649e-05, |
|
"loss": 1.0842, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.527422946541238e-05, |
|
"loss": 1.5779, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.523507809178506e-05, |
|
"loss": 1.0615, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.519577332114106e-05, |
|
"loss": 1.2474, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.51563152867671e-05, |
|
"loss": 1.0115, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.511670412246957e-05, |
|
"loss": 1.3013, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.507693996257423e-05, |
|
"loss": 1.2232, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.503702294192563e-05, |
|
"loss": 1.3789, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.499695319588664e-05, |
|
"loss": 0.9632, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.495673086033813e-05, |
|
"loss": 1.2701, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.491635607167832e-05, |
|
"loss": 0.9906, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.487582896682252e-05, |
|
"loss": 1.2195, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.483514968320243e-05, |
|
"loss": 1.3335, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.479431835876596e-05, |
|
"loss": 1.2713, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.475333513197644e-05, |
|
"loss": 1.2689, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.471220014181246e-05, |
|
"loss": 1.3115, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.467091352776719e-05, |
|
"loss": 1.3231, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.462947542984795e-05, |
|
"loss": 1.3568, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.458788598857582e-05, |
|
"loss": 1.2475, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.454614534498506e-05, |
|
"loss": 1.3989, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.450425364062268e-05, |
|
"loss": 1.0536, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.446221101754795e-05, |
|
"loss": 0.8546, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.442001761833194e-05, |
|
"loss": 1.1789, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.4377673586057e-05, |
|
"loss": 1.1837, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.433517906431631e-05, |
|
"loss": 1.2079, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.429253419721336e-05, |
|
"loss": 1.3602, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.424973912936147e-05, |
|
"loss": 1.241, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.420679400588334e-05, |
|
"loss": 0.9881, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.416369897241048e-05, |
|
"loss": 1.3354, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.412045417508282e-05, |
|
"loss": 1.4914, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.407705976054809e-05, |
|
"loss": 1.2034, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.403351587596141e-05, |
|
"loss": 1.3082, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.398982266898481e-05, |
|
"loss": 1.1587, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.394598028778662e-05, |
|
"loss": 1.1651, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.390198888104112e-05, |
|
"loss": 1.0659, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.385784859792788e-05, |
|
"loss": 1.1136, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.381355958813133e-05, |
|
"loss": 1.0624, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.376912200184029e-05, |
|
"loss": 1.1085, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.372453598974738e-05, |
|
"loss": 1.3352, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.367980170304857e-05, |
|
"loss": 1.2857, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.363491929344266e-05, |
|
"loss": 1.0944, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.35898889131307e-05, |
|
"loss": 1.2447, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.354471071481557e-05, |
|
"loss": 1.3863, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.349938485170139e-05, |
|
"loss": 1.217, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.345391147749305e-05, |
|
"loss": 1.1874, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.340829074639565e-05, |
|
"loss": 1.1632, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.3362522813114e-05, |
|
"loss": 1.3235, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.331660783285208e-05, |
|
"loss": 1.2446, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.327054596131256e-05, |
|
"loss": 1.3024, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.322433735469614e-05, |
|
"loss": 1.3746, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.317798216970122e-05, |
|
"loss": 1.1546, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.31314805635232e-05, |
|
"loss": 1.3817, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.308483269385406e-05, |
|
"loss": 1.1122, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.303803871888172e-05, |
|
"loss": 1.452, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.29910987972896e-05, |
|
"loss": 1.4635, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.2944013088256e-05, |
|
"loss": 1.084, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.289678175145364e-05, |
|
"loss": 1.1933, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.284940494704906e-05, |
|
"loss": 1.0819, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.280188283570211e-05, |
|
"loss": 1.0246, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.275421557856536e-05, |
|
"loss": 1.3393, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.270640333728365e-05, |
|
"loss": 1.4799, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.265844627399339e-05, |
|
"loss": 1.4177, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.261034455132218e-05, |
|
"loss": 1.2253, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.25620983323881e-05, |
|
"loss": 1.3142, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.251370778079929e-05, |
|
"loss": 1.0313, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.246517306065332e-05, |
|
"loss": 1.3404, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.241649433653663e-05, |
|
"loss": 1.2644, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.236767177352403e-05, |
|
"loss": 1.2247, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.231870553717807e-05, |
|
"loss": 1.38, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.226959579354855e-05, |
|
"loss": 1.209, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.222034270917185e-05, |
|
"loss": 1.2786, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.217094645107053e-05, |
|
"loss": 1.1664, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.212140718675257e-05, |
|
"loss": 1.0972, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.207172508421098e-05, |
|
"loss": 1.3686, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.20219003119231e-05, |
|
"loss": 0.9582, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.197193303885008e-05, |
|
"loss": 1.124, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.192182343443634e-05, |
|
"loss": 1.2647, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.187157166860894e-05, |
|
"loss": 1.2692, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.182117791177701e-05, |
|
"loss": 1.1562, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.177064233483121e-05, |
|
"loss": 1.1139, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.171996510914311e-05, |
|
"loss": 1.099, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.166914640656467e-05, |
|
"loss": 1.3154, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.161818639942753e-05, |
|
"loss": 1.2462, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.156708526054256e-05, |
|
"loss": 1.4607, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.151584316319926e-05, |
|
"loss": 1.3882, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.146446028116507e-05, |
|
"loss": 1.503, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.141293678868487e-05, |
|
"loss": 1.3261, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.136127286048038e-05, |
|
"loss": 1.2509, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.130946867174952e-05, |
|
"loss": 0.903, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.125752439816588e-05, |
|
"loss": 1.0597, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.120544021587808e-05, |
|
"loss": 1.086, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.115321630150919e-05, |
|
"loss": 1.3303, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.11008528321561e-05, |
|
"loss": 1.3007, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.104834998538898e-05, |
|
"loss": 1.228, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.099570793925066e-05, |
|
"loss": 1.3628, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.094292687225595e-05, |
|
"loss": 1.3378, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.089000696339112e-05, |
|
"loss": 1.2781, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.083694839211329e-05, |
|
"loss": 1.2291, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.078375133834981e-05, |
|
"loss": 1.4619, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.073041598249756e-05, |
|
"loss": 1.1273, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.067694250542251e-05, |
|
"loss": 1.2815, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.062333108845898e-05, |
|
"loss": 0.9991, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.056958191340901e-05, |
|
"loss": 1.3539, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.051569516254185e-05, |
|
"loss": 1.3121, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.046167101859331e-05, |
|
"loss": 1.3959, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.040750966476503e-05, |
|
"loss": 1.1305, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.035321128472398e-05, |
|
"loss": 1.2801, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.029877606260186e-05, |
|
"loss": 1.3347, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.024420418299434e-05, |
|
"loss": 1.1528, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.01894958309605e-05, |
|
"loss": 1.0547, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.01346511920223e-05, |
|
"loss": 1.2588, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.007967045216379e-05, |
|
"loss": 1.4318, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.002455379783057e-05, |
|
"loss": 0.9583, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.996930141592915e-05, |
|
"loss": 1.1918, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.99139134938263e-05, |
|
"loss": 1.2918, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.985839021934843e-05, |
|
"loss": 1.2838, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.980273178078094e-05, |
|
"loss": 1.1537, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.974693836686754e-05, |
|
"loss": 1.3874, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.969101016680977e-05, |
|
"loss": 1.0477, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.963494737026611e-05, |
|
"loss": 1.3685, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.957875016735159e-05, |
|
"loss": 1.1138, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.952241874863694e-05, |
|
"loss": 1.4457, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.946595330514806e-05, |
|
"loss": 1.1432, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.940935402836535e-05, |
|
"loss": 1.1733, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.935262111022306e-05, |
|
"loss": 1.3109, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 8.929575474310858e-05, |
|
"loss": 1.245, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.923875511986193e-05, |
|
"loss": 1.1425, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.918162243377493e-05, |
|
"loss": 1.1255, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.912435687859068e-05, |
|
"loss": 1.0995, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.906695864850284e-05, |
|
"loss": 1.1619, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.900942793815498e-05, |
|
"loss": 1.1144, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.895176494263992e-05, |
|
"loss": 1.2095, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.88939698574991e-05, |
|
"loss": 0.8601, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.883604287872184e-05, |
|
"loss": 1.1284, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.87779842027448e-05, |
|
"loss": 1.1979, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.871979402645116e-05, |
|
"loss": 0.9438, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.86614725471701e-05, |
|
"loss": 1.1268, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.860301996267602e-05, |
|
"loss": 1.1528, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.85444364711879e-05, |
|
"loss": 1.3826, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.848572227136868e-05, |
|
"loss": 1.1664, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.842687756232454e-05, |
|
"loss": 1.1893, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.836790254360421e-05, |
|
"loss": 0.9256, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.830879741519832e-05, |
|
"loss": 1.2873, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.824956237753872e-05, |
|
"loss": 0.9982, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.81901976314978e-05, |
|
"loss": 1.4097, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.81307033783878e-05, |
|
"loss": 1.2972, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.807107981996013e-05, |
|
"loss": 1.3306, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.80113271584047e-05, |
|
"loss": 1.0644, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.79514455963492e-05, |
|
"loss": 1.2069, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.789143533685846e-05, |
|
"loss": 1.4917, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.783129658343374e-05, |
|
"loss": 1.0882, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.7771029540012e-05, |
|
"loss": 1.1785, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.771063441096526e-05, |
|
"loss": 1.5195, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.765011140109994e-05, |
|
"loss": 1.2203, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.758946071565603e-05, |
|
"loss": 1.0685, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.752868256030658e-05, |
|
"loss": 1.0948, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.74677771411568e-05, |
|
"loss": 1.0642, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.740674466474356e-05, |
|
"loss": 1.4473, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.734558533803456e-05, |
|
"loss": 1.4501, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.728429936842762e-05, |
|
"loss": 1.1295, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.722288696375009e-05, |
|
"loss": 1.0889, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.716134833225802e-05, |
|
"loss": 1.074, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.709968368263553e-05, |
|
"loss": 1.2429, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.703789322399409e-05, |
|
"loss": 1.0163, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.69759771658718e-05, |
|
"loss": 1.0135, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.691393571823265e-05, |
|
"loss": 0.9524, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.68517690914659e-05, |
|
"loss": 1.1386, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.678947749638526e-05, |
|
"loss": 1.2415, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.672706114422819e-05, |
|
"loss": 1.2505, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.666452024665532e-05, |
|
"loss": 1.129, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.66018550157495e-05, |
|
"loss": 1.2245, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.653906566401533e-05, |
|
"loss": 0.9779, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.647615240437821e-05, |
|
"loss": 1.0941, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.64131154501838e-05, |
|
"loss": 1.1956, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.634995501519718e-05, |
|
"loss": 1.041, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.628667131360218e-05, |
|
"loss": 1.3644, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.622326456000065e-05, |
|
"loss": 1.1445, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.615973496941169e-05, |
|
"loss": 0.8436, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.609608275727102e-05, |
|
"loss": 1.3154, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.60323081394301e-05, |
|
"loss": 1.1636, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.596841133215554e-05, |
|
"loss": 1.3078, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.590439255212829e-05, |
|
"loss": 1.3831, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.584025201644292e-05, |
|
"loss": 1.306, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.577598994260688e-05, |
|
"loss": 1.1494, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.571160654853975e-05, |
|
"loss": 1.1917, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.56471020525726e-05, |
|
"loss": 1.1931, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.55824766734471e-05, |
|
"loss": 1.0079, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.551773063031484e-05, |
|
"loss": 1.2898, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.545286414273664e-05, |
|
"loss": 1.4111, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.538787743068171e-05, |
|
"loss": 1.2392, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.532277071452704e-05, |
|
"loss": 1.4933, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.525754421505646e-05, |
|
"loss": 1.1419, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.519219815346004e-05, |
|
"loss": 1.2934, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.512673275133334e-05, |
|
"loss": 1.2549, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.506114823067657e-05, |
|
"loss": 1.2358, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.49954448138939e-05, |
|
"loss": 1.1946, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.492962272379267e-05, |
|
"loss": 1.4094, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.486368218358268e-05, |
|
"loss": 1.2118, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.479762341687541e-05, |
|
"loss": 1.1866, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.473144664768321e-05, |
|
"loss": 1.3067, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.466515210041865e-05, |
|
"loss": 1.3868, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.459873999989367e-05, |
|
"loss": 0.9928, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.453221057131886e-05, |
|
"loss": 1.1484, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.446556404030264e-05, |
|
"loss": 1.2613, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.43988006328506e-05, |
|
"loss": 1.3083, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.433192057536458e-05, |
|
"loss": 1.3283, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.426492409464213e-05, |
|
"loss": 0.9239, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.419781141787548e-05, |
|
"loss": 1.3879, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.413058277265093e-05, |
|
"loss": 1.3417, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.406323838694808e-05, |
|
"loss": 1.4364, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.399577848913895e-05, |
|
"loss": 1.2996, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.392820330798733e-05, |
|
"loss": 1.0691, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.386051307264797e-05, |
|
"loss": 1.2127, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.379270801266568e-05, |
|
"loss": 1.1981, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.372478835797473e-05, |
|
"loss": 1.2008, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.365675433889799e-05, |
|
"loss": 1.2538, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.358860618614612e-05, |
|
"loss": 1.0027, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.352034413081686e-05, |
|
"loss": 1.2904, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.345196840439416e-05, |
|
"loss": 1.1954, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.338347923874749e-05, |
|
"loss": 1.2785, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.331487686613097e-05, |
|
"loss": 1.2965, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.324616151918261e-05, |
|
"loss": 1.2346, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.317733343092357e-05, |
|
"loss": 1.3271, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.31083928347573e-05, |
|
"loss": 1.4484, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.303933996446876e-05, |
|
"loss": 1.2238, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.297017505422367e-05, |
|
"loss": 1.1706, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.290089833856769e-05, |
|
"loss": 1.3222, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.28315100524256e-05, |
|
"loss": 1.0899, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.276201043110057e-05, |
|
"loss": 1.4266, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.269239971027328e-05, |
|
"loss": 1.2182, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.262267812600115e-05, |
|
"loss": 1.2767, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.255284591471762e-05, |
|
"loss": 1.1922, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.24829033132312e-05, |
|
"loss": 1.4014, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.241285055872478e-05, |
|
"loss": 1.2795, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.234268788875479e-05, |
|
"loss": 1.0033, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.22724155412504e-05, |
|
"loss": 1.2398, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.220203375451269e-05, |
|
"loss": 1.0794, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.213154276721388e-05, |
|
"loss": 1.2707, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.20609428183965e-05, |
|
"loss": 1.1187, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.199023414747256e-05, |
|
"loss": 1.1923, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.191941699422276e-05, |
|
"loss": 1.0582, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.184849159879571e-05, |
|
"loss": 1.3767, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.177745820170705e-05, |
|
"loss": 1.2931, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.170631704383866e-05, |
|
"loss": 1.2684, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.163506836643786e-05, |
|
"loss": 1.0971, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.15637124111166e-05, |
|
"loss": 1.2454, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.149224941985058e-05, |
|
"loss": 1.2759, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.142067963497851e-05, |
|
"loss": 1.2909, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.134900329920121e-05, |
|
"loss": 0.9601, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.127722065558087e-05, |
|
"loss": 1.3069, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.120533194754014e-05, |
|
"loss": 1.3113, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.113333741886138e-05, |
|
"loss": 0.981, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.106123731368578e-05, |
|
"loss": 1.0584, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.098903187651252e-05, |
|
"loss": 1.3073, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.091672135219805e-05, |
|
"loss": 1.3101, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.084430598595513e-05, |
|
"loss": 1.0588, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.077178602335204e-05, |
|
"loss": 1.1701, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.06991617103118e-05, |
|
"loss": 1.0507, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.062643329311124e-05, |
|
"loss": 1.1813, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.055360101838026e-05, |
|
"loss": 1.2416, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.048066513310093e-05, |
|
"loss": 1.2705, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.04076258846067e-05, |
|
"loss": 1.119, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.033448352058154e-05, |
|
"loss": 1.1883, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.026123828905903e-05, |
|
"loss": 1.267, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.018789043842166e-05, |
|
"loss": 1.1738, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.011444021739987e-05, |
|
"loss": 1.2885, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.004088787507128e-05, |
|
"loss": 1.1987, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.996723366085977e-05, |
|
"loss": 1.2403, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.989347782453473e-05, |
|
"loss": 0.9733, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.981962061621012e-05, |
|
"loss": 1.2572, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.974566228634368e-05, |
|
"loss": 1.0905, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.967160308573607e-05, |
|
"loss": 1.0659, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.959744326553001e-05, |
|
"loss": 1.349, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.952318307720943e-05, |
|
"loss": 1.0868, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.94488227725986e-05, |
|
"loss": 1.2632, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.937436260386134e-05, |
|
"loss": 1.2386, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.929980282350008e-05, |
|
"loss": 1.2145, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 7.922514368435505e-05, |
|
"loss": 1.1532, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.915038543960342e-05, |
|
"loss": 1.3916, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.907552834275847e-05, |
|
"loss": 1.1152, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.900057264766865e-05, |
|
"loss": 0.9355, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.892551860851679e-05, |
|
"loss": 1.037, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.885036647981921e-05, |
|
"loss": 0.9923, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.877511651642486e-05, |
|
"loss": 1.2507, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.869976897351446e-05, |
|
"loss": 1.256, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.862432410659964e-05, |
|
"loss": 1.2936, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.854878217152208e-05, |
|
"loss": 1.0659, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.847314342445258e-05, |
|
"loss": 1.282, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.839740812189028e-05, |
|
"loss": 1.3023, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.832157652066173e-05, |
|
"loss": 1.2871, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.824564887792007e-05, |
|
"loss": 1.3008, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.81696254511441e-05, |
|
"loss": 1.1695, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.809350649813743e-05, |
|
"loss": 1.4763, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.801729227702761e-05, |
|
"loss": 1.1502, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.79409830462653e-05, |
|
"loss": 1.2635, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.786457906462328e-05, |
|
"loss": 1.3654, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 7.778808059119567e-05, |
|
"loss": 1.2267, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.771148788539704e-05, |
|
"loss": 1.3194, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.763480120696149e-05, |
|
"loss": 1.2976, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.755802081594179e-05, |
|
"loss": 0.9709, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.748114697270853e-05, |
|
"loss": 1.0617, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.740417993794918e-05, |
|
"loss": 1.402, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.732711997266719e-05, |
|
"loss": 1.1975, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.724996733818124e-05, |
|
"loss": 1.2343, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.71727222961242e-05, |
|
"loss": 1.1829, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.709538510844233e-05, |
|
"loss": 1.0001, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.701795603739435e-05, |
|
"loss": 1.1391, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.694043534555055e-05, |
|
"loss": 1.1496, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.686282329579196e-05, |
|
"loss": 1.0934, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.678512015130937e-05, |
|
"loss": 1.2875, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.670732617560253e-05, |
|
"loss": 1.2206, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.662944163247916e-05, |
|
"loss": 1.2561, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.655146678605414e-05, |
|
"loss": 1.213, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.647340190074855e-05, |
|
"loss": 1.2487, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.63952472412888e-05, |
|
"loss": 1.1735, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 7.63170030727058e-05, |
|
"loss": 1.3249, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.623866966033391e-05, |
|
"loss": 1.3793, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.616024726981015e-05, |
|
"loss": 1.1992, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.60817361670733e-05, |
|
"loss": 1.4108, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.600313661836298e-05, |
|
"loss": 1.106, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.592444889021866e-05, |
|
"loss": 1.2202, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.584567324947893e-05, |
|
"loss": 1.461, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.576680996328045e-05, |
|
"loss": 1.2801, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.568785929905713e-05, |
|
"loss": 1.4448, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.560882152453914e-05, |
|
"loss": 1.075, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.552969690775209e-05, |
|
"loss": 0.9568, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.545048571701605e-05, |
|
"loss": 1.1432, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.537118822094474e-05, |
|
"loss": 1.3191, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.529180468844444e-05, |
|
"loss": 1.2781, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.521233538871329e-05, |
|
"loss": 1.2839, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.513278059124022e-05, |
|
"loss": 1.3867, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.50531405658041e-05, |
|
"loss": 1.2937, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.497341558247286e-05, |
|
"loss": 1.3274, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.489360591160244e-05, |
|
"loss": 1.2399, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.481371182383607e-05, |
|
"loss": 1.1877, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.473373359010318e-05, |
|
"loss": 1.0751, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.465367148161856e-05, |
|
"loss": 1.3145, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.457352576988144e-05, |
|
"loss": 1.0667, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.449329672667456e-05, |
|
"loss": 1.176, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.441298462406321e-05, |
|
"loss": 1.2446, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.43325897343944e-05, |
|
"loss": 1.095, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.425211233029583e-05, |
|
"loss": 1.4673, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.417155268467505e-05, |
|
"loss": 1.389, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.409091107071848e-05, |
|
"loss": 1.1901, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.40101877618905e-05, |
|
"loss": 1.1656, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.392938303193256e-05, |
|
"loss": 1.276, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.384849715486217e-05, |
|
"loss": 1.116, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.376753040497206e-05, |
|
"loss": 1.1236, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.368648305682917e-05, |
|
"loss": 1.1392, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.36053553852738e-05, |
|
"loss": 1.1715, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.352414766541859e-05, |
|
"loss": 1.3388, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.344286017264765e-05, |
|
"loss": 1.2815, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.336149318261565e-05, |
|
"loss": 1.2, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.328004697124679e-05, |
|
"loss": 1.0053, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.319852181473393e-05, |
|
"loss": 1.3813, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.311691798953765e-05, |
|
"loss": 1.1228, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.303523577238529e-05, |
|
"loss": 1.1688, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.295347544027006e-05, |
|
"loss": 1.153, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.287163727045001e-05, |
|
"loss": 1.2582, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.278972154044722e-05, |
|
"loss": 1.2573, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.270772852804672e-05, |
|
"loss": 1.1999, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.262565851129564e-05, |
|
"loss": 1.377, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.254351176850222e-05, |
|
"loss": 1.1847, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.246128857823495e-05, |
|
"loss": 1.3181, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.237898921932146e-05, |
|
"loss": 1.037, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.229661397084776e-05, |
|
"loss": 1.4083, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.221416311215717e-05, |
|
"loss": 1.2283, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.213163692284943e-05, |
|
"loss": 1.1925, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.204903568277974e-05, |
|
"loss": 1.2007, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.196635967205776e-05, |
|
"loss": 1.1356, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.188360917104677e-05, |
|
"loss": 1.1845, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.180078446036259e-05, |
|
"loss": 1.2703, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.171788582087277e-05, |
|
"loss": 1.2917, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.163491353369545e-05, |
|
"loss": 1.226, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.155186788019864e-05, |
|
"loss": 1.3631, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.146874914199906e-05, |
|
"loss": 1.3167, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.13855576009613e-05, |
|
"loss": 1.0606, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.130229353919684e-05, |
|
"loss": 1.061, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.121895723906306e-05, |
|
"loss": 1.2346, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.11355489831623e-05, |
|
"loss": 1.3658, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.105206905434096e-05, |
|
"loss": 1.0964, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.096851773568844e-05, |
|
"loss": 1.134, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.088489531053627e-05, |
|
"loss": 1.1763, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.080120206245709e-05, |
|
"loss": 1.2423, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.071743827526366e-05, |
|
"loss": 1.2917, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.063360423300808e-05, |
|
"loss": 1.2818, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.054970021998053e-05, |
|
"loss": 1.1765, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.04657265207086e-05, |
|
"loss": 1.3552, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.038168341995609e-05, |
|
"loss": 1.2988, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.029757120272223e-05, |
|
"loss": 1.3411, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.02133901542406e-05, |
|
"loss": 0.9321, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.012914055997817e-05, |
|
"loss": 1.1216, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.004482270563441e-05, |
|
"loss": 1.3039, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.99604368771402e-05, |
|
"loss": 1.1697, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.9875983360657e-05, |
|
"loss": 1.2737, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.979146244257572e-05, |
|
"loss": 1.2087, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.970687440951593e-05, |
|
"loss": 1.2569, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.962221954832476e-05, |
|
"loss": 1.4302, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.953749814607592e-05, |
|
"loss": 1.2295, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.945271049006882e-05, |
|
"loss": 1.2631, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.936785686782751e-05, |
|
"loss": 1.1279, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.928293756709975e-05, |
|
"loss": 1.184, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.919795287585604e-05, |
|
"loss": 1.1165, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.911290308228861e-05, |
|
"loss": 0.9758, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.902778847481046e-05, |
|
"loss": 1.233, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.894260934205437e-05, |
|
"loss": 1.3657, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.885736597287194e-05, |
|
"loss": 1.2249, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.877205865633262e-05, |
|
"loss": 1.1283, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.868668768172272e-05, |
|
"loss": 1.1403, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.860125333854437e-05, |
|
"loss": 1.2103, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.851575591651461e-05, |
|
"loss": 1.0768, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.843019570556443e-05, |
|
"loss": 1.1469, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.834457299583768e-05, |
|
"loss": 1.2445, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.82588880776902e-05, |
|
"loss": 1.2965, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.817314124168877e-05, |
|
"loss": 1.2222, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.808733277861012e-05, |
|
"loss": 1.252, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.800146297943998e-05, |
|
"loss": 1.2799, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.791553213537209e-05, |
|
"loss": 1.2599, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.782954053780719e-05, |
|
"loss": 1.0473, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.774348847835202e-05, |
|
"loss": 1.0644, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.765737624881839e-05, |
|
"loss": 1.3575, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.757120414122214e-05, |
|
"loss": 0.8829, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.748497244778214e-05, |
|
"loss": 1.2545, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.739868146091934e-05, |
|
"loss": 1.3259, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.731233147325578e-05, |
|
"loss": 1.0904, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.722592277761355e-05, |
|
"loss": 1.1134, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.713945566701383e-05, |
|
"loss": 1.1543, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.705293043467589e-05, |
|
"loss": 1.0005, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.69663473740161e-05, |
|
"loss": 1.3961, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.687970677864696e-05, |
|
"loss": 1.0416, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.679300894237602e-05, |
|
"loss": 1.1632, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.670625415920499e-05, |
|
"loss": 1.08, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.661944272332867e-05, |
|
"loss": 1.2152, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.653257492913397e-05, |
|
"loss": 1.2323, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.644565107119894e-05, |
|
"loss": 0.9387, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.635867144429173e-05, |
|
"loss": 1.1786, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.62716363433696e-05, |
|
"loss": 1.1721, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.618454606357796e-05, |
|
"loss": 1.1166, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.60974009002493e-05, |
|
"loss": 1.3361, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.601020114890228e-05, |
|
"loss": 1.1175, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.592294710524058e-05, |
|
"loss": 1.1079, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.58356390651521e-05, |
|
"loss": 1.2961, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.574827732470779e-05, |
|
"loss": 1.2133, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.56608621801607e-05, |
|
"loss": 1.0169, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.5573393927945e-05, |
|
"loss": 1.2873, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.548587286467491e-05, |
|
"loss": 1.1346, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.539829928714383e-05, |
|
"loss": 1.168, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.531067349232313e-05, |
|
"loss": 1.0916, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.522299577736133e-05, |
|
"loss": 1.2535, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.513526643958301e-05, |
|
"loss": 1.2332, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.504748577648778e-05, |
|
"loss": 1.1259, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.49596540857493e-05, |
|
"loss": 1.1833, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.48717716652143e-05, |
|
"loss": 1.1894, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.478383881290152e-05, |
|
"loss": 1.1131, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.469585582700071e-05, |
|
"loss": 1.2203, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.460782300587166e-05, |
|
"loss": 1.3577, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.451974064804312e-05, |
|
"loss": 1.2248, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.443160905221187e-05, |
|
"loss": 1.3831, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.43434285172416e-05, |
|
"loss": 1.2409, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.425519934216203e-05, |
|
"loss": 1.3693, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.416692182616775e-05, |
|
"loss": 1.1685, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.407859626861735e-05, |
|
"loss": 1.2228, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.399022296903224e-05, |
|
"loss": 1.0375, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.390180222709583e-05, |
|
"loss": 0.9321, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.381333434265238e-05, |
|
"loss": 1.2262, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.372481961570597e-05, |
|
"loss": 1.1371, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.363625834641959e-05, |
|
"loss": 1.1375, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.354765083511401e-05, |
|
"loss": 1.2385, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.345899738226687e-05, |
|
"loss": 1.2664, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.337029828851151e-05, |
|
"loss": 1.3064, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.328155385463615e-05, |
|
"loss": 1.0658, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.31927643815827e-05, |
|
"loss": 1.0843, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.310393017044582e-05, |
|
"loss": 1.1444, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.301505152247184e-05, |
|
"loss": 1.2787, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.292612873905788e-05, |
|
"loss": 1.3589, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.283716212175062e-05, |
|
"loss": 1.3724, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.274815197224545e-05, |
|
"loss": 1.1862, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.265909859238535e-05, |
|
"loss": 1.2664, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.257000228415994e-05, |
|
"loss": 1.0545, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.248086334970435e-05, |
|
"loss": 1.3467, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.239168209129833e-05, |
|
"loss": 1.4016, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.230245881136508e-05, |
|
"loss": 1.4289, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.22131938124704e-05, |
|
"loss": 1.3223, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.212388739732145e-05, |
|
"loss": 1.4146, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.203453986876594e-05, |
|
"loss": 1.1499, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.194515152979093e-05, |
|
"loss": 1.2879, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.185572268352187e-05, |
|
"loss": 1.0874, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 6.176625363322163e-05, |
|
"loss": 1.2427, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.167674468228941e-05, |
|
"loss": 1.3649, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.158719613425964e-05, |
|
"loss": 1.2718, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.14976082928011e-05, |
|
"loss": 1.3047, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.140798146171582e-05, |
|
"loss": 1.3723, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.131831594493799e-05, |
|
"loss": 1.2044, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.122861204653303e-05, |
|
"loss": 1.3015, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.113887007069656e-05, |
|
"loss": 1.052, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.104909032175323e-05, |
|
"loss": 1.3608, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.095927310415584e-05, |
|
"loss": 1.1729, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.086941872248424e-05, |
|
"loss": 1.1226, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.07795274814443e-05, |
|
"loss": 1.2384, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.068959968586689e-05, |
|
"loss": 1.2275, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.059963564070683e-05, |
|
"loss": 1.1939, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.050963565104191e-05, |
|
"loss": 1.293, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.041960002207174e-05, |
|
"loss": 0.9448, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.032952905911686e-05, |
|
"loss": 1.259, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.023942306761757e-05, |
|
"loss": 1.3718, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.014928235313301e-05, |
|
"loss": 1.3526, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 6.0059107221340004e-05, |
|
"loss": 0.7851, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.9968897978032135e-05, |
|
"loss": 1.3295, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.987865492911866e-05, |
|
"loss": 1.4182, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.9788378380623475e-05, |
|
"loss": 1.3184, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.969806863868407e-05, |
|
"loss": 1.2503, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.960772600955049e-05, |
|
"loss": 1.0603, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.9517350799584307e-05, |
|
"loss": 1.1791, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.9426943315257575e-05, |
|
"loss": 1.3167, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.933650386315183e-05, |
|
"loss": 1.4345, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.924603274995692e-05, |
|
"loss": 1.0739, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.915553028247021e-05, |
|
"loss": 1.1927, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.9064996767595236e-05, |
|
"loss": 1.182, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.8974432512340926e-05, |
|
"loss": 1.0244, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.88838378238204e-05, |
|
"loss": 1.3127, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.879321300924999e-05, |
|
"loss": 1.2116, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.87025583759482e-05, |
|
"loss": 1.3396, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.861187423133464e-05, |
|
"loss": 1.1122, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.8521160882929006e-05, |
|
"loss": 1.0469, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.843041863835003e-05, |
|
"loss": 1.3309, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.83396478053144e-05, |
|
"loss": 1.2289, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.82488486916358e-05, |
|
"loss": 1.2619, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.815802160522379e-05, |
|
"loss": 1.189, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.8067166854082774e-05, |
|
"loss": 1.3728, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.797628474631102e-05, |
|
"loss": 1.2453, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.7885375590099514e-05, |
|
"loss": 1.3382, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.779443969373097e-05, |
|
"loss": 1.2999, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.770347736557884e-05, |
|
"loss": 1.4029, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.761248891410613e-05, |
|
"loss": 1.2411, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.752147464786448e-05, |
|
"loss": 1.2294, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.7430434875493056e-05, |
|
"loss": 1.1427, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.733936990571752e-05, |
|
"loss": 1.2474, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.724828004734899e-05, |
|
"loss": 1.1501, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.715716560928297e-05, |
|
"loss": 1.0521, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.7066026900498314e-05, |
|
"loss": 1.2757, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.697486423005621e-05, |
|
"loss": 1.363, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.6883677907099085e-05, |
|
"loss": 1.0824, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.679246824084955e-05, |
|
"loss": 1.2215, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.67012355406094e-05, |
|
"loss": 1.1607, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.660998011575853e-05, |
|
"loss": 1.1333, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.651870227575391e-05, |
|
"loss": 1.065, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.642740233012853e-05, |
|
"loss": 1.1918, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.6336080588490326e-05, |
|
"loss": 1.1603, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.624473736052114e-05, |
|
"loss": 1.051, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.615337295597569e-05, |
|
"loss": 1.5568, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.6061987684680506e-05, |
|
"loss": 1.2837, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.5970581856532866e-05, |
|
"loss": 1.1235, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.587915578149976e-05, |
|
"loss": 1.0204, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.5787709769616846e-05, |
|
"loss": 1.2447, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.5696244130987415e-05, |
|
"loss": 1.4469, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.5604759175781286e-05, |
|
"loss": 1.3223, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.551325521423375e-05, |
|
"loss": 1.1734, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.542173255664462e-05, |
|
"loss": 1.2144, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.5330191513377064e-05, |
|
"loss": 1.3377, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.52386323948566e-05, |
|
"loss": 1.0928, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.514705551157006e-05, |
|
"loss": 1.1444, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.505546117406449e-05, |
|
"loss": 1.2559, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.4963849692946166e-05, |
|
"loss": 1.1396, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.487222137887949e-05, |
|
"loss": 1.3016, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.4780576542585883e-05, |
|
"loss": 1.3395, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.468891549484289e-05, |
|
"loss": 1.0733, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.459723854648297e-05, |
|
"loss": 1.1816, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.4505546008392505e-05, |
|
"loss": 1.0665, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.441383819151078e-05, |
|
"loss": 1.2515, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.432211540682887e-05, |
|
"loss": 1.2566, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.423037796538858e-05, |
|
"loss": 1.0183, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.413862617828147e-05, |
|
"loss": 1.2808, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.40468603566477e-05, |
|
"loss": 1.062, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.395508081167506e-05, |
|
"loss": 1.0936, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.386328785459787e-05, |
|
"loss": 1.1886, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.37714817966959e-05, |
|
"loss": 1.41, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.367966294929336e-05, |
|
"loss": 0.9884, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.35878316237579e-05, |
|
"loss": 1.1373, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.349598813149936e-05, |
|
"loss": 1.1247, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.340413278396895e-05, |
|
"loss": 1.2176, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.331226589265801e-05, |
|
"loss": 1.1515, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.322038776909705e-05, |
|
"loss": 1.2422, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.312849872485468e-05, |
|
"loss": 1.504, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.3036599071536544e-05, |
|
"loss": 1.1377, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.294468912078424e-05, |
|
"loss": 1.1078, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.285276918427432e-05, |
|
"loss": 1.2656, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.276083957371716e-05, |
|
"loss": 1.0643, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.266890060085595e-05, |
|
"loss": 1.3832, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.2576952577465664e-05, |
|
"loss": 1.038, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.248499581535193e-05, |
|
"loss": 1.1451, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.239303062635001e-05, |
|
"loss": 1.3984, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.230105732232379e-05, |
|
"loss": 1.2597, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.220907621516461e-05, |
|
"loss": 1.0798, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.2117087616790304e-05, |
|
"loss": 1.297, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.202509183914412e-05, |
|
"loss": 1.024, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.193308919419363e-05, |
|
"loss": 1.0174, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.18410799939297e-05, |
|
"loss": 1.3293, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.1749064550365414e-05, |
|
"loss": 1.2362, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.165704317553505e-05, |
|
"loss": 1.5172, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.1565016181493006e-05, |
|
"loss": 1.1196, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.147298388031271e-05, |
|
"loss": 1.3309, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.1380946584085596e-05, |
|
"loss": 1.3208, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.128890460492004e-05, |
|
"loss": 1.2953, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.1196858254940305e-05, |
|
"loss": 1.0986, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.110480784628544e-05, |
|
"loss": 1.2564, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.1012753691108295e-05, |
|
"loss": 1.2837, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.092069610157443e-05, |
|
"loss": 1.3492, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.082863538986102e-05, |
|
"loss": 1.2833, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.073657186815586e-05, |
|
"loss": 1.4483, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.064450584865624e-05, |
|
"loss": 1.2237, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.055243764356795e-05, |
|
"loss": 1.1658, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.046036756510417e-05, |
|
"loss": 1.2706, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.036829592548446e-05, |
|
"loss": 1.0426, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.0276223036933626e-05, |
|
"loss": 1.1844, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.018414921168075e-05, |
|
"loss": 1.155, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.0092074761958086e-05, |
|
"loss": 1.1265, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5e-05, |
|
"loss": 1.2486, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.9907925238041925e-05, |
|
"loss": 1.151, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.981585078831926e-05, |
|
"loss": 1.4085, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.972377696306639e-05, |
|
"loss": 1.2894, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.963170407451556e-05, |
|
"loss": 1.2555, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.953963243489583e-05, |
|
"loss": 0.9528, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.944756235643205e-05, |
|
"loss": 1.2463, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.9355494151343756e-05, |
|
"loss": 1.1701, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.926342813184414e-05, |
|
"loss": 1.3017, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.9171364610139e-05, |
|
"loss": 1.1233, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.9079303898425585e-05, |
|
"loss": 1.1004, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.8987246308891716e-05, |
|
"loss": 1.2572, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.889519215371458e-05, |
|
"loss": 1.081, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.880314174505972e-05, |
|
"loss": 1.3523, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.871109539507997e-05, |
|
"loss": 1.0426, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.8619053415914415e-05, |
|
"loss": 1.3245, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.85270161196873e-05, |
|
"loss": 1.2372, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.8434983818507005e-05, |
|
"loss": 1.031, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.834295682446496e-05, |
|
"loss": 1.2342, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.82509354496346e-05, |
|
"loss": 1.3401, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.815892000607032e-05, |
|
"loss": 1.4023, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.806691080580638e-05, |
|
"loss": 1.2549, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.797490816085588e-05, |
|
"loss": 1.2023, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.7882912383209694e-05, |
|
"loss": 1.2136, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.779092378483539e-05, |
|
"loss": 1.4782, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.769894267767621e-05, |
|
"loss": 0.9413, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.7606969373649986e-05, |
|
"loss": 1.1232, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.7515004184648086e-05, |
|
"loss": 1.4187, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.7423047422534354e-05, |
|
"loss": 1.3423, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.733109939914407e-05, |
|
"loss": 1.0831, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.723916042628287e-05, |
|
"loss": 0.9331, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.71472308157257e-05, |
|
"loss": 1.2319, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.705531087921577e-05, |
|
"loss": 1.1965, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.6963400928463474e-05, |
|
"loss": 1.3236, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.687150127514533e-05, |
|
"loss": 1.2908, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.6779612230902965e-05, |
|
"loss": 1.3047, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.668773410734201e-05, |
|
"loss": 1.0924, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.6595867216031066e-05, |
|
"loss": 1.3568, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.650401186850064e-05, |
|
"loss": 1.1631, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.641216837624211e-05, |
|
"loss": 1.2808, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.632033705070664e-05, |
|
"loss": 1.2646, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.622851820330412e-05, |
|
"loss": 1.0823, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.6136712145402144e-05, |
|
"loss": 1.2418, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.604491918832494e-05, |
|
"loss": 1.1149, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.59531396433523e-05, |
|
"loss": 1.2756, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.5861373821718555e-05, |
|
"loss": 1.0218, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.576962203461144e-05, |
|
"loss": 1.2293, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.567788459317116e-05, |
|
"loss": 1.1739, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.558616180848922e-05, |
|
"loss": 1.1339, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.54944539916075e-05, |
|
"loss": 1.2657, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.5402761453517044e-05, |
|
"loss": 1.3037, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.531108450515712e-05, |
|
"loss": 1.2414, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.521942345741412e-05, |
|
"loss": 1.3885, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.512777862112053e-05, |
|
"loss": 1.2721, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.503615030705384e-05, |
|
"loss": 1.182, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.494453882593552e-05, |
|
"loss": 1.0801, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.485294448842996e-05, |
|
"loss": 1.243, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.476136760514341e-05, |
|
"loss": 1.3275, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.466980848662294e-05, |
|
"loss": 1.1791, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.4578267443355384e-05, |
|
"loss": 0.9916, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.448674478576625e-05, |
|
"loss": 1.1714, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.439524082421872e-05, |
|
"loss": 1.3951, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.4303755869012576e-05, |
|
"loss": 1.1565, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.421229023038316e-05, |
|
"loss": 1.3776, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.412084421850026e-05, |
|
"loss": 1.185, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.402941814346716e-05, |
|
"loss": 1.0763, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.393801231531952e-05, |
|
"loss": 1.0781, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.384662704402432e-05, |
|
"loss": 1.0173, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.3755262639478874e-05, |
|
"loss": 1.3302, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.3663919411509686e-05, |
|
"loss": 1.2739, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.3572597669871475e-05, |
|
"loss": 1.317, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.34812977242461e-05, |
|
"loss": 1.0408, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.339001988424148e-05, |
|
"loss": 1.1187, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.329876445939062e-05, |
|
"loss": 1.297, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.320753175915047e-05, |
|
"loss": 1.238, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.3116322092900926e-05, |
|
"loss": 0.8729, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.302513576994378e-05, |
|
"loss": 1.1245, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.293397309950168e-05, |
|
"loss": 1.3215, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.2842834390717036e-05, |
|
"loss": 1.1446, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.2751719952651015e-05, |
|
"loss": 1.241, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.26606300942825e-05, |
|
"loss": 1.1225, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.256956512450696e-05, |
|
"loss": 1.2456, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.247852535213554e-05, |
|
"loss": 1.2693, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.2387511085893895e-05, |
|
"loss": 1.0317, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.229652263442118e-05, |
|
"loss": 1.0834, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.220556030626904e-05, |
|
"loss": 1.0985, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.21146244099005e-05, |
|
"loss": 1.0909, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.2023715253688987e-05, |
|
"loss": 1.2587, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.193283314591723e-05, |
|
"loss": 1.2601, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.184197839477623e-05, |
|
"loss": 1.277, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.175115130836421e-05, |
|
"loss": 1.4979, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.166035219468562e-05, |
|
"loss": 1.0265, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.156958136164999e-05, |
|
"loss": 1.2503, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.1478839117071e-05, |
|
"loss": 1.2521, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.1388125768665364e-05, |
|
"loss": 1.2168, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.1297441624051796e-05, |
|
"loss": 1.2362, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.1206786990750005e-05, |
|
"loss": 1.1674, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.11161621761796e-05, |
|
"loss": 1.1533, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.102556748765909e-05, |
|
"loss": 1.1444, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.093500323240478e-05, |
|
"loss": 1.3611, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.084446971752981e-05, |
|
"loss": 0.949, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.075396725004308e-05, |
|
"loss": 1.2392, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.06634961368482e-05, |
|
"loss": 1.2627, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.0573056684742437e-05, |
|
"loss": 1.1446, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.048264920041571e-05, |
|
"loss": 1.2606, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.0392273990449526e-05, |
|
"loss": 1.0948, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.0301931361315937e-05, |
|
"loss": 1.0528, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.021162161937653e-05, |
|
"loss": 1.0092, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.0121345070881345e-05, |
|
"loss": 1.2607, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.003110202196787e-05, |
|
"loss": 1.0265, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.994089277866001e-05, |
|
"loss": 1.2582, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.985071764686701e-05, |
|
"loss": 1.3616, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.976057693238243e-05, |
|
"loss": 1.1759, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9670470940883144e-05, |
|
"loss": 1.1679, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.958039997792825e-05, |
|
"loss": 1.2321, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.94903643489581e-05, |
|
"loss": 1.1551, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.940036435929318e-05, |
|
"loss": 1.4267, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9310400314133135e-05, |
|
"loss": 1.3853, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9220472518555724e-05, |
|
"loss": 1.1791, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9130581277515785e-05, |
|
"loss": 1.1589, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9040726895844174e-05, |
|
"loss": 1.2076, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.8950909678246784e-05, |
|
"loss": 1.3118, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.8861129929303454e-05, |
|
"loss": 1.4017, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.877138795346697e-05, |
|
"loss": 1.1258, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.868168405506202e-05, |
|
"loss": 1.1977, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.85920185382842e-05, |
|
"loss": 1.2062, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.850239170719891e-05, |
|
"loss": 1.254, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.8412803865740375e-05, |
|
"loss": 1.2368, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.83232553177106e-05, |
|
"loss": 1.2982, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.823374636677837e-05, |
|
"loss": 1.3586, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.814427731647813e-05, |
|
"loss": 1.0685, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.8054848470209095e-05, |
|
"loss": 1.1598, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.796546013123407e-05, |
|
"loss": 1.1086, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.7876112602678546e-05, |
|
"loss": 1.1408, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.778680618752963e-05, |
|
"loss": 1.2318, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.769754118863493e-05, |
|
"loss": 1.3748, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.7608317908701706e-05, |
|
"loss": 1.3031, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.751913665029567e-05, |
|
"loss": 0.9961, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.7429997715840084e-05, |
|
"loss": 1.2784, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.734090140761466e-05, |
|
"loss": 1.2326, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.7251848027754565e-05, |
|
"loss": 1.2761, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.716283787824939e-05, |
|
"loss": 1.4119, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.7073871260942125e-05, |
|
"loss": 1.3497, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.6984948477528157e-05, |
|
"loss": 1.1329, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.68960698295542e-05, |
|
"loss": 1.1757, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.6807235618417316e-05, |
|
"loss": 1.0434, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.6718446145363854e-05, |
|
"loss": 1.0727, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.662970171148849e-05, |
|
"loss": 1.3552, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.6541002617733146e-05, |
|
"loss": 0.9735, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.645234916488599e-05, |
|
"loss": 0.8415, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.636374165358042e-05, |
|
"loss": 1.4925, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.627518038429403e-05, |
|
"loss": 1.1336, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.618666565734764e-05, |
|
"loss": 1.2341, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.609819777290418e-05, |
|
"loss": 1.2162, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.600977703096778e-05, |
|
"loss": 0.9718, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.592140373138269e-05, |
|
"loss": 1.3183, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.5833078173832256e-05, |
|
"loss": 1.2114, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.5744800657837985e-05, |
|
"loss": 1.3612, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.56565714827584e-05, |
|
"loss": 1.3131, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.5568390947788135e-05, |
|
"loss": 1.2154, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.548025935195688e-05, |
|
"loss": 1.299, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.5392176994128354e-05, |
|
"loss": 1.1693, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.530414417299929e-05, |
|
"loss": 1.1611, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.5216161187098495e-05, |
|
"loss": 1.2259, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.5128228334785703e-05, |
|
"loss": 1.0022, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.5040345914250704e-05, |
|
"loss": 1.1038, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.4952514223512235e-05, |
|
"loss": 1.1872, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.4864733560417e-05, |
|
"loss": 1.2469, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.477700422263867e-05, |
|
"loss": 1.3855, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.4689326507676886e-05, |
|
"loss": 1.2645, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.46017007128562e-05, |
|
"loss": 1.2546, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.4514127135325103e-05, |
|
"loss": 1.2321, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.4426606072055036e-05, |
|
"loss": 1.0954, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.433913781983932e-05, |
|
"loss": 1.0545, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.425172267529223e-05, |
|
"loss": 1.4396, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.4164360934847914e-05, |
|
"loss": 1.2859, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.407705289475942e-05, |
|
"loss": 1.1176, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.398979885109774e-05, |
|
"loss": 1.2284, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.3902599099750707e-05, |
|
"loss": 1.1778, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.381545393642205e-05, |
|
"loss": 1.3364, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.372836365663041e-05, |
|
"loss": 1.1908, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.364132855570829e-05, |
|
"loss": 1.1588, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.355434892880107e-05, |
|
"loss": 1.3075, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.3467425070866035e-05, |
|
"loss": 1.3005, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.3380557276671345e-05, |
|
"loss": 1.1527, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.3293745840795e-05, |
|
"loss": 1.3768, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.3206991057623974e-05, |
|
"loss": 1.088, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.312029322135306e-05, |
|
"loss": 1.2779, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.3033652625983915e-05, |
|
"loss": 1.3483, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.294706956532413e-05, |
|
"loss": 0.8055, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.286054433298619e-05, |
|
"loss": 1.2302, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.277407722238646e-05, |
|
"loss": 1.3869, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.2687668526744224e-05, |
|
"loss": 1.145, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.260131853908066e-05, |
|
"loss": 1.0956, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.251502755221787e-05, |
|
"loss": 1.3329, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.242879585877787e-05, |
|
"loss": 1.3474, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.234262375118161e-05, |
|
"loss": 1.2398, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.225651152164799e-05, |
|
"loss": 1.0887, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.2170459462192826e-05, |
|
"loss": 1.2679, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.208446786462791e-05, |
|
"loss": 0.9408, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.199853702056003e-05, |
|
"loss": 1.2024, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.191266722138989e-05, |
|
"loss": 1.2051, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.182685875831124e-05, |
|
"loss": 1.1828, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.17411119223098e-05, |
|
"loss": 1.1571, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.165542700416232e-05, |
|
"loss": 1.2901, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.156980429443559e-05, |
|
"loss": 1.3149, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.1484244083485396e-05, |
|
"loss": 1.0309, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.1398746661455646e-05, |
|
"loss": 1.3031, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.131331231827729e-05, |
|
"loss": 1.2569, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.1227941343667374e-05, |
|
"loss": 1.2363, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.114263402712807e-05, |
|
"loss": 1.1, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.1057390657945653e-05, |
|
"loss": 1.3165, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.097221152518956e-05, |
|
"loss": 1.2885, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.08870969177114e-05, |
|
"loss": 1.0581, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.0802047124143964e-05, |
|
"loss": 1.1709, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.071706243290026e-05, |
|
"loss": 1.2625, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.0632143132172504e-05, |
|
"loss": 1.3506, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.054728950993119e-05, |
|
"loss": 1.2595, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.0462501853924087e-05, |
|
"loss": 1.1551, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.037778045167524e-05, |
|
"loss": 1.1417, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.029312559048406e-05, |
|
"loss": 1.1699, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.0208537557424277e-05, |
|
"loss": 1.1594, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.0124016639343022e-05, |
|
"loss": 1.3783, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.0039563122859817e-05, |
|
"loss": 1.2226, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.995517729436561e-05, |
|
"loss": 1.3955, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.9870859440021842e-05, |
|
"loss": 1.3198, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.9786609845759418e-05, |
|
"loss": 1.429, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.970242879727778e-05, |
|
"loss": 1.0394, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.9618316580043915e-05, |
|
"loss": 1.1531, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.953427347929142e-05, |
|
"loss": 1.2417, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.9450299780019474e-05, |
|
"loss": 1.2562, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.936639576699194e-05, |
|
"loss": 1.0393, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.9282561724736335e-05, |
|
"loss": 1.2222, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.919879793754293e-05, |
|
"loss": 1.3213, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.9115104689463724e-05, |
|
"loss": 1.3782, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.9031482264311548e-05, |
|
"loss": 1.4448, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.894793094565904e-05, |
|
"loss": 1.1885, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.88644510168377e-05, |
|
"loss": 0.9915, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.878104276093695e-05, |
|
"loss": 1.3803, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.869770646080316e-05, |
|
"loss": 0.9869, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.861444239903871e-05, |
|
"loss": 1.2829, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.853125085800096e-05, |
|
"loss": 1.2863, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.8448132119801386e-05, |
|
"loss": 1.0569, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.836508646630457e-05, |
|
"loss": 1.1295, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.8282114179127268e-05, |
|
"loss": 1.23, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.8199215539637424e-05, |
|
"loss": 1.0819, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.8116390828953253e-05, |
|
"loss": 1.0494, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.8033640327942234e-05, |
|
"loss": 0.9276, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.795096431722026e-05, |
|
"loss": 1.3069, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.786836307715056e-05, |
|
"loss": 1.0837, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.778583688784283e-05, |
|
"loss": 1.1688, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.7703386029152246e-05, |
|
"loss": 1.2329, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.7621010780678547e-05, |
|
"loss": 1.3319, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.753871142176506e-05, |
|
"loss": 1.1118, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.7456488231497778e-05, |
|
"loss": 1.1439, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.737434148870437e-05, |
|
"loss": 1.0275, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.7292271471953284e-05, |
|
"loss": 1.3342, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.7210278459552785e-05, |
|
"loss": 1.35, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.7128362729550005e-05, |
|
"loss": 1.291, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.7046524559729968e-05, |
|
"loss": 1.3303, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.6964764227614736e-05, |
|
"loss": 0.9718, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.688308201046236e-05, |
|
"loss": 1.2921, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.6801478185266076e-05, |
|
"loss": 1.0728, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.6719953028753214e-05, |
|
"loss": 1.2965, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.6638506817384347e-05, |
|
"loss": 1.2749, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.655713982735234e-05, |
|
"loss": 1.3782, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.647585233458142e-05, |
|
"loss": 1.0089, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.6394644614726216e-05, |
|
"loss": 1.0192, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.6313516943170836e-05, |
|
"loss": 0.9587, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.623246959502795e-05, |
|
"loss": 1.1867, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.615150284513783e-05, |
|
"loss": 1.2985, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.6070616968067445e-05, |
|
"loss": 1.3737, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.5989812238109502e-05, |
|
"loss": 1.0816, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.5909088929281533e-05, |
|
"loss": 1.2715, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.5828447315324962e-05, |
|
"loss": 1.2235, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.574788766970418e-05, |
|
"loss": 1.1379, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.5667410265605617e-05, |
|
"loss": 0.9785, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.5587015375936802e-05, |
|
"loss": 1.0824, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.550670327332546e-05, |
|
"loss": 1.3527, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.5426474230118567e-05, |
|
"loss": 1.4121, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.5346328518381445e-05, |
|
"loss": 0.9755, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.5266266409896834e-05, |
|
"loss": 1.2786, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.518628817616394e-05, |
|
"loss": 1.2678, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.5106394088397566e-05, |
|
"loss": 1.1993, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.502658441752716e-05, |
|
"loss": 1.0807, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.49468594341959e-05, |
|
"loss": 1.1373, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.4867219408759794e-05, |
|
"loss": 1.0183, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.4787664611286722e-05, |
|
"loss": 1.2953, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.4708195311555572e-05, |
|
"loss": 1.1941, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.4628811779055277e-05, |
|
"loss": 1.0912, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.4549514282983947e-05, |
|
"loss": 1.3223, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.4470303092247926e-05, |
|
"loss": 0.9993, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.4391178475460875e-05, |
|
"loss": 1.4854, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.4312140700942888e-05, |
|
"loss": 1.1286, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.4233190036719562e-05, |
|
"loss": 1.294, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.4154326750521083e-05, |
|
"loss": 1.1159, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.4075551109781358e-05, |
|
"loss": 1.3525, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.3996863381637046e-05, |
|
"loss": 1.2539, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.3918263832926708e-05, |
|
"loss": 1.2386, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.3839752730189862e-05, |
|
"loss": 1.2319, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.376133033966611e-05, |
|
"loss": 1.2072, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.3682996927294214e-05, |
|
"loss": 0.8596, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.3604752758711206e-05, |
|
"loss": 1.1563, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.3526598099251472e-05, |
|
"loss": 1.0832, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.3448533213945884e-05, |
|
"loss": 1.2598, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.3370558367520855e-05, |
|
"loss": 1.1482, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.3292673824397487e-05, |
|
"loss": 1.1527, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.321487984869064e-05, |
|
"loss": 1.2964, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.313717670420804e-05, |
|
"loss": 1.1139, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.305956465444945e-05, |
|
"loss": 1.3739, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2982043962605655e-05, |
|
"loss": 1.3723, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.290461489155768e-05, |
|
"loss": 1.1796, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2827277703875805e-05, |
|
"loss": 0.8906, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.275003266181877e-05, |
|
"loss": 1.1503, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2672880027332828e-05, |
|
"loss": 1.2305, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2595820062050855e-05, |
|
"loss": 1.292, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2518853027291487e-05, |
|
"loss": 0.9769, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2441979184058222e-05, |
|
"loss": 1.237, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.236519879303853e-05, |
|
"loss": 1.2916, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2288512114602983e-05, |
|
"loss": 0.9576, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2211919408804353e-05, |
|
"loss": 1.0593, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.213542093537675e-05, |
|
"loss": 1.2593, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.2059016953734725e-05, |
|
"loss": 1.4077, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.1982707722972383e-05, |
|
"loss": 1.3385, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.1906493501862575e-05, |
|
"loss": 1.2153, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.1830374548855902e-05, |
|
"loss": 1.0724, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.1754351122079923e-05, |
|
"loss": 1.1901, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.167842347933826e-05, |
|
"loss": 1.2802, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.1602591878109723e-05, |
|
"loss": 1.1832, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.1526856575547444e-05, |
|
"loss": 1.27, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.1451217828477944e-05, |
|
"loss": 1.1813, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.1375675893400372e-05, |
|
"loss": 1.2412, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.1300231026485556e-05, |
|
"loss": 1.3666, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.1224883483575163e-05, |
|
"loss": 1.0566, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.114963352018081e-05, |
|
"loss": 1.2551, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.107448139148323e-05, |
|
"loss": 1.4707, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.0999427352331362e-05, |
|
"loss": 1.102, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.0924471657241524e-05, |
|
"loss": 1.0989, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.084961456039657e-05, |
|
"loss": 1.1448, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.0774856315644954e-05, |
|
"loss": 1.1827, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.0700197176499925e-05, |
|
"loss": 1.2116, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.0625637396138664e-05, |
|
"loss": 1.2914, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.0551177227401398e-05, |
|
"loss": 1.2934, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.0476816922790575e-05, |
|
"loss": 1.2691, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.040255673446999e-05, |
|
"loss": 1.3639, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.0328396914263927e-05, |
|
"loss": 1.0915, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.025433771365632e-05, |
|
"loss": 1.0743, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.0180379383789903e-05, |
|
"loss": 1.4099, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.0106522175465292e-05, |
|
"loss": 1.1789, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.0032766339140248e-05, |
|
"loss": 1.1932, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9959112124928743e-05, |
|
"loss": 1.3066, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9885559782600126e-05, |
|
"loss": 1.4066, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.981210956157834e-05, |
|
"loss": 1.1259, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9738761710940968e-05, |
|
"loss": 1.1671, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9665516479418467e-05, |
|
"loss": 1.0956, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9592374115393292e-05, |
|
"loss": 1.1723, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.951933486689907e-05, |
|
"loss": 1.1064, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9446398981619756e-05, |
|
"loss": 1.4602, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.937356670688878e-05, |
|
"loss": 1.2421, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9300838289688216e-05, |
|
"loss": 1.0283, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9228213976647964e-05, |
|
"loss": 1.1432, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9155694014044877e-05, |
|
"loss": 1.1459, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.908327864780195e-05, |
|
"loss": 1.1085, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.9010968123487477e-05, |
|
"loss": 1.1844, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8938762686314238e-05, |
|
"loss": 1.4597, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8866662581138646e-05, |
|
"loss": 1.3673, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.879466805245986e-05, |
|
"loss": 1.2964, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.872277934441914e-05, |
|
"loss": 1.1169, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8650996700798795e-05, |
|
"loss": 0.9918, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8579320365021508e-05, |
|
"loss": 1.3039, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8507750580149436e-05, |
|
"loss": 1.2077, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8436287588883417e-05, |
|
"loss": 1.047, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8364931633562148e-05, |
|
"loss": 1.3578, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8293682956161357e-05, |
|
"loss": 0.9352, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8222541798292963e-05, |
|
"loss": 1.1146, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8151508401204297e-05, |
|
"loss": 1.0679, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.808058300577724e-05, |
|
"loss": 1.2617, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.800976585252745e-05, |
|
"loss": 1.2403, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7939057181603503e-05, |
|
"loss": 1.2127, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7868457232786117e-05, |
|
"loss": 1.372, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7797966245487314e-05, |
|
"loss": 1.2297, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7727584458749608e-05, |
|
"loss": 1.1727, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7657312111245215e-05, |
|
"loss": 1.3122, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7587149441275235e-05, |
|
"loss": 1.1499, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.751709668676882e-05, |
|
"loss": 1.4439, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7447154085282396e-05, |
|
"loss": 1.1983, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.737732187399886e-05, |
|
"loss": 1.0593, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7307600289726746e-05, |
|
"loss": 1.1317, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7237989568899443e-05, |
|
"loss": 1.016, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7168489947574406e-05, |
|
"loss": 1.3522, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7099101661432328e-05, |
|
"loss": 1.318, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7029824945776345e-05, |
|
"loss": 1.1763, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6960660035531256e-05, |
|
"loss": 1.1405, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6891607165242716e-05, |
|
"loss": 1.1446, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6822666569076434e-05, |
|
"loss": 1.1657, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6753838480817398e-05, |
|
"loss": 1.2864, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6685123133869044e-05, |
|
"loss": 1.0678, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.661652076125252e-05, |
|
"loss": 1.2132, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.654803159560583e-05, |
|
"loss": 1.3107, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.647965586918314e-05, |
|
"loss": 0.9526, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6411393813853892e-05, |
|
"loss": 1.3004, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.634324566110203e-05, |
|
"loss": 1.1973, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6275211642025285e-05, |
|
"loss": 1.584, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6207291987334338e-05, |
|
"loss": 1.0228, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.613948692735205e-05, |
|
"loss": 1.1348, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.6071796692012663e-05, |
|
"loss": 0.9529, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.6004221510861057e-05, |
|
"loss": 1.2817, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5936761613051937e-05, |
|
"loss": 1.1639, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5869417227349075e-05, |
|
"loss": 0.9147, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5802188582124538e-05, |
|
"loss": 1.2894, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5735075905357884e-05, |
|
"loss": 1.0294, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5668079424635424e-05, |
|
"loss": 1.0803, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5601199367149432e-05, |
|
"loss": 1.0402, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5534435959697363e-05, |
|
"loss": 1.0795, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5467789428681146e-05, |
|
"loss": 1.3021, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.540126000010632e-05, |
|
"loss": 1.267, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5334847899581346e-05, |
|
"loss": 1.2536, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5268553352316788e-05, |
|
"loss": 1.1807, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5202376583124616e-05, |
|
"loss": 1.1125, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5136317816417334e-05, |
|
"loss": 1.356, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5070377276207348e-05, |
|
"loss": 1.2827, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.5004555186106122e-05, |
|
"loss": 1.2565, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.4938851769323447e-05, |
|
"loss": 1.1855, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.4873267248666678e-05, |
|
"loss": 1.3973, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.4807801846539976e-05, |
|
"loss": 1.1853, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4742455784943576e-05, |
|
"loss": 1.5008, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4677229285472988e-05, |
|
"loss": 0.9897, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4612122569318281e-05, |
|
"loss": 1.4022, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4547135857263371e-05, |
|
"loss": 1.1371, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.448226936968517e-05, |
|
"loss": 0.9957, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4417523326552912e-05, |
|
"loss": 1.3553, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4352897947427396e-05, |
|
"loss": 1.2868, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4288393451460248e-05, |
|
"loss": 1.1517, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4224010057393139e-05, |
|
"loss": 1.1406, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4159747983557092e-05, |
|
"loss": 1.1375, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4095607447871712e-05, |
|
"loss": 1.3648, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.4031588667844476e-05, |
|
"loss": 1.1563, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3967691860569915e-05, |
|
"loss": 1.1333, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3903917242729003e-05, |
|
"loss": 1.3929, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3840265030588323e-05, |
|
"loss": 1.2062, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3776735439999378e-05, |
|
"loss": 1.17, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.371332868639783e-05, |
|
"loss": 1.1508, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3650044984802829e-05, |
|
"loss": 1.4525, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.358688454981621e-05, |
|
"loss": 1.3368, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.352384759562179e-05, |
|
"loss": 1.2405, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3460934335984677e-05, |
|
"loss": 1.2914, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3398144984250494e-05, |
|
"loss": 1.3549, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3335479753344687e-05, |
|
"loss": 1.0794, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3272938855771805e-05, |
|
"loss": 1.275, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3210522503614754e-05, |
|
"loss": 1.3397, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3148230908534098e-05, |
|
"loss": 1.0313, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3086064281767346e-05, |
|
"loss": 1.286, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3024022834128208e-05, |
|
"loss": 1.2771, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2962106776005916e-05, |
|
"loss": 1.1995, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2900316317364497e-05, |
|
"loss": 1.4086, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2838651667742012e-05, |
|
"loss": 1.0927, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2777113036249928e-05, |
|
"loss": 1.1783, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2715700631572386e-05, |
|
"loss": 1.4828, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2654414661965447e-05, |
|
"loss": 1.199, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2593255335256437e-05, |
|
"loss": 1.346, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.25322228588432e-05, |
|
"loss": 1.2825, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2471317439693436e-05, |
|
"loss": 1.3238, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2410539284343976e-05, |
|
"loss": 1.2738, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2349888598900077e-05, |
|
"loss": 1.1887, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2289365589034745e-05, |
|
"loss": 1.1967, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2228970459988015e-05, |
|
"loss": 1.0423, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2168703416566274e-05, |
|
"loss": 1.104, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2108564663141542e-05, |
|
"loss": 1.3101, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2048554403650803e-05, |
|
"loss": 1.224, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.198867284159531e-05, |
|
"loss": 1.2758, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.1928920180039877e-05, |
|
"loss": 1.1773, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.1869296621612209e-05, |
|
"loss": 1.2702, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.180980236850221e-05, |
|
"loss": 1.1148, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.175043762246129e-05, |
|
"loss": 1.2368, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.1691202584801692e-05, |
|
"loss": 0.8922, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.1632097456395802e-05, |
|
"loss": 1.2246, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.1573122437675466e-05, |
|
"loss": 1.2745, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.1514277728631324e-05, |
|
"loss": 1.2237, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.1455563528812113e-05, |
|
"loss": 1.3435, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.1396980037324002e-05, |
|
"loss": 1.2888, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.1338527452829911e-05, |
|
"loss": 1.3382, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.128020597354884e-05, |
|
"loss": 1.313, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.122201579725521e-05, |
|
"loss": 1.1705, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.1163957121278163e-05, |
|
"loss": 1.2137, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.1106030142500917e-05, |
|
"loss": 1.24, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.104823505736009e-05, |
|
"loss": 1.1323, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0990572061845034e-05, |
|
"loss": 1.2191, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.093304135149717e-05, |
|
"loss": 1.0818, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0875643121409307e-05, |
|
"loss": 1.2488, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0818377566225074e-05, |
|
"loss": 1.1697, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0761244880138078e-05, |
|
"loss": 1.0373, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0704245256891421e-05, |
|
"loss": 1.1307, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0647378889776954e-05, |
|
"loss": 1.3626, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0590645971634655e-05, |
|
"loss": 1.3001, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0534046694851945e-05, |
|
"loss": 1.1761, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0477581251363067e-05, |
|
"loss": 0.9441, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0421249832648417e-05, |
|
"loss": 1.1267, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0365052629733884e-05, |
|
"loss": 1.2547, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.030898983319024e-05, |
|
"loss": 1.1233, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0253061633132461e-05, |
|
"loss": 0.9727, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0197268219219087e-05, |
|
"loss": 1.0764, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.0141609780651584e-05, |
|
"loss": 1.1134, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.008608650617371e-05, |
|
"loss": 1.0289, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.0030698584070847e-05, |
|
"loss": 1.246, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.975446202169431e-06, |
|
"loss": 1.1342, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.92032954783621e-06, |
|
"loss": 1.4847, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.865348807977698e-06, |
|
"loss": 1.0856, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.810504169039509e-06, |
|
"loss": 1.115, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.755795817005686e-06, |
|
"loss": 1.3651, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.701223937398152e-06, |
|
"loss": 1.1679, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.646788715276023e-06, |
|
"loss": 0.8894, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.592490335234994e-06, |
|
"loss": 1.3147, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.538328981406713e-06, |
|
"loss": 1.2616, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.484304837458157e-06, |
|
"loss": 1.1916, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.430418086591009e-06, |
|
"loss": 1.0985, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.376668911541042e-06, |
|
"loss": 1.1392, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.323057494577498e-06, |
|
"loss": 1.0583, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.269584017502431e-06, |
|
"loss": 1.1925, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.216248661650195e-06, |
|
"loss": 1.4009, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.163051607886703e-06, |
|
"loss": 1.0857, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.109993036608883e-06, |
|
"loss": 1.4733, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.057073127744064e-06, |
|
"loss": 1.3363, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.004292060749348e-06, |
|
"loss": 1.2422, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.951650014611018e-06, |
|
"loss": 1.3727, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.899147167843908e-06, |
|
"loss": 1.0963, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.846783698490834e-06, |
|
"loss": 1.3198, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.794559784121937e-06, |
|
"loss": 1.4718, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.742475601834132e-06, |
|
"loss": 1.237, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.690531328250489e-06, |
|
"loss": 1.3897, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.638727139519636e-06, |
|
"loss": 1.2511, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.587063211315139e-06, |
|
"loss": 1.1449, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.535539718834929e-06, |
|
"loss": 1.1666, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.484156836800739e-06, |
|
"loss": 1.1349, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.432914739457432e-06, |
|
"loss": 1.1784, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.381813600572481e-06, |
|
"loss": 1.0811, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.330853593435345e-06, |
|
"loss": 0.9825, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.280034890856885e-06, |
|
"loss": 1.2873, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.22935766516879e-06, |
|
"loss": 1.0751, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.178822088222993e-06, |
|
"loss": 1.3585, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.12842833139107e-06, |
|
"loss": 1.2388, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.078176565563661e-06, |
|
"loss": 1.2442, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.02806696114992e-06, |
|
"loss": 1.3153, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.978099688076913e-06, |
|
"loss": 0.9505, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.928274915789036e-06, |
|
"loss": 1.2725, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.878592813247443e-06, |
|
"loss": 1.1283, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.829053548929489e-06, |
|
"loss": 1.1668, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.779657290828146e-06, |
|
"loss": 1.2682, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.730404206451457e-06, |
|
"loss": 1.1203, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.681294462821925e-06, |
|
"loss": 1.2335, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.632328226475971e-06, |
|
"loss": 0.9757, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.58350566346337e-06, |
|
"loss": 1.3396, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.5348269393466896e-06, |
|
"loss": 0.9868, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.486292219200713e-06, |
|
"loss": 1.1997, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.437901667611907e-06, |
|
"loss": 1.3741, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.389655448677835e-06, |
|
"loss": 1.2155, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.341553726006611e-06, |
|
"loss": 0.9943, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.293596662716362e-06, |
|
"loss": 1.1267, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.2457844214346425e-06, |
|
"loss": 1.34, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.198117164297907e-06, |
|
"loss": 1.2177, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.150595052950954e-06, |
|
"loss": 1.1015, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.103218248546378e-06, |
|
"loss": 1.0381, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.055986911744017e-06, |
|
"loss": 1.0556, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.0089012027104165e-06, |
|
"loss": 1.1059, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.961961281118284e-06, |
|
"loss": 1.1099, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.915167306145942e-06, |
|
"loss": 1.2652, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.868519436476795e-06, |
|
"loss": 1.2154, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.822017830298788e-06, |
|
"loss": 1.2193, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.775662645303871e-06, |
|
"loss": 1.1096, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.7294540386874605e-06, |
|
"loss": 1.2095, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.683392167147917e-06, |
|
"loss": 1.0729, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.6374771868860045e-06, |
|
"loss": 1.461, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.591709253604356e-06, |
|
"loss": 1.1322, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.546088522506955e-06, |
|
"loss": 1.1282, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.500615148298617e-06, |
|
"loss": 1.1667, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.455289285184446e-06, |
|
"loss": 1.1554, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.410111086869314e-06, |
|
"loss": 0.9844, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.365080706557352e-06, |
|
"loss": 1.2855, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.320198296951435e-06, |
|
"loss": 1.278, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.275464010252629e-06, |
|
"loss": 1.1833, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.230877998159723e-06, |
|
"loss": 1.3404, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.186440411868682e-06, |
|
"loss": 1.1578, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.142151402072132e-06, |
|
"loss": 1.3695, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.098011118958885e-06, |
|
"loss": 1.2596, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.054019712213377e-06, |
|
"loss": 1.0202, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.010177331015204e-06, |
|
"loss": 1.1857, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.966484124038602e-06, |
|
"loss": 0.9059, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.922940239451935e-06, |
|
"loss": 1.4132, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.879545824917199e-06, |
|
"loss": 1.1798, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.836301027589525e-06, |
|
"loss": 1.1492, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.793205994116674e-06, |
|
"loss": 1.4875, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.75026087063854e-06, |
|
"loss": 1.3959, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.707465802786654e-06, |
|
"loss": 0.9615, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.664820935683696e-06, |
|
"loss": 1.3538, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.622326413942996e-06, |
|
"loss": 1.2193, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.5799823816680586e-06, |
|
"loss": 1.0537, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.537788982452052e-06, |
|
"loss": 1.1547, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.495746359377335e-06, |
|
"loss": 1.3141, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.453854655014956e-06, |
|
"loss": 1.3389, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.412114011424191e-06, |
|
"loss": 1.0036, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.370524570152058e-06, |
|
"loss": 1.0442, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.329086472232825e-06, |
|
"loss": 1.2034, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.287799858187548e-06, |
|
"loss": 1.3755, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.2466648680235654e-06, |
|
"loss": 1.0389, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.205681641234061e-06, |
|
"loss": 1.3614, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.1648503167975705e-06, |
|
"loss": 1.2396, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.124171033177499e-06, |
|
"loss": 1.1014, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.083643928321679e-06, |
|
"loss": 1.2946, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.043269139661871e-06, |
|
"loss": 1.1439, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.0030468041133535e-06, |
|
"loss": 0.9891, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.962977058074381e-06, |
|
"loss": 1.2912, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.92306003742577e-06, |
|
"loss": 1.3515, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.88329587753043e-06, |
|
"loss": 1.0298, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.843684713232915e-06, |
|
"loss": 1.3006, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.804226678858936e-06, |
|
"loss": 1.1417, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.7649219082149475e-06, |
|
"loss": 1.191, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.7257705345876365e-06, |
|
"loss": 1.3271, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.686772690743529e-06, |
|
"loss": 1.4339, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.647928508928512e-06, |
|
"loss": 1.1019, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.609238120867387e-06, |
|
"loss": 0.9552, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.570701657763415e-06, |
|
"loss": 1.3462, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.532319250297901e-06, |
|
"loss": 1.3213, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.494091028629699e-06, |
|
"loss": 1.3868, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.4560171223948455e-06, |
|
"loss": 1.2072, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.418097660706039e-06, |
|
"loss": 1.5743, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.380332772152257e-06, |
|
"loss": 1.1274, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.342722584798298e-06, |
|
"loss": 1.1995, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.305267226184356e-06, |
|
"loss": 1.1405, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.267966823325581e-06, |
|
"loss": 1.0487, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.230821502711657e-06, |
|
"loss": 1.0078, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.193831390306352e-06, |
|
"loss": 1.1674, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.1569966115471255e-06, |
|
"loss": 1.0929, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.1203172913446775e-06, |
|
"loss": 1.5074, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.083793554082521e-06, |
|
"loss": 1.1008, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.047425523616577e-06, |
|
"loss": 1.1701, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.01121332327476e-06, |
|
"loss": 1.0381, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.975157075856528e-06, |
|
"loss": 1.1124, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.939256903632494e-06, |
|
"loss": 1.1795, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.903512928344017e-06, |
|
"loss": 1.1264, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.867925271202755e-06, |
|
"loss": 1.159, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.832494052890284e-06, |
|
"loss": 1.2922, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.7972193935576774e-06, |
|
"loss": 1.1712, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.7621014128250977e-06, |
|
"loss": 1.2098, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.7271402297814006e-06, |
|
"loss": 1.3432, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.692335962983712e-06, |
|
"loss": 1.4964, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.657688730457054e-06, |
|
"loss": 1.3647, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.623198649693915e-06, |
|
"loss": 1.2143, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5888658376538655e-06, |
|
"loss": 1.1209, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.554690410763173e-06, |
|
"loss": 1.0583, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5206724849143837e-06, |
|
"loss": 1.4116, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.4868121754659533e-06, |
|
"loss": 1.1682, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.4531095972418105e-06, |
|
"loss": 1.1478, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.419564864531044e-06, |
|
"loss": 1.4356, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.386178091087444e-06, |
|
"loss": 1.3359, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.3529493901291564e-06, |
|
"loss": 0.8788, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.319878874338278e-06, |
|
"loss": 1.1486, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.286966655860485e-06, |
|
"loss": 1.1786, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.2542128463046497e-06, |
|
"loss": 1.1263, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.2216175567424734e-06, |
|
"loss": 1.3651, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.1891808977080826e-06, |
|
"loss": 1.2084, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.156902979197679e-06, |
|
"loss": 1.022, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.1247839106691543e-06, |
|
"loss": 1.2045, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.0928238010417275e-06, |
|
"loss": 1.242, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.061022758695575e-06, |
|
"loss": 1.3751, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.029380891471445e-06, |
|
"loss": 1.1772, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.9978983066703212e-06, |
|
"loss": 1.2256, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.966575111053027e-06, |
|
"loss": 1.2171, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.935411410839889e-06, |
|
"loss": 1.2218, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.9044073117103778e-06, |
|
"loss": 1.1875, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.8735629188027245e-06, |
|
"loss": 1.0663, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.842878336713578e-06, |
|
"loss": 1.1733, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.812353669497664e-06, |
|
"loss": 1.1406, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.7819890206674084e-06, |
|
"loss": 0.8795, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.7517844931926107e-06, |
|
"loss": 1.2653, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.721740189500066e-06, |
|
"loss": 1.3022, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.691856211473237e-06, |
|
"loss": 1.0345, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.6621326604519215e-06, |
|
"loss": 1.327, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.6325696372318688e-06, |
|
"loss": 1.1517, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.6031672420644692e-06, |
|
"loss": 1.0326, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.5739255746564216e-06, |
|
"loss": 1.2462, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.5448447341693495e-06, |
|
"loss": 1.0468, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.5159248192195283e-06, |
|
"loss": 1.2062, |
|
"step": 1733 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.4871659278774883e-06, |
|
"loss": 1.1731, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.458568157667729e-06, |
|
"loss": 1.0717, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.430131605568353e-06, |
|
"loss": 1.4119, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.4018563680107962e-06, |
|
"loss": 1.1063, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.37374254087942e-06, |
|
"loss": 1.3684, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.3457902195112236e-06, |
|
"loss": 1.0844, |
|
"step": 1739 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.31799949869555e-06, |
|
"loss": 1.3613, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.290370472673714e-06, |
|
"loss": 0.9189, |
|
"step": 1741 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.262903235138725e-06, |
|
"loss": 1.4039, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.2355978792349295e-06, |
|
"loss": 1.0626, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.2084544975577383e-06, |
|
"loss": 1.3145, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.1814731821532762e-06, |
|
"loss": 1.1313, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.1546540245180826e-06, |
|
"loss": 1.0545, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.1279971155988066e-06, |
|
"loss": 1.2812, |
|
"step": 1747 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.1015025457919004e-06, |
|
"loss": 1.1375, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.0751704049432943e-06, |
|
"loss": 1.1879, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.0490007823481092e-06, |
|
"loss": 0.9773, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.022993766750364e-06, |
|
"loss": 1.3692, |
|
"step": 1751 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.997149446342633e-06, |
|
"loss": 1.2848, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.971467908765795e-06, |
|
"loss": 0.9358, |
|
"step": 1753 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.945949241108708e-06, |
|
"loss": 1.2165, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.920593529907916e-06, |
|
"loss": 1.0563, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.8954008611473616e-06, |
|
"loss": 1.2586, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.8703713202580963e-06, |
|
"loss": 1.4222, |
|
"step": 1757 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.8455049921179856e-06, |
|
"loss": 1.2894, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.820801961051427e-06, |
|
"loss": 1.1405, |
|
"step": 1759 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.7962623108290554e-06, |
|
"loss": 1.2014, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.7718861246674656e-06, |
|
"loss": 1.1223, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.7476734852289234e-06, |
|
"loss": 1.23, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.7236244746210994e-06, |
|
"loss": 1.2078, |
|
"step": 1763 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6997391743967695e-06, |
|
"loss": 0.9517, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6760176655535641e-06, |
|
"loss": 1.1362, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.65246002853367e-06, |
|
"loss": 1.1904, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.629066343223562e-06, |
|
"loss": 1.4042, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6058366889537547e-06, |
|
"loss": 0.9662, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.5827711444985016e-06, |
|
"loss": 1.1057, |
|
"step": 1769 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.559869788075541e-06, |
|
"loss": 1.3034, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.5371326973458389e-06, |
|
"loss": 1.1779, |
|
"step": 1771 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.514559949413319e-06, |
|
"loss": 1.2365, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.4921516208246e-06, |
|
"loss": 1.4841, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.4699077875687251e-06, |
|
"loss": 0.9955, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.447828525076933e-06, |
|
"loss": 1.3027, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.425913908222376e-06, |
|
"loss": 1.2471, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.404164011319875e-06, |
|
"loss": 1.2244, |
|
"step": 1777 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.3825789081256813e-06, |
|
"loss": 1.0797, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.3611586718371871e-06, |
|
"loss": 1.2535, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.3399033750927325e-06, |
|
"loss": 1.3052, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.3188130899713102e-06, |
|
"loss": 1.2229, |
|
"step": 1781 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.2978878879923439e-06, |
|
"loss": 1.1745, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.2771278401154495e-06, |
|
"loss": 1.116, |
|
"step": 1783 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.2565330167401745e-06, |
|
"loss": 1.1535, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.236103487705792e-06, |
|
"loss": 1.6101, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.2158393222910236e-06, |
|
"loss": 1.2188, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1957405892138396e-06, |
|
"loss": 1.4719, |
|
"step": 1787 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.175807356631209e-06, |
|
"loss": 1.2328, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.156039692138855e-06, |
|
"loss": 1.1181, |
|
"step": 1789 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1364376627710726e-06, |
|
"loss": 1.3365, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1170013350004448e-06, |
|
"loss": 1.1572, |
|
"step": 1791 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.097730774737643e-06, |
|
"loss": 1.2458, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0786260473312105e-06, |
|
"loss": 1.3213, |
|
"step": 1793 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0596872175673455e-06, |
|
"loss": 1.2084, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0409143496696527e-06, |
|
"loss": 1.3271, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0223075072989418e-06, |
|
"loss": 1.2337, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0038667535530232e-06, |
|
"loss": 1.2558, |
|
"step": 1797 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.855921509664746e-07, |
|
"loss": 1.1997, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.674837615104349e-07, |
|
"loss": 1.3758, |
|
"step": 1799 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.495416465924112e-07, |
|
"loss": 1.0377, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.317658670560336e-07, |
|
"loss": 1.1146, |
|
"step": 1801 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.141564831808947e-07, |
|
"loss": 0.9778, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.967135546823047e-07, |
|
"loss": 1.138, |
|
"step": 1803 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.794371407111091e-07, |
|
"loss": 1.2464, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.623272998534881e-07, |
|
"loss": 1.2632, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.453840901307519e-07, |
|
"loss": 1.2735, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.286075689991457e-07, |
|
"loss": 1.1872, |
|
"step": 1807 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.11997793349667e-07, |
|
"loss": 1.1971, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.955548195078433e-07, |
|
"loss": 1.3481, |
|
"step": 1809 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.792787032335658e-07, |
|
"loss": 0.9743, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.63169499720906e-07, |
|
"loss": 1.2233, |
|
"step": 1811 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.472272635978994e-07, |
|
"loss": 1.4467, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.314520489263788e-07, |
|
"loss": 1.3375, |
|
"step": 1813 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.158439092018076e-07, |
|
"loss": 1.3292, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.004028973530585e-07, |
|
"loss": 1.0034, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.851290657422627e-07, |
|
"loss": 0.915, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.700224661646326e-07, |
|
"loss": 1.0902, |
|
"step": 1817 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.550831498482679e-07, |
|
"loss": 1.0471, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.403111674539997e-07, |
|
"loss": 1.2116, |
|
"step": 1819 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.257065690752128e-07, |
|
"loss": 0.9323, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.112694042376632e-07, |
|
"loss": 1.0216, |
|
"step": 1821 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.969997218993328e-07, |
|
"loss": 1.1321, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.828975704502582e-07, |
|
"loss": 1.0969, |
|
"step": 1823 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.689629977123412e-07, |
|
"loss": 1.3603, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.551960509392218e-07, |
|
"loss": 1.1131, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.415967768160945e-07, |
|
"loss": 1.1443, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.2816522145957e-07, |
|
"loss": 1.2909, |
|
"step": 1827 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.149014304174915e-07, |
|
"loss": 1.4178, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.018054486687962e-07, |
|
"loss": 1.0522, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.888773206233765e-07, |
|
"loss": 1.2744, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.7611709012190253e-07, |
|
"loss": 1.3265, |
|
"step": 1831 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.6352480043568845e-07, |
|
"loss": 1.3634, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.5110049426653755e-07, |
|
"loss": 1.4133, |
|
"step": 1833 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.3884421374661975e-07, |
|
"loss": 1.249, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.267560004382942e-07, |
|
"loss": 1.2334, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.1483589533399257e-07, |
|
"loss": 1.0328, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.030839388560803e-07, |
|
"loss": 1.0827, |
|
"step": 1837 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.9150017085669565e-07, |
|
"loss": 1.2826, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.8008463061764977e-07, |
|
"loss": 1.3065, |
|
"step": 1839 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.688373568502601e-07, |
|
"loss": 1.2179, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.5775838769525615e-07, |
|
"loss": 0.9292, |
|
"step": 1841 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.4684776072260173e-07, |
|
"loss": 1.4614, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.361055129314117e-07, |
|
"loss": 1.0518, |
|
"step": 1843 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.255316807498077e-07, |
|
"loss": 1.1854, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.151263000347793e-07, |
|
"loss": 1.0778, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.0488940607210637e-07, |
|
"loss": 1.1983, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.948210335761925e-07, |
|
"loss": 1.0366, |
|
"step": 1847 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.8492121668997065e-07, |
|
"loss": 1.1459, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.751899889847864e-07, |
|
"loss": 1.1982, |
|
"step": 1849 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.656273834602763e-07, |
|
"loss": 0.9119, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.562334325442728e-07, |
|
"loss": 1.1973, |
|
"step": 1851 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.4700816809266616e-07, |
|
"loss": 1.2599, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.379516213893207e-07, |
|
"loss": 1.3151, |
|
"step": 1853 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.290638231459641e-07, |
|
"loss": 1.2084, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.2034480350208164e-07, |
|
"loss": 1.2637, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.1179459202479435e-07, |
|
"loss": 1.2562, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.0341321770880327e-07, |
|
"loss": 0.9374, |
|
"step": 1857 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9520070897623978e-07, |
|
"loss": 1.2063, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.8715709367660982e-07, |
|
"loss": 1.1011, |
|
"step": 1859 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.792823990866721e-07, |
|
"loss": 1.3395, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.715766519103712e-07, |
|
"loss": 1.1728, |
|
"step": 1861 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.6403987827872669e-07, |
|
"loss": 1.0144, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5667210374973874e-07, |
|
"loss": 1.1882, |
|
"step": 1863 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.4947335330832702e-07, |
|
"loss": 1.411, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.4244365136623085e-07, |
|
"loss": 1.2906, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3558302176192583e-07, |
|
"loss": 1.1126, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2889148776054628e-07, |
|
"loss": 1.4038, |
|
"step": 1867 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2236907205379623e-07, |
|
"loss": 1.2224, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.160157967598996e-07, |
|
"loss": 1.3104, |
|
"step": 1869 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0983168342348915e-07, |
|
"loss": 1.1886, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0381675301556759e-07, |
|
"loss": 1.0505, |
|
"step": 1871 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.797102593339658e-08, |
|
"loss": 1.0326, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.229452200048005e-08, |
|
"loss": 1.2758, |
|
"step": 1873 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.678726046644214e-08, |
|
"loss": 1.1204, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.144926000701048e-08, |
|
"loss": 1.0465, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.628053872390517e-08, |
|
"loss": 1.0711, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.128111414482775e-08, |
|
"loss": 1.2398, |
|
"step": 1877 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.645100322336117e-08, |
|
"loss": 1.2506, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.179022233893106e-08, |
|
"loss": 1.4724, |
|
"step": 1879 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.729878729675009e-08, |
|
"loss": 1.0328, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.297671332775145e-08, |
|
"loss": 1.1325, |
|
"step": 1881 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.88240150885555e-08, |
|
"loss": 1.1838, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.484070666140872e-08, |
|
"loss": 1.1584, |
|
"step": 1883 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.102680155413929e-08, |
|
"loss": 1.4427, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.738231270010717e-08, |
|
"loss": 1.1909, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3907252458176276e-08, |
|
"loss": 1.2926, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.060163261263682e-08, |
|
"loss": 1.1723, |
|
"step": 1887 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.7465464373205298e-08, |
|
"loss": 1.066, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.4498758374968954e-08, |
|
"loss": 1.0458, |
|
"step": 1889 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.1701524678346963e-08, |
|
"loss": 1.1118, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.9073772769051533e-08, |
|
"loss": 1.2084, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.6615511558082385e-08, |
|
"loss": 1.2864, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.4326749381665672e-08, |
|
"loss": 1.0176, |
|
"step": 1893 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.220749400123733e-08, |
|
"loss": 1.3828, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0257752603420879e-08, |
|
"loss": 1.1612, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.47753179999966e-09, |
|
"loss": 1.3129, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.866837627889089e-09, |
|
"loss": 1.3022, |
|
"step": 1897 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.425675549136644e-09, |
|
"loss": 1.3343, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.154050450871916e-09, |
|
"loss": 1.0691, |
|
"step": 1899 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.0519666453121544e-09, |
|
"loss": 1.0664, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.119427869745616e-09, |
|
"loss": 1.355, |
|
"step": 1901 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.356437286503809e-09, |
|
"loss": 1.0196, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.62997482978145e-10, |
|
"loss": 1.1987, |
|
"step": 1903 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3911047158663447e-10, |
|
"loss": 1.0693, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 8.477768976833389e-11, |
|
"loss": 1.343, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.166, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1906, |
|
"total_flos": 1.288320957371174e+19, |
|
"train_loss": 1.2139620957319535, |
|
"train_runtime": 124170.7649, |
|
"train_samples_per_second": 0.061, |
|
"train_steps_per_second": 0.015 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1906, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 1.288320957371174e+19, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|