|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 1784, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.9039, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 5.0473, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 5.1588, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 4.7, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.8e-07, |
|
"loss": 4.7898, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 4.8486, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 4.7039, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6e-07, |
|
"loss": 4.8578, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2e-07, |
|
"loss": 5.0793, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 4.7626, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.4e-07, |
|
"loss": 4.8163, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 4.7764, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.599999999999999e-07, |
|
"loss": 4.9868, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.2e-07, |
|
"loss": 4.601, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.799999999999999e-07, |
|
"loss": 4.7462, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.4e-07, |
|
"loss": 4.8539, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9e-07, |
|
"loss": 4.8762, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 4.7755, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0200000000000002e-06, |
|
"loss": 4.7324, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.08e-06, |
|
"loss": 5.0197, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.14e-06, |
|
"loss": 4.7546, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 5.0524, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.26e-06, |
|
"loss": 4.9026, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.3199999999999999e-06, |
|
"loss": 4.6494, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.38e-06, |
|
"loss": 4.8204, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.44e-06, |
|
"loss": 4.9052, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5e-06, |
|
"loss": 4.7384, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5599999999999999e-06, |
|
"loss": 4.4794, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.62e-06, |
|
"loss": 4.8224, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.68e-06, |
|
"loss": 4.6417, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.74e-06, |
|
"loss": 4.5346, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.74e-06, |
|
"loss": 4.8925, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.8e-06, |
|
"loss": 4.6022, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.86e-06, |
|
"loss": 4.5056, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 4.7537, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.98e-06, |
|
"loss": 4.9873, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.0400000000000004e-06, |
|
"loss": 4.6271, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 4.7265, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.16e-06, |
|
"loss": 4.8251, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.22e-06, |
|
"loss": 4.816, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.28e-06, |
|
"loss": 4.6273, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.34e-06, |
|
"loss": 4.419, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 4.726, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.46e-06, |
|
"loss": 4.5527, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.52e-06, |
|
"loss": 4.8068, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.58e-06, |
|
"loss": 4.7964, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.6399999999999997e-06, |
|
"loss": 4.9131, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.7e-06, |
|
"loss": 5.2795, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.76e-06, |
|
"loss": 5.3103, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.82e-06, |
|
"loss": 5.3139, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.88e-06, |
|
"loss": 4.4328, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 4.5604, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3e-06, |
|
"loss": 4.562, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.06e-06, |
|
"loss": 4.4274, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.1199999999999998e-06, |
|
"loss": 4.4539, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.18e-06, |
|
"loss": 4.576, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.24e-06, |
|
"loss": 4.4693, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.3e-06, |
|
"loss": 4.4279, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.36e-06, |
|
"loss": 4.4521, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.4200000000000003e-06, |
|
"loss": 4.4023, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.48e-06, |
|
"loss": 4.4207, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.54e-06, |
|
"loss": 4.352, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.6e-06, |
|
"loss": 4.7222, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.66e-06, |
|
"loss": 4.2245, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.72e-06, |
|
"loss": 4.4388, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.7800000000000002e-06, |
|
"loss": 4.5199, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 4.5636, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.9e-06, |
|
"loss": 4.5562, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.96e-06, |
|
"loss": 4.4263, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.0200000000000005e-06, |
|
"loss": 4.3701, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.080000000000001e-06, |
|
"loss": 4.4671, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.14e-06, |
|
"loss": 4.24, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 4.4483, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.26e-06, |
|
"loss": 4.2266, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.32e-06, |
|
"loss": 4.7742, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.3799999999999996e-06, |
|
"loss": 4.5626, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.44e-06, |
|
"loss": 4.3647, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.5e-06, |
|
"loss": 4.2969, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.56e-06, |
|
"loss": 4.2667, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.62e-06, |
|
"loss": 4.6044, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.68e-06, |
|
"loss": 4.2306, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.74e-06, |
|
"loss": 4.3939, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 4.3016, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.86e-06, |
|
"loss": 4.3591, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.92e-06, |
|
"loss": 4.2502, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.980000000000001e-06, |
|
"loss": 4.2694, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.04e-06, |
|
"loss": 4.6084, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.1e-06, |
|
"loss": 4.2422, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.16e-06, |
|
"loss": 4.599, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.22e-06, |
|
"loss": 4.1419, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.279999999999999e-06, |
|
"loss": 4.5418, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.34e-06, |
|
"loss": 4.2179, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.4e-06, |
|
"loss": 4.0608, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.46e-06, |
|
"loss": 4.5138, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.52e-06, |
|
"loss": 4.7229, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.58e-06, |
|
"loss": 4.287, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.64e-06, |
|
"loss": 4.4828, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.7000000000000005e-06, |
|
"loss": 4.5756, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.76e-06, |
|
"loss": 4.9466, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.76e-06, |
|
"loss": 4.8034, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.82e-06, |
|
"loss": 4.367, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.8800000000000005e-06, |
|
"loss": 4.1602, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.940000000000001e-06, |
|
"loss": 4.2882, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6e-06, |
|
"loss": 4.2509, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.0600000000000004e-06, |
|
"loss": 4.3068, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.12e-06, |
|
"loss": 4.2813, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.18e-06, |
|
"loss": 4.3237, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.2399999999999995e-06, |
|
"loss": 4.4125, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.3e-06, |
|
"loss": 4.2929, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.36e-06, |
|
"loss": 4.3304, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.42e-06, |
|
"loss": 4.2666, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.48e-06, |
|
"loss": 4.1765, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.54e-06, |
|
"loss": 4.2945, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.6e-06, |
|
"loss": 4.0244, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.660000000000001e-06, |
|
"loss": 4.075, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.72e-06, |
|
"loss": 4.2312, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.78e-06, |
|
"loss": 4.3817, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.840000000000001e-06, |
|
"loss": 4.24, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.900000000000001e-06, |
|
"loss": 4.3153, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.96e-06, |
|
"loss": 4.3964, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.0200000000000006e-06, |
|
"loss": 4.4129, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.08e-06, |
|
"loss": 4.2732, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.14e-06, |
|
"loss": 4.433, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.2e-06, |
|
"loss": 4.3277, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.26e-06, |
|
"loss": 4.0291, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.32e-06, |
|
"loss": 4.63, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.3800000000000005e-06, |
|
"loss": 4.3313, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.44e-06, |
|
"loss": 4.2944, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.5e-06, |
|
"loss": 4.1793, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.5600000000000005e-06, |
|
"loss": 4.2588, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.62e-06, |
|
"loss": 4.5454, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.680000000000001e-06, |
|
"loss": 4.221, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.74e-06, |
|
"loss": 4.3223, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.8e-06, |
|
"loss": 4.2926, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.860000000000001e-06, |
|
"loss": 4.2339, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.92e-06, |
|
"loss": 4.3912, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 7.98e-06, |
|
"loss": 4.1663, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.040000000000001e-06, |
|
"loss": 4.3684, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.1e-06, |
|
"loss": 4.4916, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.160000000000001e-06, |
|
"loss": 4.1525, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.220000000000001e-06, |
|
"loss": 4.37, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.28e-06, |
|
"loss": 4.6219, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.340000000000001e-06, |
|
"loss": 4.2804, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 4.4011, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.459999999999999e-06, |
|
"loss": 4.6298, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.52e-06, |
|
"loss": 4.484, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.58e-06, |
|
"loss": 4.4453, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.64e-06, |
|
"loss": 4.5693, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.7e-06, |
|
"loss": 4.7461, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.759999999999999e-06, |
|
"loss": 4.2156, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.82e-06, |
|
"loss": 4.0355, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.88e-06, |
|
"loss": 4.3699, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 8.939999999999999e-06, |
|
"loss": 4.0072, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9e-06, |
|
"loss": 4.4507, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.06e-06, |
|
"loss": 4.2933, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.12e-06, |
|
"loss": 4.5302, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.18e-06, |
|
"loss": 4.3209, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.24e-06, |
|
"loss": 4.429, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.3e-06, |
|
"loss": 4.3111, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.36e-06, |
|
"loss": 4.2651, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.42e-06, |
|
"loss": 4.3154, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.48e-06, |
|
"loss": 4.4383, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.54e-06, |
|
"loss": 4.2654, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 4.5006, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.66e-06, |
|
"loss": 4.287, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.72e-06, |
|
"loss": 4.4946, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.780000000000001e-06, |
|
"loss": 4.3238, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.84e-06, |
|
"loss": 4.0323, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.9e-06, |
|
"loss": 4.1272, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 4.2639, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.002e-05, |
|
"loss": 4.3443, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.008e-05, |
|
"loss": 4.2672, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.0140000000000001e-05, |
|
"loss": 4.2944, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.02e-05, |
|
"loss": 4.1786, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.0260000000000002e-05, |
|
"loss": 4.429, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.032e-05, |
|
"loss": 4.1529, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.0379999999999999e-05, |
|
"loss": 4.205, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.044e-05, |
|
"loss": 4.1984, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.05e-05, |
|
"loss": 3.9722, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.0559999999999999e-05, |
|
"loss": 4.4296, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.062e-05, |
|
"loss": 4.2805, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.068e-05, |
|
"loss": 4.2324, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.074e-05, |
|
"loss": 4.3509, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.08e-05, |
|
"loss": 4.3776, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.086e-05, |
|
"loss": 4.3438, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.092e-05, |
|
"loss": 4.1843, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.098e-05, |
|
"loss": 4.242, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.104e-05, |
|
"loss": 4.4717, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.11e-05, |
|
"loss": 4.1776, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.116e-05, |
|
"loss": 4.208, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.1220000000000001e-05, |
|
"loss": 4.4483, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.128e-05, |
|
"loss": 4.1557, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.134e-05, |
|
"loss": 4.0977, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 4.2601, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.146e-05, |
|
"loss": 4.5423, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.152e-05, |
|
"loss": 4.3749, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.1580000000000001e-05, |
|
"loss": 4.3546, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.164e-05, |
|
"loss": 4.3407, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.1700000000000001e-05, |
|
"loss": 4.4539, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.1760000000000001e-05, |
|
"loss": 4.2983, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.182e-05, |
|
"loss": 4.2789, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.1880000000000001e-05, |
|
"loss": 4.1983, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.1940000000000001e-05, |
|
"loss": 4.3792, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.2e-05, |
|
"loss": 4.059, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.2060000000000001e-05, |
|
"loss": 4.2062, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.2120000000000001e-05, |
|
"loss": 4.3069, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.2180000000000002e-05, |
|
"loss": 4.3501, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.224e-05, |
|
"loss": 4.0598, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.2299999999999999e-05, |
|
"loss": 4.1062, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.236e-05, |
|
"loss": 4.2752, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.242e-05, |
|
"loss": 4.2774, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.2479999999999999e-05, |
|
"loss": 4.2148, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.254e-05, |
|
"loss": 4.1994, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.26e-05, |
|
"loss": 4.0105, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.2659999999999999e-05, |
|
"loss": 4.4023, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.272e-05, |
|
"loss": 3.8713, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.278e-05, |
|
"loss": 4.1284, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.284e-05, |
|
"loss": 4.2426, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.29e-05, |
|
"loss": 4.1782, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.296e-05, |
|
"loss": 4.4739, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.302e-05, |
|
"loss": 4.3053, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.308e-05, |
|
"loss": 4.3529, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.314e-05, |
|
"loss": 4.1366, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.32e-05, |
|
"loss": 4.2963, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.326e-05, |
|
"loss": 4.3181, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.3320000000000001e-05, |
|
"loss": 4.4328, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.338e-05, |
|
"loss": 4.3516, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.344e-05, |
|
"loss": 4.0431, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 4.2322, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.356e-05, |
|
"loss": 4.2473, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.362e-05, |
|
"loss": 4.2337, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.3680000000000001e-05, |
|
"loss": 4.2574, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.374e-05, |
|
"loss": 4.3965, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 4.2948, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.3860000000000001e-05, |
|
"loss": 4.121, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.392e-05, |
|
"loss": 4.6244, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.3980000000000002e-05, |
|
"loss": 4.3004, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.4040000000000001e-05, |
|
"loss": 4.2864, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.4099999999999999e-05, |
|
"loss": 4.2729, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.416e-05, |
|
"loss": 4.2111, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.422e-05, |
|
"loss": 4.4951, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.428e-05, |
|
"loss": 4.2367, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.434e-05, |
|
"loss": 4.3396, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.44e-05, |
|
"loss": 4.4672, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.446e-05, |
|
"loss": 4.6007, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.452e-05, |
|
"loss": 4.5437, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.458e-05, |
|
"loss": 4.3376, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.464e-05, |
|
"loss": 4.4152, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.47e-05, |
|
"loss": 4.2067, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.4760000000000001e-05, |
|
"loss": 4.2647, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.482e-05, |
|
"loss": 4.1991, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.488e-05, |
|
"loss": 4.1397, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.4940000000000001e-05, |
|
"loss": 4.1763, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.5e-05, |
|
"loss": 4.3289, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.506e-05, |
|
"loss": 4.2419, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.5120000000000001e-05, |
|
"loss": 4.1833, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.518e-05, |
|
"loss": 4.2139, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.524e-05, |
|
"loss": 4.3484, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.53e-05, |
|
"loss": 4.4464, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.5360000000000002e-05, |
|
"loss": 3.948, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.542e-05, |
|
"loss": 4.0849, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.548e-05, |
|
"loss": 4.3158, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.554e-05, |
|
"loss": 4.2621, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.56e-05, |
|
"loss": 4.0818, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.5660000000000003e-05, |
|
"loss": 4.1697, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.5720000000000002e-05, |
|
"loss": 4.0042, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.578e-05, |
|
"loss": 4.3114, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.584e-05, |
|
"loss": 4.221, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.59e-05, |
|
"loss": 4.421, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.596e-05, |
|
"loss": 4.0347, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.6020000000000002e-05, |
|
"loss": 4.3152, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.6080000000000002e-05, |
|
"loss": 4.1378, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.614e-05, |
|
"loss": 4.0427, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.62e-05, |
|
"loss": 4.3118, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.626e-05, |
|
"loss": 4.2973, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.6320000000000003e-05, |
|
"loss": 4.2771, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.6380000000000002e-05, |
|
"loss": 4.2649, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.6440000000000002e-05, |
|
"loss": 4.3386, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.65e-05, |
|
"loss": 4.1876, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.656e-05, |
|
"loss": 4.201, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.6620000000000004e-05, |
|
"loss": 4.3233, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.6680000000000003e-05, |
|
"loss": 4.3703, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.6740000000000002e-05, |
|
"loss": 4.3515, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 4.1063, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.686e-05, |
|
"loss": 4.2593, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.6919999999999997e-05, |
|
"loss": 4.0121, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.698e-05, |
|
"loss": 4.2617, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.704e-05, |
|
"loss": 4.4772, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.71e-05, |
|
"loss": 4.2036, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.716e-05, |
|
"loss": 4.291, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.7219999999999998e-05, |
|
"loss": 4.2281, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.728e-05, |
|
"loss": 4.6205, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.734e-05, |
|
"loss": 4.3975, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.74e-05, |
|
"loss": 4.4876, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.746e-05, |
|
"loss": 4.3942, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.7519999999999998e-05, |
|
"loss": 4.3009, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.758e-05, |
|
"loss": 4.2866, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.764e-05, |
|
"loss": 4.4495, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.77e-05, |
|
"loss": 4.3001, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.776e-05, |
|
"loss": 4.5097, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.782e-05, |
|
"loss": 4.2911, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.7879999999999998e-05, |
|
"loss": 4.1269, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.794e-05, |
|
"loss": 4.3113, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8e-05, |
|
"loss": 4.1721, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.806e-05, |
|
"loss": 4.3154, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.812e-05, |
|
"loss": 4.1948, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.818e-05, |
|
"loss": 4.3935, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.824e-05, |
|
"loss": 4.4485, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.83e-05, |
|
"loss": 4.4367, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.836e-05, |
|
"loss": 4.3007, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.842e-05, |
|
"loss": 4.301, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.848e-05, |
|
"loss": 4.1787, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.854e-05, |
|
"loss": 4.2407, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.86e-05, |
|
"loss": 4.3646, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.866e-05, |
|
"loss": 4.3755, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.872e-05, |
|
"loss": 4.1599, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.878e-05, |
|
"loss": 4.2004, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.884e-05, |
|
"loss": 3.9382, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8900000000000002e-05, |
|
"loss": 4.1481, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.896e-05, |
|
"loss": 4.2926, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.902e-05, |
|
"loss": 4.4841, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.908e-05, |
|
"loss": 4.3148, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.914e-05, |
|
"loss": 4.3228, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 4.2022, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9260000000000002e-05, |
|
"loss": 4.1847, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.932e-05, |
|
"loss": 4.1588, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.938e-05, |
|
"loss": 4.0783, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.944e-05, |
|
"loss": 4.2046, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.95e-05, |
|
"loss": 4.1779, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9560000000000002e-05, |
|
"loss": 4.4946, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9620000000000002e-05, |
|
"loss": 4.2943, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.968e-05, |
|
"loss": 4.3843, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.974e-05, |
|
"loss": 4.3714, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.98e-05, |
|
"loss": 4.3556, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9860000000000003e-05, |
|
"loss": 4.3798, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9920000000000002e-05, |
|
"loss": 4.152, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9980000000000002e-05, |
|
"loss": 4.2607, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.004e-05, |
|
"loss": 4.1062, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.01e-05, |
|
"loss": 4.2196, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.016e-05, |
|
"loss": 4.2091, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.0220000000000003e-05, |
|
"loss": 4.1571, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.0280000000000002e-05, |
|
"loss": 4.356, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.0340000000000002e-05, |
|
"loss": 4.4906, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.04e-05, |
|
"loss": 4.0563, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.046e-05, |
|
"loss": 4.3361, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.0520000000000003e-05, |
|
"loss": 4.1754, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.0580000000000003e-05, |
|
"loss": 4.6586, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.064e-05, |
|
"loss": 4.605, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.07e-05, |
|
"loss": 4.2504, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.0759999999999998e-05, |
|
"loss": 4.2302, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.082e-05, |
|
"loss": 4.3514, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.088e-05, |
|
"loss": 4.418, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.094e-05, |
|
"loss": 4.0628, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.1e-05, |
|
"loss": 4.1922, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.1059999999999998e-05, |
|
"loss": 3.8978, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.1119999999999998e-05, |
|
"loss": 4.1357, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.118e-05, |
|
"loss": 4.3353, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.124e-05, |
|
"loss": 4.3339, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.13e-05, |
|
"loss": 4.4832, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.136e-05, |
|
"loss": 4.1927, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.1419999999999998e-05, |
|
"loss": 4.2226, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.148e-05, |
|
"loss": 4.0212, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.154e-05, |
|
"loss": 4.2884, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.16e-05, |
|
"loss": 4.2995, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.166e-05, |
|
"loss": 4.2459, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.172e-05, |
|
"loss": 4.3832, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.178e-05, |
|
"loss": 4.224, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.184e-05, |
|
"loss": 4.1484, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.19e-05, |
|
"loss": 4.0405, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.196e-05, |
|
"loss": 4.1106, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.202e-05, |
|
"loss": 4.1984, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.208e-05, |
|
"loss": 4.6375, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.214e-05, |
|
"loss": 4.2163, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.22e-05, |
|
"loss": 4.0386, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.226e-05, |
|
"loss": 4.0182, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.232e-05, |
|
"loss": 4.0926, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.238e-05, |
|
"loss": 4.1242, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.2440000000000002e-05, |
|
"loss": 3.9954, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.25e-05, |
|
"loss": 4.1984, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.256e-05, |
|
"loss": 4.3595, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.262e-05, |
|
"loss": 3.9519, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.268e-05, |
|
"loss": 4.1197, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.274e-05, |
|
"loss": 4.2409, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 4.1282, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.286e-05, |
|
"loss": 4.3583, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.292e-05, |
|
"loss": 4.318, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.298e-05, |
|
"loss": 4.3235, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.304e-05, |
|
"loss": 4.0547, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.3100000000000002e-05, |
|
"loss": 4.3362, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.3160000000000002e-05, |
|
"loss": 4.5647, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.322e-05, |
|
"loss": 4.1328, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.328e-05, |
|
"loss": 4.5629, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.334e-05, |
|
"loss": 4.5756, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.3400000000000003e-05, |
|
"loss": 4.2939, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.3460000000000002e-05, |
|
"loss": 4.4157, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.3520000000000002e-05, |
|
"loss": 4.3343, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.358e-05, |
|
"loss": 4.3268, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.364e-05, |
|
"loss": 4.7341, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.37e-05, |
|
"loss": 5.2309, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.3760000000000003e-05, |
|
"loss": 4.8813, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.3820000000000002e-05, |
|
"loss": 4.3757, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.3880000000000002e-05, |
|
"loss": 4.0931, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.394e-05, |
|
"loss": 4.1194, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.4e-05, |
|
"loss": 4.3194, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.4060000000000003e-05, |
|
"loss": 4.0206, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.4120000000000003e-05, |
|
"loss": 4.22, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.4180000000000002e-05, |
|
"loss": 4.1186, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.4240000000000002e-05, |
|
"loss": 4.24, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.43e-05, |
|
"loss": 4.3466, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.4360000000000004e-05, |
|
"loss": 4.2623, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.442e-05, |
|
"loss": 4.2144, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.448e-05, |
|
"loss": 4.2472, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.454e-05, |
|
"loss": 4.268, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.4599999999999998e-05, |
|
"loss": 4.2911, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.4659999999999998e-05, |
|
"loss": 4.2753, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.472e-05, |
|
"loss": 4.1832, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.478e-05, |
|
"loss": 4.2735, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.484e-05, |
|
"loss": 4.2643, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.49e-05, |
|
"loss": 4.289, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.4959999999999998e-05, |
|
"loss": 4.3005, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.502e-05, |
|
"loss": 4.1223, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.508e-05, |
|
"loss": 4.429, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.514e-05, |
|
"loss": 4.0601, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.52e-05, |
|
"loss": 4.5131, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.526e-05, |
|
"loss": 4.4075, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.5319999999999998e-05, |
|
"loss": 4.2671, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.538e-05, |
|
"loss": 4.1313, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.544e-05, |
|
"loss": 4.1743, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.55e-05, |
|
"loss": 4.1737, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.556e-05, |
|
"loss": 4.2877, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.562e-05, |
|
"loss": 4.1968, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.568e-05, |
|
"loss": 4.2805, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.574e-05, |
|
"loss": 4.2455, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.58e-05, |
|
"loss": 4.3769, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.586e-05, |
|
"loss": 4.3591, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.592e-05, |
|
"loss": 3.9896, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.5980000000000002e-05, |
|
"loss": 4.1979, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.604e-05, |
|
"loss": 4.4241, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.61e-05, |
|
"loss": 4.1434, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.616e-05, |
|
"loss": 4.3756, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.622e-05, |
|
"loss": 4.36, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.628e-05, |
|
"loss": 4.3536, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.6340000000000002e-05, |
|
"loss": 4.294, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.64e-05, |
|
"loss": 4.5094, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.646e-05, |
|
"loss": 4.3045, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.652e-05, |
|
"loss": 4.9616, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.658e-05, |
|
"loss": 4.705, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.6640000000000002e-05, |
|
"loss": 5.1824, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.6700000000000002e-05, |
|
"loss": 4.5262, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.676e-05, |
|
"loss": 4.516, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.682e-05, |
|
"loss": 3.9647, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.688e-05, |
|
"loss": 4.1498, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.6940000000000003e-05, |
|
"loss": 4.2994, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 3.9839, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.7060000000000002e-05, |
|
"loss": 4.0633, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.712e-05, |
|
"loss": 4.1811, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.718e-05, |
|
"loss": 4.0188, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.724e-05, |
|
"loss": 4.007, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.7300000000000003e-05, |
|
"loss": 3.9451, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.7360000000000002e-05, |
|
"loss": 4.1622, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.7420000000000002e-05, |
|
"loss": 4.0804, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.748e-05, |
|
"loss": 4.0575, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.754e-05, |
|
"loss": 4.1324, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 4.0888, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.7660000000000003e-05, |
|
"loss": 4.1026, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.7720000000000002e-05, |
|
"loss": 4.3026, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.778e-05, |
|
"loss": 4.1423, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.784e-05, |
|
"loss": 4.2504, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.79e-05, |
|
"loss": 4.065, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.7960000000000003e-05, |
|
"loss": 4.1554, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.8020000000000003e-05, |
|
"loss": 4.34, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.8080000000000002e-05, |
|
"loss": 4.1225, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.8139999999999998e-05, |
|
"loss": 4.1717, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 4.261, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.826e-05, |
|
"loss": 4.2665, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.832e-05, |
|
"loss": 4.3772, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.838e-05, |
|
"loss": 4.3676, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.844e-05, |
|
"loss": 4.2325, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.8499999999999998e-05, |
|
"loss": 4.3501, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.856e-05, |
|
"loss": 4.2706, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.862e-05, |
|
"loss": 4.0092, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.868e-05, |
|
"loss": 4.3138, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.874e-05, |
|
"loss": 4.4792, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.88e-05, |
|
"loss": 4.0796, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.8859999999999998e-05, |
|
"loss": 4.4669, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.892e-05, |
|
"loss": 4.3043, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.898e-05, |
|
"loss": 4.3602, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.904e-05, |
|
"loss": 4.4734, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.91e-05, |
|
"loss": 4.4822, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.916e-05, |
|
"loss": 4.2915, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.922e-05, |
|
"loss": 4.1764, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.928e-05, |
|
"loss": 4.4906, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.934e-05, |
|
"loss": 4.3887, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.94e-05, |
|
"loss": 4.4491, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.946e-05, |
|
"loss": 4.3901, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.9520000000000002e-05, |
|
"loss": 4.3761, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.958e-05, |
|
"loss": 4.1813, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.964e-05, |
|
"loss": 4.6006, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.97e-05, |
|
"loss": 4.7313, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.976e-05, |
|
"loss": 4.114, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"eval_loss": 4.311306953430176, |
|
"eval_runtime": 948.7803, |
|
"eval_samples_per_second": 2.785, |
|
"eval_steps_per_second": 0.349, |
|
"eval_wer": 1.9471836572788577, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.982e-05, |
|
"loss": 4.3698, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.9880000000000002e-05, |
|
"loss": 4.0329, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.994e-05, |
|
"loss": 4.1158, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3e-05, |
|
"loss": 4.2736, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.9976635514018694e-05, |
|
"loss": 4.1211, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.9953271028037384e-05, |
|
"loss": 4.3151, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.9929906542056074e-05, |
|
"loss": 4.1362, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.9906542056074768e-05, |
|
"loss": 4.1223, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.988317757009346e-05, |
|
"loss": 4.3377, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.985981308411215e-05, |
|
"loss": 4.4417, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.983644859813084e-05, |
|
"loss": 4.299, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.9813084112149534e-05, |
|
"loss": 4.1538, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.9789719626168224e-05, |
|
"loss": 4.1437, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.9766355140186918e-05, |
|
"loss": 4.0927, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.9742990654205608e-05, |
|
"loss": 4.1283, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.97196261682243e-05, |
|
"loss": 4.3935, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.969626168224299e-05, |
|
"loss": 4.1754, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.9672897196261685e-05, |
|
"loss": 4.5719, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.9649532710280375e-05, |
|
"loss": 4.3025, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.9626168224299065e-05, |
|
"loss": 4.0066, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.9602803738317758e-05, |
|
"loss": 3.9659, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.957943925233645e-05, |
|
"loss": 4.1495, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.955607476635514e-05, |
|
"loss": 4.1178, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.953271028037383e-05, |
|
"loss": 4.3021, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.9509345794392525e-05, |
|
"loss": 4.6051, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.9485981308411218e-05, |
|
"loss": 4.289, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9462616822429905e-05, |
|
"loss": 4.425, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9439252336448598e-05, |
|
"loss": 3.9979, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.941588785046729e-05, |
|
"loss": 4.3364, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9392523364485985e-05, |
|
"loss": 4.3657, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.936915887850467e-05, |
|
"loss": 4.1333, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9345794392523365e-05, |
|
"loss": 4.01, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.932242990654206e-05, |
|
"loss": 4.0705, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.929906542056075e-05, |
|
"loss": 3.9721, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.927570093457944e-05, |
|
"loss": 4.3871, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9252336448598132e-05, |
|
"loss": 4.2582, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9228971962616825e-05, |
|
"loss": 4.1321, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9205607476635515e-05, |
|
"loss": 4.4381, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9182242990654205e-05, |
|
"loss": 4.2946, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.91588785046729e-05, |
|
"loss": 4.3397, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.913551401869159e-05, |
|
"loss": 4.5107, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9112149532710282e-05, |
|
"loss": 4.3734, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9088785046728972e-05, |
|
"loss": 5.0012, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.9065420560747665e-05, |
|
"loss": 4.3289, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.9042056074766355e-05, |
|
"loss": 4.1193, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.901869158878505e-05, |
|
"loss": 4.4826, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.899532710280374e-05, |
|
"loss": 4.7226, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.897196261682243e-05, |
|
"loss": 4.5186, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8948598130841122e-05, |
|
"loss": 4.7832, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8925233644859816e-05, |
|
"loss": 4.6424, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8901869158878506e-05, |
|
"loss": 4.6108, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8878504672897196e-05, |
|
"loss": 4.484, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.885514018691589e-05, |
|
"loss": 4.4223, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8831775700934582e-05, |
|
"loss": 4.1946, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8808411214953272e-05, |
|
"loss": 4.105, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8785046728971962e-05, |
|
"loss": 4.2699, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8761682242990656e-05, |
|
"loss": 4.3023, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.873831775700935e-05, |
|
"loss": 4.108, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8714953271028036e-05, |
|
"loss": 4.065, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.869158878504673e-05, |
|
"loss": 4.0726, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8668224299065423e-05, |
|
"loss": 4.3704, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8644859813084116e-05, |
|
"loss": 4.275, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8621495327102803e-05, |
|
"loss": 3.9922, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8598130841121496e-05, |
|
"loss": 4.6498, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.857476635514019e-05, |
|
"loss": 4.4709, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.855140186915888e-05, |
|
"loss": 4.1769, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.852803738317757e-05, |
|
"loss": 4.2284, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8504672897196263e-05, |
|
"loss": 4.4302, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8481308411214956e-05, |
|
"loss": 4.2458, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8457943925233646e-05, |
|
"loss": 4.4577, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8434579439252336e-05, |
|
"loss": 4.1258, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.841121495327103e-05, |
|
"loss": 4.457, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.838785046728972e-05, |
|
"loss": 4.1741, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8364485981308413e-05, |
|
"loss": 4.0215, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8341121495327103e-05, |
|
"loss": 4.1338, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8317757009345797e-05, |
|
"loss": 4.2283, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8294392523364487e-05, |
|
"loss": 4.2055, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.827102803738318e-05, |
|
"loss": 4.0812, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.824766355140187e-05, |
|
"loss": 4.1874, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.822429906542056e-05, |
|
"loss": 4.4458, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.8200934579439253e-05, |
|
"loss": 4.5588, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.8177570093457947e-05, |
|
"loss": 4.304, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.8154205607476637e-05, |
|
"loss": 3.9939, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.8130841121495327e-05, |
|
"loss": 4.4976, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.810747663551402e-05, |
|
"loss": 4.4264, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.8084112149532714e-05, |
|
"loss": 4.218, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.80607476635514e-05, |
|
"loss": 4.4753, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.8037383177570094e-05, |
|
"loss": 4.2927, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.8014018691588787e-05, |
|
"loss": 4.3891, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.799065420560748e-05, |
|
"loss": 4.4293, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.7967289719626167e-05, |
|
"loss": 4.1973, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.794392523364486e-05, |
|
"loss": 4.5021, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.7920560747663554e-05, |
|
"loss": 4.3199, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.7897196261682244e-05, |
|
"loss": 4.4571, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.7873831775700934e-05, |
|
"loss": 4.528, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.7850467289719627e-05, |
|
"loss": 4.4157, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.782710280373832e-05, |
|
"loss": 4.5597, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.780373831775701e-05, |
|
"loss": 4.9257, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.77803738317757e-05, |
|
"loss": 4.7692, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7757009345794394e-05, |
|
"loss": 4.8095, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7733644859813084e-05, |
|
"loss": 3.9404, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7710280373831777e-05, |
|
"loss": 4.4654, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7686915887850467e-05, |
|
"loss": 4.1783, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.766355140186916e-05, |
|
"loss": 4.2439, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.764018691588785e-05, |
|
"loss": 4.072, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7616822429906544e-05, |
|
"loss": 4.064, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7593457943925234e-05, |
|
"loss": 4.1668, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7570093457943924e-05, |
|
"loss": 4.212, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7546728971962618e-05, |
|
"loss": 4.4345, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.752336448598131e-05, |
|
"loss": 4.1837, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.75e-05, |
|
"loss": 4.4643, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.747663551401869e-05, |
|
"loss": 4.1591, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7453271028037384e-05, |
|
"loss": 4.2991, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7429906542056078e-05, |
|
"loss": 4.1714, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.7406542056074764e-05, |
|
"loss": 4.3276, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7383177570093458e-05, |
|
"loss": 4.0799, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.735981308411215e-05, |
|
"loss": 4.1613, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7336448598130845e-05, |
|
"loss": 4.0844, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.731308411214953e-05, |
|
"loss": 4.0714, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7289719626168225e-05, |
|
"loss": 4.3086, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7266355140186918e-05, |
|
"loss": 4.2854, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7242990654205608e-05, |
|
"loss": 4.3431, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7219626168224298e-05, |
|
"loss": 4.3078, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.719626168224299e-05, |
|
"loss": 4.5547, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7172897196261685e-05, |
|
"loss": 4.3998, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7149532710280375e-05, |
|
"loss": 4.1948, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7126168224299065e-05, |
|
"loss": 4.1507, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7102803738317758e-05, |
|
"loss": 4.5187, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7079439252336448e-05, |
|
"loss": 4.82, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.705607476635514e-05, |
|
"loss": 4.1649, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.703271028037383e-05, |
|
"loss": 4.2138, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7009345794392525e-05, |
|
"loss": 4.2827, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.6985981308411215e-05, |
|
"loss": 4.1455, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.696261682242991e-05, |
|
"loss": 4.1618, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.69392523364486e-05, |
|
"loss": 4.2469, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.691588785046729e-05, |
|
"loss": 4.247, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.6892523364485982e-05, |
|
"loss": 4.2564, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.6869158878504675e-05, |
|
"loss": 3.9329, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.6845794392523365e-05, |
|
"loss": 4.3441, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.6822429906542055e-05, |
|
"loss": 4.6779, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.679906542056075e-05, |
|
"loss": 4.4811, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.6775700934579442e-05, |
|
"loss": 4.4661, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.675233644859813e-05, |
|
"loss": 4.1823, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.6728971962616822e-05, |
|
"loss": 4.3765, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.6705607476635515e-05, |
|
"loss": 4.3031, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.668224299065421e-05, |
|
"loss": 4.4316, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.6658878504672895e-05, |
|
"loss": 5.1151, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.663551401869159e-05, |
|
"loss": 5.3826, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.6612149532710282e-05, |
|
"loss": 4.624, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.6588785046728972e-05, |
|
"loss": 4.6994, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.6565420560747662e-05, |
|
"loss": 4.1818, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.6542056074766356e-05, |
|
"loss": 4.1108, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.651869158878505e-05, |
|
"loss": 4.1831, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.649532710280374e-05, |
|
"loss": 4.1909, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.647196261682243e-05, |
|
"loss": 4.1307, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.6448598130841122e-05, |
|
"loss": 4.1173, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.6425233644859816e-05, |
|
"loss": 4.1902, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.6401869158878506e-05, |
|
"loss": 4.5231, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.6378504672897196e-05, |
|
"loss": 4.4093, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.635514018691589e-05, |
|
"loss": 4.2605, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.633177570093458e-05, |
|
"loss": 4.2977, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.6308411214953273e-05, |
|
"loss": 4.2694, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.6285046728971963e-05, |
|
"loss": 4.3017, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.6261682242990656e-05, |
|
"loss": 4.386, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.6238317757009346e-05, |
|
"loss": 4.1274, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.621495327102804e-05, |
|
"loss": 4.3211, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.619158878504673e-05, |
|
"loss": 4.2019, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.616822429906542e-05, |
|
"loss": 4.0505, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.6144859813084113e-05, |
|
"loss": 4.0959, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.6121495327102806e-05, |
|
"loss": 4.3859, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.6098130841121496e-05, |
|
"loss": 4.3131, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.6074766355140186e-05, |
|
"loss": 4.5609, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.605140186915888e-05, |
|
"loss": 4.1302, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.6028037383177573e-05, |
|
"loss": 4.1409, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.600467289719626e-05, |
|
"loss": 4.0495, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.5981308411214953e-05, |
|
"loss": 4.2181, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.5957943925233647e-05, |
|
"loss": 4.1967, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.593457943925234e-05, |
|
"loss": 4.1595, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.5911214953271027e-05, |
|
"loss": 4.2405, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.588785046728972e-05, |
|
"loss": 4.1914, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.5864485981308413e-05, |
|
"loss": 4.1382, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.5841121495327103e-05, |
|
"loss": 4.3131, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.5817757009345793e-05, |
|
"loss": 4.4054, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.5794392523364487e-05, |
|
"loss": 4.3854, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.577102803738318e-05, |
|
"loss": 4.4064, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.574766355140187e-05, |
|
"loss": 4.3073, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.572429906542056e-05, |
|
"loss": 3.9777, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5700934579439254e-05, |
|
"loss": 4.0989, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5677570093457944e-05, |
|
"loss": 4.2607, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5654205607476637e-05, |
|
"loss": 4.333, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5630841121495327e-05, |
|
"loss": 4.0871, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.560747663551402e-05, |
|
"loss": 4.3324, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.558411214953271e-05, |
|
"loss": 4.3102, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5560747663551404e-05, |
|
"loss": 4.2393, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5537383177570094e-05, |
|
"loss": 4.4242, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5514018691588784e-05, |
|
"loss": 4.5045, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5490654205607477e-05, |
|
"loss": 3.9591, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.546728971962617e-05, |
|
"loss": 4.289, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.544392523364486e-05, |
|
"loss": 4.2378, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.542056074766355e-05, |
|
"loss": 4.2691, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5397196261682244e-05, |
|
"loss": 4.0229, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5373831775700937e-05, |
|
"loss": 4.153, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5350467289719624e-05, |
|
"loss": 4.1632, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.5327102803738317e-05, |
|
"loss": 4.4474, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.530373831775701e-05, |
|
"loss": 4.2303, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.5280373831775704e-05, |
|
"loss": 4.3282, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.525700934579439e-05, |
|
"loss": 4.0947, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.5233644859813084e-05, |
|
"loss": 4.1924, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.5210280373831778e-05, |
|
"loss": 3.9936, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.5186915887850468e-05, |
|
"loss": 4.2294, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.5163551401869158e-05, |
|
"loss": 4.3738, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.514018691588785e-05, |
|
"loss": 4.1016, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.5116822429906544e-05, |
|
"loss": 4.1244, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.5093457943925234e-05, |
|
"loss": 4.5749, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.5070093457943924e-05, |
|
"loss": 4.5455, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.5046728971962618e-05, |
|
"loss": 4.0365, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.5023364485981308e-05, |
|
"loss": 4.1837, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.5e-05, |
|
"loss": 4.3674, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.497663551401869e-05, |
|
"loss": 4.068, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.4953271028037385e-05, |
|
"loss": 4.2529, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.4929906542056075e-05, |
|
"loss": 4.3839, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.4906542056074768e-05, |
|
"loss": 4.3385, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4883177570093458e-05, |
|
"loss": 4.1595, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4859813084112148e-05, |
|
"loss": 4.4662, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.483644859813084e-05, |
|
"loss": 4.2813, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4813084112149535e-05, |
|
"loss": 4.188, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4789719626168225e-05, |
|
"loss": 3.8812, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4766355140186915e-05, |
|
"loss": 4.377, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4742990654205608e-05, |
|
"loss": 4.1591, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.47196261682243e-05, |
|
"loss": 4.1232, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4696261682242988e-05, |
|
"loss": 4.2111, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.467289719626168e-05, |
|
"loss": 4.195, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4649532710280375e-05, |
|
"loss": 4.5914, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.462616822429907e-05, |
|
"loss": 4.2196, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4602803738317755e-05, |
|
"loss": 4.2522, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.457943925233645e-05, |
|
"loss": 4.1694, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4556074766355142e-05, |
|
"loss": 4.2373, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4532710280373832e-05, |
|
"loss": 3.9855, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4509345794392522e-05, |
|
"loss": 4.2423, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4485981308411215e-05, |
|
"loss": 4.2095, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.446261682242991e-05, |
|
"loss": 4.4789, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.44392523364486e-05, |
|
"loss": 4.0323, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.441588785046729e-05, |
|
"loss": 4.2841, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.4392523364485982e-05, |
|
"loss": 4.2162, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.4369158878504672e-05, |
|
"loss": 4.7925, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.4345794392523365e-05, |
|
"loss": 4.2842, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.4322429906542055e-05, |
|
"loss": 4.4468, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.429906542056075e-05, |
|
"loss": 4.5483, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.427570093457944e-05, |
|
"loss": 4.1929, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.4252336448598132e-05, |
|
"loss": 4.5784, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.4228971962616822e-05, |
|
"loss": 4.2466, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.4205607476635516e-05, |
|
"loss": 4.256, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.4182242990654206e-05, |
|
"loss": 4.2916, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.41588785046729e-05, |
|
"loss": 4.0228, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.413551401869159e-05, |
|
"loss": 4.1207, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.411214953271028e-05, |
|
"loss": 4.2923, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.4088785046728972e-05, |
|
"loss": 4.0716, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.4065420560747666e-05, |
|
"loss": 4.2561, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.4042056074766356e-05, |
|
"loss": 4.2501, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.4018691588785046e-05, |
|
"loss": 4.1544, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.399532710280374e-05, |
|
"loss": 4.5181, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.3971962616822433e-05, |
|
"loss": 4.1383, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.394859813084112e-05, |
|
"loss": 4.2302, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.3925233644859813e-05, |
|
"loss": 4.0103, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.3901869158878506e-05, |
|
"loss": 3.9473, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.38785046728972e-05, |
|
"loss": 4.188, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.3855140186915886e-05, |
|
"loss": 4.3639, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.383177570093458e-05, |
|
"loss": 3.93, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.3808411214953273e-05, |
|
"loss": 4.5287, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.3785046728971963e-05, |
|
"loss": 4.2475, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.3761682242990653e-05, |
|
"loss": 4.1368, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.3738317757009346e-05, |
|
"loss": 4.1505, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.371495327102804e-05, |
|
"loss": 4.0876, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.369158878504673e-05, |
|
"loss": 4.2193, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.366822429906542e-05, |
|
"loss": 4.0992, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.3644859813084113e-05, |
|
"loss": 4.1377, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.3621495327102803e-05, |
|
"loss": 4.0301, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.3598130841121497e-05, |
|
"loss": 4.421, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.3574766355140187e-05, |
|
"loss": 4.3282, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.355140186915888e-05, |
|
"loss": 4.3547, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.352803738317757e-05, |
|
"loss": 4.4139, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.3504672897196263e-05, |
|
"loss": 4.347, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.3481308411214953e-05, |
|
"loss": 4.2355, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.3457943925233643e-05, |
|
"loss": 4.1498, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.3434579439252337e-05, |
|
"loss": 4.0922, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.341121495327103e-05, |
|
"loss": 4.478, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.338785046728972e-05, |
|
"loss": 3.8649, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.336448598130841e-05, |
|
"loss": 4.2899, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.3341121495327104e-05, |
|
"loss": 4.2538, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.3317757009345797e-05, |
|
"loss": 4.2685, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.3294392523364484e-05, |
|
"loss": 4.4079, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.3271028037383177e-05, |
|
"loss": 4.035, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.324766355140187e-05, |
|
"loss": 4.2569, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.3224299065420564e-05, |
|
"loss": 4.1586, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.320093457943925e-05, |
|
"loss": 4.5834, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.3177570093457944e-05, |
|
"loss": 4.2447, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.3154205607476637e-05, |
|
"loss": 4.2122, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.3130841121495327e-05, |
|
"loss": 4.5336, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.3107476635514017e-05, |
|
"loss": 4.3484, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.308411214953271e-05, |
|
"loss": 4.4033, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.3060747663551404e-05, |
|
"loss": 4.2892, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.3037383177570094e-05, |
|
"loss": 4.292, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.3014018691588784e-05, |
|
"loss": 4.322, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.2990654205607477e-05, |
|
"loss": 4.1898, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.2967289719626167e-05, |
|
"loss": 4.2183, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.294392523364486e-05, |
|
"loss": 4.3549, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.292056074766355e-05, |
|
"loss": 4.2268, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.2897196261682244e-05, |
|
"loss": 3.9229, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.2873831775700934e-05, |
|
"loss": 4.3157, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.2850467289719628e-05, |
|
"loss": 4.3893, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.2827102803738318e-05, |
|
"loss": 4.2611, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2803738317757008e-05, |
|
"loss": 4.2447, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.27803738317757e-05, |
|
"loss": 4.2921, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2757009345794394e-05, |
|
"loss": 4.0179, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2733644859813084e-05, |
|
"loss": 4.1825, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2710280373831774e-05, |
|
"loss": 4.1997, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2686915887850468e-05, |
|
"loss": 4.0681, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.266355140186916e-05, |
|
"loss": 4.3193, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2640186915887848e-05, |
|
"loss": 4.1792, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.261682242990654e-05, |
|
"loss": 4.1559, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2593457943925235e-05, |
|
"loss": 4.4142, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2570093457943928e-05, |
|
"loss": 4.1386, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2546728971962615e-05, |
|
"loss": 4.5255, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2523364485981308e-05, |
|
"loss": 4.2801, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.25e-05, |
|
"loss": 4.1778, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.247663551401869e-05, |
|
"loss": 4.2408, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2453271028037385e-05, |
|
"loss": 4.4065, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2429906542056075e-05, |
|
"loss": 4.1209, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.2406542056074768e-05, |
|
"loss": 4.1902, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.2383177570093458e-05, |
|
"loss": 4.1085, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.235981308411215e-05, |
|
"loss": 4.4849, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.233644859813084e-05, |
|
"loss": 4.5035, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.231308411214953e-05, |
|
"loss": 4.308, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.2289719626168225e-05, |
|
"loss": 4.0138, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.226635514018692e-05, |
|
"loss": 4.0107, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.224299065420561e-05, |
|
"loss": 4.1838, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.22196261682243e-05, |
|
"loss": 4.3247, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.2196261682242992e-05, |
|
"loss": 4.2136, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.2172897196261685e-05, |
|
"loss": 4.0103, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.2149532710280372e-05, |
|
"loss": 4.2826, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.2126168224299065e-05, |
|
"loss": 4.3545, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.210280373831776e-05, |
|
"loss": 4.1326, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.2079439252336452e-05, |
|
"loss": 4.1696, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.205607476635514e-05, |
|
"loss": 4.2908, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.2032710280373832e-05, |
|
"loss": 4.024, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.2009345794392525e-05, |
|
"loss": 4.1375, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.1985981308411215e-05, |
|
"loss": 3.9984, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1962616822429905e-05, |
|
"loss": 4.4272, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.19392523364486e-05, |
|
"loss": 4.6845, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1915887850467292e-05, |
|
"loss": 4.6562, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1892523364485982e-05, |
|
"loss": 4.2803, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1869158878504672e-05, |
|
"loss": 4.2674, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1845794392523366e-05, |
|
"loss": 4.3411, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.182242990654206e-05, |
|
"loss": 4.2781, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.179906542056075e-05, |
|
"loss": 4.2525, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.177570093457944e-05, |
|
"loss": 4.1371, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1752336448598132e-05, |
|
"loss": 4.1449, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1728971962616822e-05, |
|
"loss": 4.4371, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1705607476635516e-05, |
|
"loss": 4.2179, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1682242990654206e-05, |
|
"loss": 4.529, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.16588785046729e-05, |
|
"loss": 3.8545, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.163551401869159e-05, |
|
"loss": 4.1672, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1612149532710283e-05, |
|
"loss": 4.3361, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1588785046728973e-05, |
|
"loss": 4.0718, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.1565420560747663e-05, |
|
"loss": 4.2156, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1542056074766356e-05, |
|
"loss": 4.272, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.151869158878505e-05, |
|
"loss": 4.206, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.149532710280374e-05, |
|
"loss": 4.2278, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.147196261682243e-05, |
|
"loss": 4.2667, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1448598130841123e-05, |
|
"loss": 4.1348, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1425233644859816e-05, |
|
"loss": 4.1959, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1401869158878503e-05, |
|
"loss": 4.3323, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1378504672897196e-05, |
|
"loss": 4.2832, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.135514018691589e-05, |
|
"loss": 4.109, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1331775700934583e-05, |
|
"loss": 4.4554, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.130841121495327e-05, |
|
"loss": 4.2466, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1285046728971963e-05, |
|
"loss": 4.5604, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1261682242990657e-05, |
|
"loss": 4.2792, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1238317757009347e-05, |
|
"loss": 4.1452, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1214953271028037e-05, |
|
"loss": 4.3134, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.119158878504673e-05, |
|
"loss": 4.4312, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1168224299065423e-05, |
|
"loss": 4.0045, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.1144859813084113e-05, |
|
"loss": 4.3507, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.1121495327102803e-05, |
|
"loss": 4.4745, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.1098130841121497e-05, |
|
"loss": 4.441, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.1074766355140187e-05, |
|
"loss": 4.3963, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.105140186915888e-05, |
|
"loss": 4.691, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.102803738317757e-05, |
|
"loss": 4.0693, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.1004672897196264e-05, |
|
"loss": 4.0052, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.0981308411214954e-05, |
|
"loss": 4.439, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.0957943925233647e-05, |
|
"loss": 4.2542, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.0934579439252337e-05, |
|
"loss": 4.3875, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.0911214953271027e-05, |
|
"loss": 4.6864, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.088785046728972e-05, |
|
"loss": 4.316, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.0864485981308414e-05, |
|
"loss": 4.2591, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.0841121495327104e-05, |
|
"loss": 4.0914, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.0817757009345794e-05, |
|
"loss": 4.2464, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.0794392523364487e-05, |
|
"loss": 4.9274, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.077102803738318e-05, |
|
"loss": 4.7296, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.0747663551401867e-05, |
|
"loss": 5.0117, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.072429906542056e-05, |
|
"loss": 4.2921, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0700934579439254e-05, |
|
"loss": 4.0694, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0677570093457947e-05, |
|
"loss": 4.5726, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0654205607476634e-05, |
|
"loss": 4.1894, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0630841121495327e-05, |
|
"loss": 4.2496, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.060747663551402e-05, |
|
"loss": 4.3837, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.058411214953271e-05, |
|
"loss": 4.3154, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.05607476635514e-05, |
|
"loss": 4.1458, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0537383177570094e-05, |
|
"loss": 4.2969, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0514018691588788e-05, |
|
"loss": 4.1434, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0490654205607478e-05, |
|
"loss": 4.2901, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0467289719626168e-05, |
|
"loss": 4.0777, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.044392523364486e-05, |
|
"loss": 4.2098, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.042056074766355e-05, |
|
"loss": 4.1676, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0397196261682244e-05, |
|
"loss": 4.3009, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0373831775700934e-05, |
|
"loss": 4.2482, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0350467289719628e-05, |
|
"loss": 4.0691, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.0327102803738318e-05, |
|
"loss": 4.2496, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.030373831775701e-05, |
|
"loss": 4.1904, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.02803738317757e-05, |
|
"loss": 4.0056, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.025700934579439e-05, |
|
"loss": 4.1408, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.0233644859813085e-05, |
|
"loss": 3.9573, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.0210280373831778e-05, |
|
"loss": 4.1, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.0186915887850468e-05, |
|
"loss": 4.1254, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.0163551401869158e-05, |
|
"loss": 4.2993, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.014018691588785e-05, |
|
"loss": 4.3646, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.0116822429906545e-05, |
|
"loss": 4.1477, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.009345794392523e-05, |
|
"loss": 4.0236, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.0070093457943925e-05, |
|
"loss": 4.1204, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.0046728971962618e-05, |
|
"loss": 4.3844, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.002336448598131e-05, |
|
"loss": 4.4103, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 4.1294, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.997663551401869e-05, |
|
"loss": 4.2804, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9953271028037385e-05, |
|
"loss": 4.3015, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9929906542056075e-05, |
|
"loss": 4.4078, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9906542056074765e-05, |
|
"loss": 4.3072, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.988317757009346e-05, |
|
"loss": 4.1355, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9859813084112152e-05, |
|
"loss": 3.9358, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9836448598130842e-05, |
|
"loss": 3.954, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9813084112149532e-05, |
|
"loss": 4.3639, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9789719626168225e-05, |
|
"loss": 4.2587, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9766355140186915e-05, |
|
"loss": 4.2508, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.974299065420561e-05, |
|
"loss": 4.4542, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.97196261682243e-05, |
|
"loss": 4.1811, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9696261682242992e-05, |
|
"loss": 4.3571, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9672897196261682e-05, |
|
"loss": 4.3206, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9649532710280375e-05, |
|
"loss": 3.8047, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9626168224299065e-05, |
|
"loss": 4.3799, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.960280373831776e-05, |
|
"loss": 5.1039, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.957943925233645e-05, |
|
"loss": 4.0424, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9556074766355142e-05, |
|
"loss": 4.163, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9532710280373832e-05, |
|
"loss": 4.2665, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9509345794392522e-05, |
|
"loss": 4.3006, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9485981308411216e-05, |
|
"loss": 3.9481, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.946261682242991e-05, |
|
"loss": 4.3406, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.94392523364486e-05, |
|
"loss": 4.1748, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.941588785046729e-05, |
|
"loss": 4.2548, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9392523364485982e-05, |
|
"loss": 4.0172, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9369158878504676e-05, |
|
"loss": 4.5716, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9345794392523363e-05, |
|
"loss": 4.2715, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9322429906542056e-05, |
|
"loss": 4.111, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.929906542056075e-05, |
|
"loss": 4.1517, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9275700934579443e-05, |
|
"loss": 4.1256, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.925233644859813e-05, |
|
"loss": 4.2684, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9228971962616823e-05, |
|
"loss": 4.1267, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9205607476635516e-05, |
|
"loss": 4.0476, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9182242990654206e-05, |
|
"loss": 4.2444, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9158878504672896e-05, |
|
"loss": 4.1258, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.913551401869159e-05, |
|
"loss": 4.1988, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9112149532710283e-05, |
|
"loss": 4.1309, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9088785046728973e-05, |
|
"loss": 4.2583, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.9065420560747663e-05, |
|
"loss": 4.1522, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9042056074766356e-05, |
|
"loss": 4.221, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9018691588785046e-05, |
|
"loss": 4.4322, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.899532710280374e-05, |
|
"loss": 4.3223, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.897196261682243e-05, |
|
"loss": 4.2805, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8948598130841123e-05, |
|
"loss": 4.5155, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8925233644859813e-05, |
|
"loss": 4.1073, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8901869158878507e-05, |
|
"loss": 4.0066, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8878504672897197e-05, |
|
"loss": 4.0313, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8855140186915887e-05, |
|
"loss": 3.8829, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.883177570093458e-05, |
|
"loss": 3.8576, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8808411214953273e-05, |
|
"loss": 4.3572, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8785046728971963e-05, |
|
"loss": 4.327, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8761682242990653e-05, |
|
"loss": 4.0703, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8738317757009347e-05, |
|
"loss": 4.1239, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.871495327102804e-05, |
|
"loss": 4.3195, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8691588785046727e-05, |
|
"loss": 4.2369, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.866822429906542e-05, |
|
"loss": 4.3695, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8644859813084114e-05, |
|
"loss": 4.0731, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8621495327102807e-05, |
|
"loss": 4.4181, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8598130841121494e-05, |
|
"loss": 4.585, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8574766355140187e-05, |
|
"loss": 4.335, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.855140186915888e-05, |
|
"loss": 4.2939, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.852803738317757e-05, |
|
"loss": 4.1693, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.850467289719626e-05, |
|
"loss": 3.786, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8481308411214954e-05, |
|
"loss": 4.2314, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8457943925233647e-05, |
|
"loss": 4.8163, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8434579439252337e-05, |
|
"loss": 4.8442, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8411214953271027e-05, |
|
"loss": 4.3994, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 4.276415824890137, |
|
"eval_runtime": 941.3717, |
|
"eval_samples_per_second": 2.807, |
|
"eval_steps_per_second": 0.352, |
|
"eval_wer": 1.960353034510115, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.838785046728972e-05, |
|
"loss": 4.3838, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.836448598130841e-05, |
|
"loss": 4.1807, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8341121495327104e-05, |
|
"loss": 4.3417, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8317757009345794e-05, |
|
"loss": 4.2058, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8294392523364487e-05, |
|
"loss": 4.1705, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8271028037383177e-05, |
|
"loss": 4.2186, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.824766355140187e-05, |
|
"loss": 4.1123, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.822429906542056e-05, |
|
"loss": 4.4482, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.820093457943925e-05, |
|
"loss": 4.3113, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8177570093457944e-05, |
|
"loss": 4.0683, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8154205607476638e-05, |
|
"loss": 4.2439, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8130841121495328e-05, |
|
"loss": 4.1346, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8107476635514018e-05, |
|
"loss": 4.2404, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.808411214953271e-05, |
|
"loss": 4.3736, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8060747663551404e-05, |
|
"loss": 4.6737, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.803738317757009e-05, |
|
"loss": 4.2312, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8014018691588784e-05, |
|
"loss": 4.2907, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7990654205607478e-05, |
|
"loss": 4.1953, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.796728971962617e-05, |
|
"loss": 4.3023, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7943925233644858e-05, |
|
"loss": 4.262, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.792056074766355e-05, |
|
"loss": 4.1887, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7897196261682245e-05, |
|
"loss": 4.2461, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7873831775700935e-05, |
|
"loss": 4.175, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7850467289719625e-05, |
|
"loss": 4.2105, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.7827102803738318e-05, |
|
"loss": 4.245, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.780373831775701e-05, |
|
"loss": 3.8814, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.77803738317757e-05, |
|
"loss": 4.2568, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.775700934579439e-05, |
|
"loss": 4.4183, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7733644859813085e-05, |
|
"loss": 4.1602, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7710280373831775e-05, |
|
"loss": 4.2869, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7686915887850468e-05, |
|
"loss": 4.2435, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7663551401869158e-05, |
|
"loss": 4.066, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.764018691588785e-05, |
|
"loss": 4.4279, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.761682242990654e-05, |
|
"loss": 3.926, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7593457943925235e-05, |
|
"loss": 4.1664, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7570093457943925e-05, |
|
"loss": 4.3359, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7546728971962615e-05, |
|
"loss": 4.2128, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.752336448598131e-05, |
|
"loss": 4.2249, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 4.2303, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7476635514018692e-05, |
|
"loss": 4.4989, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7453271028037382e-05, |
|
"loss": 4.4136, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.7429906542056075e-05, |
|
"loss": 4.4061, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.740654205607477e-05, |
|
"loss": 4.4597, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.738317757009346e-05, |
|
"loss": 4.175, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.735981308411215e-05, |
|
"loss": 4.425, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7336448598130842e-05, |
|
"loss": 4.3069, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7313084112149535e-05, |
|
"loss": 4.2418, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7289719626168222e-05, |
|
"loss": 4.1212, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7266355140186915e-05, |
|
"loss": 4.3381, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.724299065420561e-05, |
|
"loss": 5.1032, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.72196261682243e-05, |
|
"loss": 4.6026, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.719626168224299e-05, |
|
"loss": 4.1127, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7172897196261682e-05, |
|
"loss": 4.0776, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7149532710280376e-05, |
|
"loss": 3.9369, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7126168224299066e-05, |
|
"loss": 4.1443, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7102803738317756e-05, |
|
"loss": 4.131, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.707943925233645e-05, |
|
"loss": 4.3994, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7056074766355142e-05, |
|
"loss": 4.1317, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7032710280373833e-05, |
|
"loss": 4.2785, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7009345794392523e-05, |
|
"loss": 4.0166, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6985981308411216e-05, |
|
"loss": 4.1581, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6962616822429906e-05, |
|
"loss": 4.2306, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.69392523364486e-05, |
|
"loss": 4.5221, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.691588785046729e-05, |
|
"loss": 3.8312, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6892523364485983e-05, |
|
"loss": 4.3187, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6869158878504673e-05, |
|
"loss": 3.8238, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6845794392523366e-05, |
|
"loss": 4.2842, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6822429906542056e-05, |
|
"loss": 4.0101, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6799065420560746e-05, |
|
"loss": 4.4147, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.677570093457944e-05, |
|
"loss": 3.924, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6752336448598133e-05, |
|
"loss": 4.1558, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6728971962616823e-05, |
|
"loss": 3.924, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6705607476635513e-05, |
|
"loss": 3.9973, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6682242990654206e-05, |
|
"loss": 4.3168, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.66588785046729e-05, |
|
"loss": 4.1304, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6635514018691586e-05, |
|
"loss": 4.4585, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.661214953271028e-05, |
|
"loss": 4.1511, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6588785046728973e-05, |
|
"loss": 4.1036, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6565420560747667e-05, |
|
"loss": 4.1453, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6542056074766353e-05, |
|
"loss": 4.2954, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6518691588785047e-05, |
|
"loss": 4.1615, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.649532710280374e-05, |
|
"loss": 4.2027, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.647196261682243e-05, |
|
"loss": 3.9467, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.644859813084112e-05, |
|
"loss": 4.2254, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6425233644859813e-05, |
|
"loss": 4.0942, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6401869158878507e-05, |
|
"loss": 4.2585, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6378504672897197e-05, |
|
"loss": 4.363, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6355140186915887e-05, |
|
"loss": 4.246, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.633177570093458e-05, |
|
"loss": 4.3585, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.630841121495327e-05, |
|
"loss": 4.0166, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6285046728971964e-05, |
|
"loss": 4.0657, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6261682242990654e-05, |
|
"loss": 4.2611, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6238317757009347e-05, |
|
"loss": 4.3694, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6214953271028037e-05, |
|
"loss": 4.4972, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.619158878504673e-05, |
|
"loss": 4.3113, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.616822429906542e-05, |
|
"loss": 4.6755, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.614485981308411e-05, |
|
"loss": 4.4701, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.6121495327102804e-05, |
|
"loss": 5.0686, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.6098130841121497e-05, |
|
"loss": 4.3936, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.6074766355140187e-05, |
|
"loss": 4.7163, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.6051401869158877e-05, |
|
"loss": 3.8227, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.602803738317757e-05, |
|
"loss": 4.3251, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.6004672897196264e-05, |
|
"loss": 4.2298, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.598130841121495e-05, |
|
"loss": 3.9855, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5957943925233644e-05, |
|
"loss": 3.9809, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5934579439252337e-05, |
|
"loss": 4.0129, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.591121495327103e-05, |
|
"loss": 3.8364, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5887850467289717e-05, |
|
"loss": 4.0056, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.586448598130841e-05, |
|
"loss": 4.2688, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5841121495327104e-05, |
|
"loss": 4.2297, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5817757009345794e-05, |
|
"loss": 3.9998, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5794392523364484e-05, |
|
"loss": 4.3571, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5771028037383178e-05, |
|
"loss": 3.7861, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.574766355140187e-05, |
|
"loss": 4.2606, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.572429906542056e-05, |
|
"loss": 4.4363, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.570093457943925e-05, |
|
"loss": 4.2552, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5677570093457944e-05, |
|
"loss": 4.1592, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5654205607476634e-05, |
|
"loss": 4.1694, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5630841121495328e-05, |
|
"loss": 4.1184, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5607476635514018e-05, |
|
"loss": 4.1001, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.558411214953271e-05, |
|
"loss": 4.214, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.55607476635514e-05, |
|
"loss": 4.131, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5537383177570095e-05, |
|
"loss": 4.0021, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5514018691588785e-05, |
|
"loss": 4.0581, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5490654205607475e-05, |
|
"loss": 4.3702, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5467289719626168e-05, |
|
"loss": 4.0275, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.544392523364486e-05, |
|
"loss": 4.0219, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.542056074766355e-05, |
|
"loss": 4.5626, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.539719626168224e-05, |
|
"loss": 3.7936, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5373831775700935e-05, |
|
"loss": 4.47, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5350467289719628e-05, |
|
"loss": 4.3354, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5327102803738315e-05, |
|
"loss": 4.1541, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5303738317757008e-05, |
|
"loss": 4.0681, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.52803738317757e-05, |
|
"loss": 4.2114, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5257009345794393e-05, |
|
"loss": 4.4533, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5233644859813083e-05, |
|
"loss": 4.1447, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5210280373831775e-05, |
|
"loss": 4.2246, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5186915887850468e-05, |
|
"loss": 4.3156, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.516355140186916e-05, |
|
"loss": 4.033, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5140186915887848e-05, |
|
"loss": 4.2967, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5116822429906542e-05, |
|
"loss": 4.1513, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5093457943925234e-05, |
|
"loss": 4.5366, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5070093457943927e-05, |
|
"loss": 4.2984, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5046728971962615e-05, |
|
"loss": 4.515, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5023364485981309e-05, |
|
"loss": 4.2453, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5e-05, |
|
"loss": 4.3104, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4976635514018692e-05, |
|
"loss": 4.2848, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4953271028037384e-05, |
|
"loss": 4.132, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4929906542056075e-05, |
|
"loss": 4.3078, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4906542056074767e-05, |
|
"loss": 4.1989, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4883177570093459e-05, |
|
"loss": 4.1369, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.485981308411215e-05, |
|
"loss": 4.2523, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4836448598130842e-05, |
|
"loss": 4.2366, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4813084112149532e-05, |
|
"loss": 4.2827, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4789719626168226e-05, |
|
"loss": 4.2893, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4766355140186916e-05, |
|
"loss": 4.2778, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4742990654205609e-05, |
|
"loss": 4.1089, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4719626168224299e-05, |
|
"loss": 4.1321, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4696261682242993e-05, |
|
"loss": 4.0284, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4672897196261683e-05, |
|
"loss": 3.9388, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4649532710280374e-05, |
|
"loss": 4.3155, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4626168224299066e-05, |
|
"loss": 4.1251, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4602803738317758e-05, |
|
"loss": 4.1858, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.457943925233645e-05, |
|
"loss": 4.0236, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4556074766355141e-05, |
|
"loss": 4.0506, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4532710280373833e-05, |
|
"loss": 4.2977, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4509345794392524e-05, |
|
"loss": 4.0952, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.4485981308411214e-05, |
|
"loss": 3.9324, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4462616822429908e-05, |
|
"loss": 4.2742, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4439252336448598e-05, |
|
"loss": 4.3295, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4415887850467291e-05, |
|
"loss": 3.9502, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4392523364485981e-05, |
|
"loss": 3.9991, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4369158878504675e-05, |
|
"loss": 4.3436, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4345794392523365e-05, |
|
"loss": 4.0077, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4322429906542058e-05, |
|
"loss": 3.9805, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4299065420560748e-05, |
|
"loss": 4.1969, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.427570093457944e-05, |
|
"loss": 4.2658, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4252336448598131e-05, |
|
"loss": 4.2669, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4228971962616823e-05, |
|
"loss": 3.7971, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4205607476635515e-05, |
|
"loss": 4.1124, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4182242990654207e-05, |
|
"loss": 4.0168, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4158878504672898e-05, |
|
"loss": 4.1873, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.413551401869159e-05, |
|
"loss": 3.976, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.411214953271028e-05, |
|
"loss": 4.0415, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4088785046728973e-05, |
|
"loss": 4.5128, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.4065420560747663e-05, |
|
"loss": 3.9412, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4042056074766357e-05, |
|
"loss": 4.4509, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4018691588785047e-05, |
|
"loss": 4.0894, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.399532710280374e-05, |
|
"loss": 3.9693, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.397196261682243e-05, |
|
"loss": 4.1943, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3948598130841122e-05, |
|
"loss": 4.2143, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3925233644859814e-05, |
|
"loss": 4.3757, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3901869158878505e-05, |
|
"loss": 4.6904, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3878504672897197e-05, |
|
"loss": 4.1949, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3855140186915889e-05, |
|
"loss": 4.4187, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.383177570093458e-05, |
|
"loss": 4.0573, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3808411214953272e-05, |
|
"loss": 4.0778, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3785046728971962e-05, |
|
"loss": 4.4602, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3761682242990655e-05, |
|
"loss": 4.364, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3738317757009345e-05, |
|
"loss": 4.0137, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3714953271028039e-05, |
|
"loss": 4.063, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3691588785046729e-05, |
|
"loss": 4.0722, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3668224299065422e-05, |
|
"loss": 4.0259, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3644859813084112e-05, |
|
"loss": 3.9311, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3621495327102804e-05, |
|
"loss": 3.8923, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3598130841121496e-05, |
|
"loss": 4.2806, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3574766355140187e-05, |
|
"loss": 4.4994, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3551401869158879e-05, |
|
"loss": 4.1264, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.352803738317757e-05, |
|
"loss": 4.0846, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3504672897196263e-05, |
|
"loss": 3.9229, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3481308411214954e-05, |
|
"loss": 4.2628, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3457943925233644e-05, |
|
"loss": 4.3152, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3434579439252338e-05, |
|
"loss": 4.2379, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3411214953271028e-05, |
|
"loss": 4.0713, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3387850467289721e-05, |
|
"loss": 4.1032, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3364485981308411e-05, |
|
"loss": 4.1874, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3341121495327104e-05, |
|
"loss": 4.0298, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3317757009345794e-05, |
|
"loss": 3.9534, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3294392523364486e-05, |
|
"loss": 3.9305, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3271028037383178e-05, |
|
"loss": 4.2816, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.324766355140187e-05, |
|
"loss": 4.1107, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3224299065420561e-05, |
|
"loss": 3.8697, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3200934579439253e-05, |
|
"loss": 4.2744, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3177570093457945e-05, |
|
"loss": 4.1552, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3154205607476636e-05, |
|
"loss": 4.2383, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3130841121495328e-05, |
|
"loss": 4.2343, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.310747663551402e-05, |
|
"loss": 4.0618, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.308411214953271e-05, |
|
"loss": 4.0713, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3060747663551403e-05, |
|
"loss": 4.0311, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3037383177570093e-05, |
|
"loss": 4.2391, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3014018691588787e-05, |
|
"loss": 4.1531, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.2990654205607477e-05, |
|
"loss": 4.0725, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.296728971962617e-05, |
|
"loss": 4.0772, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.294392523364486e-05, |
|
"loss": 4.0475, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.2920560747663552e-05, |
|
"loss": 4.4437, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.2897196261682243e-05, |
|
"loss": 4.154, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.2873831775700935e-05, |
|
"loss": 4.0849, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.2850467289719627e-05, |
|
"loss": 4.0024, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.2827102803738318e-05, |
|
"loss": 4.1889, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.280373831775701e-05, |
|
"loss": 4.2619, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2780373831775702e-05, |
|
"loss": 4.2038, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2757009345794392e-05, |
|
"loss": 4.5605, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2733644859813085e-05, |
|
"loss": 4.277, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2710280373831775e-05, |
|
"loss": 4.2266, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2686915887850469e-05, |
|
"loss": 3.9489, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2663551401869159e-05, |
|
"loss": 4.3945, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2640186915887852e-05, |
|
"loss": 4.4063, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2616822429906542e-05, |
|
"loss": 4.1918, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2593457943925234e-05, |
|
"loss": 4.4201, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2570093457943925e-05, |
|
"loss": 4.5105, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2546728971962617e-05, |
|
"loss": 4.2447, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2523364485981309e-05, |
|
"loss": 4.1262, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.25e-05, |
|
"loss": 4.1989, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2476635514018692e-05, |
|
"loss": 4.292, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2453271028037384e-05, |
|
"loss": 4.3866, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2429906542056074e-05, |
|
"loss": 4.0586, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.2406542056074767e-05, |
|
"loss": 4.0311, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2383177570093457e-05, |
|
"loss": 3.9164, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.235981308411215e-05, |
|
"loss": 4.1645, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.233644859813084e-05, |
|
"loss": 4.4957, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2313084112149534e-05, |
|
"loss": 3.9794, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2289719626168224e-05, |
|
"loss": 4.2441, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2266355140186916e-05, |
|
"loss": 4.2401, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2242990654205608e-05, |
|
"loss": 4.1159, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.22196261682243e-05, |
|
"loss": 4.317, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2196261682242991e-05, |
|
"loss": 4.2399, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2172897196261683e-05, |
|
"loss": 4.0619, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2149532710280374e-05, |
|
"loss": 4.0484, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2126168224299066e-05, |
|
"loss": 4.2496, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2102803738317758e-05, |
|
"loss": 4.0288, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.207943925233645e-05, |
|
"loss": 4.0943, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.205607476635514e-05, |
|
"loss": 4.1295, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2032710280373833e-05, |
|
"loss": 4.3845, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.2009345794392523e-05, |
|
"loss": 4.0699, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.1985981308411216e-05, |
|
"loss": 4.1361, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1962616822429906e-05, |
|
"loss": 4.4625, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.19392523364486e-05, |
|
"loss": 4.2566, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.191588785046729e-05, |
|
"loss": 4.1578, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1892523364485981e-05, |
|
"loss": 4.3413, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1869158878504673e-05, |
|
"loss": 4.28, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1845794392523365e-05, |
|
"loss": 4.2081, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1822429906542057e-05, |
|
"loss": 4.0978, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1799065420560748e-05, |
|
"loss": 4.3259, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.177570093457944e-05, |
|
"loss": 4.1025, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1752336448598132e-05, |
|
"loss": 4.1345, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1728971962616822e-05, |
|
"loss": 4.2033, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1705607476635515e-05, |
|
"loss": 4.1905, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1682242990654205e-05, |
|
"loss": 4.1205, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1658878504672898e-05, |
|
"loss": 4.1419, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1635514018691588e-05, |
|
"loss": 3.8481, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1612149532710282e-05, |
|
"loss": 4.2781, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1588785046728972e-05, |
|
"loss": 4.3232, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.1565420560747664e-05, |
|
"loss": 4.6789, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1542056074766355e-05, |
|
"loss": 4.0168, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1518691588785047e-05, |
|
"loss": 4.0139, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1495327102803739e-05, |
|
"loss": 3.9468, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.147196261682243e-05, |
|
"loss": 4.0327, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1448598130841122e-05, |
|
"loss": 3.8068, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1425233644859814e-05, |
|
"loss": 4.6063, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1401869158878504e-05, |
|
"loss": 3.9934, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1378504672897197e-05, |
|
"loss": 4.2126, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1355140186915887e-05, |
|
"loss": 4.3675, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.133177570093458e-05, |
|
"loss": 4.1626, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.130841121495327e-05, |
|
"loss": 4.1996, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1285046728971964e-05, |
|
"loss": 4.0115, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1261682242990654e-05, |
|
"loss": 4.1536, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1238317757009346e-05, |
|
"loss": 4.0911, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1214953271028037e-05, |
|
"loss": 3.9302, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1191588785046729e-05, |
|
"loss": 4.1187, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.116822429906542e-05, |
|
"loss": 3.9579, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.1144859813084113e-05, |
|
"loss": 4.0662, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.1121495327102804e-05, |
|
"loss": 4.2141, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.1098130841121496e-05, |
|
"loss": 4.1037, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.1074766355140186e-05, |
|
"loss": 3.9829, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.105140186915888e-05, |
|
"loss": 4.3324, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.102803738317757e-05, |
|
"loss": 4.1306, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.1004672897196263e-05, |
|
"loss": 4.0159, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.0981308411214953e-05, |
|
"loss": 4.567, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.0957943925233646e-05, |
|
"loss": 4.0622, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.0934579439252336e-05, |
|
"loss": 3.8993, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.091121495327103e-05, |
|
"loss": 3.8566, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.088785046728972e-05, |
|
"loss": 4.09, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.0864485981308411e-05, |
|
"loss": 4.0313, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.0841121495327103e-05, |
|
"loss": 4.0637, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.0817757009345795e-05, |
|
"loss": 4.1181, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.0794392523364486e-05, |
|
"loss": 3.9657, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.0771028037383178e-05, |
|
"loss": 3.8988, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.074766355140187e-05, |
|
"loss": 4.0866, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.0724299065420561e-05, |
|
"loss": 4.2302, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0700934579439251e-05, |
|
"loss": 4.2435, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0677570093457945e-05, |
|
"loss": 3.9233, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0654205607476635e-05, |
|
"loss": 4.1559, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0630841121495328e-05, |
|
"loss": 4.1301, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0607476635514018e-05, |
|
"loss": 4.2697, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0584112149532712e-05, |
|
"loss": 3.9397, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0560747663551402e-05, |
|
"loss": 4.0593, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0537383177570093e-05, |
|
"loss": 4.2725, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0514018691588785e-05, |
|
"loss": 4.2991, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0490654205607477e-05, |
|
"loss": 4.4362, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0467289719626168e-05, |
|
"loss": 4.5216, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.044392523364486e-05, |
|
"loss": 4.3957, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0420560747663552e-05, |
|
"loss": 4.1009, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0397196261682244e-05, |
|
"loss": 4.3266, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0373831775700934e-05, |
|
"loss": 4.087, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0350467289719627e-05, |
|
"loss": 4.2562, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.0327102803738317e-05, |
|
"loss": 4.2976, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.030373831775701e-05, |
|
"loss": 4.6248, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.02803738317757e-05, |
|
"loss": 4.1296, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.0257009345794394e-05, |
|
"loss": 4.2328, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.0233644859813084e-05, |
|
"loss": 4.5745, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.0210280373831776e-05, |
|
"loss": 4.3112, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.0186915887850467e-05, |
|
"loss": 4.2557, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.0163551401869159e-05, |
|
"loss": 4.2107, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.014018691588785e-05, |
|
"loss": 4.2676, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.0116822429906542e-05, |
|
"loss": 4.0928, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.0093457943925234e-05, |
|
"loss": 4.4405, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.0070093457943926e-05, |
|
"loss": 3.9069, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.0046728971962616e-05, |
|
"loss": 4.2085, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.0023364485981309e-05, |
|
"loss": 4.364, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.999999999999999e-06, |
|
"loss": 4.6242, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.976635514018693e-06, |
|
"loss": 4.115, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.953271028037383e-06, |
|
"loss": 3.9853, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.929906542056076e-06, |
|
"loss": 4.0141, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.906542056074766e-06, |
|
"loss": 4.4776, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.883177570093458e-06, |
|
"loss": 4.1173, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.85981308411215e-06, |
|
"loss": 3.8906, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.836448598130841e-06, |
|
"loss": 4.1557, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.813084112149533e-06, |
|
"loss": 3.9848, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.789719626168224e-06, |
|
"loss": 4.3488, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.766355140186916e-06, |
|
"loss": 4.376, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.742990654205608e-06, |
|
"loss": 4.28, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.7196261682243e-06, |
|
"loss": 3.9477, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.696261682242991e-06, |
|
"loss": 4.1401, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.672897196261681e-06, |
|
"loss": 4.2706, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.649532710280375e-06, |
|
"loss": 3.8855, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.626168224299065e-06, |
|
"loss": 3.8925, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.602803738317758e-06, |
|
"loss": 4.0841, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.579439252336448e-06, |
|
"loss": 4.1056, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.556074766355141e-06, |
|
"loss": 4.1274, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.532710280373831e-06, |
|
"loss": 4.0996, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.509345794392523e-06, |
|
"loss": 3.7845, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.485981308411215e-06, |
|
"loss": 4.3562, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.462616822429907e-06, |
|
"loss": 4.0043, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.439252336448598e-06, |
|
"loss": 4.2826, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.41588785046729e-06, |
|
"loss": 4.442, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.392523364485982e-06, |
|
"loss": 4.2274, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.369158878504673e-06, |
|
"loss": 4.2576, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.345794392523363e-06, |
|
"loss": 4.2564, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.322429906542057e-06, |
|
"loss": 4.1399, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.299065420560747e-06, |
|
"loss": 4.205, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.27570093457944e-06, |
|
"loss": 3.9346, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.25233644859813e-06, |
|
"loss": 4.0359, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.228971962616824e-06, |
|
"loss": 4.2414, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.205607476635514e-06, |
|
"loss": 4.3723, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.182242990654205e-06, |
|
"loss": 4.2651, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.158878504672897e-06, |
|
"loss": 4.1683, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.135514018691589e-06, |
|
"loss": 4.34, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.11214953271028e-06, |
|
"loss": 4.1625, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.088785046728972e-06, |
|
"loss": 4.3213, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.065420560747664e-06, |
|
"loss": 4.4264, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.042056074766356e-06, |
|
"loss": 3.9775, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.018691588785046e-06, |
|
"loss": 4.1745, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.995327102803739e-06, |
|
"loss": 4.0301, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.971962616822429e-06, |
|
"loss": 3.9575, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.948598130841122e-06, |
|
"loss": 3.9296, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.925233644859812e-06, |
|
"loss": 4.4694, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.901869158878506e-06, |
|
"loss": 4.2067, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.878504672897196e-06, |
|
"loss": 3.9505, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.855140186915887e-06, |
|
"loss": 4.0672, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.831775700934579e-06, |
|
"loss": 3.8908, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.80841121495327e-06, |
|
"loss": 3.9355, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.785046728971963e-06, |
|
"loss": 3.9802, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.761682242990654e-06, |
|
"loss": 4.402, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.738317757009346e-06, |
|
"loss": 4.2815, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.714953271028038e-06, |
|
"loss": 4.2071, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.69158878504673e-06, |
|
"loss": 4.3293, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.668224299065421e-06, |
|
"loss": 4.1243, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.644859813084111e-06, |
|
"loss": 4.1596, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.621495327102804e-06, |
|
"loss": 4.1897, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.598130841121494e-06, |
|
"loss": 3.9652, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.574766355140188e-06, |
|
"loss": 4.1442, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.551401869158878e-06, |
|
"loss": 4.2353, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.528037383177571e-06, |
|
"loss": 4.1577, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.504672897196261e-06, |
|
"loss": 4.0383, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.481308411214953e-06, |
|
"loss": 4.2949, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.457943925233645e-06, |
|
"loss": 4.2969, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.434579439252336e-06, |
|
"loss": 4.0419, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.411214953271028e-06, |
|
"loss": 4.0483, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.38785046728972e-06, |
|
"loss": 4.2055, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.364485981308411e-06, |
|
"loss": 4.332, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.341121495327103e-06, |
|
"loss": 4.2456, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.317757009345793e-06, |
|
"loss": 4.2195, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.294392523364487e-06, |
|
"loss": 4.4525, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.271028037383177e-06, |
|
"loss": 4.0624, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.24766355140187e-06, |
|
"loss": 3.9296, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 8.22429906542056e-06, |
|
"loss": 3.9153, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.200934579439253e-06, |
|
"loss": 4.3315, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.177570093457943e-06, |
|
"loss": 4.0011, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.154205607476635e-06, |
|
"loss": 4.2285, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.130841121495327e-06, |
|
"loss": 4.1969, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.107476635514018e-06, |
|
"loss": 4.3416, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.08411214953271e-06, |
|
"loss": 4.0149, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.060747663551402e-06, |
|
"loss": 4.0146, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.037383177570094e-06, |
|
"loss": 4.1167, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.014018691588785e-06, |
|
"loss": 4.2799, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.990654205607475e-06, |
|
"loss": 4.3641, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.967289719626169e-06, |
|
"loss": 4.2311, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.943925233644859e-06, |
|
"loss": 4.4094, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.920560747663552e-06, |
|
"loss": 4.5298, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.897196261682242e-06, |
|
"loss": 4.159, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.873831775700936e-06, |
|
"loss": 4.0792, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.850467289719626e-06, |
|
"loss": 4.1228, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 7.827102803738317e-06, |
|
"loss": 3.9725, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.803738317757009e-06, |
|
"loss": 3.9233, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.7803738317757e-06, |
|
"loss": 4.0491, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.757009345794392e-06, |
|
"loss": 4.0949, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.733644859813084e-06, |
|
"loss": 4.3294, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.710280373831776e-06, |
|
"loss": 4.2094, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.686915887850467e-06, |
|
"loss": 3.9979, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.663551401869157e-06, |
|
"loss": 4.189, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.64018691588785e-06, |
|
"loss": 4.1713, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.616822429906542e-06, |
|
"loss": 4.2261, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.593457943925234e-06, |
|
"loss": 4.154, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.570093457943924e-06, |
|
"loss": 4.2109, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.546728971962617e-06, |
|
"loss": 4.3885, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.523364485981308e-06, |
|
"loss": 3.8577, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.5e-06, |
|
"loss": 4.0928, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.476635514018692e-06, |
|
"loss": 4.1424, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.453271028037384e-06, |
|
"loss": 3.821, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.429906542056075e-06, |
|
"loss": 3.8844, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 7.406542056074766e-06, |
|
"loss": 4.113, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.383177570093458e-06, |
|
"loss": 3.8827, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.3598130841121496e-06, |
|
"loss": 4.0513, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.336448598130841e-06, |
|
"loss": 3.9866, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.313084112149533e-06, |
|
"loss": 4.1726, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.289719626168225e-06, |
|
"loss": 4.1526, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.266355140186916e-06, |
|
"loss": 4.2075, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.242990654205607e-06, |
|
"loss": 3.8465, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.219626168224299e-06, |
|
"loss": 4.1633, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.196261682242991e-06, |
|
"loss": 3.9722, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.172897196261682e-06, |
|
"loss": 4.0622, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.149532710280374e-06, |
|
"loss": 4.1813, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.126168224299066e-06, |
|
"loss": 4.1506, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.1028037383177574e-06, |
|
"loss": 3.7479, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.079439252336449e-06, |
|
"loss": 4.0951, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.05607476635514e-06, |
|
"loss": 4.239, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.032710280373832e-06, |
|
"loss": 4.2861, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 7.009345794392523e-06, |
|
"loss": 4.242, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.985981308411215e-06, |
|
"loss": 4.1746, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.962616822429907e-06, |
|
"loss": 3.9778, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.9392523364485985e-06, |
|
"loss": 4.5709, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.91588785046729e-06, |
|
"loss": 4.1187, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.892523364485981e-06, |
|
"loss": 3.7873, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.869158878504673e-06, |
|
"loss": 3.9793, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.8457943925233644e-06, |
|
"loss": 4.111, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.822429906542056e-06, |
|
"loss": 4.5429, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.799065420560748e-06, |
|
"loss": 4.3431, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.7757009345794396e-06, |
|
"loss": 4.5138, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.752336448598131e-06, |
|
"loss": 4.0346, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.728971962616822e-06, |
|
"loss": 4.3001, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 4.2075958251953125, |
|
"eval_runtime": 947.3446, |
|
"eval_samples_per_second": 2.789, |
|
"eval_steps_per_second": 0.349, |
|
"eval_wer": 1.9032526775089251, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.705607476635514e-06, |
|
"loss": 4.2039, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.6822429906542055e-06, |
|
"loss": 4.1607, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.658878504672897e-06, |
|
"loss": 4.2332, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.635514018691589e-06, |
|
"loss": 4.108, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.612149532710281e-06, |
|
"loss": 4.2319, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.588785046728972e-06, |
|
"loss": 4.055, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.565420560747664e-06, |
|
"loss": 4.0099, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.542056074766355e-06, |
|
"loss": 4.0962, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.518691588785047e-06, |
|
"loss": 4.1213, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.495327102803738e-06, |
|
"loss": 4.0894, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.47196261682243e-06, |
|
"loss": 3.9209, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.448598130841122e-06, |
|
"loss": 4.2152, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.425233644859813e-06, |
|
"loss": 3.8835, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.401869158878505e-06, |
|
"loss": 4.0394, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.378504672897196e-06, |
|
"loss": 4.0479, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.355140186915888e-06, |
|
"loss": 4.4315, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.331775700934579e-06, |
|
"loss": 4.0129, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.308411214953271e-06, |
|
"loss": 4.2214, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.285046728971963e-06, |
|
"loss": 4.3242, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.2616822429906544e-06, |
|
"loss": 4.0849, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.238317757009346e-06, |
|
"loss": 4.0637, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.214953271028037e-06, |
|
"loss": 3.9984, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.191588785046729e-06, |
|
"loss": 4.3311, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.16822429906542e-06, |
|
"loss": 3.9591, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.144859813084112e-06, |
|
"loss": 4.0905, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.121495327102804e-06, |
|
"loss": 3.9547, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.0981308411214955e-06, |
|
"loss": 4.0565, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.074766355140187e-06, |
|
"loss": 4.1071, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.051401869158879e-06, |
|
"loss": 3.6556, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.02803738317757e-06, |
|
"loss": 3.9483, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.0046728971962615e-06, |
|
"loss": 3.9194, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.981308411214953e-06, |
|
"loss": 3.8927, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.957943925233645e-06, |
|
"loss": 4.0501, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.934579439252337e-06, |
|
"loss": 4.0536, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.911214953271028e-06, |
|
"loss": 4.0931, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.88785046728972e-06, |
|
"loss": 3.8412, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.864485981308411e-06, |
|
"loss": 4.1066, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.8411214953271025e-06, |
|
"loss": 4.3525, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.817757009345794e-06, |
|
"loss": 4.0533, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.794392523364486e-06, |
|
"loss": 4.1529, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.771028037383178e-06, |
|
"loss": 4.2804, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.747663551401869e-06, |
|
"loss": 3.8271, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.724299065420561e-06, |
|
"loss": 4.2896, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.700934579439252e-06, |
|
"loss": 4.2828, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.677570093457944e-06, |
|
"loss": 4.6037, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.654205607476635e-06, |
|
"loss": 3.9111, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.630841121495327e-06, |
|
"loss": 4.2947, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.607476635514019e-06, |
|
"loss": 4.2559, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.58411214953271e-06, |
|
"loss": 4.4756, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.560747663551402e-06, |
|
"loss": 3.9216, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.537383177570093e-06, |
|
"loss": 4.3611, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.514018691588785e-06, |
|
"loss": 4.1302, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.490654205607476e-06, |
|
"loss": 4.0421, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.467289719626168e-06, |
|
"loss": 3.9994, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.44392523364486e-06, |
|
"loss": 4.3427, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.4205607476635515e-06, |
|
"loss": 4.5006, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.397196261682243e-06, |
|
"loss": 4.18, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.373831775700935e-06, |
|
"loss": 4.2818, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.350467289719626e-06, |
|
"loss": 3.9262, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.3271028037383174e-06, |
|
"loss": 4.2223, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.303738317757009e-06, |
|
"loss": 4.1897, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.280373831775701e-06, |
|
"loss": 4.1049, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.2570093457943925e-06, |
|
"loss": 4.2849, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.233644859813084e-06, |
|
"loss": 4.0656, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.210280373831776e-06, |
|
"loss": 4.1985, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.186915887850467e-06, |
|
"loss": 3.958, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.1635514018691585e-06, |
|
"loss": 3.8778, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.14018691588785e-06, |
|
"loss": 4.1992, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.116822429906542e-06, |
|
"loss": 4.1769, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.093457943925234e-06, |
|
"loss": 3.9919, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.070093457943925e-06, |
|
"loss": 4.0812, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.046728971962617e-06, |
|
"loss": 4.3101, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.023364485981308e-06, |
|
"loss": 4.2435, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.9999999999999996e-06, |
|
"loss": 3.9456, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.976635514018691e-06, |
|
"loss": 3.8921, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.953271028037383e-06, |
|
"loss": 4.1622, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.929906542056075e-06, |
|
"loss": 3.9452, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.906542056074766e-06, |
|
"loss": 4.215, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.883177570093458e-06, |
|
"loss": 4.184, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.85981308411215e-06, |
|
"loss": 4.3362, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.836448598130841e-06, |
|
"loss": 4.2123, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.813084112149532e-06, |
|
"loss": 4.2072, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.789719626168224e-06, |
|
"loss": 4.107, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.766355140186916e-06, |
|
"loss": 4.0697, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.7429906542056074e-06, |
|
"loss": 3.9829, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.719626168224299e-06, |
|
"loss": 4.0668, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.696261682242991e-06, |
|
"loss": 4.008, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.672897196261682e-06, |
|
"loss": 4.2659, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.649532710280373e-06, |
|
"loss": 4.0806, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.626168224299065e-06, |
|
"loss": 3.962, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.602803738317757e-06, |
|
"loss": 4.0991, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.5794392523364485e-06, |
|
"loss": 3.9898, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.55607476635514e-06, |
|
"loss": 3.9846, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.532710280373832e-06, |
|
"loss": 4.1954, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.509345794392523e-06, |
|
"loss": 4.0624, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.4859813084112145e-06, |
|
"loss": 4.3319, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.462616822429906e-06, |
|
"loss": 4.2242, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.439252336448598e-06, |
|
"loss": 4.4162, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.4158878504672896e-06, |
|
"loss": 5.0513, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.392523364485981e-06, |
|
"loss": 4.6567, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.369158878504673e-06, |
|
"loss": 4.0805, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.345794392523365e-06, |
|
"loss": 3.9221, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.3224299065420555e-06, |
|
"loss": 4.0248, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.299065420560747e-06, |
|
"loss": 4.1355, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.275700934579439e-06, |
|
"loss": 3.9084, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.252336448598131e-06, |
|
"loss": 4.1655, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.228971962616822e-06, |
|
"loss": 4.0852, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.205607476635514e-06, |
|
"loss": 3.9907, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.182242990654206e-06, |
|
"loss": 4.2936, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.158878504672897e-06, |
|
"loss": 3.945, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.135514018691588e-06, |
|
"loss": 4.2846, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.11214953271028e-06, |
|
"loss": 3.9944, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.088785046728972e-06, |
|
"loss": 4.1348, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.065420560747663e-06, |
|
"loss": 3.8733, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.042056074766355e-06, |
|
"loss": 4.1246, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.018691588785047e-06, |
|
"loss": 4.0774, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.995327102803738e-06, |
|
"loss": 4.0991, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.971962616822429e-06, |
|
"loss": 4.0718, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.948598130841121e-06, |
|
"loss": 4.1337, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.925233644859813e-06, |
|
"loss": 4.1297, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9018691588785045e-06, |
|
"loss": 4.2224, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.878504672897196e-06, |
|
"loss": 3.8608, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.855140186915888e-06, |
|
"loss": 3.9987, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.831775700934579e-06, |
|
"loss": 3.9592, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.808411214953271e-06, |
|
"loss": 4.0687, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.785046728971962e-06, |
|
"loss": 4.2544, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.761682242990654e-06, |
|
"loss": 4.1496, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.738317757009346e-06, |
|
"loss": 3.999, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.7149532710280376e-06, |
|
"loss": 4.2071, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.691588785046729e-06, |
|
"loss": 3.9809, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.6682242990654206e-06, |
|
"loss": 4.1445, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.6448598130841123e-06, |
|
"loss": 4.1166, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.6214953271028036e-06, |
|
"loss": 4.1841, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5981308411214953e-06, |
|
"loss": 4.0224, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.574766355140187e-06, |
|
"loss": 4.1591, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5514018691588787e-06, |
|
"loss": 3.991, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.52803738317757e-06, |
|
"loss": 4.0546, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5046728971962617e-06, |
|
"loss": 4.2639, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4813084112149534e-06, |
|
"loss": 3.9807, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.457943925233645e-06, |
|
"loss": 4.1094, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4345794392523364e-06, |
|
"loss": 4.2343, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.411214953271028e-06, |
|
"loss": 4.1518, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.3878504672897198e-06, |
|
"loss": 4.064, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.364485981308411e-06, |
|
"loss": 4.162, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.3411214953271028e-06, |
|
"loss": 4.3617, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.3177570093457945e-06, |
|
"loss": 4.5435, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.294392523364486e-06, |
|
"loss": 4.0725, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.2710280373831774e-06, |
|
"loss": 4.7151, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.247663551401869e-06, |
|
"loss": 4.4425, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.224299065420561e-06, |
|
"loss": 4.0702, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.2009345794392525e-06, |
|
"loss": 4.2053, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.177570093457944e-06, |
|
"loss": 4.0842, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.1542056074766355e-06, |
|
"loss": 4.1126, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.1308411214953272e-06, |
|
"loss": 4.0547, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.1074766355140185e-06, |
|
"loss": 3.9678, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.08411214953271e-06, |
|
"loss": 4.0489, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.060747663551402e-06, |
|
"loss": 4.1916, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.0373831775700936e-06, |
|
"loss": 4.1362, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.014018691588785e-06, |
|
"loss": 3.9667, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9906542056074766e-06, |
|
"loss": 4.108, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9672897196261683e-06, |
|
"loss": 4.2037, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.94392523364486e-06, |
|
"loss": 4.1952, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.9205607476635513e-06, |
|
"loss": 3.8044, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.897196261682243e-06, |
|
"loss": 4.1709, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8738317757009347e-06, |
|
"loss": 3.9257, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.850467289719626e-06, |
|
"loss": 4.0337, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8271028037383177e-06, |
|
"loss": 4.0067, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8037383177570094e-06, |
|
"loss": 4.0033, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.780373831775701e-06, |
|
"loss": 4.0537, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.7570093457943923e-06, |
|
"loss": 4.0012, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.733644859813084e-06, |
|
"loss": 4.0172, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.7102803738317757e-06, |
|
"loss": 4.2428, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.6869158878504674e-06, |
|
"loss": 3.9845, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.6635514018691587e-06, |
|
"loss": 4.1325, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.6401869158878504e-06, |
|
"loss": 4.3764, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.616822429906542e-06, |
|
"loss": 4.0186, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.5934579439252334e-06, |
|
"loss": 3.848, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.570093457943925e-06, |
|
"loss": 4.091, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.546728971962617e-06, |
|
"loss": 4.246, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.5233644859813085e-06, |
|
"loss": 4.2399, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.4999999999999998e-06, |
|
"loss": 3.8142, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.4766355140186915e-06, |
|
"loss": 3.8551, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.453271028037383e-06, |
|
"loss": 4.3228, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.429906542056075e-06, |
|
"loss": 3.833, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.406542056074766e-06, |
|
"loss": 4.1549, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.383177570093458e-06, |
|
"loss": 4.1394, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.3598130841121496e-06, |
|
"loss": 3.9934, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.336448598130841e-06, |
|
"loss": 4.2384, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.3130841121495325e-06, |
|
"loss": 4.1392, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.2897196261682242e-06, |
|
"loss": 3.9704, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.266355140186916e-06, |
|
"loss": 4.3617, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.2429906542056072e-06, |
|
"loss": 4.1711, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.219626168224299e-06, |
|
"loss": 4.417, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.1962616822429906e-06, |
|
"loss": 4.4343, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.1728971962616823e-06, |
|
"loss": 4.4066, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.1495327102803736e-06, |
|
"loss": 4.8174, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.1261682242990653e-06, |
|
"loss": 4.2197, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.102803738317757e-06, |
|
"loss": 4.7544, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.0794392523364483e-06, |
|
"loss": 4.5876, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.05607476635514e-06, |
|
"loss": 4.5133, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.0327102803738317e-06, |
|
"loss": 4.3535, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.0093457943925234e-06, |
|
"loss": 3.9685, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.9859813084112147e-06, |
|
"loss": 3.9364, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9626168224299064e-06, |
|
"loss": 3.9482, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.939252336448598e-06, |
|
"loss": 4.0321, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9158878504672894e-06, |
|
"loss": 3.8892, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.892523364485981e-06, |
|
"loss": 3.9527, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.869158878504673e-06, |
|
"loss": 4.1113, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8457943925233645e-06, |
|
"loss": 4.2602, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8224299065420562e-06, |
|
"loss": 4.2309, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7990654205607477e-06, |
|
"loss": 3.9337, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7757009345794394e-06, |
|
"loss": 4.1777, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7523364485981308e-06, |
|
"loss": 4.386, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7289719626168225e-06, |
|
"loss": 4.0444, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.705607476635514e-06, |
|
"loss": 4.0592, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6822429906542055e-06, |
|
"loss": 4.1411, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6588785046728972e-06, |
|
"loss": 4.5118, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6355140186915887e-06, |
|
"loss": 4.3736, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.6121495327102804e-06, |
|
"loss": 3.9026, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.588785046728972e-06, |
|
"loss": 3.9982, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5654205607476636e-06, |
|
"loss": 4.1393, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.542056074766355e-06, |
|
"loss": 4.1242, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5186915887850468e-06, |
|
"loss": 4.261, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4953271028037383e-06, |
|
"loss": 4.1525, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.47196261682243e-06, |
|
"loss": 4.0871, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4485981308411215e-06, |
|
"loss": 4.0289, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.425233644859813e-06, |
|
"loss": 4.2446, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4018691588785047e-06, |
|
"loss": 4.2803, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.3785046728971962e-06, |
|
"loss": 4.3613, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.3551401869158879e-06, |
|
"loss": 4.0993, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.3317757009345794e-06, |
|
"loss": 3.8335, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.308411214953271e-06, |
|
"loss": 4.0755, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.2850467289719625e-06, |
|
"loss": 4.0524, |
|
"step": 1733 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.2616822429906543e-06, |
|
"loss": 4.2421, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.2383177570093457e-06, |
|
"loss": 4.5127, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.2149532710280374e-06, |
|
"loss": 4.5618, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.191588785046729e-06, |
|
"loss": 3.8363, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.1682242990654204e-06, |
|
"loss": 4.1024, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.1448598130841121e-06, |
|
"loss": 4.069, |
|
"step": 1739 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.1214953271028036e-06, |
|
"loss": 3.9845, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0981308411214953e-06, |
|
"loss": 4.1161, |
|
"step": 1741 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0747663551401868e-06, |
|
"loss": 4.1233, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0514018691588785e-06, |
|
"loss": 3.9332, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.02803738317757e-06, |
|
"loss": 3.9231, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0046728971962617e-06, |
|
"loss": 4.1985, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.813084112149532e-07, |
|
"loss": 4.3305, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.579439252336447e-07, |
|
"loss": 4.2018, |
|
"step": 1747 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.345794392523365e-07, |
|
"loss": 4.1221, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.112149532710281e-07, |
|
"loss": 4.2828, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.878504672897197e-07, |
|
"loss": 4.0967, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.644859813084113e-07, |
|
"loss": 3.8762, |
|
"step": 1751 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.411214953271028e-07, |
|
"loss": 4.087, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.177570093457944e-07, |
|
"loss": 4.2492, |
|
"step": 1753 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.94392523364486e-07, |
|
"loss": 4.0876, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.710280373831776e-07, |
|
"loss": 4.272, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.476635514018691e-07, |
|
"loss": 3.8828, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.242990654205607e-07, |
|
"loss": 3.9841, |
|
"step": 1757 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.009345794392523e-07, |
|
"loss": 4.0423, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.775700934579439e-07, |
|
"loss": 4.0956, |
|
"step": 1759 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.542056074766355e-07, |
|
"loss": 3.964, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.308411214953271e-07, |
|
"loss": 4.2357, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.074766355140187e-07, |
|
"loss": 3.8661, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.841121495327102e-07, |
|
"loss": 4.0183, |
|
"step": 1763 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.607476635514018e-07, |
|
"loss": 4.233, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.373831775700934e-07, |
|
"loss": 3.8625, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.14018691588785e-07, |
|
"loss": 4.0392, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.906542056074766e-07, |
|
"loss": 3.9741, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.6728971962616824e-07, |
|
"loss": 4.0145, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.4392523364485984e-07, |
|
"loss": 4.0376, |
|
"step": 1769 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.205607476635514e-07, |
|
"loss": 3.997, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.97196261682243e-07, |
|
"loss": 4.1271, |
|
"step": 1771 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.7383177570093457e-07, |
|
"loss": 4.0518, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.5046728971962617e-07, |
|
"loss": 4.0569, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.2710280373831776e-07, |
|
"loss": 4.1645, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.0373831775700936e-07, |
|
"loss": 4.0194, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.803738317757009e-07, |
|
"loss": 4.1338, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.570093457943925e-07, |
|
"loss": 4.0394, |
|
"step": 1777 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.3364485981308412e-07, |
|
"loss": 4.2285, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.102803738317757e-07, |
|
"loss": 4.3856, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.8691588785046729e-07, |
|
"loss": 4.042, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.6355140186915888e-07, |
|
"loss": 4.397, |
|
"step": 1781 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.4018691588785045e-07, |
|
"loss": 4.0514, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.1682242990654206e-07, |
|
"loss": 4.4125, |
|
"step": 1783 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.345794392523364e-08, |
|
"loss": 3.5579, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1784, |
|
"total_flos": 0.0, |
|
"train_loss": 4.24715919291492, |
|
"train_runtime": 8514.9113, |
|
"train_samples_per_second": 3.352, |
|
"train_steps_per_second": 0.21 |
|
} |
|
], |
|
"max_steps": 1784, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|