|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 892, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.8006, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 5.0552, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 4.8954, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 4.8986, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 4.7319, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 4.8418, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 4.8198, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 4.7216, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 4.8858, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 4.7542, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8e-06, |
|
"loss": 4.8323, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 4.7913, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.2e-06, |
|
"loss": 4.757, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 4.7019, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.6e-06, |
|
"loss": 4.6262, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 4.7446, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3e-06, |
|
"loss": 4.7368, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 4.8308, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 4.5315, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6e-06, |
|
"loss": 4.6872, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8e-06, |
|
"loss": 4.5977, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 4.5822, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 4.5846, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.4e-06, |
|
"loss": 4.7091, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.6e-06, |
|
"loss": 4.5738, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 4.7385, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5e-06, |
|
"loss": 4.454, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.2e-06, |
|
"loss": 4.3818, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.4e-06, |
|
"loss": 4.3869, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 4.5749, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.8e-06, |
|
"loss": 4.5545, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6e-06, |
|
"loss": 4.3159, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.2e-06, |
|
"loss": 4.4299, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 4.2923, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.6e-06, |
|
"loss": 4.4871, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 4.4389, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 4.4767, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.2e-06, |
|
"loss": 4.484, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.4e-06, |
|
"loss": 4.641, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.6e-06, |
|
"loss": 4.4554, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 7.8e-06, |
|
"loss": 4.3018, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 4.4761, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.200000000000001e-06, |
|
"loss": 4.2683, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 4.426, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.599999999999999e-06, |
|
"loss": 4.4915, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 8.8e-06, |
|
"loss": 4.6235, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9e-06, |
|
"loss": 4.4703, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.2e-06, |
|
"loss": 4.8672, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.2e-06, |
|
"loss": 5.0521, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.4e-06, |
|
"loss": 4.6531, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 4.2156, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 4.348, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1e-05, |
|
"loss": 4.2307, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.02e-05, |
|
"loss": 4.2949, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.04e-05, |
|
"loss": 4.28, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.06e-05, |
|
"loss": 4.3738, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.08e-05, |
|
"loss": 4.344, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 4.5751, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 4.3086, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 4.1412, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.16e-05, |
|
"loss": 4.4096, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.18e-05, |
|
"loss": 4.3565, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.2e-05, |
|
"loss": 4.1857, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.22e-05, |
|
"loss": 4.3458, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.24e-05, |
|
"loss": 4.1284, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 4.4129, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 4.1812, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 4.2234, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.32e-05, |
|
"loss": 4.1651, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 4.29, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 4.4585, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 4.3538, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 4.281, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.42e-05, |
|
"loss": 4.3208, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.44e-05, |
|
"loss": 4.1282, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4599999999999999e-05, |
|
"loss": 4.4093, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.48e-05, |
|
"loss": 4.3022, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.5e-05, |
|
"loss": 4.2166, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.52e-05, |
|
"loss": 4.0831, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.54e-05, |
|
"loss": 4.3279, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.56e-05, |
|
"loss": 4.3771, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.58e-05, |
|
"loss": 4.1824, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 4.3989, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.62e-05, |
|
"loss": 4.3379, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6400000000000002e-05, |
|
"loss": 4.2745, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.66e-05, |
|
"loss": 4.2694, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 4.2729, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 4.2621, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.7199999999999998e-05, |
|
"loss": 4.3116, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.74e-05, |
|
"loss": 4.2443, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.76e-05, |
|
"loss": 4.4649, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.78e-05, |
|
"loss": 4.3735, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.8e-05, |
|
"loss": 4.0979, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 4.3195, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.84e-05, |
|
"loss": 4.5964, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.86e-05, |
|
"loss": 4.5072, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.88e-05, |
|
"loss": 4.2899, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9e-05, |
|
"loss": 4.5847, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 4.3735, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.94e-05, |
|
"loss": 4.3218, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 4.177, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9800000000000004e-05, |
|
"loss": 4.209, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2e-05, |
|
"loss": 4.327, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.0200000000000003e-05, |
|
"loss": 4.2536, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.04e-05, |
|
"loss": 4.0728, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.06e-05, |
|
"loss": 4.2959, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.08e-05, |
|
"loss": 4.2988, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.1e-05, |
|
"loss": 4.4356, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.12e-05, |
|
"loss": 4.0137, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.1400000000000002e-05, |
|
"loss": 4.1517, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.16e-05, |
|
"loss": 4.3351, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.18e-05, |
|
"loss": 4.1899, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 4.185, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.22e-05, |
|
"loss": 4.1203, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.2400000000000002e-05, |
|
"loss": 4.1869, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.26e-05, |
|
"loss": 3.9657, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 4.2749, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 4.1479, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.32e-05, |
|
"loss": 4.2752, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.3400000000000003e-05, |
|
"loss": 4.3304, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.36e-05, |
|
"loss": 4.2427, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.38e-05, |
|
"loss": 4.2195, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.4e-05, |
|
"loss": 4.0881, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.4200000000000002e-05, |
|
"loss": 4.3312, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.44e-05, |
|
"loss": 4.2518, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.46e-05, |
|
"loss": 4.2965, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.48e-05, |
|
"loss": 4.3571, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.5e-05, |
|
"loss": 4.1767, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.5200000000000003e-05, |
|
"loss": 4.1605, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.54e-05, |
|
"loss": 4.3313, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.5600000000000002e-05, |
|
"loss": 4.2277, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.58e-05, |
|
"loss": 4.313, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 4.2856, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.6200000000000003e-05, |
|
"loss": 4.2828, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.64e-05, |
|
"loss": 4.1482, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.6600000000000003e-05, |
|
"loss": 4.2175, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.6800000000000004e-05, |
|
"loss": 4.5134, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 4.2704, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.7200000000000004e-05, |
|
"loss": 4.3241, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.7400000000000002e-05, |
|
"loss": 4.105, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 4.4102, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.7800000000000005e-05, |
|
"loss": 4.4697, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 4.3797, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 4.2843, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.84e-05, |
|
"loss": 4.421, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.86e-05, |
|
"loss": 4.3733, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.88e-05, |
|
"loss": 4.4609, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.9e-05, |
|
"loss": 4.246, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.9199999999999998e-05, |
|
"loss": 4.5866, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.94e-05, |
|
"loss": 4.2249, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.96e-05, |
|
"loss": 4.3224, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.98e-05, |
|
"loss": 4.2143, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3e-05, |
|
"loss": 4.2868, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.02e-05, |
|
"loss": 4.1287, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.04e-05, |
|
"loss": 4.0788, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.06e-05, |
|
"loss": 4.172, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.08e-05, |
|
"loss": 4.3126, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.1e-05, |
|
"loss": 4.4143, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.12e-05, |
|
"loss": 4.4585, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.1400000000000004e-05, |
|
"loss": 4.1789, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.16e-05, |
|
"loss": 4.1989, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.18e-05, |
|
"loss": 4.2238, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 4.2342, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.2200000000000003e-05, |
|
"loss": 4.3552, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.24e-05, |
|
"loss": 4.257, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.26e-05, |
|
"loss": 4.2857, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.2800000000000004e-05, |
|
"loss": 4.2484, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.3e-05, |
|
"loss": 4.0284, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.32e-05, |
|
"loss": 4.0501, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.3400000000000005e-05, |
|
"loss": 4.1841, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 4.4082, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.38e-05, |
|
"loss": 4.363, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 4.2477, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.4200000000000005e-05, |
|
"loss": 4.0851, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.4399999999999996e-05, |
|
"loss": 4.2421, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.46e-05, |
|
"loss": 4.0494, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.48e-05, |
|
"loss": 4.0945, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.5e-05, |
|
"loss": 4.171, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.52e-05, |
|
"loss": 4.2874, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.54e-05, |
|
"loss": 4.2568, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.56e-05, |
|
"loss": 4.2897, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.58e-05, |
|
"loss": 4.1081, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.6e-05, |
|
"loss": 4.313, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.62e-05, |
|
"loss": 4.292, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.6400000000000004e-05, |
|
"loss": 4.2912, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.66e-05, |
|
"loss": 4.151, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.68e-05, |
|
"loss": 4.3744, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.7e-05, |
|
"loss": 4.0779, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.72e-05, |
|
"loss": 4.2057, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.74e-05, |
|
"loss": 4.3045, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.76e-05, |
|
"loss": 4.3288, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.7800000000000004e-05, |
|
"loss": 4.2585, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.8e-05, |
|
"loss": 4.491, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.82e-05, |
|
"loss": 4.1982, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 4.2791, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.86e-05, |
|
"loss": 4.4892, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.88e-05, |
|
"loss": 4.4989, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 4.3795, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.9200000000000004e-05, |
|
"loss": 4.6247, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.94e-05, |
|
"loss": 4.6662, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.960000000000001e-05, |
|
"loss": 4.0805, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.9800000000000005e-05, |
|
"loss": 4.2222, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4e-05, |
|
"loss": 4.1281, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.02e-05, |
|
"loss": 4.1377, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 4.0093, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.0600000000000004e-05, |
|
"loss": 4.0695, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.08e-05, |
|
"loss": 4.1305, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.1e-05, |
|
"loss": 4.0018, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.12e-05, |
|
"loss": 4.2204, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.14e-05, |
|
"loss": 4.1703, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.16e-05, |
|
"loss": 4.0995, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.18e-05, |
|
"loss": 4.2342, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.2e-05, |
|
"loss": 4.1738, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.22e-05, |
|
"loss": 4.2382, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.24e-05, |
|
"loss": 4.1038, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.26e-05, |
|
"loss": 4.2598, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.2800000000000004e-05, |
|
"loss": 4.1992, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.3e-05, |
|
"loss": 4.1448, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.32e-05, |
|
"loss": 4.2357, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.3400000000000005e-05, |
|
"loss": 4.1894, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.36e-05, |
|
"loss": 4.1205, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.38e-05, |
|
"loss": 4.2463, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 4.2651, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.4200000000000004e-05, |
|
"loss": 4.1997, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.44e-05, |
|
"loss": 4.2946, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.46e-05, |
|
"loss": 4.4428, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.4800000000000005e-05, |
|
"loss": 4.2206, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5e-05, |
|
"loss": 4.1257, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.52e-05, |
|
"loss": 4.1379, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5400000000000006e-05, |
|
"loss": 4.1872, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 4.2277, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.58e-05, |
|
"loss": 4.3086, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 4.1558, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.6200000000000005e-05, |
|
"loss": 4.2961, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.64e-05, |
|
"loss": 4.3365, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.660000000000001e-05, |
|
"loss": 4.1922, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.6800000000000006e-05, |
|
"loss": 4.1784, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.7e-05, |
|
"loss": 4.4414, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.72e-05, |
|
"loss": 4.2069, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.74e-05, |
|
"loss": 4.3952, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.76e-05, |
|
"loss": 4.295, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.78e-05, |
|
"loss": 4.4594, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.8e-05, |
|
"loss": 4.3772, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.82e-05, |
|
"loss": 4.2769, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.8400000000000004e-05, |
|
"loss": 4.4663, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.86e-05, |
|
"loss": 4.3733, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.88e-05, |
|
"loss": 4.7879, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.9e-05, |
|
"loss": 4.7509, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.92e-05, |
|
"loss": 4.7604, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.94e-05, |
|
"loss": 4.1676, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.96e-05, |
|
"loss": 4.4618, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.9800000000000004e-05, |
|
"loss": 4.1168, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5e-05, |
|
"loss": 4.1508, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.02e-05, |
|
"loss": 4.2563, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.0400000000000005e-05, |
|
"loss": 4.1567, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.0600000000000003e-05, |
|
"loss": 4.1473, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.08e-05, |
|
"loss": 4.2933, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 4.1407, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.1200000000000004e-05, |
|
"loss": 4.1175, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.14e-05, |
|
"loss": 4.3068, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.16e-05, |
|
"loss": 4.2729, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.1800000000000005e-05, |
|
"loss": 4.1282, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 4.1259, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.22e-05, |
|
"loss": 4.1419, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.2400000000000007e-05, |
|
"loss": 4.2539, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.2600000000000005e-05, |
|
"loss": 4.2848, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.28e-05, |
|
"loss": 4.1218, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 4.4412, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.3200000000000006e-05, |
|
"loss": 4.1862, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.3400000000000004e-05, |
|
"loss": 4.3737, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.360000000000001e-05, |
|
"loss": 3.9655, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.380000000000001e-05, |
|
"loss": 4.1543, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 4.1888, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.420000000000001e-05, |
|
"loss": 4.0681, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.440000000000001e-05, |
|
"loss": 4.3354, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.4600000000000006e-05, |
|
"loss": 4.2964, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.4800000000000004e-05, |
|
"loss": 4.2131, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 4.1059, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.520000000000001e-05, |
|
"loss": 4.1404, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.5400000000000005e-05, |
|
"loss": 4.3474, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.560000000000001e-05, |
|
"loss": 4.0881, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.580000000000001e-05, |
|
"loss": 4.2215, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 4.0005, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.620000000000001e-05, |
|
"loss": 4.224, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 4.2709, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.66e-05, |
|
"loss": 4.164, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.68e-05, |
|
"loss": 4.3511, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 4.2992, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.72e-05, |
|
"loss": 4.3478, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.74e-05, |
|
"loss": 4.3043, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.76e-05, |
|
"loss": 4.3565, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.7799999999999995e-05, |
|
"loss": 4.2118, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.8e-05, |
|
"loss": 4.5298, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.82e-05, |
|
"loss": 4.2882, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.8399999999999997e-05, |
|
"loss": 4.1672, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.86e-05, |
|
"loss": 4.5489, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.88e-05, |
|
"loss": 4.5708, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.9e-05, |
|
"loss": 4.3176, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.92e-05, |
|
"loss": 4.7349, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.94e-05, |
|
"loss": 4.7477, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.96e-05, |
|
"loss": 4.1141, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.9800000000000003e-05, |
|
"loss": 4.255, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6e-05, |
|
"loss": 4.2093, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.02e-05, |
|
"loss": 4.2512, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.04e-05, |
|
"loss": 4.0745, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.06e-05, |
|
"loss": 4.143, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 6.08e-05, |
|
"loss": 4.1732, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.1e-05, |
|
"loss": 4.3464, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.12e-05, |
|
"loss": 4.2193, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.14e-05, |
|
"loss": 4.3655, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.16e-05, |
|
"loss": 4.4293, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.18e-05, |
|
"loss": 4.3158, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.2e-05, |
|
"loss": 4.2305, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.220000000000001e-05, |
|
"loss": 4.2545, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.24e-05, |
|
"loss": 4.092, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6.26e-05, |
|
"loss": 4.3718, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.280000000000001e-05, |
|
"loss": 4.1923, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.3e-05, |
|
"loss": 4.1044, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.32e-05, |
|
"loss": 4.2423, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.340000000000001e-05, |
|
"loss": 4.1932, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.36e-05, |
|
"loss": 4.3054, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.38e-05, |
|
"loss": 4.204, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 4.6242, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.42e-05, |
|
"loss": 4.4789, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 6.440000000000001e-05, |
|
"loss": 4.4935, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.460000000000001e-05, |
|
"loss": 4.3337, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.48e-05, |
|
"loss": 4.1572, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 4.1932, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.52e-05, |
|
"loss": 4.4071, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.54e-05, |
|
"loss": 4.4865, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.560000000000001e-05, |
|
"loss": 4.1701, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.58e-05, |
|
"loss": 4.2422, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.6e-05, |
|
"loss": 4.2503, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 6.620000000000001e-05, |
|
"loss": 4.2858, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.64e-05, |
|
"loss": 4.3979, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.66e-05, |
|
"loss": 4.1882, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.680000000000001e-05, |
|
"loss": 4.1267, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.7e-05, |
|
"loss": 4.3292, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.720000000000001e-05, |
|
"loss": 4.2356, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.740000000000001e-05, |
|
"loss": 4.2885, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.76e-05, |
|
"loss": 4.5698, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.780000000000001e-05, |
|
"loss": 4.4765, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 4.2685, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.82e-05, |
|
"loss": 4.4023, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.840000000000001e-05, |
|
"loss": 4.401, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.860000000000001e-05, |
|
"loss": 4.6416, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.879999999999999e-05, |
|
"loss": 4.6245, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.9e-05, |
|
"loss": 4.5174, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.92e-05, |
|
"loss": 4.5499, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.939999999999999e-05, |
|
"loss": 4.4764, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.96e-05, |
|
"loss": 4.4091, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 6.98e-05, |
|
"loss": 4.2577, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7e-05, |
|
"loss": 4.2241, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.02e-05, |
|
"loss": 4.2012, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.04e-05, |
|
"loss": 4.4662, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.06e-05, |
|
"loss": 4.1124, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.08e-05, |
|
"loss": 4.2003, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.1e-05, |
|
"loss": 4.2004, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.12e-05, |
|
"loss": 4.2059, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.14e-05, |
|
"loss": 4.3448, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 7.16e-05, |
|
"loss": 4.243, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.18e-05, |
|
"loss": 4.2397, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.2e-05, |
|
"loss": 4.1738, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.22e-05, |
|
"loss": 4.4315, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.24e-05, |
|
"loss": 4.0795, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.26e-05, |
|
"loss": 4.2753, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.280000000000001e-05, |
|
"loss": 4.181, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.3e-05, |
|
"loss": 4.239, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.32e-05, |
|
"loss": 4.3883, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 7.340000000000001e-05, |
|
"loss": 4.2789, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.36e-05, |
|
"loss": 4.1056, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.38e-05, |
|
"loss": 4.4694, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.4e-05, |
|
"loss": 4.2561, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.42e-05, |
|
"loss": 4.398, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.44e-05, |
|
"loss": 4.1645, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.46e-05, |
|
"loss": 4.2328, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.48e-05, |
|
"loss": 4.2445, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 4.3664, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 7.52e-05, |
|
"loss": 4.2296, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.54e-05, |
|
"loss": 4.4382, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.560000000000001e-05, |
|
"loss": 4.3286, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.58e-05, |
|
"loss": 4.2781, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.6e-05, |
|
"loss": 4.4275, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.620000000000001e-05, |
|
"loss": 4.2383, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.64e-05, |
|
"loss": 4.2161, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.66e-05, |
|
"loss": 4.2606, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 4.2192, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.7e-05, |
|
"loss": 4.222, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.72e-05, |
|
"loss": 4.2535, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.740000000000001e-05, |
|
"loss": 4.323, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.76e-05, |
|
"loss": 4.3979, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.780000000000001e-05, |
|
"loss": 4.1583, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 4.4448, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.82e-05, |
|
"loss": 4.4023, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.840000000000001e-05, |
|
"loss": 4.6777, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.860000000000001e-05, |
|
"loss": 4.2592, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.88e-05, |
|
"loss": 4.2254, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 4.6933, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.920000000000001e-05, |
|
"loss": 4.4564, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.94e-05, |
|
"loss": 4.8916, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.960000000000001e-05, |
|
"loss": 4.4665, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.98e-05, |
|
"loss": 4.3327, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 8e-05, |
|
"loss": 4.4033, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 8.020000000000001e-05, |
|
"loss": 4.3885, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 8.04e-05, |
|
"loss": 4.4113, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.060000000000001e-05, |
|
"loss": 4.3084, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 4.2218, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.1e-05, |
|
"loss": 4.202, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.120000000000001e-05, |
|
"loss": 4.4395, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.14e-05, |
|
"loss": 4.3359, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.16e-05, |
|
"loss": 4.3585, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.18e-05, |
|
"loss": 4.2204, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.2e-05, |
|
"loss": 4.4704, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.22e-05, |
|
"loss": 4.0948, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.24e-05, |
|
"loss": 4.2758, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.26e-05, |
|
"loss": 4.3346, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.28e-05, |
|
"loss": 4.1562, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.3e-05, |
|
"loss": 4.3247, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.32e-05, |
|
"loss": 4.2338, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.34e-05, |
|
"loss": 4.268, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.36e-05, |
|
"loss": 4.242, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.38e-05, |
|
"loss": 4.4886, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.4959, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.42e-05, |
|
"loss": 4.4986, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.44e-05, |
|
"loss": 4.4402, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.46e-05, |
|
"loss": 4.4681, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.48e-05, |
|
"loss": 4.1911, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.5e-05, |
|
"loss": 4.4122, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.52e-05, |
|
"loss": 4.2988, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.54e-05, |
|
"loss": 4.3522, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.560000000000001e-05, |
|
"loss": 4.5413, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.58e-05, |
|
"loss": 4.3583, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.6e-05, |
|
"loss": 4.0939, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.620000000000001e-05, |
|
"loss": 4.6722, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.64e-05, |
|
"loss": 4.2455, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.66e-05, |
|
"loss": 4.4837, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.680000000000001e-05, |
|
"loss": 4.5514, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.7e-05, |
|
"loss": 4.3723, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.72e-05, |
|
"loss": 4.2979, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.740000000000001e-05, |
|
"loss": 4.3634, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.76e-05, |
|
"loss": 4.6395, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.78e-05, |
|
"loss": 4.2519, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 4.2913, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.82e-05, |
|
"loss": 4.7257, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.840000000000001e-05, |
|
"loss": 4.3425, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.86e-05, |
|
"loss": 4.1172, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.88e-05, |
|
"loss": 4.3683, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 4.5756, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.92e-05, |
|
"loss": 4.9532, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.94e-05, |
|
"loss": 5.6362, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.960000000000001e-05, |
|
"loss": 4.3291, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.98e-05, |
|
"loss": 4.4704, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9e-05, |
|
"loss": 4.5678, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.020000000000001e-05, |
|
"loss": 4.2346, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.04e-05, |
|
"loss": 4.5092, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.06e-05, |
|
"loss": 4.3366, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.080000000000001e-05, |
|
"loss": 4.3663, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.1e-05, |
|
"loss": 4.3834, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.120000000000001e-05, |
|
"loss": 4.3779, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.140000000000001e-05, |
|
"loss": 4.5347, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.16e-05, |
|
"loss": 4.2667, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.180000000000001e-05, |
|
"loss": 4.4051, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 4.3178, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.22e-05, |
|
"loss": 4.1682, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.240000000000001e-05, |
|
"loss": 4.2923, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.260000000000001e-05, |
|
"loss": 4.6037, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.28e-05, |
|
"loss": 4.2776, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 4.2357, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.320000000000002e-05, |
|
"loss": 4.3306, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.340000000000001e-05, |
|
"loss": 4.2949, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.360000000000001e-05, |
|
"loss": 4.1422, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.38e-05, |
|
"loss": 4.2664, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.4e-05, |
|
"loss": 5.0999, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.42e-05, |
|
"loss": 4.4737, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.44e-05, |
|
"loss": 4.3051, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.46e-05, |
|
"loss": 4.6362, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.48e-05, |
|
"loss": 4.5125, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.5e-05, |
|
"loss": 4.2002, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.52e-05, |
|
"loss": 4.1203, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.54e-05, |
|
"loss": 4.3217, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.56e-05, |
|
"loss": 4.2459, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.58e-05, |
|
"loss": 4.1917, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.6e-05, |
|
"loss": 4.3971, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.620000000000001e-05, |
|
"loss": 4.3083, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.64e-05, |
|
"loss": 4.4234, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.66e-05, |
|
"loss": 4.198, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.680000000000001e-05, |
|
"loss": 4.3167, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.7e-05, |
|
"loss": 4.1635, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.72e-05, |
|
"loss": 4.3927, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.74e-05, |
|
"loss": 4.3937, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.76e-05, |
|
"loss": 4.2244, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.78e-05, |
|
"loss": 4.5039, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.8e-05, |
|
"loss": 4.8541, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.82e-05, |
|
"loss": 4.2893, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.84e-05, |
|
"loss": 4.6025, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.86e-05, |
|
"loss": 4.1377, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.88e-05, |
|
"loss": 4.094, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 4.8325, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.92e-05, |
|
"loss": 4.6663, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.94e-05, |
|
"loss": 4.1605, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 4.537853240966797, |
|
"eval_runtime": 894.8393, |
|
"eval_samples_per_second": 2.952, |
|
"eval_steps_per_second": 0.37, |
|
"eval_wer": 1.929650932169774, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.960000000000001e-05, |
|
"loss": 4.6864, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.98e-05, |
|
"loss": 4.2069, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.0001, |
|
"loss": 4.6738, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.974489795918368e-05, |
|
"loss": 4.4791, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.948979591836736e-05, |
|
"loss": 4.1978, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.923469387755102e-05, |
|
"loss": 4.3373, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.897959183673469e-05, |
|
"loss": 4.4823, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.872448979591837e-05, |
|
"loss": 4.6696, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.846938775510204e-05, |
|
"loss": 4.3015, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.821428571428572e-05, |
|
"loss": 4.6368, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.79591836734694e-05, |
|
"loss": 4.3094, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.770408163265307e-05, |
|
"loss": 4.3088, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.744897959183674e-05, |
|
"loss": 5.0756, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.719387755102042e-05, |
|
"loss": 4.4334, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.693877551020408e-05, |
|
"loss": 4.5505, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.668367346938776e-05, |
|
"loss": 4.2511, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.642857142857143e-05, |
|
"loss": 4.4093, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.617346938775511e-05, |
|
"loss": 4.3096, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.591836734693878e-05, |
|
"loss": 4.5489, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.566326530612246e-05, |
|
"loss": 4.4747, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.540816326530613e-05, |
|
"loss": 4.274, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.515306122448981e-05, |
|
"loss": 4.2253, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.489795918367348e-05, |
|
"loss": 4.2597, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.464285714285715e-05, |
|
"loss": 4.5888, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.438775510204082e-05, |
|
"loss": 4.4988, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.41326530612245e-05, |
|
"loss": 4.2755, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.387755102040817e-05, |
|
"loss": 4.4353, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.362244897959183e-05, |
|
"loss": 4.3746, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.336734693877551e-05, |
|
"loss": 4.3393, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.311224489795918e-05, |
|
"loss": 4.5366, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.285714285714286e-05, |
|
"loss": 4.582, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.260204081632653e-05, |
|
"loss": 4.1169, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.234693877551021e-05, |
|
"loss": 4.3229, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.209183673469388e-05, |
|
"loss": 4.2116, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.183673469387756e-05, |
|
"loss": 4.5244, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.158163265306124e-05, |
|
"loss": 4.4066, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.13265306122449e-05, |
|
"loss": 4.5668, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.107142857142857e-05, |
|
"loss": 4.4726, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.081632653061225e-05, |
|
"loss": 4.2823, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.056122448979592e-05, |
|
"loss": 4.3757, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.030612244897958e-05, |
|
"loss": 4.3945, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.005102040816327e-05, |
|
"loss": 4.4137, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 8.979591836734695e-05, |
|
"loss": 4.5504, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 8.954081632653062e-05, |
|
"loss": 4.6919, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 8.92857142857143e-05, |
|
"loss": 4.677, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 8.903061224489796e-05, |
|
"loss": 4.5456, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 8.877551020408164e-05, |
|
"loss": 5.0684, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 8.852040816326531e-05, |
|
"loss": 6.0851, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.826530612244899e-05, |
|
"loss": 4.369, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.801020408163265e-05, |
|
"loss": 5.2507, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.775510204081632e-05, |
|
"loss": 4.4075, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.75e-05, |
|
"loss": 4.5339, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.724489795918367e-05, |
|
"loss": 4.5553, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.698979591836735e-05, |
|
"loss": 4.2438, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.673469387755102e-05, |
|
"loss": 4.3284, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.64795918367347e-05, |
|
"loss": 4.5701, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 8.622448979591838e-05, |
|
"loss": 4.2063, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.596938775510205e-05, |
|
"loss": 4.1729, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 4.3359, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.545918367346939e-05, |
|
"loss": 4.2974, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.520408163265306e-05, |
|
"loss": 4.4944, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.494897959183674e-05, |
|
"loss": 4.2742, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.469387755102041e-05, |
|
"loss": 4.3104, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.443877551020409e-05, |
|
"loss": 4.2998, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.418367346938776e-05, |
|
"loss": 4.3185, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 8.392857142857144e-05, |
|
"loss": 4.4354, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.367346938775511e-05, |
|
"loss": 4.3363, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.341836734693878e-05, |
|
"loss": 4.2593, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.316326530612245e-05, |
|
"loss": 4.3758, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.290816326530613e-05, |
|
"loss": 4.3491, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.26530612244898e-05, |
|
"loss": 4.2382, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.239795918367348e-05, |
|
"loss": 4.3785, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.214285714285714e-05, |
|
"loss": 4.2177, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.188775510204081e-05, |
|
"loss": 4.1685, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 8.163265306122449e-05, |
|
"loss": 4.6721, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.137755102040817e-05, |
|
"loss": 4.2446, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.112244897959184e-05, |
|
"loss": 4.3527, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.086734693877552e-05, |
|
"loss": 4.585, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.061224489795919e-05, |
|
"loss": 3.9455, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.035714285714287e-05, |
|
"loss": 4.9334, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 8.010204081632653e-05, |
|
"loss": 4.4063, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.98469387755102e-05, |
|
"loss": 4.2461, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.959183673469388e-05, |
|
"loss": 4.7348, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.933673469387755e-05, |
|
"loss": 4.4816, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.908163265306123e-05, |
|
"loss": 4.5978, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.882653061224489e-05, |
|
"loss": 4.4656, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.857142857142858e-05, |
|
"loss": 4.4803, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.831632653061226e-05, |
|
"loss": 4.427, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.806122448979593e-05, |
|
"loss": 4.1964, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.780612244897959e-05, |
|
"loss": 4.2802, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.755102040816327e-05, |
|
"loss": 4.3095, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.729591836734694e-05, |
|
"loss": 4.5677, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.704081632653062e-05, |
|
"loss": 4.7527, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.67857142857143e-05, |
|
"loss": 4.3602, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.653061224489796e-05, |
|
"loss": 4.4132, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.627551020408163e-05, |
|
"loss": 4.1445, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.60204081632653e-05, |
|
"loss": 4.2382, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.576530612244898e-05, |
|
"loss": 4.5613, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.551020408163266e-05, |
|
"loss": 4.5066, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.525510204081633e-05, |
|
"loss": 4.1874, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 4.3602, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.474489795918368e-05, |
|
"loss": 4.3446, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.448979591836736e-05, |
|
"loss": 4.4451, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.423469387755102e-05, |
|
"loss": 4.3278, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.39795918367347e-05, |
|
"loss": 4.2441, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.372448979591837e-05, |
|
"loss": 4.3496, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.346938775510205e-05, |
|
"loss": 4.425, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.321428571428571e-05, |
|
"loss": 4.2756, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.29591836734694e-05, |
|
"loss": 4.2437, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.270408163265307e-05, |
|
"loss": 4.2779, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.244897959183675e-05, |
|
"loss": 4.2593, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.219387755102042e-05, |
|
"loss": 4.4021, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.193877551020408e-05, |
|
"loss": 4.3809, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.168367346938776e-05, |
|
"loss": 4.4197, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.142857142857143e-05, |
|
"loss": 4.2543, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.117346938775511e-05, |
|
"loss": 4.2681, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.091836734693877e-05, |
|
"loss": 4.2117, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.066326530612245e-05, |
|
"loss": 4.163, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.040816326530612e-05, |
|
"loss": 4.1755, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.01530612244898e-05, |
|
"loss": 4.3606, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.989795918367347e-05, |
|
"loss": 4.2221, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.964285714285715e-05, |
|
"loss": 4.1264, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.938775510204082e-05, |
|
"loss": 4.3952, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.91326530612245e-05, |
|
"loss": 4.2269, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.887755102040817e-05, |
|
"loss": 4.608, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.862244897959184e-05, |
|
"loss": 4.2958, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.836734693877551e-05, |
|
"loss": 4.2362, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 6.811224489795919e-05, |
|
"loss": 4.1795, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.785714285714286e-05, |
|
"loss": 4.1724, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.760204081632652e-05, |
|
"loss": 4.5351, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.73469387755102e-05, |
|
"loss": 4.3448, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.709183673469389e-05, |
|
"loss": 4.2583, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.683673469387756e-05, |
|
"loss": 4.266, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.658163265306124e-05, |
|
"loss": 4.237, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.63265306122449e-05, |
|
"loss": 4.4784, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.607142857142857e-05, |
|
"loss": 4.3054, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 6.581632653061225e-05, |
|
"loss": 4.2649, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.556122448979592e-05, |
|
"loss": 4.1358, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.530612244897959e-05, |
|
"loss": 4.3757, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.505102040816326e-05, |
|
"loss": 4.0554, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.479591836734694e-05, |
|
"loss": 4.2163, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.454081632653061e-05, |
|
"loss": 4.6037, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.428571428571429e-05, |
|
"loss": 4.6982, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.403061224489796e-05, |
|
"loss": 4.1454, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.377551020408164e-05, |
|
"loss": 3.9796, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.352040816326531e-05, |
|
"loss": 4.3531, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.326530612244899e-05, |
|
"loss": 4.1176, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.301020408163265e-05, |
|
"loss": 4.3527, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.275510204081633e-05, |
|
"loss": 4.4182, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.25e-05, |
|
"loss": 4.3515, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.224489795918368e-05, |
|
"loss": 4.4742, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.198979591836735e-05, |
|
"loss": 4.4929, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.173469387755101e-05, |
|
"loss": 4.3374, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.14795918367347e-05, |
|
"loss": 4.4383, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.122448979591838e-05, |
|
"loss": 4.2959, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.0969387755102046e-05, |
|
"loss": 4.4306, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.0714285714285715e-05, |
|
"loss": 4.1845, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.045918367346939e-05, |
|
"loss": 4.4121, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.0204081632653065e-05, |
|
"loss": 4.1552, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.994897959183674e-05, |
|
"loss": 4.4136, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.9693877551020416e-05, |
|
"loss": 4.0537, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.9438775510204084e-05, |
|
"loss": 4.0487, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.918367346938776e-05, |
|
"loss": 4.6006, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.8928571428571435e-05, |
|
"loss": 4.1514, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.867346938775511e-05, |
|
"loss": 4.0854, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.841836734693877e-05, |
|
"loss": 4.3523, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.816326530612245e-05, |
|
"loss": 4.2261, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.790816326530612e-05, |
|
"loss": 4.3905, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.7653061224489805e-05, |
|
"loss": 4.4202, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.739795918367348e-05, |
|
"loss": 4.3253, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 4.1013, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.688775510204082e-05, |
|
"loss": 4.1175, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.663265306122449e-05, |
|
"loss": 4.0785, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.637755102040817e-05, |
|
"loss": 4.0308, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.6122448979591836e-05, |
|
"loss": 4.0925, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.586734693877551e-05, |
|
"loss": 4.323, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.561224489795919e-05, |
|
"loss": 4.0947, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.535714285714286e-05, |
|
"loss": 3.9544, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.510204081632653e-05, |
|
"loss": 4.0468, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.4846938775510206e-05, |
|
"loss": 4.1615, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.459183673469388e-05, |
|
"loss": 4.8051, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 5.4336734693877556e-05, |
|
"loss": 4.1085, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.408163265306123e-05, |
|
"loss": 4.5086, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.382653061224489e-05, |
|
"loss": 4.467, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.3571428571428575e-05, |
|
"loss": 4.4496, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.331632653061225e-05, |
|
"loss": 4.2151, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.3061224489795926e-05, |
|
"loss": 4.3281, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.280612244897959e-05, |
|
"loss": 4.3739, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.255102040816326e-05, |
|
"loss": 4.3452, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.229591836734694e-05, |
|
"loss": 4.5481, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.2040816326530614e-05, |
|
"loss": 4.084, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.1785714285714296e-05, |
|
"loss": 4.304, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.153061224489796e-05, |
|
"loss": 4.0724, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.127551020408163e-05, |
|
"loss": 4.5427, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.102040816326531e-05, |
|
"loss": 4.693, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.076530612244898e-05, |
|
"loss": 4.3249, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.051020408163265e-05, |
|
"loss": 4.4825, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.025510204081633e-05, |
|
"loss": 4.2753, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5e-05, |
|
"loss": 4.5013, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.974489795918368e-05, |
|
"loss": 4.4463, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.9489795918367346e-05, |
|
"loss": 4.0471, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.923469387755102e-05, |
|
"loss": 4.2305, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.89795918367347e-05, |
|
"loss": 4.1357, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.872448979591837e-05, |
|
"loss": 4.0259, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.846938775510204e-05, |
|
"loss": 4.2383, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.8214285714285716e-05, |
|
"loss": 4.3998, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.795918367346939e-05, |
|
"loss": 4.4906, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.7704081632653066e-05, |
|
"loss": 4.3952, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.744897959183674e-05, |
|
"loss": 4.2198, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.719387755102041e-05, |
|
"loss": 4.2494, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.6938775510204086e-05, |
|
"loss": 4.1282, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.6683673469387754e-05, |
|
"loss": 4.1195, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.642857142857143e-05, |
|
"loss": 4.2118, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.6173469387755105e-05, |
|
"loss": 4.286, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.591836734693878e-05, |
|
"loss": 4.351, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.566326530612245e-05, |
|
"loss": 4.048, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.5408163265306124e-05, |
|
"loss": 4.4465, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.515306122448979e-05, |
|
"loss": 4.1497, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.4897959183673474e-05, |
|
"loss": 3.9604, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.464285714285715e-05, |
|
"loss": 4.1792, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.438775510204082e-05, |
|
"loss": 4.1827, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.4132653061224493e-05, |
|
"loss": 4.0194, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.387755102040816e-05, |
|
"loss": 4.2096, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.362244897959184e-05, |
|
"loss": 4.2585, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.336734693877551e-05, |
|
"loss": 4.077, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.311224489795919e-05, |
|
"loss": 4.3033, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 4.119, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.260204081632653e-05, |
|
"loss": 4.1655, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.234693877551021e-05, |
|
"loss": 4.0441, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.209183673469388e-05, |
|
"loss": 4.2595, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.183673469387756e-05, |
|
"loss": 4.4831, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.1581632653061226e-05, |
|
"loss": 4.3292, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.13265306122449e-05, |
|
"loss": 4.0568, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.107142857142857e-05, |
|
"loss": 4.2774, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.0816326530612245e-05, |
|
"loss": 3.9564, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.056122448979592e-05, |
|
"loss": 4.1067, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.0306122448979596e-05, |
|
"loss": 4.2758, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.0051020408163264e-05, |
|
"loss": 4.1247, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.979591836734694e-05, |
|
"loss": 4.2443, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.9540816326530615e-05, |
|
"loss": 4.2306, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.928571428571429e-05, |
|
"loss": 4.4393, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.9030612244897965e-05, |
|
"loss": 4.1833, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.8775510204081634e-05, |
|
"loss": 3.9059, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.852040816326531e-05, |
|
"loss": 4.3058, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.826530612244898e-05, |
|
"loss": 4.2608, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.801020408163265e-05, |
|
"loss": 4.4009, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.775510204081633e-05, |
|
"loss": 4.2079, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 4.3105, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.724489795918368e-05, |
|
"loss": 4.7069, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.698979591836735e-05, |
|
"loss": 4.0475, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.673469387755102e-05, |
|
"loss": 4.2485, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.64795918367347e-05, |
|
"loss": 4.262, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.622448979591837e-05, |
|
"loss": 4.2274, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.596938775510204e-05, |
|
"loss": 4.1542, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 4.4089, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5459183673469385e-05, |
|
"loss": 4.2441, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.520408163265306e-05, |
|
"loss": 4.198, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.4948979591836736e-05, |
|
"loss": 4.2563, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.469387755102041e-05, |
|
"loss": 4.1014, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.443877551020409e-05, |
|
"loss": 4.2019, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.4183673469387755e-05, |
|
"loss": 4.1753, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.392857142857143e-05, |
|
"loss": 4.2436, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.36734693877551e-05, |
|
"loss": 4.107, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.341836734693878e-05, |
|
"loss": 4.0647, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.316326530612245e-05, |
|
"loss": 4.3057, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.2908163265306125e-05, |
|
"loss": 4.109, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.265306122448979e-05, |
|
"loss": 4.0356, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.239795918367347e-05, |
|
"loss": 4.2105, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.2142857142857144e-05, |
|
"loss": 4.2356, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.188775510204082e-05, |
|
"loss": 4.1832, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.1632653061224494e-05, |
|
"loss": 4.0565, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.137755102040816e-05, |
|
"loss": 4.2768, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.112244897959184e-05, |
|
"loss": 4.369, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.086734693877551e-05, |
|
"loss": 3.935, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.061224489795919e-05, |
|
"loss": 3.9795, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.0357142857142857e-05, |
|
"loss": 4.3018, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.0102040816326533e-05, |
|
"loss": 3.9756, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.9846938775510208e-05, |
|
"loss": 4.3389, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.959183673469388e-05, |
|
"loss": 4.0707, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.9336734693877555e-05, |
|
"loss": 4.0017, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.9081632653061224e-05, |
|
"loss": 3.9922, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.8826530612244902e-05, |
|
"loss": 4.1209, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 4.1468, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.8316326530612246e-05, |
|
"loss": 3.9952, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.8061224489795918e-05, |
|
"loss": 4.1739, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.7806122448979593e-05, |
|
"loss": 3.9362, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.7551020408163265e-05, |
|
"loss": 4.2515, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.729591836734694e-05, |
|
"loss": 4.1879, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.7040816326530616e-05, |
|
"loss": 4.0157, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.6785714285714288e-05, |
|
"loss": 4.1654, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.6530612244897963e-05, |
|
"loss": 4.1267, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.627551020408163e-05, |
|
"loss": 4.1517, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.6020408163265307e-05, |
|
"loss": 4.0912, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.576530612244898e-05, |
|
"loss": 4.3466, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.5510204081632654e-05, |
|
"loss": 4.2573, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.5255102040816326e-05, |
|
"loss": 4.0715, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.5e-05, |
|
"loss": 4.2313, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.4744897959183673e-05, |
|
"loss": 4.2574, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.448979591836735e-05, |
|
"loss": 4.8761, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.423469387755102e-05, |
|
"loss": 4.3985, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.3979591836734696e-05, |
|
"loss": 4.2022, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.372448979591837e-05, |
|
"loss": 4.0457, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.3469387755102043e-05, |
|
"loss": 4.1468, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.3214285714285715e-05, |
|
"loss": 4.1144, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.295918367346939e-05, |
|
"loss": 3.9681, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.2704081632653062e-05, |
|
"loss": 4.1506, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.2448979591836737e-05, |
|
"loss": 4.1706, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.219387755102041e-05, |
|
"loss": 4.0046, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.193877551020408e-05, |
|
"loss": 4.2563, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.1683673469387756e-05, |
|
"loss": 4.0046, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 4.3614, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.1173469387755103e-05, |
|
"loss": 4.1648, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.091836734693878e-05, |
|
"loss": 3.9555, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.066326530612245e-05, |
|
"loss": 4.0926, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.0408163265306123e-05, |
|
"loss": 4.0064, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.0153061224489798e-05, |
|
"loss": 4.0343, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.989795918367347e-05, |
|
"loss": 4.1501, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.9642857142857145e-05, |
|
"loss": 4.1466, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.9387755102040817e-05, |
|
"loss": 4.0237, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.913265306122449e-05, |
|
"loss": 4.127, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.8877551020408164e-05, |
|
"loss": 4.0763, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.862244897959184e-05, |
|
"loss": 4.0363, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.836734693877551e-05, |
|
"loss": 4.1314, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.8112244897959187e-05, |
|
"loss": 4.1815, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 4.3073, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.760204081632653e-05, |
|
"loss": 4.2567, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.7346938775510206e-05, |
|
"loss": 4.1142, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.7091836734693878e-05, |
|
"loss": 3.9881, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.683673469387755e-05, |
|
"loss": 3.9615, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6581632653061225e-05, |
|
"loss": 4.2256, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6326530612244897e-05, |
|
"loss": 3.9554, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6071428571428572e-05, |
|
"loss": 4.0022, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.5816326530612247e-05, |
|
"loss": 4.1985, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.556122448979592e-05, |
|
"loss": 4.0862, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5306122448979594e-05, |
|
"loss": 4.065, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5051020408163266e-05, |
|
"loss": 4.0925, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.479591836734694e-05, |
|
"loss": 4.0593, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.4540816326530612e-05, |
|
"loss": 4.3442, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 3.9675, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.4030612244897959e-05, |
|
"loss": 4.1694, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.3775510204081633e-05, |
|
"loss": 4.2668, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.3520408163265308e-05, |
|
"loss": 4.1724, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3265306122448982e-05, |
|
"loss": 4.2712, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3010204081632653e-05, |
|
"loss": 4.2255, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2755102040816327e-05, |
|
"loss": 4.3171, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.25e-05, |
|
"loss": 4.6056, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2244897959183674e-05, |
|
"loss": 4.0734, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1989795918367348e-05, |
|
"loss": 4.725, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1734693877551021e-05, |
|
"loss": 4.3384, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1479591836734695e-05, |
|
"loss": 4.1118, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1224489795918369e-05, |
|
"loss": 4.1718, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.096938775510204e-05, |
|
"loss": 4.1493, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0714285714285714e-05, |
|
"loss": 4.0594, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.045918367346939e-05, |
|
"loss": 4.0984, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0204081632653061e-05, |
|
"loss": 4.0627, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.948979591836735e-06, |
|
"loss": 4.0885, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.693877551020408e-06, |
|
"loss": 4.1545, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.438775510204082e-06, |
|
"loss": 4.1281, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.183673469387756e-06, |
|
"loss": 3.9807, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.92857142857143e-06, |
|
"loss": 4.1072, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 8.673469387755103e-06, |
|
"loss": 4.2454, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 8.418367346938775e-06, |
|
"loss": 4.0128, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 8.163265306122448e-06, |
|
"loss": 4.1705, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 7.908163265306124e-06, |
|
"loss": 4.3512, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 7.653061224489797e-06, |
|
"loss": 4.264, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 7.39795918367347e-06, |
|
"loss": 3.9359, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 4.1472, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.887755102040816e-06, |
|
"loss": 3.9617, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.632653061224491e-06, |
|
"loss": 4.1297, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 6.3775510204081635e-06, |
|
"loss": 4.2363, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 6.122448979591837e-06, |
|
"loss": 3.966, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.867346938775511e-06, |
|
"loss": 4.1766, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.612244897959184e-06, |
|
"loss": 4.1703, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.357142857142857e-06, |
|
"loss": 4.138, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.102040816326531e-06, |
|
"loss": 4.2855, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.846938775510204e-06, |
|
"loss": 4.1395, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.591836734693878e-06, |
|
"loss": 4.0096, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.336734693877551e-06, |
|
"loss": 4.0787, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.081632653061224e-06, |
|
"loss": 4.1907, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.826530612244899e-06, |
|
"loss": 4.348, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.5714285714285714e-06, |
|
"loss": 3.9686, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3163265306122454e-06, |
|
"loss": 4.0974, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.0612244897959185e-06, |
|
"loss": 4.0149, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.806122448979592e-06, |
|
"loss": 4.0868, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.5510204081632653e-06, |
|
"loss": 4.0935, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.295918367346939e-06, |
|
"loss": 4.2536, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.040816326530612e-06, |
|
"loss": 3.9868, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.7857142857142857e-06, |
|
"loss": 4.2264, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5306122448979593e-06, |
|
"loss": 4.2771, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.2755102040816327e-06, |
|
"loss": 4.0625, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.020408163265306e-06, |
|
"loss": 4.2802, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.653061224489796e-07, |
|
"loss": 3.737, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 892, |
|
"total_flos": 0.0, |
|
"train_loss": 4.3097993645967385, |
|
"train_runtime": 5935.828, |
|
"train_samples_per_second": 4.808, |
|
"train_steps_per_second": 0.15 |
|
} |
|
], |
|
"max_steps": 892, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|