|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9987389659520807, |
|
"global_step": 297, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 4.7416, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 4.822, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 4.8917, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 4.833, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 4.7162, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 4.8449, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 4.8237, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 4.8885, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 4.7995, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8e-06, |
|
"loss": 4.6334, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 4.765, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.2e-06, |
|
"loss": 4.7846, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 4.6955, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.6e-06, |
|
"loss": 4.6238, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 4.6308, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3e-06, |
|
"loss": 4.5522, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 4.6306, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 4.6244, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.6e-06, |
|
"loss": 4.5672, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.8e-06, |
|
"loss": 4.6278, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 4.6397, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 4.5934, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.4e-06, |
|
"loss": 4.4719, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.6e-06, |
|
"loss": 4.6009, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 4.4169, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5e-06, |
|
"loss": 4.7091, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.2e-06, |
|
"loss": 4.4467, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.4e-06, |
|
"loss": 4.4314, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 4.3324, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.8e-06, |
|
"loss": 4.4922, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6e-06, |
|
"loss": 4.4856, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.2e-06, |
|
"loss": 4.3944, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 4.4152, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.6e-06, |
|
"loss": 4.3853, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 4.3918, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 4.3543, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.2e-06, |
|
"loss": 4.4801, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.4e-06, |
|
"loss": 4.4362, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.6e-06, |
|
"loss": 4.4816, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.8e-06, |
|
"loss": 4.347, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 4.4076, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.200000000000001e-06, |
|
"loss": 4.4635, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 4.2502, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.599999999999999e-06, |
|
"loss": 4.5461, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 8.8e-06, |
|
"loss": 4.4963, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9e-06, |
|
"loss": 4.5238, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.2e-06, |
|
"loss": 4.5026, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.4e-06, |
|
"loss": 4.5945, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 4.7031, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 4.415, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1e-05, |
|
"loss": 4.3481, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.02e-05, |
|
"loss": 4.2441, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.04e-05, |
|
"loss": 4.2067, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.06e-05, |
|
"loss": 4.206, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.08e-05, |
|
"loss": 4.1688, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 4.1921, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 4.3179, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 4.3096, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.16e-05, |
|
"loss": 4.2819, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.18e-05, |
|
"loss": 4.2195, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.2e-05, |
|
"loss": 4.2763, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.22e-05, |
|
"loss": 4.1643, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.24e-05, |
|
"loss": 4.2968, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 4.2292, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 4.2935, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 4.3136, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.32e-05, |
|
"loss": 4.2581, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 4.2987, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 4.2362, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 4.1762, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 4.1934, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.42e-05, |
|
"loss": 4.309, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.44e-05, |
|
"loss": 4.2195, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.4599999999999999e-05, |
|
"loss": 4.2148, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.48e-05, |
|
"loss": 4.3228, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.5e-05, |
|
"loss": 4.1966, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.52e-05, |
|
"loss": 4.1663, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.54e-05, |
|
"loss": 4.2249, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.56e-05, |
|
"loss": 4.1044, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.58e-05, |
|
"loss": 4.2918, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 4.215, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.62e-05, |
|
"loss": 4.2246, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6400000000000002e-05, |
|
"loss": 4.1984, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.66e-05, |
|
"loss": 4.2291, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 4.337, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 4.2152, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7199999999999998e-05, |
|
"loss": 4.2546, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.74e-05, |
|
"loss": 4.2966, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.76e-05, |
|
"loss": 4.3031, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.78e-05, |
|
"loss": 4.3245, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8e-05, |
|
"loss": 4.3341, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 4.4108, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.84e-05, |
|
"loss": 4.3563, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.86e-05, |
|
"loss": 4.294, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.88e-05, |
|
"loss": 4.2507, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9e-05, |
|
"loss": 4.4097, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 4.6024, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.94e-05, |
|
"loss": 4.5406, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 4.5267, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9800000000000004e-05, |
|
"loss": 4.5753, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2e-05, |
|
"loss": 4.1584, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.0200000000000003e-05, |
|
"loss": 4.2193, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.04e-05, |
|
"loss": 4.2235, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.06e-05, |
|
"loss": 4.1147, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.08e-05, |
|
"loss": 4.2271, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.1e-05, |
|
"loss": 4.14, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.12e-05, |
|
"loss": 4.1039, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.1400000000000002e-05, |
|
"loss": 4.2112, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.16e-05, |
|
"loss": 4.2219, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.18e-05, |
|
"loss": 4.2005, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 4.2685, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.22e-05, |
|
"loss": 4.1964, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.2400000000000002e-05, |
|
"loss": 4.1327, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.26e-05, |
|
"loss": 4.1996, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 4.1549, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 4.1736, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.32e-05, |
|
"loss": 4.1134, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.3400000000000003e-05, |
|
"loss": 4.1522, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.36e-05, |
|
"loss": 4.2019, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.38e-05, |
|
"loss": 4.208, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4e-05, |
|
"loss": 4.2208, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.4200000000000002e-05, |
|
"loss": 4.2966, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.44e-05, |
|
"loss": 4.1012, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.46e-05, |
|
"loss": 4.2317, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.48e-05, |
|
"loss": 4.2931, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.5e-05, |
|
"loss": 4.1984, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.5200000000000003e-05, |
|
"loss": 4.2407, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.54e-05, |
|
"loss": 4.111, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.5600000000000002e-05, |
|
"loss": 4.2664, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.58e-05, |
|
"loss": 4.3569, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 4.2576, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.6200000000000003e-05, |
|
"loss": 4.1036, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.64e-05, |
|
"loss": 4.2504, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.6600000000000003e-05, |
|
"loss": 4.1388, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.6800000000000004e-05, |
|
"loss": 4.3289, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 4.2359, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7200000000000004e-05, |
|
"loss": 4.1676, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7400000000000002e-05, |
|
"loss": 4.1007, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 4.1463, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7800000000000005e-05, |
|
"loss": 4.1603, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 4.3578, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 4.1484, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.84e-05, |
|
"loss": 4.2449, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.86e-05, |
|
"loss": 4.2826, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.88e-05, |
|
"loss": 4.3742, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.9e-05, |
|
"loss": 4.1835, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.9199999999999998e-05, |
|
"loss": 4.3268, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.94e-05, |
|
"loss": 4.454, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.96e-05, |
|
"loss": 4.4426, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.98e-05, |
|
"loss": 4.5606, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3e-05, |
|
"loss": 4.2171, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.02e-05, |
|
"loss": 4.1916, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.04e-05, |
|
"loss": 4.2203, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 3.06e-05, |
|
"loss": 4.1566, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 3.08e-05, |
|
"loss": 4.1767, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 3.1e-05, |
|
"loss": 4.1967, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.12e-05, |
|
"loss": 4.1163, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.1400000000000004e-05, |
|
"loss": 4.1687, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.16e-05, |
|
"loss": 4.1796, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.18e-05, |
|
"loss": 4.1479, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 4.1677, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.2200000000000003e-05, |
|
"loss": 4.1325, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.24e-05, |
|
"loss": 4.203, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.26e-05, |
|
"loss": 4.1248, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.2800000000000004e-05, |
|
"loss": 4.1852, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.3e-05, |
|
"loss": 4.1591, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.32e-05, |
|
"loss": 4.1522, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.3400000000000005e-05, |
|
"loss": 4.1526, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 4.1457, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.38e-05, |
|
"loss": 4.0866, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 4.0714, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.4200000000000005e-05, |
|
"loss": 4.1798, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.4399999999999996e-05, |
|
"loss": 4.032, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.46e-05, |
|
"loss": 4.1041, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.48e-05, |
|
"loss": 4.2255, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.5e-05, |
|
"loss": 4.1027, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.52e-05, |
|
"loss": 4.2227, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.54e-05, |
|
"loss": 4.071, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.56e-05, |
|
"loss": 4.1006, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.58e-05, |
|
"loss": 4.2902, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.6e-05, |
|
"loss": 4.1315, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.62e-05, |
|
"loss": 4.0543, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.6400000000000004e-05, |
|
"loss": 4.0681, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.66e-05, |
|
"loss": 4.1713, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.68e-05, |
|
"loss": 4.2008, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.7e-05, |
|
"loss": 4.1546, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.72e-05, |
|
"loss": 4.1709, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.74e-05, |
|
"loss": 4.1473, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.76e-05, |
|
"loss": 4.1097, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.7800000000000004e-05, |
|
"loss": 4.2103, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.8e-05, |
|
"loss": 4.2618, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.82e-05, |
|
"loss": 4.2977, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 4.3615, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.86e-05, |
|
"loss": 4.2861, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.88e-05, |
|
"loss": 4.2774, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 4.231, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9200000000000004e-05, |
|
"loss": 4.0982, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.94e-05, |
|
"loss": 4.6038, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.960000000000001e-05, |
|
"loss": 4.3139, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.9800000000000005e-05, |
|
"loss": 4.5138, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4e-05, |
|
"loss": 4.2164, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.02e-05, |
|
"loss": 4.1559, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 4.1452, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.0600000000000004e-05, |
|
"loss": 4.0741, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.08e-05, |
|
"loss": 4.1215, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.1e-05, |
|
"loss": 4.0996, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.12e-05, |
|
"loss": 4.164, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.14e-05, |
|
"loss": 4.1406, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.16e-05, |
|
"loss": 4.1043, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.18e-05, |
|
"loss": 4.2183, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.2e-05, |
|
"loss": 3.9963, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.22e-05, |
|
"loss": 4.1352, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.24e-05, |
|
"loss": 4.1756, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.26e-05, |
|
"loss": 4.1339, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.2800000000000004e-05, |
|
"loss": 4.1696, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.3e-05, |
|
"loss": 4.1555, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.32e-05, |
|
"loss": 4.104, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.3400000000000005e-05, |
|
"loss": 4.0657, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.36e-05, |
|
"loss": 4.0738, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.38e-05, |
|
"loss": 4.1514, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 4.0798, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.4200000000000004e-05, |
|
"loss": 4.0354, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.44e-05, |
|
"loss": 4.0835, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.46e-05, |
|
"loss": 4.1241, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.4800000000000005e-05, |
|
"loss": 4.1139, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.5e-05, |
|
"loss": 4.1373, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.52e-05, |
|
"loss": 4.1314, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.5400000000000006e-05, |
|
"loss": 4.0515, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 4.0908, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.58e-05, |
|
"loss": 4.1007, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 4.2356, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.6200000000000005e-05, |
|
"loss": 4.0633, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.64e-05, |
|
"loss": 4.1935, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.660000000000001e-05, |
|
"loss": 4.1731, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.6800000000000006e-05, |
|
"loss": 4.2143, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.7e-05, |
|
"loss": 4.0391, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.72e-05, |
|
"loss": 4.1392, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.74e-05, |
|
"loss": 4.2716, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.76e-05, |
|
"loss": 4.0908, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.78e-05, |
|
"loss": 4.1365, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.8e-05, |
|
"loss": 4.2666, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.82e-05, |
|
"loss": 4.1715, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.8400000000000004e-05, |
|
"loss": 4.1233, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.86e-05, |
|
"loss": 4.1594, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.88e-05, |
|
"loss": 4.1508, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.9e-05, |
|
"loss": 4.3186, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.92e-05, |
|
"loss": 4.2761, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.94e-05, |
|
"loss": 4.2375, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.96e-05, |
|
"loss": 4.3505, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.9800000000000004e-05, |
|
"loss": 4.3046, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 5e-05, |
|
"loss": 4.23, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 5.02e-05, |
|
"loss": 4.1051, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 5.0400000000000005e-05, |
|
"loss": 4.0697, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 5.0600000000000003e-05, |
|
"loss": 4.0968, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.08e-05, |
|
"loss": 4.1046, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 4.0778, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 5.1200000000000004e-05, |
|
"loss": 4.2512, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.14e-05, |
|
"loss": 4.0971, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.16e-05, |
|
"loss": 4.1317, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.1800000000000005e-05, |
|
"loss": 4.0438, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 4.1585, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.22e-05, |
|
"loss": 4.1922, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.2400000000000007e-05, |
|
"loss": 4.0355, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.2600000000000005e-05, |
|
"loss": 4.0942, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.28e-05, |
|
"loss": 4.0823, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 4.2057, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.3200000000000006e-05, |
|
"loss": 4.1053, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.3400000000000004e-05, |
|
"loss": 4.0612, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.360000000000001e-05, |
|
"loss": 4.0192, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.380000000000001e-05, |
|
"loss": 4.1746, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 4.0217, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.420000000000001e-05, |
|
"loss": 4.0175, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.440000000000001e-05, |
|
"loss": 4.0711, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.4600000000000006e-05, |
|
"loss": 4.0552, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.4800000000000004e-05, |
|
"loss": 4.1794, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 4.1307, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.520000000000001e-05, |
|
"loss": 4.1143, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.5400000000000005e-05, |
|
"loss": 4.0236, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 5.560000000000001e-05, |
|
"loss": 4.0805, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 5.580000000000001e-05, |
|
"loss": 4.0284, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 4.1211, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.620000000000001e-05, |
|
"loss": 4.0672, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 4.2571, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.66e-05, |
|
"loss": 4.0389, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.68e-05, |
|
"loss": 4.1248, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 4.1542, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.72e-05, |
|
"loss": 4.0784, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.74e-05, |
|
"loss": 4.0661, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.76e-05, |
|
"loss": 4.0908, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.7799999999999995e-05, |
|
"loss": 4.179, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.8e-05, |
|
"loss": 4.17, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.82e-05, |
|
"loss": 4.2625, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.8399999999999997e-05, |
|
"loss": 4.1965, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.86e-05, |
|
"loss": 4.5008, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.88e-05, |
|
"loss": 4.1809, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.9e-05, |
|
"loss": 4.5448, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.92e-05, |
|
"loss": 4.4749, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 297, |
|
"total_flos": 0.0, |
|
"train_loss": 4.260829443883414, |
|
"train_runtime": 4532.5114, |
|
"train_samples_per_second": 6.296, |
|
"train_steps_per_second": 0.066 |
|
} |
|
], |
|
"max_steps": 297, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|