ko-TinyLlama-1.1B-vocab-extend-mix-corpus-basic-tokenizer-eeve-stage1
/
checkpoint-1500
/trainer_state.json
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 0.7820136852394917, | |
"eval_steps": 500, | |
"global_step": 1500, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.0, | |
"grad_norm": 0.0, | |
"learning_rate": 3e-07, | |
"loss": 1.65, | |
"step": 1 | |
}, | |
{ | |
"epoch": 0.0, | |
"grad_norm": 0.0, | |
"learning_rate": 6e-07, | |
"loss": 1.655, | |
"step": 2 | |
}, | |
{ | |
"epoch": 0.0, | |
"grad_norm": 0.0, | |
"learning_rate": 9e-07, | |
"loss": 1.6636, | |
"step": 3 | |
}, | |
{ | |
"epoch": 0.0, | |
"grad_norm": 0.0, | |
"learning_rate": 1.2e-06, | |
"loss": 1.6423, | |
"step": 4 | |
}, | |
{ | |
"epoch": 0.0, | |
"grad_norm": 0.0, | |
"learning_rate": 1.4999999999999998e-06, | |
"loss": 1.6622, | |
"step": 5 | |
}, | |
{ | |
"epoch": 0.0, | |
"grad_norm": 0.0, | |
"learning_rate": 1.8e-06, | |
"loss": 1.6576, | |
"step": 6 | |
}, | |
{ | |
"epoch": 0.0, | |
"grad_norm": 0.0, | |
"learning_rate": 2.1e-06, | |
"loss": 1.6533, | |
"step": 7 | |
}, | |
{ | |
"epoch": 0.0, | |
"grad_norm": 0.0, | |
"learning_rate": 2.4e-06, | |
"loss": 1.6428, | |
"step": 8 | |
}, | |
{ | |
"epoch": 0.0, | |
"grad_norm": 0.0, | |
"learning_rate": 2.6999999999999996e-06, | |
"loss": 1.6579, | |
"step": 9 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 2.9999999999999997e-06, | |
"loss": 1.6355, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 3.2999999999999993e-06, | |
"loss": 1.6575, | |
"step": 11 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 3.6e-06, | |
"loss": 1.663, | |
"step": 12 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 3.899999999999999e-06, | |
"loss": 1.6434, | |
"step": 13 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 4.2e-06, | |
"loss": 1.6578, | |
"step": 14 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 4.499999999999999e-06, | |
"loss": 1.6718, | |
"step": 15 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 4.8e-06, | |
"loss": 1.6559, | |
"step": 16 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 5.1e-06, | |
"loss": 1.6452, | |
"step": 17 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 5.399999999999999e-06, | |
"loss": 1.6658, | |
"step": 18 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 5.7e-06, | |
"loss": 1.6565, | |
"step": 19 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 5.999999999999999e-06, | |
"loss": 1.6451, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 6.3e-06, | |
"loss": 1.6478, | |
"step": 21 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 6.599999999999999e-06, | |
"loss": 1.6429, | |
"step": 22 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 6.899999999999999e-06, | |
"loss": 1.6567, | |
"step": 23 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 7.2e-06, | |
"loss": 1.6521, | |
"step": 24 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 7.499999999999999e-06, | |
"loss": 1.6475, | |
"step": 25 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 7.799999999999998e-06, | |
"loss": 1.6618, | |
"step": 26 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 8.099999999999999e-06, | |
"loss": 1.6389, | |
"step": 27 | |
}, | |
{ | |
"epoch": 0.01, | |
"grad_norm": 0.0, | |
"learning_rate": 8.4e-06, | |
"loss": 1.6533, | |
"step": 28 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 8.7e-06, | |
"loss": 1.6586, | |
"step": 29 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 8.999999999999999e-06, | |
"loss": 1.6399, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 9.299999999999999e-06, | |
"loss": 1.6378, | |
"step": 31 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 9.6e-06, | |
"loss": 1.6564, | |
"step": 32 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 9.9e-06, | |
"loss": 1.6542, | |
"step": 33 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.02e-05, | |
"loss": 1.6466, | |
"step": 34 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.05e-05, | |
"loss": 1.6507, | |
"step": 35 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.0799999999999998e-05, | |
"loss": 1.6606, | |
"step": 36 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.1099999999999999e-05, | |
"loss": 1.6563, | |
"step": 37 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.14e-05, | |
"loss": 1.6528, | |
"step": 38 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.17e-05, | |
"loss": 1.6479, | |
"step": 39 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.1999999999999999e-05, | |
"loss": 1.6407, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.2299999999999999e-05, | |
"loss": 1.6383, | |
"step": 41 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.26e-05, | |
"loss": 1.6567, | |
"step": 42 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.2899999999999998e-05, | |
"loss": 1.6459, | |
"step": 43 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.3199999999999997e-05, | |
"loss": 1.6608, | |
"step": 44 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.3499999999999998e-05, | |
"loss": 1.6503, | |
"step": 45 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.3799999999999998e-05, | |
"loss": 1.6465, | |
"step": 46 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 0.0, | |
"learning_rate": 1.4099999999999999e-05, | |
"loss": 1.6419, | |
"step": 47 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.44e-05, | |
"loss": 1.6573, | |
"step": 48 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.47e-05, | |
"loss": 1.6463, | |
"step": 49 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.4999999999999999e-05, | |
"loss": 1.6338, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.53e-05, | |
"loss": 1.6278, | |
"step": 51 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.5599999999999996e-05, | |
"loss": 1.6685, | |
"step": 52 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.5899999999999997e-05, | |
"loss": 1.638, | |
"step": 53 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.6199999999999997e-05, | |
"loss": 1.6625, | |
"step": 54 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.6499999999999998e-05, | |
"loss": 1.6759, | |
"step": 55 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.68e-05, | |
"loss": 1.6565, | |
"step": 56 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.71e-05, | |
"loss": 1.6412, | |
"step": 57 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.74e-05, | |
"loss": 1.6517, | |
"step": 58 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.7699999999999997e-05, | |
"loss": 1.6694, | |
"step": 59 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.7999999999999997e-05, | |
"loss": 1.6789, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.8299999999999998e-05, | |
"loss": 1.6492, | |
"step": 61 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.8599999999999998e-05, | |
"loss": 1.6592, | |
"step": 62 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.89e-05, | |
"loss": 1.6668, | |
"step": 63 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.92e-05, | |
"loss": 1.6491, | |
"step": 64 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.95e-05, | |
"loss": 1.6498, | |
"step": 65 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 1.98e-05, | |
"loss": 1.631, | |
"step": 66 | |
}, | |
{ | |
"epoch": 0.03, | |
"grad_norm": 0.0, | |
"learning_rate": 2.01e-05, | |
"loss": 1.6547, | |
"step": 67 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.04e-05, | |
"loss": 1.652, | |
"step": 68 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.07e-05, | |
"loss": 1.6642, | |
"step": 69 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.1e-05, | |
"loss": 1.6416, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.1299999999999996e-05, | |
"loss": 1.668, | |
"step": 71 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.1599999999999996e-05, | |
"loss": 1.6408, | |
"step": 72 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.1899999999999997e-05, | |
"loss": 1.6346, | |
"step": 73 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.2199999999999998e-05, | |
"loss": 1.6541, | |
"step": 74 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.2499999999999998e-05, | |
"loss": 1.6408, | |
"step": 75 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.28e-05, | |
"loss": 1.659, | |
"step": 76 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.31e-05, | |
"loss": 1.6626, | |
"step": 77 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.34e-05, | |
"loss": 1.6467, | |
"step": 78 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.3699999999999997e-05, | |
"loss": 1.6502, | |
"step": 79 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.3999999999999997e-05, | |
"loss": 1.6569, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.4299999999999998e-05, | |
"loss": 1.652, | |
"step": 81 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.4599999999999998e-05, | |
"loss": 1.6539, | |
"step": 82 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.49e-05, | |
"loss": 1.6588, | |
"step": 83 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.52e-05, | |
"loss": 1.6408, | |
"step": 84 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.55e-05, | |
"loss": 1.6503, | |
"step": 85 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 0.0, | |
"learning_rate": 2.5799999999999997e-05, | |
"loss": 1.6492, | |
"step": 86 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.6099999999999997e-05, | |
"loss": 1.6551, | |
"step": 87 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.6399999999999995e-05, | |
"loss": 1.6565, | |
"step": 88 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.6699999999999995e-05, | |
"loss": 1.6678, | |
"step": 89 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.6999999999999996e-05, | |
"loss": 1.6641, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.7299999999999996e-05, | |
"loss": 1.674, | |
"step": 91 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.7599999999999997e-05, | |
"loss": 1.6657, | |
"step": 92 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.7899999999999997e-05, | |
"loss": 1.658, | |
"step": 93 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.8199999999999998e-05, | |
"loss": 1.6654, | |
"step": 94 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.8499999999999998e-05, | |
"loss": 1.6474, | |
"step": 95 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.88e-05, | |
"loss": 1.6467, | |
"step": 96 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.91e-05, | |
"loss": 1.6616, | |
"step": 97 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.94e-05, | |
"loss": 1.6712, | |
"step": 98 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.97e-05, | |
"loss": 1.643, | |
"step": 99 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 2.9999999999999997e-05, | |
"loss": 1.6578, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 3.0299999999999998e-05, | |
"loss": 1.664, | |
"step": 101 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 3.06e-05, | |
"loss": 1.6624, | |
"step": 102 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 3.09e-05, | |
"loss": 1.6623, | |
"step": 103 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 3.119999999999999e-05, | |
"loss": 1.6344, | |
"step": 104 | |
}, | |
{ | |
"epoch": 0.05, | |
"grad_norm": 0.0, | |
"learning_rate": 3.149999999999999e-05, | |
"loss": 1.644, | |
"step": 105 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.1799999999999994e-05, | |
"loss": 1.6387, | |
"step": 106 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.2099999999999994e-05, | |
"loss": 1.6659, | |
"step": 107 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.2399999999999995e-05, | |
"loss": 1.6579, | |
"step": 108 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.2699999999999995e-05, | |
"loss": 1.6306, | |
"step": 109 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.2999999999999996e-05, | |
"loss": 1.654, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.3299999999999996e-05, | |
"loss": 1.6322, | |
"step": 111 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.36e-05, | |
"loss": 1.6617, | |
"step": 112 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.39e-05, | |
"loss": 1.6356, | |
"step": 113 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.42e-05, | |
"loss": 1.6602, | |
"step": 114 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.45e-05, | |
"loss": 1.6322, | |
"step": 115 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.48e-05, | |
"loss": 1.6451, | |
"step": 116 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.51e-05, | |
"loss": 1.6567, | |
"step": 117 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.539999999999999e-05, | |
"loss": 1.6523, | |
"step": 118 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.5699999999999994e-05, | |
"loss": 1.6863, | |
"step": 119 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.5999999999999994e-05, | |
"loss": 1.6455, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.6299999999999995e-05, | |
"loss": 1.6523, | |
"step": 121 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.6599999999999995e-05, | |
"loss": 1.6471, | |
"step": 122 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.6899999999999996e-05, | |
"loss": 1.6532, | |
"step": 123 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 0.0, | |
"learning_rate": 3.7199999999999996e-05, | |
"loss": 1.6592, | |
"step": 124 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 3.75e-05, | |
"loss": 1.6787, | |
"step": 125 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 3.78e-05, | |
"loss": 1.6713, | |
"step": 126 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 3.81e-05, | |
"loss": 1.6366, | |
"step": 127 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 3.84e-05, | |
"loss": 1.6531, | |
"step": 128 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 3.87e-05, | |
"loss": 1.6404, | |
"step": 129 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 3.9e-05, | |
"loss": 1.6525, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 3.93e-05, | |
"loss": 1.6507, | |
"step": 131 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 3.96e-05, | |
"loss": 1.647, | |
"step": 132 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 3.99e-05, | |
"loss": 1.656, | |
"step": 133 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 4.02e-05, | |
"loss": 1.6371, | |
"step": 134 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 4.05e-05, | |
"loss": 1.6484, | |
"step": 135 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 4.08e-05, | |
"loss": 1.6758, | |
"step": 136 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 4.11e-05, | |
"loss": 1.6486, | |
"step": 137 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 4.14e-05, | |
"loss": 1.6555, | |
"step": 138 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 4.17e-05, | |
"loss": 1.6383, | |
"step": 139 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 4.2e-05, | |
"loss": 1.6544, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 4.229999999999999e-05, | |
"loss": 1.6595, | |
"step": 141 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 4.259999999999999e-05, | |
"loss": 1.6362, | |
"step": 142 | |
}, | |
{ | |
"epoch": 0.07, | |
"grad_norm": 0.0, | |
"learning_rate": 4.289999999999999e-05, | |
"loss": 1.6728, | |
"step": 143 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.319999999999999e-05, | |
"loss": 1.6412, | |
"step": 144 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.3499999999999993e-05, | |
"loss": 1.6424, | |
"step": 145 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.3799999999999994e-05, | |
"loss": 1.6432, | |
"step": 146 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.4099999999999995e-05, | |
"loss": 1.642, | |
"step": 147 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.4399999999999995e-05, | |
"loss": 1.6612, | |
"step": 148 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.4699999999999996e-05, | |
"loss": 1.6536, | |
"step": 149 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.4999999999999996e-05, | |
"loss": 1.6676, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.5299999999999997e-05, | |
"loss": 1.6409, | |
"step": 151 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.56e-05, | |
"loss": 1.6447, | |
"step": 152 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.59e-05, | |
"loss": 1.6387, | |
"step": 153 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.62e-05, | |
"loss": 1.6487, | |
"step": 154 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.65e-05, | |
"loss": 1.6422, | |
"step": 155 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.68e-05, | |
"loss": 1.6461, | |
"step": 156 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.709999999999999e-05, | |
"loss": 1.6558, | |
"step": 157 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.7399999999999993e-05, | |
"loss": 1.6372, | |
"step": 158 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.7699999999999994e-05, | |
"loss": 1.6451, | |
"step": 159 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.7999999999999994e-05, | |
"loss": 1.6636, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.8299999999999995e-05, | |
"loss": 1.6487, | |
"step": 161 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.8599999999999995e-05, | |
"loss": 1.6635, | |
"step": 162 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 0.0, | |
"learning_rate": 4.8899999999999996e-05, | |
"loss": 1.6557, | |
"step": 163 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 4.9199999999999997e-05, | |
"loss": 1.6609, | |
"step": 164 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 4.95e-05, | |
"loss": 1.6585, | |
"step": 165 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 4.98e-05, | |
"loss": 1.6476, | |
"step": 166 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.01e-05, | |
"loss": 1.6453, | |
"step": 167 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.04e-05, | |
"loss": 1.6601, | |
"step": 168 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.07e-05, | |
"loss": 1.641, | |
"step": 169 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.1e-05, | |
"loss": 1.6496, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.13e-05, | |
"loss": 1.6553, | |
"step": 171 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.1599999999999994e-05, | |
"loss": 1.6782, | |
"step": 172 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.1899999999999994e-05, | |
"loss": 1.662, | |
"step": 173 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.2199999999999995e-05, | |
"loss": 1.6475, | |
"step": 174 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.2499999999999995e-05, | |
"loss": 1.6502, | |
"step": 175 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.279999999999999e-05, | |
"loss": 1.6345, | |
"step": 176 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.309999999999999e-05, | |
"loss": 1.6466, | |
"step": 177 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.339999999999999e-05, | |
"loss": 1.6338, | |
"step": 178 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.369999999999999e-05, | |
"loss": 1.6616, | |
"step": 179 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.399999999999999e-05, | |
"loss": 1.6805, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.429999999999999e-05, | |
"loss": 1.6737, | |
"step": 181 | |
}, | |
{ | |
"epoch": 0.09, | |
"grad_norm": 0.0, | |
"learning_rate": 5.459999999999999e-05, | |
"loss": 1.6341, | |
"step": 182 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.489999999999999e-05, | |
"loss": 1.6557, | |
"step": 183 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.519999999999999e-05, | |
"loss": 1.6633, | |
"step": 184 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.5499999999999994e-05, | |
"loss": 1.6731, | |
"step": 185 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.5799999999999994e-05, | |
"loss": 1.6567, | |
"step": 186 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.6099999999999995e-05, | |
"loss": 1.6508, | |
"step": 187 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.6399999999999995e-05, | |
"loss": 1.6662, | |
"step": 188 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.6699999999999996e-05, | |
"loss": 1.6376, | |
"step": 189 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.6999999999999996e-05, | |
"loss": 1.6567, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.73e-05, | |
"loss": 1.6554, | |
"step": 191 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.76e-05, | |
"loss": 1.6395, | |
"step": 192 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.79e-05, | |
"loss": 1.6537, | |
"step": 193 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.82e-05, | |
"loss": 1.6478, | |
"step": 194 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.85e-05, | |
"loss": 1.6626, | |
"step": 195 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.88e-05, | |
"loss": 1.65, | |
"step": 196 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.91e-05, | |
"loss": 1.6551, | |
"step": 197 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.94e-05, | |
"loss": 1.6369, | |
"step": 198 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.97e-05, | |
"loss": 1.6418, | |
"step": 199 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 5.9999999999999995e-05, | |
"loss": 1.6552, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 0.0, | |
"learning_rate": 6.0299999999999995e-05, | |
"loss": 1.6611, | |
"step": 201 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.0599999999999996e-05, | |
"loss": 1.6428, | |
"step": 202 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.0899999999999996e-05, | |
"loss": 1.6625, | |
"step": 203 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.12e-05, | |
"loss": 1.6542, | |
"step": 204 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.149999999999999e-05, | |
"loss": 1.6592, | |
"step": 205 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.18e-05, | |
"loss": 1.6562, | |
"step": 206 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.209999999999999e-05, | |
"loss": 1.6255, | |
"step": 207 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.239999999999999e-05, | |
"loss": 1.6492, | |
"step": 208 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.269999999999999e-05, | |
"loss": 1.6481, | |
"step": 209 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.299999999999999e-05, | |
"loss": 1.6513, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.33e-05, | |
"loss": 1.6464, | |
"step": 211 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.359999999999999e-05, | |
"loss": 1.6445, | |
"step": 212 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.39e-05, | |
"loss": 1.6569, | |
"step": 213 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.419999999999999e-05, | |
"loss": 1.6779, | |
"step": 214 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.45e-05, | |
"loss": 1.6557, | |
"step": 215 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.479999999999999e-05, | |
"loss": 1.6545, | |
"step": 216 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.51e-05, | |
"loss": 1.6503, | |
"step": 217 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.539999999999999e-05, | |
"loss": 1.6427, | |
"step": 218 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.57e-05, | |
"loss": 1.6702, | |
"step": 219 | |
}, | |
{ | |
"epoch": 0.11, | |
"grad_norm": 0.0, | |
"learning_rate": 6.599999999999999e-05, | |
"loss": 1.6572, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.63e-05, | |
"loss": 1.674, | |
"step": 221 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.659999999999999e-05, | |
"loss": 1.6376, | |
"step": 222 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.69e-05, | |
"loss": 1.6453, | |
"step": 223 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.72e-05, | |
"loss": 1.6694, | |
"step": 224 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.75e-05, | |
"loss": 1.6765, | |
"step": 225 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.78e-05, | |
"loss": 1.6687, | |
"step": 226 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.81e-05, | |
"loss": 1.6502, | |
"step": 227 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.84e-05, | |
"loss": 1.6551, | |
"step": 228 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.87e-05, | |
"loss": 1.6735, | |
"step": 229 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.9e-05, | |
"loss": 1.6509, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.93e-05, | |
"loss": 1.6396, | |
"step": 231 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.96e-05, | |
"loss": 1.6559, | |
"step": 232 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 6.989999999999999e-05, | |
"loss": 1.6674, | |
"step": 233 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 7.02e-05, | |
"loss": 1.6498, | |
"step": 234 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 7.049999999999999e-05, | |
"loss": 1.6666, | |
"step": 235 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 7.079999999999999e-05, | |
"loss": 1.6677, | |
"step": 236 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 7.11e-05, | |
"loss": 1.6463, | |
"step": 237 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 7.139999999999999e-05, | |
"loss": 1.6448, | |
"step": 238 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 0.0, | |
"learning_rate": 7.17e-05, | |
"loss": 1.6527, | |
"step": 239 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.199999999999999e-05, | |
"loss": 1.6461, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.23e-05, | |
"loss": 1.6639, | |
"step": 241 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.259999999999999e-05, | |
"loss": 1.6528, | |
"step": 242 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.29e-05, | |
"loss": 1.6466, | |
"step": 243 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.319999999999999e-05, | |
"loss": 1.6624, | |
"step": 244 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.35e-05, | |
"loss": 1.6676, | |
"step": 245 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.379999999999999e-05, | |
"loss": 1.6579, | |
"step": 246 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.41e-05, | |
"loss": 1.6531, | |
"step": 247 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.439999999999999e-05, | |
"loss": 1.6678, | |
"step": 248 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.47e-05, | |
"loss": 1.6576, | |
"step": 249 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.5e-05, | |
"loss": 1.6568, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.529999999999999e-05, | |
"loss": 1.6537, | |
"step": 251 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.56e-05, | |
"loss": 1.6349, | |
"step": 252 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.589999999999999e-05, | |
"loss": 1.6652, | |
"step": 253 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.62e-05, | |
"loss": 1.6584, | |
"step": 254 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.649999999999999e-05, | |
"loss": 1.6429, | |
"step": 255 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.68e-05, | |
"loss": 1.6489, | |
"step": 256 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.709999999999999e-05, | |
"loss": 1.6553, | |
"step": 257 | |
}, | |
{ | |
"epoch": 0.13, | |
"grad_norm": 0.0, | |
"learning_rate": 7.74e-05, | |
"loss": 1.6486, | |
"step": 258 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 7.769999999999999e-05, | |
"loss": 1.6425, | |
"step": 259 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 7.8e-05, | |
"loss": 1.6591, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 7.829999999999999e-05, | |
"loss": 1.6306, | |
"step": 261 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 7.86e-05, | |
"loss": 1.6474, | |
"step": 262 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 7.89e-05, | |
"loss": 1.65, | |
"step": 263 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 7.92e-05, | |
"loss": 1.6749, | |
"step": 264 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 7.95e-05, | |
"loss": 1.6375, | |
"step": 265 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 7.98e-05, | |
"loss": 1.6327, | |
"step": 266 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.01e-05, | |
"loss": 1.6341, | |
"step": 267 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.04e-05, | |
"loss": 1.6558, | |
"step": 268 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.07e-05, | |
"loss": 1.6583, | |
"step": 269 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.1e-05, | |
"loss": 1.649, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.13e-05, | |
"loss": 1.6349, | |
"step": 271 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.16e-05, | |
"loss": 1.6701, | |
"step": 272 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.19e-05, | |
"loss": 1.6591, | |
"step": 273 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.22e-05, | |
"loss": 1.6477, | |
"step": 274 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.25e-05, | |
"loss": 1.6629, | |
"step": 275 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.28e-05, | |
"loss": 1.6495, | |
"step": 276 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.31e-05, | |
"loss": 1.637, | |
"step": 277 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 0.0, | |
"learning_rate": 8.34e-05, | |
"loss": 1.6544, | |
"step": 278 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.37e-05, | |
"loss": 1.643, | |
"step": 279 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.4e-05, | |
"loss": 1.6428, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.43e-05, | |
"loss": 1.6579, | |
"step": 281 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.459999999999998e-05, | |
"loss": 1.6553, | |
"step": 282 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.489999999999999e-05, | |
"loss": 1.6618, | |
"step": 283 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.519999999999998e-05, | |
"loss": 1.65, | |
"step": 284 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.549999999999999e-05, | |
"loss": 1.6686, | |
"step": 285 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.579999999999998e-05, | |
"loss": 1.6441, | |
"step": 286 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.609999999999999e-05, | |
"loss": 1.6612, | |
"step": 287 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.639999999999999e-05, | |
"loss": 1.637, | |
"step": 288 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.669999999999998e-05, | |
"loss": 1.6427, | |
"step": 289 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.699999999999999e-05, | |
"loss": 1.6456, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.729999999999998e-05, | |
"loss": 1.6561, | |
"step": 291 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.759999999999999e-05, | |
"loss": 1.6588, | |
"step": 292 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.789999999999998e-05, | |
"loss": 1.6251, | |
"step": 293 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.819999999999999e-05, | |
"loss": 1.6623, | |
"step": 294 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.849999999999998e-05, | |
"loss": 1.6495, | |
"step": 295 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.879999999999999e-05, | |
"loss": 1.6711, | |
"step": 296 | |
}, | |
{ | |
"epoch": 0.15, | |
"grad_norm": 0.0, | |
"learning_rate": 8.909999999999998e-05, | |
"loss": 1.6423, | |
"step": 297 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 8.939999999999999e-05, | |
"loss": 1.6543, | |
"step": 298 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 8.969999999999998e-05, | |
"loss": 1.6446, | |
"step": 299 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 8.999999999999999e-05, | |
"loss": 1.6542, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.029999999999999e-05, | |
"loss": 1.6592, | |
"step": 301 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.059999999999999e-05, | |
"loss": 1.6569, | |
"step": 302 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.089999999999999e-05, | |
"loss": 1.6688, | |
"step": 303 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.12e-05, | |
"loss": 1.6538, | |
"step": 304 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.149999999999999e-05, | |
"loss": 1.643, | |
"step": 305 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.18e-05, | |
"loss": 1.6696, | |
"step": 306 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.209999999999999e-05, | |
"loss": 1.6637, | |
"step": 307 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.24e-05, | |
"loss": 1.6466, | |
"step": 308 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.269999999999999e-05, | |
"loss": 1.6749, | |
"step": 309 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.3e-05, | |
"loss": 1.6693, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.329999999999999e-05, | |
"loss": 1.657, | |
"step": 311 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.36e-05, | |
"loss": 1.6568, | |
"step": 312 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.389999999999999e-05, | |
"loss": 1.6551, | |
"step": 313 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.419999999999999e-05, | |
"loss": 1.6349, | |
"step": 314 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.449999999999999e-05, | |
"loss": 1.6687, | |
"step": 315 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 0.0, | |
"learning_rate": 9.479999999999999e-05, | |
"loss": 1.6571, | |
"step": 316 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.51e-05, | |
"loss": 1.6403, | |
"step": 317 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.539999999999999e-05, | |
"loss": 1.6657, | |
"step": 318 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.57e-05, | |
"loss": 1.643, | |
"step": 319 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.599999999999999e-05, | |
"loss": 1.6437, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.63e-05, | |
"loss": 1.6625, | |
"step": 321 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.659999999999999e-05, | |
"loss": 1.639, | |
"step": 322 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.69e-05, | |
"loss": 1.6513, | |
"step": 323 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.719999999999999e-05, | |
"loss": 1.6402, | |
"step": 324 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.75e-05, | |
"loss": 1.6497, | |
"step": 325 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.779999999999999e-05, | |
"loss": 1.6613, | |
"step": 326 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.81e-05, | |
"loss": 1.6553, | |
"step": 327 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.839999999999999e-05, | |
"loss": 1.6424, | |
"step": 328 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.87e-05, | |
"loss": 1.6531, | |
"step": 329 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.9e-05, | |
"loss": 1.6348, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.93e-05, | |
"loss": 1.6447, | |
"step": 331 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.96e-05, | |
"loss": 1.638, | |
"step": 332 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 9.99e-05, | |
"loss": 1.6557, | |
"step": 333 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001002, | |
"loss": 1.661, | |
"step": 334 | |
}, | |
{ | |
"epoch": 0.17, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001005, | |
"loss": 1.6733, | |
"step": 335 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001008, | |
"loss": 1.6312, | |
"step": 336 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001011, | |
"loss": 1.6561, | |
"step": 337 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001014, | |
"loss": 1.6532, | |
"step": 338 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010169999999999999, | |
"loss": 1.669, | |
"step": 339 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000102, | |
"loss": 1.6474, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010229999999999999, | |
"loss": 1.6373, | |
"step": 341 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001026, | |
"loss": 1.6566, | |
"step": 342 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001029, | |
"loss": 1.6354, | |
"step": 343 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010319999999999999, | |
"loss": 1.6612, | |
"step": 344 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010349999999999998, | |
"loss": 1.6641, | |
"step": 345 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010379999999999999, | |
"loss": 1.6799, | |
"step": 346 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010409999999999998, | |
"loss": 1.668, | |
"step": 347 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010439999999999999, | |
"loss": 1.6604, | |
"step": 348 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010469999999999998, | |
"loss": 1.6434, | |
"step": 349 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010499999999999999, | |
"loss": 1.6554, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010529999999999998, | |
"loss": 1.6443, | |
"step": 351 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010559999999999998, | |
"loss": 1.6465, | |
"step": 352 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010589999999999999, | |
"loss": 1.6466, | |
"step": 353 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010619999999999998, | |
"loss": 1.6435, | |
"step": 354 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010649999999999999, | |
"loss": 1.6537, | |
"step": 355 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010679999999999998, | |
"loss": 1.6574, | |
"step": 356 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010709999999999999, | |
"loss": 1.6536, | |
"step": 357 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010739999999999998, | |
"loss": 1.6515, | |
"step": 358 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010769999999999999, | |
"loss": 1.6591, | |
"step": 359 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010799999999999998, | |
"loss": 1.6694, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010829999999999999, | |
"loss": 1.6515, | |
"step": 361 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010859999999999998, | |
"loss": 1.6283, | |
"step": 362 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010889999999999999, | |
"loss": 1.6444, | |
"step": 363 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010919999999999998, | |
"loss": 1.6358, | |
"step": 364 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010949999999999999, | |
"loss": 1.6587, | |
"step": 365 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010979999999999999, | |
"loss": 1.6406, | |
"step": 366 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011009999999999999, | |
"loss": 1.668, | |
"step": 367 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011039999999999999, | |
"loss": 1.6384, | |
"step": 368 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001107, | |
"loss": 1.653, | |
"step": 369 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011099999999999999, | |
"loss": 1.6655, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001113, | |
"loss": 1.6674, | |
"step": 371 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011159999999999999, | |
"loss": 1.6583, | |
"step": 372 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001119, | |
"loss": 1.6648, | |
"step": 373 | |
}, | |
{ | |
"epoch": 0.19, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011219999999999999, | |
"loss": 1.6662, | |
"step": 374 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001125, | |
"loss": 1.6609, | |
"step": 375 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011279999999999999, | |
"loss": 1.663, | |
"step": 376 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011309999999999998, | |
"loss": 1.6407, | |
"step": 377 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011339999999999999, | |
"loss": 1.6402, | |
"step": 378 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011369999999999999, | |
"loss": 1.6507, | |
"step": 379 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011399999999999999, | |
"loss": 1.6641, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011429999999999999, | |
"loss": 1.6276, | |
"step": 381 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001146, | |
"loss": 1.6452, | |
"step": 382 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011489999999999999, | |
"loss": 1.6531, | |
"step": 383 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001152, | |
"loss": 1.6453, | |
"step": 384 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011549999999999999, | |
"loss": 1.6516, | |
"step": 385 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001158, | |
"loss": 1.6432, | |
"step": 386 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011609999999999999, | |
"loss": 1.6706, | |
"step": 387 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001164, | |
"loss": 1.6409, | |
"step": 388 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011669999999999999, | |
"loss": 1.6405, | |
"step": 389 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000117, | |
"loss": 1.6432, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011729999999999999, | |
"loss": 1.646, | |
"step": 391 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001176, | |
"loss": 1.6535, | |
"step": 392 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011789999999999999, | |
"loss": 1.6517, | |
"step": 393 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001182, | |
"loss": 1.6624, | |
"step": 394 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001185, | |
"loss": 1.6485, | |
"step": 395 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001188, | |
"loss": 1.6345, | |
"step": 396 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001191, | |
"loss": 1.6706, | |
"step": 397 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001194, | |
"loss": 1.6609, | |
"step": 398 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001197, | |
"loss": 1.6602, | |
"step": 399 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011999999999999999, | |
"loss": 1.6646, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001203, | |
"loss": 1.6585, | |
"step": 401 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012059999999999999, | |
"loss": 1.6486, | |
"step": 402 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001209, | |
"loss": 1.6547, | |
"step": 403 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012119999999999999, | |
"loss": 1.6494, | |
"step": 404 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001215, | |
"loss": 1.6632, | |
"step": 405 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012179999999999999, | |
"loss": 1.6533, | |
"step": 406 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012209999999999999, | |
"loss": 1.653, | |
"step": 407 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001224, | |
"loss": 1.6487, | |
"step": 408 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012269999999999997, | |
"loss": 1.6696, | |
"step": 409 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012299999999999998, | |
"loss": 1.6617, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001233, | |
"loss": 1.6801, | |
"step": 411 | |
}, | |
{ | |
"epoch": 0.21, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001236, | |
"loss": 1.6414, | |
"step": 412 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012389999999999998, | |
"loss": 1.6526, | |
"step": 413 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012419999999999998, | |
"loss": 1.6387, | |
"step": 414 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001245, | |
"loss": 1.6563, | |
"step": 415 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012479999999999997, | |
"loss": 1.6653, | |
"step": 416 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012509999999999998, | |
"loss": 1.6525, | |
"step": 417 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012539999999999999, | |
"loss": 1.6579, | |
"step": 418 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001257, | |
"loss": 1.654, | |
"step": 419 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012599999999999997, | |
"loss": 1.6781, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012629999999999998, | |
"loss": 1.6362, | |
"step": 421 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001266, | |
"loss": 1.6651, | |
"step": 422 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001269, | |
"loss": 1.6268, | |
"step": 423 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012719999999999997, | |
"loss": 1.6705, | |
"step": 424 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012749999999999998, | |
"loss": 1.6398, | |
"step": 425 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001278, | |
"loss": 1.6484, | |
"step": 426 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001281, | |
"loss": 1.657, | |
"step": 427 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012839999999999998, | |
"loss": 1.6514, | |
"step": 428 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012869999999999998, | |
"loss": 1.651, | |
"step": 429 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000129, | |
"loss": 1.6756, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001293, | |
"loss": 1.6585, | |
"step": 431 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012959999999999998, | |
"loss": 1.662, | |
"step": 432 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012989999999999999, | |
"loss": 1.6524, | |
"step": 433 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001302, | |
"loss": 1.6458, | |
"step": 434 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001305, | |
"loss": 1.6644, | |
"step": 435 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013079999999999998, | |
"loss": 1.6708, | |
"step": 436 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001311, | |
"loss": 1.6388, | |
"step": 437 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001314, | |
"loss": 1.655, | |
"step": 438 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013169999999999998, | |
"loss": 1.6607, | |
"step": 439 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013199999999999998, | |
"loss": 1.6376, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001323, | |
"loss": 1.6594, | |
"step": 441 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001326, | |
"loss": 1.6623, | |
"step": 442 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013289999999999998, | |
"loss": 1.6692, | |
"step": 443 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013319999999999999, | |
"loss": 1.6618, | |
"step": 444 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001335, | |
"loss": 1.6552, | |
"step": 445 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001338, | |
"loss": 1.6516, | |
"step": 446 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013409999999999998, | |
"loss": 1.6479, | |
"step": 447 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001344, | |
"loss": 1.6623, | |
"step": 448 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001347, | |
"loss": 1.6561, | |
"step": 449 | |
}, | |
{ | |
"epoch": 0.23, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000135, | |
"loss": 1.6453, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013529999999999998, | |
"loss": 1.6459, | |
"step": 451 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001356, | |
"loss": 1.6325, | |
"step": 452 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001359, | |
"loss": 1.6587, | |
"step": 453 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001362, | |
"loss": 1.6438, | |
"step": 454 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013649999999999998, | |
"loss": 1.6464, | |
"step": 455 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001368, | |
"loss": 1.6478, | |
"step": 456 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001371, | |
"loss": 1.6712, | |
"step": 457 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001374, | |
"loss": 1.6451, | |
"step": 458 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013769999999999999, | |
"loss": 1.6826, | |
"step": 459 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000138, | |
"loss": 1.6632, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001383, | |
"loss": 1.6595, | |
"step": 461 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001386, | |
"loss": 1.6592, | |
"step": 462 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001389, | |
"loss": 1.6475, | |
"step": 463 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001392, | |
"loss": 1.6383, | |
"step": 464 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001395, | |
"loss": 1.6401, | |
"step": 465 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013979999999999998, | |
"loss": 1.6586, | |
"step": 466 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001401, | |
"loss": 1.6574, | |
"step": 467 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001404, | |
"loss": 1.6621, | |
"step": 468 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014069999999999998, | |
"loss": 1.6457, | |
"step": 469 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014099999999999998, | |
"loss": 1.6519, | |
"step": 470 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001413, | |
"loss": 1.6578, | |
"step": 471 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014159999999999997, | |
"loss": 1.657, | |
"step": 472 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014189999999999998, | |
"loss": 1.6359, | |
"step": 473 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001422, | |
"loss": 1.6501, | |
"step": 474 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001425, | |
"loss": 1.6426, | |
"step": 475 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014279999999999997, | |
"loss": 1.6347, | |
"step": 476 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014309999999999998, | |
"loss": 1.6464, | |
"step": 477 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001434, | |
"loss": 1.6591, | |
"step": 478 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014369999999999997, | |
"loss": 1.655, | |
"step": 479 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014399999999999998, | |
"loss": 1.6469, | |
"step": 480 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014429999999999998, | |
"loss": 1.6506, | |
"step": 481 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001446, | |
"loss": 1.6408, | |
"step": 482 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014489999999999997, | |
"loss": 1.6544, | |
"step": 483 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014519999999999998, | |
"loss": 1.6603, | |
"step": 484 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014549999999999999, | |
"loss": 1.6604, | |
"step": 485 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001458, | |
"loss": 1.6404, | |
"step": 486 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014609999999999997, | |
"loss": 1.6665, | |
"step": 487 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014639999999999998, | |
"loss": 1.6435, | |
"step": 488 | |
}, | |
{ | |
"epoch": 0.25, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001467, | |
"loss": 1.6477, | |
"step": 489 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000147, | |
"loss": 1.664, | |
"step": 490 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014729999999999998, | |
"loss": 1.6645, | |
"step": 491 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014759999999999998, | |
"loss": 1.6547, | |
"step": 492 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001479, | |
"loss": 1.6499, | |
"step": 493 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001482, | |
"loss": 1.6736, | |
"step": 494 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014849999999999998, | |
"loss": 1.6474, | |
"step": 495 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014879999999999998, | |
"loss": 1.643, | |
"step": 496 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001491, | |
"loss": 1.6694, | |
"step": 497 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001494, | |
"loss": 1.667, | |
"step": 498 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014969999999999998, | |
"loss": 1.6501, | |
"step": 499 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00015, | |
"loss": 1.6516, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014999981593196598, | |
"loss": 1.6726, | |
"step": 501 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014999926372876737, | |
"loss": 1.6455, | |
"step": 502 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001499983433931147, | |
"loss": 1.6556, | |
"step": 503 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014999705492952539, | |
"loss": 1.6552, | |
"step": 504 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014999539834432388, | |
"loss": 1.6532, | |
"step": 505 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014999337364564143, | |
"loss": 1.6474, | |
"step": 506 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014999098084341627, | |
"loss": 1.6289, | |
"step": 507 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001499882199493934, | |
"loss": 1.6588, | |
"step": 508 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014998509097712465, | |
"loss": 1.6426, | |
"step": 509 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014998159394196848, | |
"loss": 1.651, | |
"step": 510 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014997772886109006, | |
"loss": 1.6489, | |
"step": 511 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014997349575346104, | |
"loss": 1.6528, | |
"step": 512 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014996889463985953, | |
"loss": 1.6596, | |
"step": 513 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014996392554287005, | |
"loss": 1.6511, | |
"step": 514 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001499585884868833, | |
"loss": 1.6775, | |
"step": 515 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014995288349809612, | |
"loss": 1.6515, | |
"step": 516 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014994681060451133, | |
"loss": 1.6479, | |
"step": 517 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014994036983593764, | |
"loss": 1.6599, | |
"step": 518 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001499335612239894, | |
"loss": 1.6525, | |
"step": 519 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014992638480208656, | |
"loss": 1.6565, | |
"step": 520 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001499188406054545, | |
"loss": 1.646, | |
"step": 521 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014991092867112368, | |
"loss": 1.645, | |
"step": 522 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014990264903792973, | |
"loss": 1.6613, | |
"step": 523 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014989400174651306, | |
"loss": 1.6602, | |
"step": 524 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014988498683931876, | |
"loss": 1.654, | |
"step": 525 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014987560436059631, | |
"loss": 1.6655, | |
"step": 526 | |
}, | |
{ | |
"epoch": 0.27, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001498658543563994, | |
"loss": 1.6392, | |
"step": 527 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001498557368745858, | |
"loss": 1.6534, | |
"step": 528 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014984525196481692, | |
"loss": 1.67, | |
"step": 529 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014983439967855778, | |
"loss": 1.6592, | |
"step": 530 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014982318006907658, | |
"loss": 1.6624, | |
"step": 531 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001498115931914446, | |
"loss": 1.6558, | |
"step": 532 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014979963910253578, | |
"loss": 1.6279, | |
"step": 533 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014978731786102653, | |
"loss": 1.6544, | |
"step": 534 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001497746295273955, | |
"loss": 1.661, | |
"step": 535 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014976157416392302, | |
"loss": 1.645, | |
"step": 536 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014974815183469115, | |
"loss": 1.6693, | |
"step": 537 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014973436260558317, | |
"loss": 1.6579, | |
"step": 538 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001497202065442832, | |
"loss": 1.6589, | |
"step": 539 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000149705683720276, | |
"loss": 1.6551, | |
"step": 540 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014969079420484662, | |
"loss": 1.6549, | |
"step": 541 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014967553807107987, | |
"loss": 1.6439, | |
"step": 542 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001496599153938603, | |
"loss": 1.6627, | |
"step": 543 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014964392624987146, | |
"loss": 1.6512, | |
"step": 544 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014962757071759576, | |
"loss": 1.6348, | |
"step": 545 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014961084887731404, | |
"loss": 1.6485, | |
"step": 546 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014959376081110514, | |
"loss": 1.647, | |
"step": 547 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014957630660284544, | |
"loss": 1.6474, | |
"step": 548 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014955848633820866, | |
"loss": 1.6555, | |
"step": 549 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014954030010466522, | |
"loss": 1.6531, | |
"step": 550 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014952174799148186, | |
"loss": 1.6504, | |
"step": 551 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001495028300897213, | |
"loss": 1.6661, | |
"step": 552 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001494835464922417, | |
"loss": 1.653, | |
"step": 553 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014946389729369628, | |
"loss": 1.6695, | |
"step": 554 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014944388259053267, | |
"loss": 1.6584, | |
"step": 555 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014942350248099267, | |
"loss": 1.671, | |
"step": 556 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001494027570651117, | |
"loss": 1.6601, | |
"step": 557 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014938164644471823, | |
"loss": 1.6657, | |
"step": 558 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001493601707234333, | |
"loss": 1.6327, | |
"step": 559 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001493383300066701, | |
"loss": 1.6561, | |
"step": 560 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001493161244016334, | |
"loss": 1.6533, | |
"step": 561 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014929355401731894, | |
"loss": 1.6525, | |
"step": 562 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014927061896451307, | |
"loss": 1.6407, | |
"step": 563 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014924731935579203, | |
"loss": 1.6623, | |
"step": 564 | |
}, | |
{ | |
"epoch": 0.29, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001492236553055215, | |
"loss": 1.6651, | |
"step": 565 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014919962692985602, | |
"loss": 1.6309, | |
"step": 566 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014917523434673842, | |
"loss": 1.6639, | |
"step": 567 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014915047767589924, | |
"loss": 1.6688, | |
"step": 568 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001491253570388561, | |
"loss": 1.6388, | |
"step": 569 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014909987255891317, | |
"loss": 1.6619, | |
"step": 570 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014907402436116057, | |
"loss": 1.683, | |
"step": 571 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014904781257247366, | |
"loss": 1.6446, | |
"step": 572 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001490212373215125, | |
"loss": 1.651, | |
"step": 573 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001489942987387212, | |
"loss": 1.6545, | |
"step": 574 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001489669969563273, | |
"loss": 1.6536, | |
"step": 575 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014893933210834105, | |
"loss": 1.6469, | |
"step": 576 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014891130433055484, | |
"loss": 1.6587, | |
"step": 577 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001488829137605425, | |
"loss": 1.645, | |
"step": 578 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014885416053765855, | |
"loss": 1.6354, | |
"step": 579 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001488250448030377, | |
"loss": 1.6628, | |
"step": 580 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014879556669959395, | |
"loss": 1.6405, | |
"step": 581 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014876572637201996, | |
"loss": 1.6612, | |
"step": 582 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001487355239667865, | |
"loss": 1.6774, | |
"step": 583 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014870495963214143, | |
"loss": 1.6538, | |
"step": 584 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014867403351810925, | |
"loss": 1.679, | |
"step": 585 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014864274577649017, | |
"loss": 1.6465, | |
"step": 586 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014861109656085945, | |
"loss": 1.6633, | |
"step": 587 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001485790860265667, | |
"loss": 1.6488, | |
"step": 588 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014854671433073502, | |
"loss": 1.6569, | |
"step": 589 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014851398163226025, | |
"loss": 1.6447, | |
"step": 590 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001484808880918102, | |
"loss": 1.6537, | |
"step": 591 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001484474338718239, | |
"loss": 1.6468, | |
"step": 592 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014841361913651074, | |
"loss": 1.6592, | |
"step": 593 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001483794440518497, | |
"loss": 1.6674, | |
"step": 594 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014834490878558855, | |
"loss": 1.6412, | |
"step": 595 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014831001350724295, | |
"loss": 1.6574, | |
"step": 596 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014827475838809576, | |
"loss": 1.659, | |
"step": 597 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014823914360119603, | |
"loss": 1.6441, | |
"step": 598 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014820316932135825, | |
"loss": 1.6548, | |
"step": 599 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014816683572516147, | |
"loss": 1.6519, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014813014299094852, | |
"loss": 1.6538, | |
"step": 601 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001480930912988249, | |
"loss": 1.6599, | |
"step": 602 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014805568083065823, | |
"loss": 1.6589, | |
"step": 603 | |
}, | |
{ | |
"epoch": 0.31, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014801791177007696, | |
"loss": 1.6579, | |
"step": 604 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014797978430246993, | |
"loss": 1.6509, | |
"step": 605 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000147941298614985, | |
"loss": 1.6476, | |
"step": 606 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014790245489652845, | |
"loss": 1.6548, | |
"step": 607 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014786325333776397, | |
"loss": 1.6546, | |
"step": 608 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001478236941311116, | |
"loss": 1.6622, | |
"step": 609 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000147783777470747, | |
"loss": 1.6757, | |
"step": 610 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014774350355260032, | |
"loss": 1.6642, | |
"step": 611 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001477028725743553, | |
"loss": 1.6521, | |
"step": 612 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014766188473544834, | |
"loss": 1.6543, | |
"step": 613 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014762054023706746, | |
"loss": 1.6607, | |
"step": 614 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014757883928215136, | |
"loss": 1.6506, | |
"step": 615 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014753678207538834, | |
"loss": 1.6506, | |
"step": 616 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014749436882321543, | |
"loss": 1.6483, | |
"step": 617 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014745159973381724, | |
"loss": 1.6717, | |
"step": 618 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001474084750171251, | |
"loss": 1.6699, | |
"step": 619 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014736499488481576, | |
"loss": 1.6581, | |
"step": 620 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014732115955031066, | |
"loss": 1.6441, | |
"step": 621 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014727696922877474, | |
"loss": 1.6411, | |
"step": 622 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014723242413711528, | |
"loss": 1.6502, | |
"step": 623 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014718752449398108, | |
"loss": 1.6653, | |
"step": 624 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014714227051976114, | |
"loss": 1.6489, | |
"step": 625 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014709666243658374, | |
"loss": 1.6482, | |
"step": 626 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014705070046831526, | |
"loss": 1.6768, | |
"step": 627 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001470043848405592, | |
"loss": 1.6537, | |
"step": 628 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014695771578065488, | |
"loss": 1.6611, | |
"step": 629 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014691069351767654, | |
"loss": 1.6694, | |
"step": 630 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000146863318282432, | |
"loss": 1.6535, | |
"step": 631 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014681559030746175, | |
"loss": 1.6731, | |
"step": 632 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014676750982703761, | |
"loss": 1.6565, | |
"step": 633 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001467190770771617, | |
"loss": 1.6494, | |
"step": 634 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014667029229556528, | |
"loss": 1.6612, | |
"step": 635 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014662115572170752, | |
"loss": 1.6566, | |
"step": 636 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001465716675967743, | |
"loss": 1.6537, | |
"step": 637 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001465218281636772, | |
"loss": 1.6508, | |
"step": 638 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014647163766705207, | |
"loss": 1.6699, | |
"step": 639 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014642109635325804, | |
"loss": 1.6522, | |
"step": 640 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014637020447037617, | |
"loss": 1.659, | |
"step": 641 | |
}, | |
{ | |
"epoch": 0.33, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014631896226820833, | |
"loss": 1.6593, | |
"step": 642 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014626736999827583, | |
"loss": 1.6426, | |
"step": 643 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001462154279138184, | |
"loss": 1.6278, | |
"step": 644 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014616313626979272, | |
"loss": 1.6515, | |
"step": 645 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014611049532287136, | |
"loss": 1.6503, | |
"step": 646 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001460575053314414, | |
"loss": 1.6692, | |
"step": 647 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001460041665556032, | |
"loss": 1.6574, | |
"step": 648 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014595047925716908, | |
"loss": 1.6474, | |
"step": 649 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001458964436996622, | |
"loss": 1.6522, | |
"step": 650 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000145842060148315, | |
"loss": 1.6539, | |
"step": 651 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014578732887006812, | |
"loss": 1.6599, | |
"step": 652 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000145732250133569, | |
"loss": 1.6651, | |
"step": 653 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014567682420917055, | |
"loss": 1.6533, | |
"step": 654 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014562105136892988, | |
"loss": 1.6649, | |
"step": 655 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001455649318866069, | |
"loss": 1.6402, | |
"step": 656 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014550846603766305, | |
"loss": 1.6462, | |
"step": 657 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001454516540992598, | |
"loss": 1.6573, | |
"step": 658 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014539449635025755, | |
"loss": 1.6538, | |
"step": 659 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014533699307121397, | |
"loss": 1.6642, | |
"step": 660 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014527914454438284, | |
"loss": 1.6542, | |
"step": 661 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001452209510537125, | |
"loss": 1.6563, | |
"step": 662 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014516241288484462, | |
"loss": 1.6551, | |
"step": 663 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014510353032511268, | |
"loss": 1.6374, | |
"step": 664 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014504430366354056, | |
"loss": 1.6378, | |
"step": 665 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014498473319084128, | |
"loss": 1.6535, | |
"step": 666 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001449248191994153, | |
"loss": 1.6614, | |
"step": 667 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014486456198334937, | |
"loss": 1.643, | |
"step": 668 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014480396183841478, | |
"loss": 1.6571, | |
"step": 669 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014474301906206632, | |
"loss": 1.6519, | |
"step": 670 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001446817339534403, | |
"loss": 1.6428, | |
"step": 671 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001446201068133536, | |
"loss": 1.6459, | |
"step": 672 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014455813794430185, | |
"loss": 1.6396, | |
"step": 673 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014449582765045803, | |
"loss": 1.6413, | |
"step": 674 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014443317623767107, | |
"loss": 1.6386, | |
"step": 675 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001443701840134642, | |
"loss": 1.6386, | |
"step": 676 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014430685128703354, | |
"loss": 1.6546, | |
"step": 677 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001442431783692466, | |
"loss": 1.6514, | |
"step": 678 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014417916557264064, | |
"loss": 1.6774, | |
"step": 679 | |
}, | |
{ | |
"epoch": 0.35, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014411481321142133, | |
"loss": 1.641, | |
"step": 680 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014405012160146092, | |
"loss": 1.6526, | |
"step": 681 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014398509106029698, | |
"loss": 1.6474, | |
"step": 682 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001439197219071307, | |
"loss": 1.6382, | |
"step": 683 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014385401446282526, | |
"loss": 1.6572, | |
"step": 684 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014378796904990443, | |
"loss": 1.6485, | |
"step": 685 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001437215859925509, | |
"loss": 1.6558, | |
"step": 686 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014365486561660458, | |
"loss": 1.6601, | |
"step": 687 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014358780824956117, | |
"loss": 1.6348, | |
"step": 688 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001435204142205705, | |
"loss": 1.6456, | |
"step": 689 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014345268386043485, | |
"loss": 1.6542, | |
"step": 690 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014338461750160742, | |
"loss": 1.67, | |
"step": 691 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014331621547819062, | |
"loss": 1.6694, | |
"step": 692 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014324747812593448, | |
"loss": 1.6565, | |
"step": 693 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000143178405782235, | |
"loss": 1.6658, | |
"step": 694 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014310899878613244, | |
"loss": 1.6709, | |
"step": 695 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001430392574783097, | |
"loss": 1.6352, | |
"step": 696 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014296918220109067, | |
"loss": 1.6513, | |
"step": 697 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014289877329843855, | |
"loss": 1.6542, | |
"step": 698 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000142828031115954, | |
"loss": 1.6565, | |
"step": 699 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001427569560008738, | |
"loss": 1.6545, | |
"step": 700 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014268554830206869, | |
"loss": 1.645, | |
"step": 701 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014261380837004203, | |
"loss": 1.644, | |
"step": 702 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001425417365569279, | |
"loss": 1.6971, | |
"step": 703 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014246933321648947, | |
"loss": 1.6751, | |
"step": 704 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014239659870411708, | |
"loss": 1.6276, | |
"step": 705 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014232353337682675, | |
"loss": 1.6446, | |
"step": 706 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001422501375932582, | |
"loss": 1.657, | |
"step": 707 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014217641171367326, | |
"loss": 1.6251, | |
"step": 708 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014210235609995398, | |
"loss": 1.6384, | |
"step": 709 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000142027971115601, | |
"loss": 1.6402, | |
"step": 710 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001419532571257315, | |
"loss": 1.6707, | |
"step": 711 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001418782144970777, | |
"loss": 1.643, | |
"step": 712 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014180284359798493, | |
"loss": 1.6659, | |
"step": 713 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001417271447984098, | |
"loss": 1.6598, | |
"step": 714 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014165111846991844, | |
"loss": 1.6501, | |
"step": 715 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001415747649856846, | |
"loss": 1.6528, | |
"step": 716 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014149808472048791, | |
"loss": 1.6577, | |
"step": 717 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000141421078050712, | |
"loss": 1.6329, | |
"step": 718 | |
}, | |
{ | |
"epoch": 0.37, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001413437453543426, | |
"loss": 1.6536, | |
"step": 719 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014126608701096587, | |
"loss": 1.6443, | |
"step": 720 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014118810340176617, | |
"loss": 1.6569, | |
"step": 721 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014110979490952467, | |
"loss": 1.6444, | |
"step": 722 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014103116191861704, | |
"loss": 1.6316, | |
"step": 723 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014095220481501184, | |
"loss": 1.6591, | |
"step": 724 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014087292398626855, | |
"loss": 1.6512, | |
"step": 725 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001407933198215355, | |
"loss": 1.6527, | |
"step": 726 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001407133927115483, | |
"loss": 1.6577, | |
"step": 727 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001406331430486276, | |
"loss": 1.6484, | |
"step": 728 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014055257122667737, | |
"loss": 1.6377, | |
"step": 729 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014047167764118284, | |
"loss": 1.677, | |
"step": 730 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014039046268920867, | |
"loss": 1.6286, | |
"step": 731 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001403089267693968, | |
"loss": 1.6543, | |
"step": 732 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014022707028196483, | |
"loss": 1.6535, | |
"step": 733 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014014489362870376, | |
"loss": 1.6552, | |
"step": 734 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00014006239721297607, | |
"loss": 1.6508, | |
"step": 735 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013997958143971388, | |
"loss": 1.6548, | |
"step": 736 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001398964467154168, | |
"loss": 1.659, | |
"step": 737 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001398129934481501, | |
"loss": 1.6496, | |
"step": 738 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001397292220475425, | |
"loss": 1.6472, | |
"step": 739 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001396451329247843, | |
"loss": 1.6703, | |
"step": 740 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001395607264926254, | |
"loss": 1.6591, | |
"step": 741 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013947600316537316, | |
"loss": 1.6471, | |
"step": 742 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001393909633588904, | |
"loss": 1.6608, | |
"step": 743 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013930560749059336, | |
"loss": 1.6463, | |
"step": 744 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001392199359794497, | |
"loss": 1.6455, | |
"step": 745 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013913394924597644, | |
"loss": 1.6519, | |
"step": 746 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001390476477122378, | |
"loss": 1.6619, | |
"step": 747 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013896103180184317, | |
"loss": 1.6453, | |
"step": 748 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013887410193994516, | |
"loss": 1.6527, | |
"step": 749 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013878685855323728, | |
"loss": 1.6302, | |
"step": 750 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013869930206995205, | |
"loss": 1.6559, | |
"step": 751 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001386114329198588, | |
"loss": 1.6533, | |
"step": 752 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013852325153426156, | |
"loss": 1.6545, | |
"step": 753 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000138434758345997, | |
"loss": 1.6463, | |
"step": 754 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013834595378943223, | |
"loss": 1.6465, | |
"step": 755 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013825683830046272, | |
"loss": 1.6606, | |
"step": 756 | |
}, | |
{ | |
"epoch": 0.39, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013816741231651017, | |
"loss": 1.6401, | |
"step": 757 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013807767627652028, | |
"loss": 1.6666, | |
"step": 758 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001379876306209607, | |
"loss": 1.6675, | |
"step": 759 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013789727579181882, | |
"loss": 1.6666, | |
"step": 760 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001378066122325996, | |
"loss": 1.6602, | |
"step": 761 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013771564038832338, | |
"loss": 1.6505, | |
"step": 762 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001376243607055237, | |
"loss": 1.6494, | |
"step": 763 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013753277363224518, | |
"loss": 1.6479, | |
"step": 764 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013744087961804118, | |
"loss": 1.6514, | |
"step": 765 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013734867911397174, | |
"loss": 1.6318, | |
"step": 766 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013725617257260127, | |
"loss": 1.641, | |
"step": 767 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013716336044799636, | |
"loss": 1.6413, | |
"step": 768 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013707024319572358, | |
"loss": 1.6319, | |
"step": 769 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013697682127284712, | |
"loss": 1.6572, | |
"step": 770 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013688309513792675, | |
"loss": 1.6529, | |
"step": 771 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001367890652510154, | |
"loss": 1.6418, | |
"step": 772 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000136694732073657, | |
"loss": 1.6425, | |
"step": 773 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013660009606888412, | |
"loss": 1.6636, | |
"step": 774 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013650515770121578, | |
"loss": 1.6545, | |
"step": 775 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013640991743665517, | |
"loss": 1.6532, | |
"step": 776 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001363143757426873, | |
"loss": 1.6271, | |
"step": 777 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013621853308827674, | |
"loss": 1.6424, | |
"step": 778 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013612238994386536, | |
"loss": 1.6479, | |
"step": 779 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001360259467813699, | |
"loss": 1.6623, | |
"step": 780 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013592920407417983, | |
"loss": 1.6558, | |
"step": 781 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013583216229715484, | |
"loss": 1.6548, | |
"step": 782 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013573482192662267, | |
"loss": 1.6468, | |
"step": 783 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013563718344037666, | |
"loss": 1.6553, | |
"step": 784 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013553924731767345, | |
"loss": 1.6538, | |
"step": 785 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013544101403923064, | |
"loss": 1.6546, | |
"step": 786 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001353424840872244, | |
"loss": 1.6461, | |
"step": 787 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001352436579452871, | |
"loss": 1.645, | |
"step": 788 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013514453609850496, | |
"loss": 1.6338, | |
"step": 789 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013504511903341575, | |
"loss": 1.6454, | |
"step": 790 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013494540723800615, | |
"loss": 1.6526, | |
"step": 791 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013484540120170966, | |
"loss": 1.645, | |
"step": 792 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013474510141540393, | |
"loss": 1.6411, | |
"step": 793 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013464450837140864, | |
"loss": 1.6573, | |
"step": 794 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013454362256348277, | |
"loss": 1.6552, | |
"step": 795 | |
}, | |
{ | |
"epoch": 0.41, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001344424444868224, | |
"loss": 1.6479, | |
"step": 796 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013434097463805813, | |
"loss": 1.6422, | |
"step": 797 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013423921351525287, | |
"loss": 1.6708, | |
"step": 798 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013413716161789907, | |
"loss": 1.6607, | |
"step": 799 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001340348194469166, | |
"loss": 1.6375, | |
"step": 800 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013393218750464995, | |
"loss": 1.6685, | |
"step": 801 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013382926629486614, | |
"loss": 1.6482, | |
"step": 802 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001337260563227519, | |
"loss": 1.6454, | |
"step": 803 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013362255809491145, | |
"loss": 1.6638, | |
"step": 804 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013351877211936384, | |
"loss": 1.6428, | |
"step": 805 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013341469890554056, | |
"loss": 1.6668, | |
"step": 806 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000133310338964283, | |
"loss": 1.6565, | |
"step": 807 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013320569280783994, | |
"loss": 1.6458, | |
"step": 808 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000133100760949865, | |
"loss": 1.6576, | |
"step": 809 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013299554390541426, | |
"loss": 1.6471, | |
"step": 810 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013289004219094354, | |
"loss": 1.6459, | |
"step": 811 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013278425632430599, | |
"loss": 1.6412, | |
"step": 812 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013267818682474952, | |
"loss": 1.6577, | |
"step": 813 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013257183421291427, | |
"loss": 1.6532, | |
"step": 814 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013246519901082996, | |
"loss": 1.6682, | |
"step": 815 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013235828174191347, | |
"loss": 1.6499, | |
"step": 816 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013225108293096616, | |
"loss": 1.6492, | |
"step": 817 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013214360310417138, | |
"loss": 1.656, | |
"step": 818 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001320358427890918, | |
"loss": 1.6548, | |
"step": 819 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001319278025146668, | |
"loss": 1.6416, | |
"step": 820 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013181948281121007, | |
"loss": 1.6705, | |
"step": 821 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013171088421040682, | |
"loss": 1.6439, | |
"step": 822 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013160200724531118, | |
"loss": 1.6398, | |
"step": 823 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013149285245034364, | |
"loss": 1.6661, | |
"step": 824 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013138342036128845, | |
"loss": 1.6452, | |
"step": 825 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013127371151529091, | |
"loss": 1.6517, | |
"step": 826 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013116372645085485, | |
"loss": 1.6559, | |
"step": 827 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013105346570783978, | |
"loss": 1.6502, | |
"step": 828 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013094292982745849, | |
"loss": 1.6646, | |
"step": 829 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013083211935227426, | |
"loss": 1.6398, | |
"step": 830 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013072103482619817, | |
"loss": 1.6501, | |
"step": 831 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013060967679448648, | |
"loss": 1.6438, | |
"step": 832 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000130498045803738, | |
"loss": 1.6512, | |
"step": 833 | |
}, | |
{ | |
"epoch": 0.43, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013038614240189127, | |
"loss": 1.6466, | |
"step": 834 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013027396713822202, | |
"loss": 1.6413, | |
"step": 835 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00013016152056334043, | |
"loss": 1.658, | |
"step": 836 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001300488032291883, | |
"loss": 1.6559, | |
"step": 837 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012993581568903657, | |
"loss": 1.6451, | |
"step": 838 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001298225584974824, | |
"loss": 1.6436, | |
"step": 839 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012970903221044654, | |
"loss": 1.6818, | |
"step": 840 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012959523738517058, | |
"loss": 1.6584, | |
"step": 841 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012948117458021436, | |
"loss": 1.6471, | |
"step": 842 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012936684435545283, | |
"loss": 1.6445, | |
"step": 843 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001292522472720738, | |
"loss": 1.6752, | |
"step": 844 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012913738389257485, | |
"loss": 1.6622, | |
"step": 845 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012902225478076074, | |
"loss": 1.6564, | |
"step": 846 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012890686050174042, | |
"loss": 1.6637, | |
"step": 847 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012879120162192454, | |
"loss": 1.6566, | |
"step": 848 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012867527870902255, | |
"loss": 1.6581, | |
"step": 849 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001285590923320398, | |
"loss": 1.6436, | |
"step": 850 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001284426430612749, | |
"loss": 1.6735, | |
"step": 851 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012832593146831694, | |
"loss": 1.6614, | |
"step": 852 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012820895812604244, | |
"loss": 1.6576, | |
"step": 853 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001280917236086129, | |
"loss": 1.6667, | |
"step": 854 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001279742284914717, | |
"loss": 1.648, | |
"step": 855 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012785647335134136, | |
"loss": 1.663, | |
"step": 856 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012773845876622075, | |
"loss": 1.6331, | |
"step": 857 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001276201853153822, | |
"loss": 1.6418, | |
"step": 858 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012750165357936866, | |
"loss": 1.6476, | |
"step": 859 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012738286413999095, | |
"loss": 1.6536, | |
"step": 860 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012726381758032472, | |
"loss": 1.6496, | |
"step": 861 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012714451448470778, | |
"loss": 1.6608, | |
"step": 862 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012702495543873702, | |
"loss": 1.6571, | |
"step": 863 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001269051410292658, | |
"loss": 1.6521, | |
"step": 864 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012678507184440084, | |
"loss": 1.6494, | |
"step": 865 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012666474847349948, | |
"loss": 1.6443, | |
"step": 866 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001265441715071666, | |
"loss": 1.6446, | |
"step": 867 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012642334153725203, | |
"loss": 1.6402, | |
"step": 868 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001263022591568473, | |
"loss": 1.6524, | |
"step": 869 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012618092496028302, | |
"loss": 1.6536, | |
"step": 870 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012605933954312572, | |
"loss": 1.662, | |
"step": 871 | |
}, | |
{ | |
"epoch": 0.45, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012593750350217513, | |
"loss": 1.6668, | |
"step": 872 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012581541743546116, | |
"loss": 1.6651, | |
"step": 873 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001256930819422409, | |
"loss": 1.6457, | |
"step": 874 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012557049762299577, | |
"loss": 1.6589, | |
"step": 875 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012544766507942862, | |
"loss": 1.6457, | |
"step": 876 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012532458491446054, | |
"loss": 1.6767, | |
"step": 877 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001252012577322283, | |
"loss": 1.6489, | |
"step": 878 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001250776841380809, | |
"loss": 1.645, | |
"step": 879 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012495386473857704, | |
"loss": 1.6481, | |
"step": 880 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012482980014148187, | |
"loss": 1.6645, | |
"step": 881 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001247054909557641, | |
"loss": 1.6512, | |
"step": 882 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012458093779159295, | |
"loss": 1.6474, | |
"step": 883 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001244561412603353, | |
"loss": 1.6521, | |
"step": 884 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012433110197455254, | |
"loss": 1.6661, | |
"step": 885 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012420582054799756, | |
"loss": 1.6659, | |
"step": 886 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001240802975956119, | |
"loss": 1.645, | |
"step": 887 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012395453373352256, | |
"loss": 1.6476, | |
"step": 888 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012382852957903908, | |
"loss": 1.6523, | |
"step": 889 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001237022857506504, | |
"loss": 1.6577, | |
"step": 890 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000123575802868022, | |
"loss": 1.6616, | |
"step": 891 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012344908155199265, | |
"loss": 1.6367, | |
"step": 892 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001233221224245715, | |
"loss": 1.6556, | |
"step": 893 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.000123194926108935, | |
"loss": 1.6352, | |
"step": 894 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012306749322942392, | |
"loss": 1.6454, | |
"step": 895 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012293982441154003, | |
"loss": 1.6491, | |
"step": 896 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001228119202819433, | |
"loss": 1.6518, | |
"step": 897 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012268378146844874, | |
"loss": 1.6491, | |
"step": 898 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012255540860002328, | |
"loss": 1.6511, | |
"step": 899 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001224268023067826, | |
"loss": 1.6505, | |
"step": 900 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012229796321998833, | |
"loss": 1.6539, | |
"step": 901 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012216889197204465, | |
"loss": 1.6539, | |
"step": 902 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012203958919649529, | |
"loss": 1.6577, | |
"step": 903 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012191005552802047, | |
"loss": 1.6462, | |
"step": 904 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001217802916024337, | |
"loss": 1.658, | |
"step": 905 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012165029805667879, | |
"loss": 1.6486, | |
"step": 906 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012152007552882652, | |
"loss": 1.6342, | |
"step": 907 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012138962465807174, | |
"loss": 1.6336, | |
"step": 908 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012125894608473, | |
"loss": 1.6569, | |
"step": 909 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012112804045023462, | |
"loss": 1.6643, | |
"step": 910 | |
}, | |
{ | |
"epoch": 0.47, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012099690839713337, | |
"loss": 1.6525, | |
"step": 911 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001208655505690855, | |
"loss": 1.6401, | |
"step": 912 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012073396761085831, | |
"loss": 1.6569, | |
"step": 913 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012060216016832429, | |
"loss": 1.6517, | |
"step": 914 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012047012888845777, | |
"loss": 1.65, | |
"step": 915 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012033787441933173, | |
"loss": 1.6685, | |
"step": 916 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012020539741011472, | |
"loss": 1.6526, | |
"step": 917 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00012007269851106762, | |
"loss": 1.6458, | |
"step": 918 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011993977837354043, | |
"loss": 1.6589, | |
"step": 919 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011980663764996912, | |
"loss": 1.6505, | |
"step": 920 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011967327699387236, | |
"loss": 1.6664, | |
"step": 921 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011953969705984842, | |
"loss": 1.6416, | |
"step": 922 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011940589850357184, | |
"loss": 1.6568, | |
"step": 923 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011927188198179029, | |
"loss": 1.6433, | |
"step": 924 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011913764815232129, | |
"loss": 1.6468, | |
"step": 925 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011900319767404902, | |
"loss": 1.6532, | |
"step": 926 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011886853120692113, | |
"loss": 1.6549, | |
"step": 927 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011873364941194534, | |
"loss": 1.6646, | |
"step": 928 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011859855295118643, | |
"loss": 1.6627, | |
"step": 929 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011846324248776276, | |
"loss": 1.6641, | |
"step": 930 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011832771868584316, | |
"loss": 1.6455, | |
"step": 931 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011819198221064364, | |
"loss": 1.6511, | |
"step": 932 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011805603372842409, | |
"loss": 1.6634, | |
"step": 933 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011791987390648503, | |
"loss": 1.6752, | |
"step": 934 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011778350341316435, | |
"loss": 1.658, | |
"step": 935 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011764692291783401, | |
"loss": 1.6485, | |
"step": 936 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011751013309089677, | |
"loss": 1.6473, | |
"step": 937 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011737313460378292, | |
"loss": 1.6546, | |
"step": 938 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011723592812894684, | |
"loss": 1.6616, | |
"step": 939 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011709851433986394, | |
"loss": 1.6635, | |
"step": 940 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011696089391102718, | |
"loss": 1.6527, | |
"step": 941 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011682306751794381, | |
"loss": 1.66, | |
"step": 942 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011668503583713202, | |
"loss": 1.6493, | |
"step": 943 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011654679954611771, | |
"loss": 1.6652, | |
"step": 944 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011640835932343108, | |
"loss": 1.6643, | |
"step": 945 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011626971584860328, | |
"loss": 1.6339, | |
"step": 946 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011613086980216318, | |
"loss": 1.6647, | |
"step": 947 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011599182186563397, | |
"loss": 1.6472, | |
"step": 948 | |
}, | |
{ | |
"epoch": 0.49, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011585257272152976, | |
"loss": 1.6632, | |
"step": 949 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011571312305335235, | |
"loss": 1.6547, | |
"step": 950 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011557347354558775, | |
"loss": 1.6543, | |
"step": 951 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011543362488370289, | |
"loss": 1.6442, | |
"step": 952 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011529357775414227, | |
"loss": 1.6655, | |
"step": 953 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011515333284432457, | |
"loss": 1.649, | |
"step": 954 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011501289084263922, | |
"loss": 1.6697, | |
"step": 955 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011487225243844312, | |
"loss": 1.651, | |
"step": 956 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001147314183220572, | |
"loss": 1.6328, | |
"step": 957 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011459038918476302, | |
"loss": 1.6381, | |
"step": 958 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001144491657187994, | |
"loss": 1.6482, | |
"step": 959 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011430774861735904, | |
"loss": 1.6693, | |
"step": 960 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011416613857458509, | |
"loss": 1.6481, | |
"step": 961 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011402433628556774, | |
"loss": 1.6478, | |
"step": 962 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011388234244634078, | |
"loss": 1.6564, | |
"step": 963 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011374015775387831, | |
"loss": 1.6441, | |
"step": 964 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011359778290609116, | |
"loss": 1.6541, | |
"step": 965 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011345521860182352, | |
"loss": 1.6336, | |
"step": 966 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011331246554084962, | |
"loss": 1.6556, | |
"step": 967 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011316952442387007, | |
"loss": 1.6462, | |
"step": 968 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011302639595250865, | |
"loss": 1.6717, | |
"step": 969 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011288308082930871, | |
"loss": 1.66, | |
"step": 970 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011273957975772983, | |
"loss": 1.6614, | |
"step": 971 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011259589344214424, | |
"loss": 1.6452, | |
"step": 972 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001124520225878335, | |
"loss": 1.6587, | |
"step": 973 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011230796790098494, | |
"loss": 1.6612, | |
"step": 974 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011216373008868825, | |
"loss": 1.6395, | |
"step": 975 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011201930985893197, | |
"loss": 1.6268, | |
"step": 976 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011187470792060002, | |
"loss": 1.6602, | |
"step": 977 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011172992498346832, | |
"loss": 1.641, | |
"step": 978 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011158496175820108, | |
"loss": 1.6506, | |
"step": 979 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011143981895634755, | |
"loss": 1.6524, | |
"step": 980 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011129449729033842, | |
"loss": 1.6553, | |
"step": 981 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011114899747348227, | |
"loss": 1.6463, | |
"step": 982 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011100332021996222, | |
"loss": 1.6534, | |
"step": 983 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011085746624483228, | |
"loss": 1.6672, | |
"step": 984 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011071143626401388, | |
"loss": 1.6539, | |
"step": 985 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001105652309942924, | |
"loss": 1.6574, | |
"step": 986 | |
}, | |
{ | |
"epoch": 0.51, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001104188511533136, | |
"loss": 1.6556, | |
"step": 987 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00011027229745958018, | |
"loss": 1.6586, | |
"step": 988 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001101255706324481, | |
"loss": 1.6654, | |
"step": 989 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010997867139212324, | |
"loss": 1.6567, | |
"step": 990 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010983160045965765, | |
"loss": 1.6502, | |
"step": 991 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010968435855694625, | |
"loss": 1.6437, | |
"step": 992 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010953694640672308, | |
"loss": 1.6451, | |
"step": 993 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010938936473255788, | |
"loss": 1.6407, | |
"step": 994 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010924161425885248, | |
"loss": 1.6503, | |
"step": 995 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010909369571083723, | |
"loss": 1.6567, | |
"step": 996 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010894560981456753, | |
"loss": 1.6514, | |
"step": 997 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010879735729692017, | |
"loss": 1.6564, | |
"step": 998 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001086489388855898, | |
"loss": 1.6543, | |
"step": 999 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010850035530908536, | |
"loss": 1.6495, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010835160729672648, | |
"loss": 1.6634, | |
"step": 1001 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010820269557863997, | |
"loss": 1.6459, | |
"step": 1002 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010805362088575613, | |
"loss": 1.6567, | |
"step": 1003 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010790438394980523, | |
"loss": 1.6501, | |
"step": 1004 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010775498550331398, | |
"loss": 1.6319, | |
"step": 1005 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010760542627960174, | |
"loss": 1.6542, | |
"step": 1006 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010745570701277715, | |
"loss": 1.6534, | |
"step": 1007 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010730582843773434, | |
"loss": 1.6465, | |
"step": 1008 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010715579129014945, | |
"loss": 1.6465, | |
"step": 1009 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010700559630647696, | |
"loss": 1.6394, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010685524422394604, | |
"loss": 1.6538, | |
"step": 1011 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010670473578055707, | |
"loss": 1.6502, | |
"step": 1012 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010655407171507784, | |
"loss": 1.6514, | |
"step": 1013 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010640325276704006, | |
"loss": 1.6517, | |
"step": 1014 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010625227967673563, | |
"loss": 1.6326, | |
"step": 1015 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001061011531852131, | |
"loss": 1.6577, | |
"step": 1016 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010594987403427396, | |
"loss": 1.667, | |
"step": 1017 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010579844296646904, | |
"loss": 1.6219, | |
"step": 1018 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010564686072509483, | |
"loss": 1.6414, | |
"step": 1019 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001054951280541899, | |
"loss": 1.6404, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010534324569853113, | |
"loss": 1.6543, | |
"step": 1021 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010519121440363017, | |
"loss": 1.6486, | |
"step": 1022 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010503903491572975, | |
"loss": 1.6595, | |
"step": 1023 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010488670798179997, | |
"loss": 1.6465, | |
"step": 1024 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010473423434953468, | |
"loss": 1.6555, | |
"step": 1025 | |
}, | |
{ | |
"epoch": 0.53, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001045816147673478, | |
"loss": 1.6511, | |
"step": 1026 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010442884998436963, | |
"loss": 1.6451, | |
"step": 1027 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010427594075044316, | |
"loss": 1.649, | |
"step": 1028 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010412288781612048, | |
"loss": 1.6565, | |
"step": 1029 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010396969193265899, | |
"loss": 1.6647, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010381635385201774, | |
"loss": 1.6512, | |
"step": 1031 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001036628743268538, | |
"loss": 1.6699, | |
"step": 1032 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010350925411051846, | |
"loss": 1.6405, | |
"step": 1033 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010335549395705366, | |
"loss": 1.6385, | |
"step": 1034 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010320159462118815, | |
"loss": 1.6733, | |
"step": 1035 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010304755685833388, | |
"loss": 1.6556, | |
"step": 1036 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010289338142458227, | |
"loss": 1.628, | |
"step": 1037 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010273906907670049, | |
"loss": 1.653, | |
"step": 1038 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010258462057212776, | |
"loss": 1.6564, | |
"step": 1039 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010243003666897163, | |
"loss": 1.6529, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001022753181260042, | |
"loss": 1.679, | |
"step": 1041 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001021204657026585, | |
"loss": 1.6641, | |
"step": 1042 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010196548015902473, | |
"loss": 1.6658, | |
"step": 1043 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010181036225584642, | |
"loss": 1.6473, | |
"step": 1044 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010165511275451687, | |
"loss": 1.6632, | |
"step": 1045 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010149973241707527, | |
"loss": 1.6518, | |
"step": 1046 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010134422200620305, | |
"loss": 1.6436, | |
"step": 1047 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001011885822852201, | |
"loss": 1.6534, | |
"step": 1048 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010103281401808099, | |
"loss": 1.6544, | |
"step": 1049 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010087691796937134, | |
"loss": 1.6508, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010072089490430387, | |
"loss": 1.6588, | |
"step": 1051 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010056474558871484, | |
"loss": 1.6684, | |
"step": 1052 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 0.0001004084707890602, | |
"loss": 1.6397, | |
"step": 1053 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010025207127241179, | |
"loss": 1.6572, | |
"step": 1054 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 0.00010009554780645369, | |
"loss": 1.662, | |
"step": 1055 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 9.99389011594783e-05, | |
"loss": 1.661, | |
"step": 1056 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 9.978213210038274e-05, | |
"loss": 1.6482, | |
"step": 1057 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 9.962524139866489e-05, | |
"loss": 1.6529, | |
"step": 1058 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 9.946822982441981e-05, | |
"loss": 1.6663, | |
"step": 1059 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 9.93110981483358e-05, | |
"loss": 1.6651, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 9.915384714169069e-05, | |
"loss": 1.6508, | |
"step": 1061 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 9.899647757634806e-05, | |
"loss": 1.6468, | |
"step": 1062 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 9.883899022475338e-05, | |
"loss": 1.6454, | |
"step": 1063 | |
}, | |
{ | |
"epoch": 0.55, | |
"grad_norm": 0.0, | |
"learning_rate": 9.868138585993035e-05, | |
"loss": 1.6579, | |
"step": 1064 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.852366525547696e-05, | |
"loss": 1.6466, | |
"step": 1065 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.836582918556178e-05, | |
"loss": 1.6655, | |
"step": 1066 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.820787842492017e-05, | |
"loss": 1.6473, | |
"step": 1067 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.80498137488504e-05, | |
"loss": 1.6512, | |
"step": 1068 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.789163593320993e-05, | |
"loss": 1.6299, | |
"step": 1069 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.773334575441155e-05, | |
"loss": 1.6716, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.757494398941956e-05, | |
"loss": 1.6504, | |
"step": 1071 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.741643141574603e-05, | |
"loss": 1.6336, | |
"step": 1072 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.725780881144687e-05, | |
"loss": 1.6581, | |
"step": 1073 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.709907695511813e-05, | |
"loss": 1.6543, | |
"step": 1074 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.694023662589207e-05, | |
"loss": 1.6515, | |
"step": 1075 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.678128860343344e-05, | |
"loss": 1.6471, | |
"step": 1076 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.662223366793556e-05, | |
"loss": 1.6641, | |
"step": 1077 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.646307260011653e-05, | |
"loss": 1.6335, | |
"step": 1078 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.630380618121544e-05, | |
"loss": 1.6514, | |
"step": 1079 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.614443519298846e-05, | |
"loss": 1.6481, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.598496041770504e-05, | |
"loss": 1.6516, | |
"step": 1081 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.582538263814407e-05, | |
"loss": 1.6386, | |
"step": 1082 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 0.0, | |
"learning_rate": 9.566570263759e-05, | |
"loss": 1.6705, | |
"step": 1083 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.550592119982912e-05, | |
"loss": 1.6473, | |
"step": 1084 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.534603910914555e-05, | |
"loss": 1.6629, | |
"step": 1085 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.518605715031746e-05, | |
"loss": 1.6592, | |
"step": 1086 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.502597610861325e-05, | |
"loss": 1.6453, | |
"step": 1087 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.486579676978766e-05, | |
"loss": 1.6535, | |
"step": 1088 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.470551992007793e-05, | |
"loss": 1.6376, | |
"step": 1089 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.45451463461999e-05, | |
"loss": 1.6456, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.438467683534418e-05, | |
"loss": 1.6718, | |
"step": 1091 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.422411217517233e-05, | |
"loss": 1.6217, | |
"step": 1092 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.406345315381288e-05, | |
"loss": 1.6311, | |
"step": 1093 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.390270055985763e-05, | |
"loss": 1.6352, | |
"step": 1094 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.374185518235758e-05, | |
"loss": 1.6384, | |
"step": 1095 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.358091781081919e-05, | |
"loss": 1.6621, | |
"step": 1096 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.341988923520048e-05, | |
"loss": 1.661, | |
"step": 1097 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.325877024590716e-05, | |
"loss": 1.6409, | |
"step": 1098 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.309756163378866e-05, | |
"loss": 1.6513, | |
"step": 1099 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.293626419013446e-05, | |
"loss": 1.6531, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.27748787066699e-05, | |
"loss": 1.6383, | |
"step": 1101 | |
}, | |
{ | |
"epoch": 0.57, | |
"grad_norm": 0.0, | |
"learning_rate": 9.261340597555258e-05, | |
"loss": 1.6671, | |
"step": 1102 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.245184678936833e-05, | |
"loss": 1.6462, | |
"step": 1103 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.229020194112731e-05, | |
"loss": 1.6511, | |
"step": 1104 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.212847222426017e-05, | |
"loss": 1.6535, | |
"step": 1105 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.196665843261414e-05, | |
"loss": 1.6451, | |
"step": 1106 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.180476136044914e-05, | |
"loss": 1.6668, | |
"step": 1107 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.164278180243384e-05, | |
"loss": 1.6457, | |
"step": 1108 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.148072055364183e-05, | |
"loss": 1.6601, | |
"step": 1109 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.131857840954764e-05, | |
"loss": 1.6581, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.11563561660229e-05, | |
"loss": 1.6511, | |
"step": 1111 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.099405461933236e-05, | |
"loss": 1.6492, | |
"step": 1112 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.083167456613011e-05, | |
"loss": 1.6426, | |
"step": 1113 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.066921680345551e-05, | |
"loss": 1.6484, | |
"step": 1114 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.050668212872941e-05, | |
"loss": 1.658, | |
"step": 1115 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.034407133975013e-05, | |
"loss": 1.6586, | |
"step": 1116 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.018138523468964e-05, | |
"loss": 1.6401, | |
"step": 1117 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 9.001862461208958e-05, | |
"loss": 1.6742, | |
"step": 1118 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 8.985579027085735e-05, | |
"loss": 1.6607, | |
"step": 1119 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 8.969288301026223e-05, | |
"loss": 1.6634, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 8.952990362993137e-05, | |
"loss": 1.6493, | |
"step": 1121 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 0.0, | |
"learning_rate": 8.936685292984594e-05, | |
"loss": 1.663, | |
"step": 1122 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.92037317103372e-05, | |
"loss": 1.6431, | |
"step": 1123 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.904054077208258e-05, | |
"loss": 1.6596, | |
"step": 1124 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.887728091610163e-05, | |
"loss": 1.6461, | |
"step": 1125 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.871395294375226e-05, | |
"loss": 1.6524, | |
"step": 1126 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.855055765672669e-05, | |
"loss": 1.6602, | |
"step": 1127 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.838709585704759e-05, | |
"loss": 1.651, | |
"step": 1128 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.822356834706409e-05, | |
"loss": 1.6407, | |
"step": 1129 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.80599759294478e-05, | |
"loss": 1.6485, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.789631940718903e-05, | |
"loss": 1.6579, | |
"step": 1131 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.773259958359266e-05, | |
"loss": 1.6534, | |
"step": 1132 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.756881726227435e-05, | |
"loss": 1.6403, | |
"step": 1133 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.740497324715643e-05, | |
"loss": 1.6482, | |
"step": 1134 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.724106834246422e-05, | |
"loss": 1.6458, | |
"step": 1135 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.707710335272176e-05, | |
"loss": 1.6439, | |
"step": 1136 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.691307908274807e-05, | |
"loss": 1.6339, | |
"step": 1137 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.674899633765315e-05, | |
"loss": 1.6528, | |
"step": 1138 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.658485592283402e-05, | |
"loss": 1.6523, | |
"step": 1139 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.642065864397079e-05, | |
"loss": 1.6511, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 0.59, | |
"grad_norm": 0.0, | |
"learning_rate": 8.625640530702265e-05, | |
"loss": 1.6685, | |
"step": 1141 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.609209671822398e-05, | |
"loss": 1.6429, | |
"step": 1142 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.592773368408035e-05, | |
"loss": 1.6692, | |
"step": 1143 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.576331701136455e-05, | |
"loss": 1.6535, | |
"step": 1144 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.559884750711272e-05, | |
"loss": 1.643, | |
"step": 1145 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.543432597862025e-05, | |
"loss": 1.6499, | |
"step": 1146 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.526975323343794e-05, | |
"loss": 1.65, | |
"step": 1147 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.510513007936795e-05, | |
"loss": 1.6457, | |
"step": 1148 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.494045732445991e-05, | |
"loss": 1.6472, | |
"step": 1149 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.477573577700688e-05, | |
"loss": 1.6573, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.461096624554143e-05, | |
"loss": 1.6495, | |
"step": 1151 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.444614953883167e-05, | |
"loss": 1.6559, | |
"step": 1152 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.428128646587724e-05, | |
"loss": 1.6539, | |
"step": 1153 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.411637783590539e-05, | |
"loss": 1.6518, | |
"step": 1154 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.3951424458367e-05, | |
"loss": 1.6484, | |
"step": 1155 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.378642714293257e-05, | |
"loss": 1.6373, | |
"step": 1156 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.362138669948824e-05, | |
"loss": 1.665, | |
"step": 1157 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.345630393813193e-05, | |
"loss": 1.6567, | |
"step": 1158 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.329117966916916e-05, | |
"loss": 1.6567, | |
"step": 1159 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 0.0, | |
"learning_rate": 8.31260147031093e-05, | |
"loss": 1.6435, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.296080985066142e-05, | |
"loss": 1.6514, | |
"step": 1161 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.279556592273037e-05, | |
"loss": 1.6305, | |
"step": 1162 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.263028373041285e-05, | |
"loss": 1.6617, | |
"step": 1163 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.24649640849933e-05, | |
"loss": 1.643, | |
"step": 1164 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.229960779794008e-05, | |
"loss": 1.6332, | |
"step": 1165 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.213421568090134e-05, | |
"loss": 1.6653, | |
"step": 1166 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.196878854570115e-05, | |
"loss": 1.6741, | |
"step": 1167 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.180332720433542e-05, | |
"loss": 1.6382, | |
"step": 1168 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.163783246896802e-05, | |
"loss": 1.6506, | |
"step": 1169 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.147230515192666e-05, | |
"loss": 1.6456, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.130674606569904e-05, | |
"loss": 1.6328, | |
"step": 1171 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.114115602292878e-05, | |
"loss": 1.6647, | |
"step": 1172 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.097553583641143e-05, | |
"loss": 1.669, | |
"step": 1173 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.080988631909051e-05, | |
"loss": 1.6638, | |
"step": 1174 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.064420828405352e-05, | |
"loss": 1.6683, | |
"step": 1175 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.047850254452793e-05, | |
"loss": 1.6491, | |
"step": 1176 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.031276991387721e-05, | |
"loss": 1.6768, | |
"step": 1177 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 8.014701120559678e-05, | |
"loss": 1.6415, | |
"step": 1178 | |
}, | |
{ | |
"epoch": 0.61, | |
"grad_norm": 0.0, | |
"learning_rate": 7.998122723331014e-05, | |
"loss": 1.6904, | |
"step": 1179 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.981541881076471e-05, | |
"loss": 1.6457, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.9649586751828e-05, | |
"loss": 1.641, | |
"step": 1181 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.948373187048349e-05, | |
"loss": 1.6324, | |
"step": 1182 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.93178549808267e-05, | |
"loss": 1.6697, | |
"step": 1183 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.915195689706114e-05, | |
"loss": 1.6603, | |
"step": 1184 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.898603843349447e-05, | |
"loss": 1.6354, | |
"step": 1185 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.882010040453424e-05, | |
"loss": 1.6642, | |
"step": 1186 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.865414362468411e-05, | |
"loss": 1.6449, | |
"step": 1187 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.848816890853977e-05, | |
"loss": 1.6612, | |
"step": 1188 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.832217707078493e-05, | |
"loss": 1.6754, | |
"step": 1189 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.815616892618737e-05, | |
"loss": 1.6453, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.799014528959491e-05, | |
"loss": 1.6616, | |
"step": 1191 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.782410697593136e-05, | |
"loss": 1.6547, | |
"step": 1192 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.765805480019266e-05, | |
"loss": 1.6552, | |
"step": 1193 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.74919895774427e-05, | |
"loss": 1.6465, | |
"step": 1194 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.732591212280949e-05, | |
"loss": 1.6573, | |
"step": 1195 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.715982325148102e-05, | |
"loss": 1.6531, | |
"step": 1196 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.699372377870139e-05, | |
"loss": 1.665, | |
"step": 1197 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 0.0, | |
"learning_rate": 7.682761451976663e-05, | |
"loss": 1.6697, | |
"step": 1198 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.66614962900209e-05, | |
"loss": 1.6505, | |
"step": 1199 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.649536990485236e-05, | |
"loss": 1.6427, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.632923617968918e-05, | |
"loss": 1.6552, | |
"step": 1201 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.61630959299956e-05, | |
"loss": 1.6734, | |
"step": 1202 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.599694997126785e-05, | |
"loss": 1.6578, | |
"step": 1203 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.58307991190302e-05, | |
"loss": 1.6611, | |
"step": 1204 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.566464418883094e-05, | |
"loss": 1.6704, | |
"step": 1205 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.549848599623837e-05, | |
"loss": 1.6766, | |
"step": 1206 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.533232535683681e-05, | |
"loss": 1.6458, | |
"step": 1207 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.516616308622258e-05, | |
"loss": 1.6576, | |
"step": 1208 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.5e-05, | |
"loss": 1.6839, | |
"step": 1209 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.483383691377742e-05, | |
"loss": 1.6491, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.466767464316319e-05, | |
"loss": 1.6465, | |
"step": 1211 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.450151400376163e-05, | |
"loss": 1.6703, | |
"step": 1212 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.433535581116905e-05, | |
"loss": 1.6567, | |
"step": 1213 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.416920088096979e-05, | |
"loss": 1.6635, | |
"step": 1214 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.400305002873215e-05, | |
"loss": 1.6509, | |
"step": 1215 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.38369040700044e-05, | |
"loss": 1.653, | |
"step": 1216 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.367076382031081e-05, | |
"loss": 1.6432, | |
"step": 1217 | |
}, | |
{ | |
"epoch": 0.63, | |
"grad_norm": 0.0, | |
"learning_rate": 7.350463009514764e-05, | |
"loss": 1.6442, | |
"step": 1218 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.33385037099791e-05, | |
"loss": 1.6514, | |
"step": 1219 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.317238548023337e-05, | |
"loss": 1.6485, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.300627622129863e-05, | |
"loss": 1.6721, | |
"step": 1221 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.284017674851897e-05, | |
"loss": 1.6281, | |
"step": 1222 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.267408787719051e-05, | |
"loss": 1.6509, | |
"step": 1223 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.250801042255729e-05, | |
"loss": 1.6426, | |
"step": 1224 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.234194519980734e-05, | |
"loss": 1.6354, | |
"step": 1225 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.217589302406863e-05, | |
"loss": 1.6793, | |
"step": 1226 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.200985471040511e-05, | |
"loss": 1.6683, | |
"step": 1227 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.184383107381263e-05, | |
"loss": 1.6515, | |
"step": 1228 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.167782292921507e-05, | |
"loss": 1.6619, | |
"step": 1229 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.151183109146023e-05, | |
"loss": 1.6582, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.134585637531589e-05, | |
"loss": 1.6654, | |
"step": 1231 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.117989959546575e-05, | |
"loss": 1.6448, | |
"step": 1232 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.101396156650552e-05, | |
"loss": 1.6662, | |
"step": 1233 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.084804310293884e-05, | |
"loss": 1.6737, | |
"step": 1234 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.068214501917332e-05, | |
"loss": 1.6598, | |
"step": 1235 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.051626812951653e-05, | |
"loss": 1.6662, | |
"step": 1236 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 0.0, | |
"learning_rate": 7.035041324817201e-05, | |
"loss": 1.6579, | |
"step": 1237 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 7.018458118923529e-05, | |
"loss": 1.6716, | |
"step": 1238 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 7.001877276668988e-05, | |
"loss": 1.6577, | |
"step": 1239 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.985298879440323e-05, | |
"loss": 1.6551, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.968723008612281e-05, | |
"loss": 1.6443, | |
"step": 1241 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.952149745547208e-05, | |
"loss": 1.6396, | |
"step": 1242 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.93557917159465e-05, | |
"loss": 1.6367, | |
"step": 1243 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.91901136809095e-05, | |
"loss": 1.6412, | |
"step": 1244 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.902446416358858e-05, | |
"loss": 1.6408, | |
"step": 1245 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.885884397707122e-05, | |
"loss": 1.6522, | |
"step": 1246 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.869325393430096e-05, | |
"loss": 1.6512, | |
"step": 1247 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.852769484807336e-05, | |
"loss": 1.6593, | |
"step": 1248 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.836216753103201e-05, | |
"loss": 1.6422, | |
"step": 1249 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.81966727956646e-05, | |
"loss": 1.647, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.803121145429887e-05, | |
"loss": 1.6452, | |
"step": 1251 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.786578431909868e-05, | |
"loss": 1.6588, | |
"step": 1252 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.770039220205994e-05, | |
"loss": 1.6464, | |
"step": 1253 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.75350359150067e-05, | |
"loss": 1.6516, | |
"step": 1254 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.736971626958715e-05, | |
"loss": 1.6529, | |
"step": 1255 | |
}, | |
{ | |
"epoch": 0.65, | |
"grad_norm": 0.0, | |
"learning_rate": 6.720443407726961e-05, | |
"loss": 1.6451, | |
"step": 1256 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.703919014933857e-05, | |
"loss": 1.6485, | |
"step": 1257 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.687398529689069e-05, | |
"loss": 1.6402, | |
"step": 1258 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.670882033083084e-05, | |
"loss": 1.6563, | |
"step": 1259 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.654369606186806e-05, | |
"loss": 1.6402, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.637861330051174e-05, | |
"loss": 1.6544, | |
"step": 1261 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.621357285706743e-05, | |
"loss": 1.6612, | |
"step": 1262 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.604857554163299e-05, | |
"loss": 1.6604, | |
"step": 1263 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.58836221640946e-05, | |
"loss": 1.6356, | |
"step": 1264 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.571871353412276e-05, | |
"loss": 1.6588, | |
"step": 1265 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.555385046116831e-05, | |
"loss": 1.6524, | |
"step": 1266 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.538903375445855e-05, | |
"loss": 1.6548, | |
"step": 1267 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.52242642229931e-05, | |
"loss": 1.633, | |
"step": 1268 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.505954267554006e-05, | |
"loss": 1.6558, | |
"step": 1269 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.489486992063202e-05, | |
"loss": 1.6506, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.473024676656204e-05, | |
"loss": 1.645, | |
"step": 1271 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.456567402137971e-05, | |
"loss": 1.6401, | |
"step": 1272 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.440115249288726e-05, | |
"loss": 1.6338, | |
"step": 1273 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.423668298863543e-05, | |
"loss": 1.654, | |
"step": 1274 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 0.0, | |
"learning_rate": 6.407226631591964e-05, | |
"loss": 1.6552, | |
"step": 1275 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.390790328177599e-05, | |
"loss": 1.6668, | |
"step": 1276 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.374359469297732e-05, | |
"loss": 1.6359, | |
"step": 1277 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.357934135602919e-05, | |
"loss": 1.6516, | |
"step": 1278 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.341514407716596e-05, | |
"loss": 1.6405, | |
"step": 1279 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.325100366234684e-05, | |
"loss": 1.6671, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.308692091725192e-05, | |
"loss": 1.6351, | |
"step": 1281 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.292289664727823e-05, | |
"loss": 1.6466, | |
"step": 1282 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.275893165753577e-05, | |
"loss": 1.6532, | |
"step": 1283 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.259502675284353e-05, | |
"loss": 1.6606, | |
"step": 1284 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.243118273772565e-05, | |
"loss": 1.6552, | |
"step": 1285 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.226740041640734e-05, | |
"loss": 1.66, | |
"step": 1286 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.210368059281098e-05, | |
"loss": 1.6436, | |
"step": 1287 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.194002407055219e-05, | |
"loss": 1.647, | |
"step": 1288 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.177643165293592e-05, | |
"loss": 1.6626, | |
"step": 1289 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.16129041429524e-05, | |
"loss": 1.6482, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.14494423432733e-05, | |
"loss": 1.6561, | |
"step": 1291 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.128604705624774e-05, | |
"loss": 1.6531, | |
"step": 1292 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.112271908389836e-05, | |
"loss": 1.6525, | |
"step": 1293 | |
}, | |
{ | |
"epoch": 0.67, | |
"grad_norm": 0.0, | |
"learning_rate": 6.095945922791743e-05, | |
"loss": 1.6455, | |
"step": 1294 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 6.079626828966279e-05, | |
"loss": 1.6523, | |
"step": 1295 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 6.0633147070154056e-05, | |
"loss": 1.6521, | |
"step": 1296 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 6.047009637006864e-05, | |
"loss": 1.6517, | |
"step": 1297 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 6.030711698973777e-05, | |
"loss": 1.6693, | |
"step": 1298 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 6.0144209729142635e-05, | |
"loss": 1.6266, | |
"step": 1299 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.998137538791041e-05, | |
"loss": 1.6617, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.9818614765310357e-05, | |
"loss": 1.6477, | |
"step": 1301 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.9655928660249866e-05, | |
"loss": 1.6646, | |
"step": 1302 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.949331787127059e-05, | |
"loss": 1.6432, | |
"step": 1303 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.933078319654448e-05, | |
"loss": 1.6517, | |
"step": 1304 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.916832543386989e-05, | |
"loss": 1.6269, | |
"step": 1305 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.900594538066763e-05, | |
"loss": 1.6435, | |
"step": 1306 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.884364383397711e-05, | |
"loss": 1.6655, | |
"step": 1307 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.868142159045236e-05, | |
"loss": 1.6608, | |
"step": 1308 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.851927944635816e-05, | |
"loss": 1.6476, | |
"step": 1309 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.835721819756615e-05, | |
"loss": 1.6532, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.819523863955085e-05, | |
"loss": 1.6637, | |
"step": 1311 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.803334156738585e-05, | |
"loss": 1.6487, | |
"step": 1312 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 0.0, | |
"learning_rate": 5.7871527775739834e-05, | |
"loss": 1.6485, | |
"step": 1313 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.7709798058872695e-05, | |
"loss": 1.6427, | |
"step": 1314 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.754815321063167e-05, | |
"loss": 1.6786, | |
"step": 1315 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.7386594024447404e-05, | |
"loss": 1.671, | |
"step": 1316 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.72251212933301e-05, | |
"loss": 1.6434, | |
"step": 1317 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.7063735809865544e-05, | |
"loss": 1.6422, | |
"step": 1318 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.6902438366211317e-05, | |
"loss": 1.6693, | |
"step": 1319 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.674122975409285e-05, | |
"loss": 1.6573, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.658011076479952e-05, | |
"loss": 1.6554, | |
"step": 1321 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.641908218918081e-05, | |
"loss": 1.6376, | |
"step": 1322 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.6258144817642425e-05, | |
"loss": 1.647, | |
"step": 1323 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.6097299440142375e-05, | |
"loss": 1.6604, | |
"step": 1324 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.593654684618711e-05, | |
"loss": 1.6606, | |
"step": 1325 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.577588782482767e-05, | |
"loss": 1.6473, | |
"step": 1326 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.561532316465582e-05, | |
"loss": 1.6525, | |
"step": 1327 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.545485365380011e-05, | |
"loss": 1.6582, | |
"step": 1328 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.529448007992207e-05, | |
"loss": 1.6542, | |
"step": 1329 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.5134203230212326e-05, | |
"loss": 1.668, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.4974023891386745e-05, | |
"loss": 1.6379, | |
"step": 1331 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.4813942849682536e-05, | |
"loss": 1.646, | |
"step": 1332 | |
}, | |
{ | |
"epoch": 0.69, | |
"grad_norm": 0.0, | |
"learning_rate": 5.465396089085446e-05, | |
"loss": 1.64, | |
"step": 1333 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.4494078800170874e-05, | |
"loss": 1.6564, | |
"step": 1334 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.4334297362409997e-05, | |
"loss": 1.6561, | |
"step": 1335 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.417461736185595e-05, | |
"loss": 1.6663, | |
"step": 1336 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.4015039582294965e-05, | |
"loss": 1.653, | |
"step": 1337 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.385556480701154e-05, | |
"loss": 1.6463, | |
"step": 1338 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.3696193818784565e-05, | |
"loss": 1.6471, | |
"step": 1339 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.3536927399883475e-05, | |
"loss": 1.6579, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.3377766332064464e-05, | |
"loss": 1.6483, | |
"step": 1341 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.321871139656658e-05, | |
"loss": 1.6453, | |
"step": 1342 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.305976337410795e-05, | |
"loss": 1.6469, | |
"step": 1343 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.2900923044881894e-05, | |
"loss": 1.6607, | |
"step": 1344 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.2742191188553146e-05, | |
"loss": 1.62, | |
"step": 1345 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.258356858425399e-05, | |
"loss": 1.6772, | |
"step": 1346 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.242505601058045e-05, | |
"loss": 1.6699, | |
"step": 1347 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.226665424558846e-05, | |
"loss": 1.6511, | |
"step": 1348 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.2108364066790085e-05, | |
"loss": 1.6518, | |
"step": 1349 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.195018625114962e-05, | |
"loss": 1.6279, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.179212157507985e-05, | |
"loss": 1.6583, | |
"step": 1351 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 0.0, | |
"learning_rate": 5.163417081443823e-05, | |
"loss": 1.6621, | |
"step": 1352 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 5.147633474452306e-05, | |
"loss": 1.6808, | |
"step": 1353 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 5.1318614140069676e-05, | |
"loss": 1.6535, | |
"step": 1354 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 5.116100977524661e-05, | |
"loss": 1.6431, | |
"step": 1355 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 5.100352242365195e-05, | |
"loss": 1.6462, | |
"step": 1356 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 5.084615285830931e-05, | |
"loss": 1.6663, | |
"step": 1357 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 5.0688901851664196e-05, | |
"loss": 1.6512, | |
"step": 1358 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 5.053177017558019e-05, | |
"loss": 1.6566, | |
"step": 1359 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 5.037475860133511e-05, | |
"loss": 1.6625, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 5.021786789961726e-05, | |
"loss": 1.6439, | |
"step": 1361 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 5.0061098840521685e-05, | |
"loss": 1.6374, | |
"step": 1362 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 4.9904452193546315e-05, | |
"loss": 1.6642, | |
"step": 1363 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 4.974792872758819e-05, | |
"loss": 1.659, | |
"step": 1364 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 4.95915292109398e-05, | |
"loss": 1.6552, | |
"step": 1365 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 4.9435254411285154e-05, | |
"loss": 1.6592, | |
"step": 1366 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 4.927910509569614e-05, | |
"loss": 1.6618, | |
"step": 1367 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 4.912308203062866e-05, | |
"loss": 1.6392, | |
"step": 1368 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 4.896718598191899e-05, | |
"loss": 1.6523, | |
"step": 1369 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 4.881141771477989e-05, | |
"loss": 1.6445, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 0.71, | |
"grad_norm": 0.0, | |
"learning_rate": 4.8655777993796936e-05, | |
"loss": 1.6486, | |
"step": 1371 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.850026758292472e-05, | |
"loss": 1.6529, | |
"step": 1372 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.834488724548312e-05, | |
"loss": 1.6399, | |
"step": 1373 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.8189637744153564e-05, | |
"loss": 1.6613, | |
"step": 1374 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.803451984097525e-05, | |
"loss": 1.6398, | |
"step": 1375 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.7879534297341464e-05, | |
"loss": 1.6586, | |
"step": 1376 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.7724681873995777e-05, | |
"loss": 1.6537, | |
"step": 1377 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.756996333102836e-05, | |
"loss": 1.6592, | |
"step": 1378 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.741537942787222e-05, | |
"loss": 1.6483, | |
"step": 1379 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.7260930923299484e-05, | |
"loss": 1.6765, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.7106618575417714e-05, | |
"loss": 1.6772, | |
"step": 1381 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.6952443141666103e-05, | |
"loss": 1.6431, | |
"step": 1382 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.6798405378811826e-05, | |
"loss": 1.6478, | |
"step": 1383 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.664450604294631e-05, | |
"loss": 1.6486, | |
"step": 1384 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.6490745889481504e-05, | |
"loss": 1.6608, | |
"step": 1385 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.6337125673146186e-05, | |
"loss": 1.6465, | |
"step": 1386 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.618364614798225e-05, | |
"loss": 1.6433, | |
"step": 1387 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.603030806734101e-05, | |
"loss": 1.6496, | |
"step": 1388 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.5877112183879524e-05, | |
"loss": 1.6543, | |
"step": 1389 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 0.0, | |
"learning_rate": 4.5724059249556844e-05, | |
"loss": 1.6533, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.557115001563038e-05, | |
"loss": 1.682, | |
"step": 1391 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.54183852326522e-05, | |
"loss": 1.6614, | |
"step": 1392 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.5265765650465305e-05, | |
"loss": 1.6581, | |
"step": 1393 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.511329201820001e-05, | |
"loss": 1.6455, | |
"step": 1394 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.496096508427024e-05, | |
"loss": 1.6493, | |
"step": 1395 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.4808785596369826e-05, | |
"loss": 1.6334, | |
"step": 1396 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.465675430146887e-05, | |
"loss": 1.6465, | |
"step": 1397 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.45048719458101e-05, | |
"loss": 1.6418, | |
"step": 1398 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.435313927490516e-05, | |
"loss": 1.6699, | |
"step": 1399 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.4201557033530954e-05, | |
"loss": 1.6528, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.405012596572604e-05, | |
"loss": 1.6372, | |
"step": 1401 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.38988468147869e-05, | |
"loss": 1.6564, | |
"step": 1402 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.3747720323264354e-05, | |
"loss": 1.6625, | |
"step": 1403 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.359674723295995e-05, | |
"loss": 1.6409, | |
"step": 1404 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.344592828492214e-05, | |
"loss": 1.659, | |
"step": 1405 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.329526421944293e-05, | |
"loss": 1.6584, | |
"step": 1406 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.314475577605394e-05, | |
"loss": 1.6453, | |
"step": 1407 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.299440369352306e-05, | |
"loss": 1.6546, | |
"step": 1408 | |
}, | |
{ | |
"epoch": 0.73, | |
"grad_norm": 0.0, | |
"learning_rate": 4.2844208709850534e-05, | |
"loss": 1.6569, | |
"step": 1409 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.2694171562265664e-05, | |
"loss": 1.6359, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.254429298722284e-05, | |
"loss": 1.6599, | |
"step": 1411 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.2394573720398223e-05, | |
"loss": 1.6468, | |
"step": 1412 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.224501449668601e-05, | |
"loss": 1.6632, | |
"step": 1413 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.209561605019473e-05, | |
"loss": 1.6586, | |
"step": 1414 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.194637911424387e-05, | |
"loss": 1.6675, | |
"step": 1415 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.1797304421360014e-05, | |
"loss": 1.6526, | |
"step": 1416 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.164839270327351e-05, | |
"loss": 1.6755, | |
"step": 1417 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.149964469091463e-05, | |
"loss": 1.6393, | |
"step": 1418 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.1351061114410195e-05, | |
"loss": 1.653, | |
"step": 1419 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.1202642703079805e-05, | |
"loss": 1.6508, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.1054390185432463e-05, | |
"loss": 1.6368, | |
"step": 1421 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.090630428916275e-05, | |
"loss": 1.6481, | |
"step": 1422 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.075838574114753e-05, | |
"loss": 1.6522, | |
"step": 1423 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.0610635267442104e-05, | |
"loss": 1.666, | |
"step": 1424 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.046305359327692e-05, | |
"loss": 1.6404, | |
"step": 1425 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.0315641443053745e-05, | |
"loss": 1.651, | |
"step": 1426 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.016839954034235e-05, | |
"loss": 1.6376, | |
"step": 1427 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 4.002132860787676e-05, | |
"loss": 1.6556, | |
"step": 1428 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 0.0, | |
"learning_rate": 3.9874429367551894e-05, | |
"loss": 1.6413, | |
"step": 1429 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.972770254041981e-05, | |
"loss": 1.6387, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.95811488466864e-05, | |
"loss": 1.6136, | |
"step": 1431 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.9434769005707605e-05, | |
"loss": 1.6501, | |
"step": 1432 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.9288563735986136e-05, | |
"loss": 1.6592, | |
"step": 1433 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.914253375516773e-05, | |
"loss": 1.6638, | |
"step": 1434 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.899667978003778e-05, | |
"loss": 1.6384, | |
"step": 1435 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.8851002526517716e-05, | |
"loss": 1.6576, | |
"step": 1436 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.87055027096616e-05, | |
"loss": 1.6348, | |
"step": 1437 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.856018104365245e-05, | |
"loss": 1.6494, | |
"step": 1438 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.841503824179894e-05, | |
"loss": 1.6363, | |
"step": 1439 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.82700750165317e-05, | |
"loss": 1.629, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.812529207939999e-05, | |
"loss": 1.6613, | |
"step": 1441 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.7980690141068045e-05, | |
"loss": 1.6437, | |
"step": 1442 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.783626991131175e-05, | |
"loss": 1.6571, | |
"step": 1443 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.769203209901507e-05, | |
"loss": 1.6508, | |
"step": 1444 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.7547977412166494e-05, | |
"loss": 1.6556, | |
"step": 1445 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.740410655785576e-05, | |
"loss": 1.649, | |
"step": 1446 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.726042024227017e-05, | |
"loss": 1.6686, | |
"step": 1447 | |
}, | |
{ | |
"epoch": 0.75, | |
"grad_norm": 0.0, | |
"learning_rate": 3.711691917069129e-05, | |
"loss": 1.6567, | |
"step": 1448 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.697360404749134e-05, | |
"loss": 1.6664, | |
"step": 1449 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.683047557612994e-05, | |
"loss": 1.6503, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.668753445915039e-05, | |
"loss": 1.6409, | |
"step": 1451 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.654478139817648e-05, | |
"loss": 1.6394, | |
"step": 1452 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.640221709390884e-05, | |
"loss": 1.6565, | |
"step": 1453 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.6259842246121696e-05, | |
"loss": 1.6894, | |
"step": 1454 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.611765755365921e-05, | |
"loss": 1.6403, | |
"step": 1455 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.597566371443228e-05, | |
"loss": 1.6617, | |
"step": 1456 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.58338614254149e-05, | |
"loss": 1.6593, | |
"step": 1457 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.5692251382640964e-05, | |
"loss": 1.6486, | |
"step": 1458 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.555083428120059e-05, | |
"loss": 1.6511, | |
"step": 1459 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.5409610815237005e-05, | |
"loss": 1.6529, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.52685816779428e-05, | |
"loss": 1.6458, | |
"step": 1461 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.51277475615569e-05, | |
"loss": 1.6322, | |
"step": 1462 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.4987109157360765e-05, | |
"loss": 1.6413, | |
"step": 1463 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.484666715567542e-05, | |
"loss": 1.685, | |
"step": 1464 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.470642224585771e-05, | |
"loss": 1.6498, | |
"step": 1465 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.456637511629711e-05, | |
"loss": 1.6652, | |
"step": 1466 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 0.0, | |
"learning_rate": 3.442652645441224e-05, | |
"loss": 1.6708, | |
"step": 1467 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.428687694664764e-05, | |
"loss": 1.6316, | |
"step": 1468 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.4147427278470204e-05, | |
"loss": 1.6631, | |
"step": 1469 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.400817813436602e-05, | |
"loss": 1.6604, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.386913019783679e-05, | |
"loss": 1.6383, | |
"step": 1471 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.373028415139672e-05, | |
"loss": 1.6545, | |
"step": 1472 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.3591640676568917e-05, | |
"loss": 1.64, | |
"step": 1473 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.345320045388228e-05, | |
"loss": 1.6632, | |
"step": 1474 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.331496416286795e-05, | |
"loss": 1.6558, | |
"step": 1475 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.317693248205619e-05, | |
"loss": 1.6523, | |
"step": 1476 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.30391060889728e-05, | |
"loss": 1.6374, | |
"step": 1477 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.290148566013605e-05, | |
"loss": 1.6424, | |
"step": 1478 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.276407187105315e-05, | |
"loss": 1.6442, | |
"step": 1479 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.2626865396217085e-05, | |
"loss": 1.6563, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.24898669091032e-05, | |
"loss": 1.6587, | |
"step": 1481 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.2353077082165974e-05, | |
"loss": 1.662, | |
"step": 1482 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.2216496586835635e-05, | |
"loss": 1.6491, | |
"step": 1483 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.208012609351495e-05, | |
"loss": 1.6337, | |
"step": 1484 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.19439662715759e-05, | |
"loss": 1.6681, | |
"step": 1485 | |
}, | |
{ | |
"epoch": 0.77, | |
"grad_norm": 0.0, | |
"learning_rate": 3.180801778935633e-05, | |
"loss": 1.6564, | |
"step": 1486 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.167228131415683e-05, | |
"loss": 1.6422, | |
"step": 1487 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.153675751223722e-05, | |
"loss": 1.6545, | |
"step": 1488 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.140144704881357e-05, | |
"loss": 1.6448, | |
"step": 1489 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.1266350588054635e-05, | |
"loss": 1.6553, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.1131468793078884e-05, | |
"loss": 1.6507, | |
"step": 1491 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.099680232595096e-05, | |
"loss": 1.6609, | |
"step": 1492 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.086235184767873e-05, | |
"loss": 1.6382, | |
"step": 1493 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.072811801820971e-05, | |
"loss": 1.6802, | |
"step": 1494 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.0594101496428164e-05, | |
"loss": 1.6502, | |
"step": 1495 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.046030294015156e-05, | |
"loss": 1.6642, | |
"step": 1496 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.0326723006127638e-05, | |
"loss": 1.6398, | |
"step": 1497 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.0193362350030877e-05, | |
"loss": 1.6484, | |
"step": 1498 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 3.006022162645957e-05, | |
"loss": 1.6617, | |
"step": 1499 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 0.0, | |
"learning_rate": 2.9927301488932364e-05, | |
"loss": 1.6502, | |
"step": 1500 | |
} | |
], | |
"logging_steps": 1, | |
"max_steps": 1918, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 1, | |
"save_steps": 100, | |
"total_flos": 2.002827860980531e+19, | |
"train_batch_size": 32, | |
"trial_name": null, | |
"trial_params": null | |
} | |