| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 6.0, |
| "eval_steps": 500, |
| "global_step": 2244, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.9411764705882355e-06, |
| "loss": 1.8086, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 5.882352941176471e-06, |
| "loss": 1.8711, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.823529411764707e-06, |
| "loss": 1.7612, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.1764705882352942e-05, |
| "loss": 1.6183, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.4705882352941177e-05, |
| "loss": 1.7558, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.7647058823529414e-05, |
| "loss": 1.7354, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.058823529411765e-05, |
| "loss": 1.6666, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3529411764705884e-05, |
| "loss": 1.7628, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.647058823529412e-05, |
| "loss": 1.7613, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 2.9411764705882354e-05, |
| "loss": 1.7046, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.235294117647059e-05, |
| "loss": 1.6056, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.529411764705883e-05, |
| "loss": 1.6852, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.8235294117647055e-05, |
| "loss": 1.5557, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.11764705882353e-05, |
| "loss": 1.3811, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.411764705882353e-05, |
| "loss": 1.5156, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 4.705882352941177e-05, |
| "loss": 1.4152, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 5e-05, |
| "loss": 1.4006, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 5.294117647058824e-05, |
| "loss": 1.3898, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 5.588235294117647e-05, |
| "loss": 1.4441, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 5.882352941176471e-05, |
| "loss": 1.3938, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 6.176470588235295e-05, |
| "loss": 1.4322, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 6.470588235294118e-05, |
| "loss": 1.2907, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 6.764705882352942e-05, |
| "loss": 1.287, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 7.058823529411765e-05, |
| "loss": 1.454, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 7.352941176470589e-05, |
| "loss": 1.3485, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 7.647058823529411e-05, |
| "loss": 1.3291, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 7.941176470588235e-05, |
| "loss": 1.2963, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 8.23529411764706e-05, |
| "loss": 1.2419, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 8.529411764705883e-05, |
| "loss": 1.2957, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 8.823529411764706e-05, |
| "loss": 1.3194, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 9.11764705882353e-05, |
| "loss": 1.2657, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 9.411764705882353e-05, |
| "loss": 1.3297, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 9.705882352941177e-05, |
| "loss": 1.1944, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.0001, |
| "loss": 1.148, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 0.00010294117647058823, |
| "loss": 1.2873, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00010588235294117647, |
| "loss": 1.1811, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.0001088235294117647, |
| "loss": 1.2284, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00011176470588235294, |
| "loss": 1.2336, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 0.00011470588235294118, |
| "loss": 1.1922, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00011764705882352942, |
| "loss": 1.1631, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.00012058823529411765, |
| "loss": 1.1637, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.0001235294117647059, |
| "loss": 1.1504, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 0.0001264705882352941, |
| "loss": 1.1399, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00012941176470588237, |
| "loss": 1.1534, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.0001323529411764706, |
| "loss": 1.1416, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 0.00013529411764705884, |
| "loss": 1.223, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00013823529411764707, |
| "loss": 1.209, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.0001411764705882353, |
| "loss": 1.1532, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00014411764705882354, |
| "loss": 1.1484, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 0.00014705882352941178, |
| "loss": 1.1157, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00015000000000000001, |
| "loss": 1.0605, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00015294117647058822, |
| "loss": 1.1052, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.00015588235294117648, |
| "loss": 1.1584, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 0.0001588235294117647, |
| "loss": 1.1052, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00016176470588235295, |
| "loss": 1.1195, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.0001647058823529412, |
| "loss": 1.1938, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 0.00016764705882352942, |
| "loss": 1.105, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00017058823529411766, |
| "loss": 1.0179, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.0001735294117647059, |
| "loss": 1.0184, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00017647058823529413, |
| "loss": 1.1753, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 0.00017941176470588236, |
| "loss": 1.1388, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001823529411764706, |
| "loss": 1.0544, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00018529411764705883, |
| "loss": 1.0571, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.00018823529411764707, |
| "loss": 1.124, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 0.0001911764705882353, |
| "loss": 1.1123, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00019411764705882354, |
| "loss": 1.099, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00019705882352941177, |
| "loss": 1.0998, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.0002, |
| "loss": 1.0141, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 0.00019999989577980243, |
| "loss": 1.128, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00019999958311942687, |
| "loss": 1.0722, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00019999906201952506, |
| "loss": 1.1066, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 0.00019999833248118322, |
| "loss": 1.0057, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019999739450592192, |
| "loss": 1.1209, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019999624809569636, |
| "loss": 1.1125, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019999489325289607, |
| "loss": 1.1419, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 0.00019999332998034514, |
| "loss": 1.043, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.000199991558281302, |
| "loss": 1.1463, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019998957815945962, |
| "loss": 1.0848, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.0001999873896189454, |
| "loss": 1.0968, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 0.00019998499266432108, |
| "loss": 1.1443, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019998238730058296, |
| "loss": 1.1057, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.0001999795735331616, |
| "loss": 1.0423, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019997655136792206, |
| "loss": 1.0467, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 0.00019997332081116373, |
| "loss": 1.1127, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.00019996988186962041, |
| "loss": 1.0337, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.0001999662345504603, |
| "loss": 1.109, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 0.0001999623788612858, |
| "loss": 0.989, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019995831481013374, |
| "loss": 1.0286, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019995404240547525, |
| "loss": 1.084, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019994956165621578, |
| "loss": 1.0103, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 0.00019994487257169502, |
| "loss": 1.0856, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019993997516168689, |
| "loss": 1.0699, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019993486943639953, |
| "loss": 1.0628, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.00019992955540647543, |
| "loss": 1.0797, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 0.0001999240330829911, |
| "loss": 1.0466, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001999183024774573, |
| "loss": 1.0348, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.00019991236360181896, |
| "loss": 1.0716, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001999062164684551, |
| "loss": 1.0901, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 0.0001998998610901788, |
| "loss": 0.978, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019989329748023725, |
| "loss": 1.0689, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019988652565231164, |
| "loss": 1.0205, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 0.00019987954562051725, |
| "loss": 1.0989, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019987235739940323, |
| "loss": 0.9357, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019986496100395275, |
| "loss": 1.0865, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.0001998573564495829, |
| "loss": 1.0767, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 0.00019984954375214465, |
| "loss": 0.9445, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019984152292792278, |
| "loss": 1.0538, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019983329399363598, |
| "loss": 1.1103, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.00019982485696643662, |
| "loss": 1.047, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 0.0001998162118639109, |
| "loss": 1.0657, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001998073587040787, |
| "loss": 0.9985, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.0001997982975053936, |
| "loss": 0.9973, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019978902828674276, |
| "loss": 1.0171, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 0.00019977955106744704, |
| "loss": 1.0689, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.0001997698658672607, |
| "loss": 1.1143, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.0001997599727063717, |
| "loss": 1.048, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 0.00019974987160540134, |
| "loss": 1.0978, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.0001997395625854044, |
| "loss": 0.9617, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019972904566786903, |
| "loss": 1.0407, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019971832087471676, |
| "loss": 1.0176, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 0.00019970738822830235, |
| "loss": 0.9936, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019969624775141391, |
| "loss": 0.972, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019968489946727266, |
| "loss": 0.9767, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.000199673343399533, |
| "loss": 0.9402, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 0.00019966157957228246, |
| "loss": 1.0536, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00019964960801004163, |
| "loss": 0.9969, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.000199637428737764, |
| "loss": 0.9877, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00019962504178083618, |
| "loss": 1.0064, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 0.00019961244716507756, |
| "loss": 0.9601, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.0001995996449167404, |
| "loss": 1.1492, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.0001995866350625098, |
| "loss": 1.0682, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 0.00019957341762950344, |
| "loss": 1.1, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00019955999264527195, |
| "loss": 1.0487, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00019954636013779826, |
| "loss": 0.9932, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.00019953252013549813, |
| "loss": 0.9758, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 0.0001995184726672197, |
| "loss": 1.0256, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019950421776224352, |
| "loss": 1.0897, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019948975545028264, |
| "loss": 1.0819, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.00019947508576148232, |
| "loss": 1.0318, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 0.0001994602087264201, |
| "loss": 1.0647, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001994451243761057, |
| "loss": 1.0951, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001994298327419811, |
| "loss": 0.9349, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 0.0001994143338559202, |
| "loss": 0.9575, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00019939862775022893, |
| "loss": 0.9666, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.00019938271445764513, |
| "loss": 0.965, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001993665940113386, |
| "loss": 1.0609, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 0.0001993502664449108, |
| "loss": 1.0021, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019933373179239502, |
| "loss": 1.0661, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019931699008825612, |
| "loss": 0.9584, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019930004136739054, |
| "loss": 1.0245, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 0.00019928288566512637, |
| "loss": 0.8984, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.0001992655230172229, |
| "loss": 1.2029, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.000199247953459871, |
| "loss": 1.0373, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.0001992301770296927, |
| "loss": 1.0066, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 0.00019921219376374123, |
| "loss": 0.9712, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.000199194003699501, |
| "loss": 1.1034, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00019917560687488742, |
| "loss": 1.0147, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 0.00019915700332824696, |
| "loss": 0.9705, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.0001991381930983569, |
| "loss": 0.9764, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.00019911917622442537, |
| "loss": 1.0389, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.0001990999527460912, |
| "loss": 0.9407, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 0.0001990805227034239, |
| "loss": 1.0459, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00019906088613692345, |
| "loss": 0.9424, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.0001990410430875205, |
| "loss": 0.9685, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00019902099359657598, |
| "loss": 1.0473, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 0.00019900073770588104, |
| "loss": 0.9768, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00019898027545765717, |
| "loss": 0.9441, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.00019895960689455598, |
| "loss": 1.0169, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.0001989387320596591, |
| "loss": 1.0654, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 0.0001989176509964781, |
| "loss": 0.986, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001988963637489545, |
| "loss": 0.9851, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.0001988748703614594, |
| "loss": 1.0164, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 0.00019885317087879378, |
| "loss": 1.0698, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019883126534618817, |
| "loss": 0.9414, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019880915380930244, |
| "loss": 0.99, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019878683631422605, |
| "loss": 1.016, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 0.00019876431290747766, |
| "loss": 0.9884, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.0001987415836360051, |
| "loss": 0.9963, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00019871864854718543, |
| "loss": 1.0335, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00019869550768882455, |
| "loss": 1.0117, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 0.00019867216110915745, |
| "loss": 0.9496, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00019864860885684778, |
| "loss": 0.9617, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00019862485098098796, |
| "loss": 0.9676, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00019860088753109896, |
| "loss": 1.0207, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 0.00019857671855713036, |
| "loss": 1.0063, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.00019855234410946, |
| "loss": 0.9253, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.0001985277642388941, |
| "loss": 1.0564, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 0.0001985029789966671, |
| "loss": 0.9109, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019847798843444138, |
| "loss": 0.9791, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019845279260430739, |
| "loss": 0.9268, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019842739155878338, |
| "loss": 0.9634, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 0.00019840178535081545, |
| "loss": 0.9742, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00019837597403377726, |
| "loss": 1.0255, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00019834995766147002, |
| "loss": 1.0062, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.00019832373628812233, |
| "loss": 0.9388, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 0.0001982973099683902, |
| "loss": 0.9413, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00019827067875735667, |
| "loss": 0.9602, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.000198243842710532, |
| "loss": 0.9392, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00019821680188385332, |
| "loss": 1.01, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 0.00019818955633368464, |
| "loss": 1.0127, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.0001981621061168167, |
| "loss": 0.9738, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.00019813445129046684, |
| "loss": 0.9315, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 0.0001981065919122789, |
| "loss": 0.9504, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00019807852804032305, |
| "loss": 1.0175, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00019805025973309578, |
| "loss": 0.9593, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00019802178704951963, |
| "loss": 1.0194, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 0.00019799311004894314, |
| "loss": 0.9647, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00019796422879114084, |
| "loss": 1.0243, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00019793514333631287, |
| "loss": 0.9245, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.0001979058537450851, |
| "loss": 0.9996, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 0.00019787636007850888, |
| "loss": 0.9909, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.0001978466623980609, |
| "loss": 0.9598, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00019781676076564315, |
| "loss": 0.9848, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00019778665524358268, |
| "loss": 0.9079, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 0.00019775634589463156, |
| "loss": 0.9714, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00019772583278196678, |
| "loss": 0.9965, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.0001976951159691899, |
| "loss": 0.9897, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 0.00019766419552032723, |
| "loss": 0.9881, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00019763307149982942, |
| "loss": 1.033, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00019760174397257156, |
| "loss": 0.9319, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00019757021300385286, |
| "loss": 0.9121, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 0.00019753847865939657, |
| "loss": 0.9639, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.0001975065410053499, |
| "loss": 0.9496, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00019747440010828383, |
| "loss": 0.9795, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00019744205603519294, |
| "loss": 0.9505, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 0.00019740950885349537, |
| "loss": 0.9199, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.0001973767586310326, |
| "loss": 1.0168, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.0001973438054360693, |
| "loss": 1.0252, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00019731064933729324, |
| "loss": 0.8953, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 0.00019727729040381516, |
| "loss": 0.9056, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.00019724372870516852, |
| "loss": 0.9637, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.00019720996431130946, |
| "loss": 0.9491, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 0.00019717599729261663, |
| "loss": 0.9682, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00019714182771989102, |
| "loss": 0.894, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00019710745566435577, |
| "loss": 0.9926, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00019707288119765623, |
| "loss": 0.9435, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 0.00019703810439185946, |
| "loss": 0.9974, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.00019700312531945442, |
| "loss": 0.9661, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.0001969679440533516, |
| "loss": 0.9843, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.000196932560666883, |
| "loss": 0.9324, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 0.00019689697523380187, |
| "loss": 0.9881, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.0001968611878282826, |
| "loss": 0.926, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.00019682519852492068, |
| "loss": 0.9549, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 0.00019678900739873226, |
| "loss": 1.023, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00019675261452515433, |
| "loss": 0.9689, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00019671601998004433, |
| "loss": 0.9883, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.0001966792238396801, |
| "loss": 0.9713, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 0.00019664222618075958, |
| "loss": 0.9576, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00019660502708040092, |
| "loss": 0.9937, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.000196567626616142, |
| "loss": 0.9394, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.00019653002486594057, |
| "loss": 1.045, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 0.0001964922219081738, |
| "loss": 0.9896, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00019645421782163837, |
| "loss": 0.9953, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.0001964160126855501, |
| "loss": 0.8679, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.000196377606579544, |
| "loss": 0.9652, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 0.00019633899958367384, |
| "loss": 1.0166, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00019630019177841224, |
| "loss": 0.9889, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00019626118324465034, |
| "loss": 0.9839, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 0.00019622197406369762, |
| "loss": 0.9726, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00019618256431728194, |
| "loss": 0.9046, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00019614295408754907, |
| "loss": 0.9599, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.00019610314345706273, |
| "loss": 0.9468, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 0.0001960631325088044, |
| "loss": 0.9581, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.000196022921326173, |
| "loss": 0.9381, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00019598250999298494, |
| "loss": 0.9713, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00019594189859347373, |
| "loss": 0.9037, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 0.00019590108721228994, |
| "loss": 0.9421, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00019586007593450097, |
| "loss": 0.9627, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.0001958188648455909, |
| "loss": 1.0034, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.00019577745403146025, |
| "loss": 0.9238, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 0.0001957358435784259, |
| "loss": 0.929, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.0001956940335732209, |
| "loss": 0.9437, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.0001956520241029941, |
| "loss": 1.0101, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 0.00019560981525531027, |
| "loss": 0.9391, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.0001955674071181497, |
| "loss": 0.927, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.000195524799779908, |
| "loss": 0.9592, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.00019548199332939618, |
| "loss": 0.9532, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 0.0001954389878558401, |
| "loss": 0.9326, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00019539578344888057, |
| "loss": 0.9665, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.000195352380198573, |
| "loss": 0.9008, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00019530877819538736, |
| "loss": 0.8865, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 0.00019526497753020777, |
| "loss": 1.0011, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.00019522097829433252, |
| "loss": 0.9342, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.00019517678057947384, |
| "loss": 0.9312, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.00019513238447775756, |
| "loss": 0.8893, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 0.00019508779008172312, |
| "loss": 1.0281, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.00019504299748432326, |
| "loss": 0.9767, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.00019499800677892385, |
| "loss": 0.9419, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 0.00019495281805930367, |
| "loss": 0.9691, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.00019490743141965428, |
| "loss": 1.0071, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.0001948618469545798, |
| "loss": 0.9404, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.0001948160647590966, |
| "loss": 0.9162, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 0.0001947700849286333, |
| "loss": 0.9517, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.0001947239075590305, |
| "loss": 0.9493, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.00019467753274654038, |
| "loss": 0.8852, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.0001946309605878269, |
| "loss": 0.9185, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 0.00019458419117996515, |
| "loss": 0.9314, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00019453722462044155, |
| "loss": 0.9895, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.0001944900610071533, |
| "loss": 0.8893, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.00019444270043840852, |
| "loss": 0.9036, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 0.0001943951430129257, |
| "loss": 0.9395, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00019434738882983372, |
| "loss": 0.8959, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00019429943798867163, |
| "loss": 0.9044, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 0.00019425129058938832, |
| "loss": 0.8753, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00019420294673234243, |
| "loss": 0.8836, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00019415440651830208, |
| "loss": 0.9836, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.0001941056700484447, |
| "loss": 0.8835, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 0.00019405673742435678, |
| "loss": 0.924, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00019400760874803364, |
| "loss": 0.9787, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00019395828412187936, |
| "loss": 0.8915, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.0001939087636487063, |
| "loss": 0.9288, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 0.00019385904743173516, |
| "loss": 0.9518, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00019380913557459466, |
| "loss": 0.995, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.0001937590281813212, |
| "loss": 0.8268, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00019370872535635892, |
| "loss": 0.9201, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 0.00019365822720455916, |
| "loss": 0.9488, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.00019360753383118048, |
| "loss": 0.9317, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.0001935566453418883, |
| "loss": 0.9108, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 0.0001935055618427549, |
| "loss": 0.8388, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.00019345428344025883, |
| "loss": 0.8801, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.00019340281024128508, |
| "loss": 0.8812, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.0001933511423531245, |
| "loss": 0.9829, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 0.0001932992798834739, |
| "loss": 0.9879, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00019324722294043558, |
| "loss": 0.9408, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00019319497163251728, |
| "loss": 0.9135, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.0001931425260686318, |
| "loss": 0.9566, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 0.00019308988635809688, |
| "loss": 0.9217, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.00019303705261063497, |
| "loss": 0.9124, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.0001929840249363729, |
| "loss": 0.9399, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 0.00019293080344584177, |
| "loss": 0.9403, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00019287738824997673, |
| "loss": 0.9402, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.00019282377946011652, |
| "loss": 0.9684, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.0001927699771880036, |
| "loss": 0.8886, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 0.0001927159815457836, |
| "loss": 0.962, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.0001926617926460053, |
| "loss": 0.9263, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.00019260741060162016, |
| "loss": 0.8352, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.0001925528355259824, |
| "loss": 0.9003, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 0.0001924980675328485, |
| "loss": 0.9465, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.00019244310673637708, |
| "loss": 0.931, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.0001923879532511287, |
| "loss": 0.9478, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.0001923326071920654, |
| "loss": 0.9568, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 0.0001922770686745508, |
| "loss": 0.9271, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00019222133781434956, |
| "loss": 0.9552, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00019216541472762735, |
| "loss": 0.9036, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 0.00019210929953095046, |
| "loss": 0.9299, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00019205299234128557, |
| "loss": 0.8963, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.0001919964932759997, |
| "loss": 0.8909, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00019193980245285966, |
| "loss": 0.9942, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 0.00019188291999003206, |
| "loss": 0.9191, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00019182584600608288, |
| "loss": 0.8753, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00019176858061997746, |
| "loss": 0.93, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00019171112395107985, |
| "loss": 0.9692, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 0.00019165347611915313, |
| "loss": 0.9275, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.00019159563724435852, |
| "loss": 0.9252, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.0001915376074472557, |
| "loss": 0.9705, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.0001914793868488021, |
| "loss": 0.9227, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 0.00019142097557035308, |
| "loss": 0.9648, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.00019136237373366125, |
| "loss": 0.9184, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.0001913035814608766, |
| "loss": 0.9064, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 0.0001912445988745459, |
| "loss": 0.8896, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00019118542609761275, |
| "loss": 0.8466, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00019112606325341703, |
| "loss": 0.8916, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.00019106651046569496, |
| "loss": 0.9177, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 0.0001910067678585786, |
| "loss": 0.9178, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00019094683555659564, |
| "loss": 0.9199, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00019088671368466925, |
| "loss": 1.0151, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00019082640236811767, |
| "loss": 0.9749, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 0.00019076590173265406, |
| "loss": 0.8908, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00019070521190438618, |
| "loss": 0.9514, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00019064433300981613, |
| "loss": 0.9282, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00019058326517584014, |
| "loss": 0.8428, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 0.00019052200852974819, |
| "loss": 0.851, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.000190460563199224, |
| "loss": 0.87, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00019039892931234435, |
| "loss": 0.9058, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.00019033710699757918, |
| "loss": 0.7081, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.0001902750963837912, |
| "loss": 0.6891, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.00019021289760023553, |
| "loss": 0.6149, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.0001901505107765596, |
| "loss": 0.7762, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 0.00019008793604280276, |
| "loss": 0.6228, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00019002517352939598, |
| "loss": 0.665, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00018996222336716172, |
| "loss": 0.6485, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00018989908568731357, |
| "loss": 0.6416, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 0.00018983576062145594, |
| "loss": 0.6801, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.0001897722483015838, |
| "loss": 0.6572, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00018970854886008257, |
| "loss": 0.6678, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00018964466242972757, |
| "loss": 0.6849, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 0.00018958058914368394, |
| "loss": 0.7182, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.00018951632913550626, |
| "loss": 0.5926, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.00018945188253913835, |
| "loss": 0.6918, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 0.000189387249488913, |
| "loss": 0.623, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.00018932243011955154, |
| "loss": 0.626, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.00018925742456616374, |
| "loss": 0.6654, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.00018919223296424745, |
| "loss": 0.6185, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 0.0001891268554496883, |
| "loss": 0.7161, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.0001890612921587594, |
| "loss": 0.6797, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.0001889955432281212, |
| "loss": 0.6334, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.00018892960879482093, |
| "loss": 0.6824, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 0.00018886348899629267, |
| "loss": 0.6395, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00018879718397035679, |
| "loss": 0.7226, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.0001887306938552197, |
| "loss": 0.6383, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.00018866401878947367, |
| "loss": 0.685, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 0.0001885971589120965, |
| "loss": 0.6154, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00018853011436245114, |
| "loss": 0.7188, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00018846288528028555, |
| "loss": 0.6322, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 0.00018839547180573226, |
| "loss": 0.6414, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00018832787407930824, |
| "loss": 0.617, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.0001882600922419144, |
| "loss": 0.7321, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.0001881921264348355, |
| "loss": 0.6597, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 0.00018812397679973975, |
| "loss": 0.6869, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00018805564347867848, |
| "loss": 0.6431, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00018798712661408592, |
| "loss": 0.644, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.00018791842634877898, |
| "loss": 0.6708, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 0.0001878495428259567, |
| "loss": 0.669, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00018778047618920013, |
| "loss": 0.6362, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00018771122658247212, |
| "loss": 0.6393, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00018764179415011682, |
| "loss": 0.7336, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 0.00018757217903685942, |
| "loss": 0.6508, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.00018750238138780595, |
| "loss": 0.6461, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.0001874324013484429, |
| "loss": 0.7184, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 0.00018736223906463696, |
| "loss": 0.678, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00018729189468263467, |
| "loss": 0.696, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00018722136834906214, |
| "loss": 0.6903, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.00018715066021092472, |
| "loss": 0.6624, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 0.0001870797704156067, |
| "loss": 0.6918, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.00018700869911087115, |
| "loss": 0.678, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.0001869374464448593, |
| "loss": 0.7096, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.0001868660125660905, |
| "loss": 0.6562, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 0.00018679439762346185, |
| "loss": 0.7185, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.00018672260176624776, |
| "loss": 0.6764, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.00018665062514409983, |
| "loss": 0.6265, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 0.0001865784679070464, |
| "loss": 0.6729, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00018650613020549232, |
| "loss": 0.601, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00018643361219021858, |
| "loss": 0.7253, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.000186360914012382, |
| "loss": 0.6868, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 0.00018628803582351497, |
| "loss": 0.6933, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00018621497777552507, |
| "loss": 0.709, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00018614174002069478, |
| "loss": 0.6883, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00018606832271168114, |
| "loss": 0.6329, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 0.00018599472600151553, |
| "loss": 0.6887, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00018592095004360318, |
| "loss": 0.6632, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00018584699499172303, |
| "loss": 0.7142, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00018577286100002723, |
| "loss": 0.6148, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 0.00018569854822304098, |
| "loss": 0.6589, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00018562405681566216, |
| "loss": 0.6388, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.00018554938693316094, |
| "loss": 0.6908, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 0.0001854745387311795, |
| "loss": 0.7128, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.00018539951236573173, |
| "loss": 0.6324, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.0001853243079932029, |
| "loss": 0.711, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.0001852489257703493, |
| "loss": 0.6289, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 0.00018517336585429793, |
| "loss": 0.6123, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00018509762840254615, |
| "loss": 0.6207, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00018502171357296144, |
| "loss": 0.7316, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00018494562152378095, |
| "loss": 0.6501, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 0.00018486935241361125, |
| "loss": 0.6923, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00018479290640142798, |
| "loss": 0.6713, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00018471628364657555, |
| "loss": 0.707, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00018463948430876665, |
| "loss": 0.6698, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 0.00018456250854808218, |
| "loss": 0.6854, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00018448535652497073, |
| "loss": 0.6277, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00018440802840024822, |
| "loss": 0.6742, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 0.00018433052433509777, |
| "loss": 0.7275, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.0001842528444910691, |
| "loss": 0.6558, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.00018417498903007843, |
| "loss": 0.6533, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.00018409695811440796, |
| "loss": 0.656, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 0.00018401875190670567, |
| "loss": 0.6458, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00018394037056998483, |
| "loss": 0.671, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00018386181426762388, |
| "loss": 0.6787, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00018378308316336584, |
| "loss": 0.6851, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 0.00018370417742131814, |
| "loss": 0.6534, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00018362509720595225, |
| "loss": 0.7138, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.00018354584268210326, |
| "loss": 0.6815, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.0001834664140149696, |
| "loss": 0.6665, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 0.0001833868113701127, |
| "loss": 0.6895, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00018330703491345669, |
| "loss": 0.649, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00018322708481128785, |
| "loss": 0.7107, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 0.00018314696123025454, |
| "loss": 0.6599, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00018306666433736662, |
| "loss": 0.6374, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.0001829861942999953, |
| "loss": 0.7152, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.00018290555128587264, |
| "loss": 0.6422, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 0.0001828247354630912, |
| "loss": 0.681, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00018274374700010389, |
| "loss": 0.6854, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00018266258606572332, |
| "loss": 0.6639, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00018258125282912167, |
| "loss": 0.6273, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 0.00018249974745983023, |
| "loss": 0.6541, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.0001824180701277392, |
| "loss": 0.6211, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00018233622100309707, |
| "loss": 0.629, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.0001822542002565105, |
| "loss": 0.6185, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 0.00018217200805894384, |
| "loss": 0.6584, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.00018208964458171884, |
| "loss": 0.6775, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.0001820071099965143, |
| "loss": 0.6965, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 0.00018192440447536553, |
| "loss": 0.6515, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00018184152819066435, |
| "loss": 0.6289, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00018175848131515837, |
| "loss": 0.6883, |
| "step": 493 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.00018167526402195082, |
| "loss": 0.6659, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 0.0001815918764845002, |
| "loss": 0.709, |
| "step": 495 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00018150831887661978, |
| "loss": 0.5898, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00018142459137247737, |
| "loss": 0.6451, |
| "step": 497 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00018134069414659494, |
| "loss": 0.7076, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 0.00018125662737384813, |
| "loss": 0.7521, |
| "step": 499 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00018117239122946615, |
| "loss": 0.6861, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00018108798588903105, |
| "loss": 0.6585, |
| "step": 501 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00018100341152847772, |
| "loss": 0.671, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 0.00018091866832409332, |
| "loss": 0.757, |
| "step": 503 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00018083375645251684, |
| "loss": 0.6589, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.000180748676090739, |
| "loss": 0.6764, |
| "step": 505 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 0.00018066342741610155, |
| "loss": 0.6746, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00018057801060629726, |
| "loss": 0.6771, |
| "step": 507 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.0001804924258393692, |
| "loss": 0.6275, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00018040667329371064, |
| "loss": 0.7131, |
| "step": 509 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 0.00018032075314806448, |
| "loss": 0.655, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00018023466558152307, |
| "loss": 0.6297, |
| "step": 511 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00018014841077352762, |
| "loss": 0.6808, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.000180061988903868, |
| "loss": 0.6383, |
| "step": 513 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 0.00017997540015268234, |
| "loss": 0.7104, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.0001798886447004565, |
| "loss": 0.6357, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.000179801722728024, |
| "loss": 0.6723, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 0.0001797146344165652, |
| "loss": 0.6659, |
| "step": 517 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.0001796273799476074, |
| "loss": 0.6292, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00017953995950302417, |
| "loss": 0.6192, |
| "step": 519 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00017945237326503507, |
| "loss": 0.6987, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 0.00017936462141620506, |
| "loss": 0.6726, |
| "step": 521 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00017927670413944458, |
| "loss": 0.7022, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00017918862161800866, |
| "loss": 0.6808, |
| "step": 523 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00017910037403549693, |
| "loss": 0.6427, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 0.00017901196157585293, |
| "loss": 0.6269, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00017892338442336399, |
| "loss": 0.682, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00017883464276266064, |
| "loss": 0.6886, |
| "step": 527 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.0001787457367787164, |
| "loss": 0.6857, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 0.00017865666665684718, |
| "loss": 0.6326, |
| "step": 529 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00017856743258271113, |
| "loss": 0.641, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00017847803474230813, |
| "loss": 0.6663, |
| "step": 531 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 0.00017838847332197938, |
| "loss": 0.6608, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00017829874850840705, |
| "loss": 0.6847, |
| "step": 533 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.0001782088604886139, |
| "loss": 0.6712, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00017811880944996285, |
| "loss": 0.6444, |
| "step": 535 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 0.00017802859558015664, |
| "loss": 0.6284, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00017793821906723748, |
| "loss": 0.6374, |
| "step": 537 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00017784768009958648, |
| "loss": 0.6239, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00017775697886592345, |
| "loss": 0.683, |
| "step": 539 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 0.00017766611555530636, |
| "loss": 0.6665, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00017757509035713106, |
| "loss": 0.6936, |
| "step": 541 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.00017748390346113087, |
| "loss": 0.5766, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.0001773925550573761, |
| "loss": 0.6645, |
| "step": 543 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 0.0001773010453362737, |
| "loss": 0.624, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.00017720937448856692, |
| "loss": 0.6882, |
| "step": 545 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.00017711754270533482, |
| "loss": 0.7085, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 0.00017702555017799196, |
| "loss": 0.6388, |
| "step": 547 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00017693339709828792, |
| "loss": 0.6672, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00017684108365830688, |
| "loss": 0.7338, |
| "step": 549 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00017674861005046743, |
| "loss": 0.6464, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 0.00017665597646752187, |
| "loss": 0.6054, |
| "step": 551 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00017656318310255604, |
| "loss": 0.65, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00017647023014898876, |
| "loss": 0.6273, |
| "step": 553 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.00017637711780057157, |
| "loss": 0.699, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 0.0001762838462513882, |
| "loss": 0.6465, |
| "step": 555 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00017619041569585418, |
| "loss": 0.6638, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00017609682632871665, |
| "loss": 0.6732, |
| "step": 557 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00017600307834505355, |
| "loss": 0.6909, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 0.00017590917194027362, |
| "loss": 0.6574, |
| "step": 559 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.0001758151073101157, |
| "loss": 0.5999, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00017572088465064848, |
| "loss": 0.6487, |
| "step": 561 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 0.00017562650415827005, |
| "loss": 0.6449, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.00017553196602970746, |
| "loss": 0.6465, |
| "step": 563 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.0001754372704620164, |
| "loss": 0.6379, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.0001753424176525807, |
| "loss": 0.6536, |
| "step": 565 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 0.00017524740779911185, |
| "loss": 0.6437, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.0001751522410996488, |
| "loss": 0.6875, |
| "step": 567 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00017505691775255745, |
| "loss": 0.6336, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.00017496143795653007, |
| "loss": 0.6282, |
| "step": 569 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 0.0001748658019105852, |
| "loss": 0.689, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.00017477000981406694, |
| "loss": 0.6589, |
| "step": 571 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.00017467406186664474, |
| "loss": 0.6255, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.0001745779582683129, |
| "loss": 0.683, |
| "step": 573 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 0.00017448169921939014, |
| "loss": 0.6227, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00017438528492051914, |
| "loss": 0.6487, |
| "step": 575 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.0001742887155726663, |
| "loss": 0.6758, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 0.00017419199137712113, |
| "loss": 0.6573, |
| "step": 577 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00017409511253549593, |
| "loss": 0.697, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00017399807924972532, |
| "loss": 0.6424, |
| "step": 579 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00017390089172206592, |
| "loss": 0.6586, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 0.00017380355015509576, |
| "loss": 0.6554, |
| "step": 581 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00017370605475171397, |
| "loss": 0.6606, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.0001736084057151404, |
| "loss": 0.6716, |
| "step": 583 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00017351060324891502, |
| "loss": 0.6685, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 0.00017341264755689776, |
| "loss": 0.6962, |
| "step": 585 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.00017331453884326782, |
| "loss": 0.671, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.00017321627731252336, |
| "loss": 0.6062, |
| "step": 587 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.0001731178631694811, |
| "loss": 0.6332, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 0.00017301929661927589, |
| "loss": 0.6588, |
| "step": 589 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00017292057786736026, |
| "loss": 0.6511, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00017282170711950396, |
| "loss": 0.6777, |
| "step": 591 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 0.00017272268458179353, |
| "loss": 0.6837, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.000172623510460632, |
| "loss": 0.6426, |
| "step": 593 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.00017252418496273823, |
| "loss": 0.6779, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.00017242470829514672, |
| "loss": 0.6681, |
| "step": 595 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 0.00017232508066520702, |
| "loss": 0.5798, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.00017222530228058335, |
| "loss": 0.6449, |
| "step": 597 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.00017212537334925415, |
| "loss": 0.6593, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.00017202529407951177, |
| "loss": 0.6911, |
| "step": 599 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 0.00017192506467996174, |
| "loss": 0.6075, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00017182468535952263, |
| "loss": 0.7037, |
| "step": 601 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00017172415632742553, |
| "loss": 0.6466, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00017162347779321353, |
| "loss": 0.6435, |
| "step": 603 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 0.00017152264996674136, |
| "loss": 0.6284, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.00017142167305817495, |
| "loss": 0.6726, |
| "step": 605 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.00017132054727799097, |
| "loss": 0.6604, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 0.00017121927283697636, |
| "loss": 0.634, |
| "step": 607 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.00017111784994622804, |
| "loss": 0.6225, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.0001710162788171522, |
| "loss": 0.677, |
| "step": 609 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.00017091455966146416, |
| "loss": 0.659, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 0.00017081269269118772, |
| "loss": 0.6772, |
| "step": 611 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.00017071067811865476, |
| "loss": 0.6254, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.00017060851615650487, |
| "loss": 0.5934, |
| "step": 613 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.0001705062070176849, |
| "loss": 0.6374, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 0.0001704037509154484, |
| "loss": 0.6114, |
| "step": 615 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.00017030114806335526, |
| "loss": 0.5956, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.00017019839867527122, |
| "loss": 0.6466, |
| "step": 617 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 0.00017009550296536761, |
| "loss": 0.6796, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.0001699924611481206, |
| "loss": 0.6283, |
| "step": 619 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00016988927343831095, |
| "loss": 0.6829, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00016978594005102356, |
| "loss": 0.653, |
| "step": 621 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 0.00016968246120164693, |
| "loss": 0.6254, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00016957883710587277, |
| "loss": 0.6119, |
| "step": 623 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00016947506797969562, |
| "loss": 0.6888, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00016937115403941217, |
| "loss": 0.5944, |
| "step": 625 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 0.00016926709550162111, |
| "loss": 0.6628, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.00016916289258322244, |
| "loss": 0.5811, |
| "step": 627 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.00016905854550141716, |
| "loss": 0.6279, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.0001689540544737067, |
| "loss": 0.6947, |
| "step": 629 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 0.0001688494197178926, |
| "loss": 0.708, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00016874464145207597, |
| "loss": 0.6452, |
| "step": 631 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00016863971989465698, |
| "loss": 0.7001, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 0.00016853465526433462, |
| "loss": 0.6071, |
| "step": 633 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.00016842944778010596, |
| "loss": 0.6542, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.00016832409766126593, |
| "loss": 0.5937, |
| "step": 635 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.00016821860512740671, |
| "loss": 0.6492, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 0.0001681129703984174, |
| "loss": 0.6444, |
| "step": 637 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.00016800719369448338, |
| "loss": 0.6636, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.00016790127523608607, |
| "loss": 0.6576, |
| "step": 639 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.00016779521524400232, |
| "loss": 0.7563, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 0.000167689013939304, |
| "loss": 0.6418, |
| "step": 641 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00016758267154335748, |
| "loss": 0.6671, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00016747618827782338, |
| "loss": 0.5932, |
| "step": 643 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.00016736956436465573, |
| "loss": 0.6524, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 0.0001672628000261019, |
| "loss": 0.6247, |
| "step": 645 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.00016715589548470185, |
| "loss": 0.6399, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.00016704885096328786, |
| "loss": 0.6399, |
| "step": 647 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 0.00016694166668498398, |
| "loss": 0.6489, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00016683434287320548, |
| "loss": 0.6663, |
| "step": 649 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00016672687975165858, |
| "loss": 0.6454, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00016661927754433982, |
| "loss": 0.6632, |
| "step": 651 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 0.00016651153647553567, |
| "loss": 0.6379, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.00016640365676982207, |
| "loss": 0.6331, |
| "step": 653 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.00016629563865206386, |
| "loss": 0.6481, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.0001661874823474144, |
| "loss": 0.6372, |
| "step": 655 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 0.00016607918808131525, |
| "loss": 0.5696, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.00016597075607949525, |
| "loss": 0.6262, |
| "step": 657 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.0001658621865679706, |
| "loss": 0.6619, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.00016575347977304398, |
| "loss": 0.6815, |
| "step": 659 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 0.00016564463592130428, |
| "loss": 0.648, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00016553565523962603, |
| "loss": 0.6655, |
| "step": 661 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00016542653795516898, |
| "loss": 0.6379, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 0.00016531728429537766, |
| "loss": 0.6425, |
| "step": 663 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.00016520789448798087, |
| "loss": 0.6688, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.0001650983687609911, |
| "loss": 0.6326, |
| "step": 665 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.00016498870734270418, |
| "loss": 0.6138, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 0.00016487891046169887, |
| "loss": 0.6027, |
| "step": 667 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.0001647689783468362, |
| "loss": 0.5862, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.0001646589112272591, |
| "loss": 0.6989, |
| "step": 669 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.0001645487093323919, |
| "loss": 0.62, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 0.0001644383728919399, |
| "loss": 0.6448, |
| "step": 671 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00016432790213588872, |
| "loss": 0.6437, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.0001642172972945042, |
| "loss": 0.6694, |
| "step": 673 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00016410655859833137, |
| "loss": 0.6361, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 0.00016399568627819443, |
| "loss": 0.7225, |
| "step": 675 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.00016388468056519612, |
| "loss": 0.6169, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.00016377354169071722, |
| "loss": 0.6331, |
| "step": 677 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 0.00016366226988641594, |
| "loss": 0.6058, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.00016355086538422776, |
| "loss": 0.5799, |
| "step": 679 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.00016343932841636456, |
| "loss": 0.6115, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.00016332765921531453, |
| "loss": 0.6596, |
| "step": 681 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 0.00016321585801384137, |
| "loss": 0.6816, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00016310392504498395, |
| "loss": 0.7131, |
| "step": 683 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00016299186054205577, |
| "loss": 0.6518, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00016287966473864452, |
| "loss": 0.6086, |
| "step": 685 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 0.00016276733786861164, |
| "loss": 0.615, |
| "step": 686 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.0001626548801660916, |
| "loss": 0.6335, |
| "step": 687 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.0001625422918654918, |
| "loss": 0.6859, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.0001624295732014916, |
| "loss": 0.6569, |
| "step": 689 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 0.00016231672440904235, |
| "loss": 0.6179, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00016220374572336645, |
| "loss": 0.616, |
| "step": 691 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.00016209063737995715, |
| "loss": 0.6382, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 0.0001619773996145779, |
| "loss": 0.6111, |
| "step": 693 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00016186403266326197, |
| "loss": 0.5949, |
| "step": 694 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00016175053676231187, |
| "loss": 0.6408, |
| "step": 695 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00016163691214829892, |
| "loss": 0.6042, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 0.00016152315905806268, |
| "loss": 0.7113, |
| "step": 697 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00016140927772871058, |
| "loss": 0.6311, |
| "step": 698 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.0001612952683976173, |
| "loss": 0.6876, |
| "step": 699 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.00016118113130242432, |
| "loss": 0.5693, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 0.0001610668666810395, |
| "loss": 0.6083, |
| "step": 701 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.0001609524747716364, |
| "loss": 0.6262, |
| "step": 702 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00016083795581265406, |
| "loss": 0.6194, |
| "step": 703 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 0.00016072331004279614, |
| "loss": 0.6285, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00016060853770103083, |
| "loss": 0.65, |
| "step": 705 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00016049363902659, |
| "loss": 0.6339, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00016037861425896891, |
| "loss": 0.627, |
| "step": 707 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 0.00016026346363792567, |
| "loss": 0.5951, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00016014818740348063, |
| "loss": 0.6794, |
| "step": 709 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00016003278579591608, |
| "loss": 0.6122, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.00015991725905577556, |
| "loss": 0.6402, |
| "step": 711 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 0.0001598016074238635, |
| "loss": 0.6043, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00015968583114124457, |
| "loss": 0.6347, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00015956993044924334, |
| "loss": 0.6113, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00015945390558944368, |
| "loss": 0.609, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 0.00015933775680368822, |
| "loss": 0.6453, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.00015922148433407802, |
| "loss": 0.6178, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.00015910508842297181, |
| "loss": 0.6219, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 0.0001589885693129857, |
| "loss": 0.6484, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.0001588719272469926, |
| "loss": 0.6048, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.0001587551624681217, |
| "loss": 0.6188, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.000158638275219758, |
| "loss": 0.6209, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 0.00015852126574554163, |
| "loss": 0.6286, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.00015840413428936767, |
| "loss": 0.6583, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.0001582868810953854, |
| "loss": 0.5979, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.00015816950640799786, |
| "loss": 0.6345, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 0.00015805201047186125, |
| "loss": 0.6338, |
| "step": 727 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.0001579343935318846, |
| "loss": 0.5986, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.0001578166558332291, |
| "loss": 0.6553, |
| "step": 729 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.0001576987976213077, |
| "loss": 0.6485, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 0.00015758081914178456, |
| "loss": 0.6385, |
| "step": 731 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.0001574627206405744, |
| "loss": 0.6344, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00015734450236384227, |
| "loss": 0.6517, |
| "step": 733 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 0.00015722616455800275, |
| "loss": 0.604, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00015710770746971971, |
| "loss": 0.6432, |
| "step": 735 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00015698913134590552, |
| "loss": 0.6166, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00015687043643372074, |
| "loss": 0.6213, |
| "step": 737 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 0.00015675162298057352, |
| "loss": 0.6095, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.00015663269123411907, |
| "loss": 0.5705, |
| "step": 739 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.0001565136414422592, |
| "loss": 0.6395, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.00015639447385314178, |
| "loss": 0.5974, |
| "step": 741 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 0.0001562751887151602, |
| "loss": 0.6041, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00015615578627695283, |
| "loss": 0.6258, |
| "step": 743 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.00015603626678740263, |
| "loss": 0.6292, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.0001559166304956365, |
| "loss": 0.6491, |
| "step": 745 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 0.0001557968776510248, |
| "loss": 0.6683, |
| "step": 746 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.0001556770085031808, |
| "loss": 0.6724, |
| "step": 747 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00015555702330196023, |
| "loss": 0.5698, |
| "step": 748 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 0.00015543692229746074, |
| "loss": 0.4421, |
| "step": 749 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.00015531670574002134, |
| "loss": 0.4473, |
| "step": 750 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.0001551963738802219, |
| "loss": 0.423, |
| "step": 751 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.00015507592696888258, |
| "loss": 0.3673, |
| "step": 752 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 0.00015495536525706344, |
| "loss": 0.3684, |
| "step": 753 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.0001548346889960638, |
| "loss": 0.3735, |
| "step": 754 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.0001547138984374217, |
| "loss": 0.399, |
| "step": 755 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.00015459299383291345, |
| "loss": 0.3477, |
| "step": 756 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 0.0001544719754345531, |
| "loss": 0.359, |
| "step": 757 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.00015435084349459195, |
| "loss": 0.421, |
| "step": 758 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.00015422959826551778, |
| "loss": 0.37, |
| "step": 759 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.00015410824000005468, |
| "loss": 0.343, |
| "step": 760 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 0.0001539867689511623, |
| "loss": 0.3941, |
| "step": 761 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00015386518537203534, |
| "loss": 0.3603, |
| "step": 762 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.00015374348951610313, |
| "loss": 0.3547, |
| "step": 763 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 0.000153621681637029, |
| "loss": 0.3483, |
| "step": 764 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.00015349976198870973, |
| "loss": 0.3841, |
| "step": 765 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.00015337773082527517, |
| "loss": 0.4025, |
| "step": 766 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.00015325558840108752, |
| "loss": 0.393, |
| "step": 767 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 0.0001531333349707409, |
| "loss": 0.3699, |
| "step": 768 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.00015301097078906096, |
| "loss": 0.4076, |
| "step": 769 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.000152888496111104, |
| "loss": 0.3815, |
| "step": 770 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.00015276591119215669, |
| "loss": 0.3607, |
| "step": 771 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 0.0001526432162877356, |
| "loss": 0.3635, |
| "step": 772 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.00015252041165358642, |
| "loss": 0.3491, |
| "step": 773 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.00015239749754568363, |
| "loss": 0.3448, |
| "step": 774 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.0001522744742202299, |
| "loss": 0.361, |
| "step": 775 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 0.0001521513419336555, |
| "loss": 0.3924, |
| "step": 776 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00015202810094261791, |
| "loss": 0.3543, |
| "step": 777 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00015190475150400105, |
| "loss": 0.3423, |
| "step": 778 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 0.00015178129387491507, |
| "loss": 0.4026, |
| "step": 779 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00015165772831269547, |
| "loss": 0.3377, |
| "step": 780 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00015153405507490288, |
| "loss": 0.3841, |
| "step": 781 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00015141027441932216, |
| "loss": 0.3651, |
| "step": 782 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 0.00015128638660396232, |
| "loss": 0.3931, |
| "step": 783 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00015116239188705556, |
| "loss": 0.3433, |
| "step": 784 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00015103829052705696, |
| "loss": 0.3417, |
| "step": 785 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00015091408278264387, |
| "loss": 0.3849, |
| "step": 786 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 0.00015078976891271542, |
| "loss": 0.3725, |
| "step": 787 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00015066534917639195, |
| "loss": 0.3895, |
| "step": 788 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.0001505408238330144, |
| "loss": 0.3756, |
| "step": 789 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00015041619314214394, |
| "loss": 0.3355, |
| "step": 790 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 0.00015029145736356124, |
| "loss": 0.3889, |
| "step": 791 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00015016661675726608, |
| "loss": 0.3701, |
| "step": 792 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00015004167158347666, |
| "loss": 0.3625, |
| "step": 793 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 0.00014991662210262928, |
| "loss": 0.3728, |
| "step": 794 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.0001497914685753775, |
| "loss": 0.3501, |
| "step": 795 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.00014966621126259183, |
| "loss": 0.349, |
| "step": 796 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.00014954085042535916, |
| "loss": 0.3494, |
| "step": 797 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 0.00014941538632498203, |
| "loss": 0.3557, |
| "step": 798 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.00014928981922297842, |
| "loss": 0.3505, |
| "step": 799 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.0001491641493810808, |
| "loss": 0.383, |
| "step": 800 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.0001490383770612359, |
| "loss": 0.3731, |
| "step": 801 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 0.00014891250252560407, |
| "loss": 0.3716, |
| "step": 802 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.00014878652603655872, |
| "loss": 0.3546, |
| "step": 803 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.00014866044785668563, |
| "loss": 0.3582, |
| "step": 804 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 0.00014853426824878278, |
| "loss": 0.3821, |
| "step": 805 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.00014840798747585933, |
| "loss": 0.3674, |
| "step": 806 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.00014828160580113552, |
| "loss": 0.3486, |
| "step": 807 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.00014815512348804178, |
| "loss": 0.3669, |
| "step": 808 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 0.0001480285408002183, |
| "loss": 0.3593, |
| "step": 809 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.0001479018580015146, |
| "loss": 0.355, |
| "step": 810 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00014777507535598878, |
| "loss": 0.3335, |
| "step": 811 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.00014764819312790707, |
| "loss": 0.3969, |
| "step": 812 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 0.0001475212115817433, |
| "loss": 0.3748, |
| "step": 813 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.0001473941309821783, |
| "loss": 0.3618, |
| "step": 814 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.00014726695159409937, |
| "loss": 0.3806, |
| "step": 815 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.0001471396736825998, |
| "loss": 0.3602, |
| "step": 816 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 0.00014701229751297806, |
| "loss": 0.3907, |
| "step": 817 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.0001468848233507376, |
| "loss": 0.3645, |
| "step": 818 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.00014675725146158608, |
| "loss": 0.3553, |
| "step": 819 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 0.0001466295821114348, |
| "loss": 0.3353, |
| "step": 820 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00014650181556639832, |
| "loss": 0.3935, |
| "step": 821 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.0001463739520927937, |
| "loss": 0.3544, |
| "step": 822 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00014624599195714005, |
| "loss": 0.3485, |
| "step": 823 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 0.00014611793542615803, |
| "loss": 0.4117, |
| "step": 824 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00014598978276676917, |
| "loss": 0.3814, |
| "step": 825 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.0001458615342460953, |
| "loss": 0.3596, |
| "step": 826 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00014573319013145823, |
| "loss": 0.3896, |
| "step": 827 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 0.00014560475069037894, |
| "loss": 0.3446, |
| "step": 828 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00014547621619057707, |
| "loss": 0.3721, |
| "step": 829 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00014534758689997045, |
| "loss": 0.3879, |
| "step": 830 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.00014521886308667447, |
| "loss": 0.3653, |
| "step": 831 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 0.0001450900450190016, |
| "loss": 0.4058, |
| "step": 832 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.00014496113296546067, |
| "loss": 0.3998, |
| "step": 833 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.00014483212719475653, |
| "loss": 0.3895, |
| "step": 834 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 0.00014470302797578926, |
| "loss": 0.3645, |
| "step": 835 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.00014457383557765386, |
| "loss": 0.3569, |
| "step": 836 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.0001444445502696394, |
| "loss": 0.3539, |
| "step": 837 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.0001443151723212287, |
| "loss": 0.3785, |
| "step": 838 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 0.00014418570200209772, |
| "loss": 0.3682, |
| "step": 839 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.00014405613958211482, |
| "loss": 0.4032, |
| "step": 840 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.0001439264853313405, |
| "loss": 0.3856, |
| "step": 841 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.00014379673952002656, |
| "loss": 0.4106, |
| "step": 842 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 0.0001436669024186157, |
| "loss": 0.3706, |
| "step": 843 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00014353697429774084, |
| "loss": 0.368, |
| "step": 844 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00014340695542822468, |
| "loss": 0.4013, |
| "step": 845 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00014327684608107913, |
| "loss": 0.361, |
| "step": 846 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 0.00014314664652750455, |
| "loss": 0.3478, |
| "step": 847 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.00014301635703888943, |
| "loss": 0.407, |
| "step": 848 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.00014288597788680973, |
| "loss": 0.3928, |
| "step": 849 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 0.00014275550934302823, |
| "loss": 0.3897, |
| "step": 850 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.00014262495167949405, |
| "loss": 0.3992, |
| "step": 851 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.0001424943051683422, |
| "loss": 0.3558, |
| "step": 852 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.0001423635700818927, |
| "loss": 0.3873, |
| "step": 853 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 0.0001422327466926503, |
| "loss": 0.3598, |
| "step": 854 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00014210183527330376, |
| "loss": 0.3802, |
| "step": 855 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00014197083609672543, |
| "loss": 0.3571, |
| "step": 856 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00014183974943597047, |
| "loss": 0.3499, |
| "step": 857 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 0.00014170857556427643, |
| "loss": 0.371, |
| "step": 858 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00014157731475506266, |
| "loss": 0.3843, |
| "step": 859 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.0001414459672819297, |
| "loss": 0.4135, |
| "step": 860 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00014131453341865877, |
| "loss": 0.3742, |
| "step": 861 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 0.00014118301343921108, |
| "loss": 0.3834, |
| "step": 862 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00014105140761772744, |
| "loss": 0.3684, |
| "step": 863 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.0001409197162285275, |
| "loss": 0.3748, |
| "step": 864 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 0.00014078793954610936, |
| "loss": 0.361, |
| "step": 865 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00014065607784514885, |
| "loss": 0.3363, |
| "step": 866 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.000140524131400499, |
| "loss": 0.3507, |
| "step": 867 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00014039210048718949, |
| "loss": 0.3809, |
| "step": 868 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 0.00014025998538042613, |
| "loss": 0.3401, |
| "step": 869 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.0001401277863555901, |
| "loss": 0.3949, |
| "step": 870 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00013999550368823767, |
| "loss": 0.3737, |
| "step": 871 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00013986313765409925, |
| "loss": 0.3848, |
| "step": 872 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 0.00013973068852907918, |
| "loss": 0.3428, |
| "step": 873 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.0001395981565892549, |
| "loss": 0.3482, |
| "step": 874 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00013946554211087655, |
| "loss": 0.3869, |
| "step": 875 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00013933284537036625, |
| "loss": 0.3747, |
| "step": 876 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 0.00013920006664431766, |
| "loss": 0.3559, |
| "step": 877 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00013906720620949521, |
| "loss": 0.3868, |
| "step": 878 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00013893426434283376, |
| "loss": 0.392, |
| "step": 879 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 0.00013880124132143782, |
| "loss": 0.337, |
| "step": 880 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.00013866813742258115, |
| "loss": 0.3707, |
| "step": 881 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.000138534952923706, |
| "loss": 0.3288, |
| "step": 882 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.00013840168810242274, |
| "loss": 0.3843, |
| "step": 883 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 0.000138268343236509, |
| "loss": 0.4287, |
| "step": 884 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00013813491860390937, |
| "loss": 0.3539, |
| "step": 885 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.0001380014144827347, |
| "loss": 0.3559, |
| "step": 886 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00013786783115126152, |
| "loss": 0.3816, |
| "step": 887 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 0.00013773416888793145, |
| "loss": 0.3923, |
| "step": 888 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.0001376004279713506, |
| "loss": 0.405, |
| "step": 889 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.0001374666086802891, |
| "loss": 0.4154, |
| "step": 890 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 0.00013733271129368041, |
| "loss": 0.3311, |
| "step": 891 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00013719873609062077, |
| "loss": 0.3643, |
| "step": 892 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.0001370646833503686, |
| "loss": 0.3804, |
| "step": 893 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00013693055335234396, |
| "loss": 0.3308, |
| "step": 894 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 0.00013679634637612798, |
| "loss": 0.342, |
| "step": 895 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00013666206270146223, |
| "loss": 0.3859, |
| "step": 896 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00013652770260824806, |
| "loss": 0.3829, |
| "step": 897 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.0001363932663765462, |
| "loss": 0.382, |
| "step": 898 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 0.00013625875428657612, |
| "loss": 0.3978, |
| "step": 899 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00013612416661871533, |
| "loss": 0.358, |
| "step": 900 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.00013598950365349883, |
| "loss": 0.3575, |
| "step": 901 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.0001358547656716187, |
| "loss": 0.3871, |
| "step": 902 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 0.0001357199529539233, |
| "loss": 0.3887, |
| "step": 903 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00013558506578141682, |
| "loss": 0.356, |
| "step": 904 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.0001354501044352586, |
| "loss": 0.399, |
| "step": 905 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 0.00013531506919676258, |
| "loss": 0.3844, |
| "step": 906 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00013517996034739677, |
| "loss": 0.3412, |
| "step": 907 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.0001350447781687826, |
| "loss": 0.3614, |
| "step": 908 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.0001349095229426943, |
| "loss": 0.3453, |
| "step": 909 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 0.00013477419495105844, |
| "loss": 0.3328, |
| "step": 910 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.00013463879447595315, |
| "loss": 0.3297, |
| "step": 911 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.0001345033217996078, |
| "loss": 0.3747, |
| "step": 912 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.00013436777720440214, |
| "loss": 0.3766, |
| "step": 913 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 0.00013423216097286584, |
| "loss": 0.3846, |
| "step": 914 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00013409647338767793, |
| "loss": 0.3505, |
| "step": 915 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00013396071473166613, |
| "loss": 0.3893, |
| "step": 916 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.00013382488528780639, |
| "loss": 0.3748, |
| "step": 917 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 0.000133688985339222, |
| "loss": 0.3753, |
| "step": 918 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00013355301516918347, |
| "loss": 0.3976, |
| "step": 919 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00013341697506110754, |
| "loss": 0.3487, |
| "step": 920 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 0.00013328086529855674, |
| "loss": 0.3516, |
| "step": 921 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00013314468616523875, |
| "loss": 0.3566, |
| "step": 922 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00013300843794500591, |
| "loss": 0.3814, |
| "step": 923 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00013287212092185464, |
| "loss": 0.349, |
| "step": 924 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 0.00013273573537992455, |
| "loss": 0.3522, |
| "step": 925 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00013259928160349831, |
| "loss": 0.3714, |
| "step": 926 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00013246275987700064, |
| "loss": 0.3675, |
| "step": 927 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.000132326170484998, |
| "loss": 0.3739, |
| "step": 928 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 0.00013218951371219784, |
| "loss": 0.393, |
| "step": 929 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00013205278984344812, |
| "loss": 0.3222, |
| "step": 930 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.0001319159991637365, |
| "loss": 0.3678, |
| "step": 931 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00013177914195819016, |
| "loss": 0.3564, |
| "step": 932 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 0.00013164221851207474, |
| "loss": 0.3934, |
| "step": 933 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00013150522911079397, |
| "loss": 0.3702, |
| "step": 934 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.00013136817403988917, |
| "loss": 0.3609, |
| "step": 935 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 0.0001312310535850384, |
| "loss": 0.3687, |
| "step": 936 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00013109386803205613, |
| "loss": 0.3719, |
| "step": 937 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00013095661766689246, |
| "loss": 0.3949, |
| "step": 938 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.0001308193027756326, |
| "loss": 0.3575, |
| "step": 939 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 0.00013068192364449618, |
| "loss": 0.3701, |
| "step": 940 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00013054448055983692, |
| "loss": 0.3691, |
| "step": 941 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00013040697380814164, |
| "loss": 0.387, |
| "step": 942 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00013026940367603, |
| "loss": 0.3538, |
| "step": 943 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 0.00013013177045025374, |
| "loss": 0.3385, |
| "step": 944 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00012999407441769602, |
| "loss": 0.3683, |
| "step": 945 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00012985631586537107, |
| "loss": 0.3885, |
| "step": 946 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00012971849508042337, |
| "loss": 0.3415, |
| "step": 947 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 0.00012958061235012706, |
| "loss": 0.3672, |
| "step": 948 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.00012944266796188547, |
| "loss": 0.4045, |
| "step": 949 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.0001293046622032304, |
| "loss": 0.3768, |
| "step": 950 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 0.00012916659536182164, |
| "loss": 0.3832, |
| "step": 951 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00012902846772544624, |
| "loss": 0.3609, |
| "step": 952 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.000128890279582018, |
| "loss": 0.3973, |
| "step": 953 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00012875203121957683, |
| "loss": 0.3837, |
| "step": 954 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 0.00012861372292628814, |
| "loss": 0.3984, |
| "step": 955 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.0001284753549904423, |
| "loss": 0.3834, |
| "step": 956 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.00012833692770045403, |
| "loss": 0.3682, |
| "step": 957 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.00012819844134486167, |
| "loss": 0.4049, |
| "step": 958 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 0.0001280598962123267, |
| "loss": 0.3446, |
| "step": 959 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00012792129259163318, |
| "loss": 0.3568, |
| "step": 960 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00012778263077168703, |
| "loss": 0.3754, |
| "step": 961 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00012764391104151554, |
| "loss": 0.3741, |
| "step": 962 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 0.00012750513369026657, |
| "loss": 0.3859, |
| "step": 963 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 0.0001273662990072083, |
| "loss": 0.367, |
| "step": 964 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 0.0001272274072817282, |
| "loss": 0.3378, |
| "step": 965 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 0.00012708845880333278, |
| "loss": 0.392, |
| "step": 966 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 0.00012694945386164673, |
| "loss": 0.397, |
| "step": 967 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 0.0001268103927464126, |
| "loss": 0.3434, |
| "step": 968 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 0.00012667127574748986, |
| "loss": 0.3837, |
| "step": 969 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 0.00012653210315485453, |
| "loss": 0.3585, |
| "step": 970 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 0.00012639287525859855, |
| "loss": 0.3515, |
| "step": 971 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 0.00012625359234892907, |
| "loss": 0.3606, |
| "step": 972 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 0.00012611425471616795, |
| "loss": 0.3456, |
| "step": 973 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 0.0001259748626507511, |
| "loss": 0.3973, |
| "step": 974 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 0.0001258354164432279, |
| "loss": 0.3958, |
| "step": 975 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 0.00012569591638426052, |
| "loss": 0.3837, |
| "step": 976 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 0.00012555636276462355, |
| "loss": 0.35, |
| "step": 977 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 0.00012541675587520297, |
| "loss": 0.3358, |
| "step": 978 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 0.00012527709600699605, |
| "loss": 0.3499, |
| "step": 979 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 0.0001251373834511103, |
| "loss": 0.3878, |
| "step": 980 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 0.00012499761849876312, |
| "loss": 0.3819, |
| "step": 981 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 0.00012485780144128115, |
| "loss": 0.3698, |
| "step": 982 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 0.00012471793257009964, |
| "loss": 0.377, |
| "step": 983 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 0.00012457801217676182, |
| "loss": 0.3713, |
| "step": 984 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 0.00012443804055291825, |
| "loss": 0.3603, |
| "step": 985 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 0.0001242980179903264, |
| "loss": 0.3769, |
| "step": 986 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 0.0001241579447808498, |
| "loss": 0.3836, |
| "step": 987 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 0.00012401782121645766, |
| "loss": 0.3284, |
| "step": 988 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 0.00012387764758922405, |
| "loss": 0.4112, |
| "step": 989 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 0.00012373742419132742, |
| "loss": 0.3704, |
| "step": 990 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 0.00012359715131505, |
| "loss": 0.3641, |
| "step": 991 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 0.00012345682925277716, |
| "loss": 0.3682, |
| "step": 992 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 0.0001233164582969967, |
| "loss": 0.3679, |
| "step": 993 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 0.00012317603874029842, |
| "loss": 0.4003, |
| "step": 994 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 0.0001230355708753734, |
| "loss": 0.3972, |
| "step": 995 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 0.0001228950549950134, |
| "loss": 0.3652, |
| "step": 996 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 0.00012275449139211034, |
| "loss": 0.3713, |
| "step": 997 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 0.00012261388035965544, |
| "loss": 0.3728, |
| "step": 998 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 0.00012247322219073898, |
| "loss": 0.3504, |
| "step": 999 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 0.00012233251717854937, |
| "loss": 0.3229, |
| "step": 1000 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 0.00012219176561637266, |
| "loss": 0.3569, |
| "step": 1001 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 0.00012205096779759206, |
| "loss": 0.3671, |
| "step": 1002 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 0.00012191012401568698, |
| "loss": 0.3828, |
| "step": 1003 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 0.00012176923456423284, |
| "loss": 0.3465, |
| "step": 1004 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 0.00012162829973690016, |
| "loss": 0.3685, |
| "step": 1005 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 0.000121487319827454, |
| "loss": 0.3857, |
| "step": 1006 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 0.00012134629512975351, |
| "loss": 0.4063, |
| "step": 1007 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 0.00012120522593775108, |
| "loss": 0.3328, |
| "step": 1008 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 0.0001210641125454919, |
| "loss": 0.3603, |
| "step": 1009 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 0.0001209229552471133, |
| "loss": 0.392, |
| "step": 1010 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 0.00012078175433684407, |
| "loss": 0.338, |
| "step": 1011 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 0.00012064051010900397, |
| "loss": 0.3654, |
| "step": 1012 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 0.00012049922285800306, |
| "loss": 0.3567, |
| "step": 1013 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 0.00012035789287834098, |
| "loss": 0.382, |
| "step": 1014 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 0.00012021652046460658, |
| "loss": 0.3582, |
| "step": 1015 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 0.00012007510591147697, |
| "loss": 0.4132, |
| "step": 1016 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 0.00011993364951371732, |
| "loss": 0.3965, |
| "step": 1017 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 0.0001197921515661798, |
| "loss": 0.3589, |
| "step": 1018 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 0.00011965061236380335, |
| "loss": 0.3622, |
| "step": 1019 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 0.00011950903220161285, |
| "loss": 0.3477, |
| "step": 1020 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 0.0001193674113747185, |
| "loss": 0.3681, |
| "step": 1021 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 0.00011922575017831537, |
| "loss": 0.365, |
| "step": 1022 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 0.00011908404890768254, |
| "loss": 0.3736, |
| "step": 1023 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 0.00011894230785818284, |
| "loss": 0.3588, |
| "step": 1024 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 0.0001188005273252617, |
| "loss": 0.385, |
| "step": 1025 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 0.00011865870760444715, |
| "loss": 0.392, |
| "step": 1026 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 0.0001185168489913487, |
| "loss": 0.3522, |
| "step": 1027 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 0.00011837495178165706, |
| "loss": 0.3872, |
| "step": 1028 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 0.00011823301627114326, |
| "loss": 0.4107, |
| "step": 1029 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 0.00011809104275565834, |
| "loss": 0.3362, |
| "step": 1030 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 0.00011794903153113239, |
| "loss": 0.3479, |
| "step": 1031 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 0.00011780698289357419, |
| "loss": 0.3763, |
| "step": 1032 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 0.00011766489713907047, |
| "loss": 0.3756, |
| "step": 1033 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 0.00011752277456378535, |
| "loss": 0.3976, |
| "step": 1034 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 0.00011738061546395967, |
| "loss": 0.3544, |
| "step": 1035 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 0.00011723842013591044, |
| "loss": 0.3669, |
| "step": 1036 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 0.00011709618887603014, |
| "loss": 0.3829, |
| "step": 1037 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 0.00011695392198078617, |
| "loss": 0.3663, |
| "step": 1038 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 0.00011681161974672026, |
| "loss": 0.3488, |
| "step": 1039 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 0.00011666928247044768, |
| "loss": 0.3499, |
| "step": 1040 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 0.00011652691044865687, |
| "loss": 0.3905, |
| "step": 1041 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 0.00011638450397810858, |
| "loss": 0.366, |
| "step": 1042 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 0.00011624206335563545, |
| "loss": 0.3989, |
| "step": 1043 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 0.00011609958887814129, |
| "loss": 0.3695, |
| "step": 1044 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 0.00011595708084260044, |
| "loss": 0.3694, |
| "step": 1045 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 0.00011581453954605723, |
| "loss": 0.3607, |
| "step": 1046 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 0.0001156719652856253, |
| "loss": 0.3646, |
| "step": 1047 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 0.00011552935835848697, |
| "loss": 0.4037, |
| "step": 1048 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 0.00011538671906189271, |
| "loss": 0.3808, |
| "step": 1049 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 0.00011524404769316041, |
| "loss": 0.3805, |
| "step": 1050 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 0.00011510134454967492, |
| "loss": 0.3661, |
| "step": 1051 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 0.00011495860992888712, |
| "loss": 0.3702, |
| "step": 1052 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 0.0001148158441283137, |
| "loss": 0.3696, |
| "step": 1053 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 0.00011467304744553618, |
| "loss": 0.3452, |
| "step": 1054 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 0.00011453022017820061, |
| "loss": 0.3544, |
| "step": 1055 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 0.00011438736262401669, |
| "loss": 0.3626, |
| "step": 1056 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 0.00011424447508075722, |
| "loss": 0.3465, |
| "step": 1057 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 0.00011410155784625763, |
| "loss": 0.3532, |
| "step": 1058 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 0.00011395861121841514, |
| "loss": 0.3902, |
| "step": 1059 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 0.00011381563549518823, |
| "loss": 0.3956, |
| "step": 1060 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 0.0001136726309745961, |
| "loss": 0.3807, |
| "step": 1061 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 0.00011352959795471798, |
| "loss": 0.3872, |
| "step": 1062 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 0.00011338653673369235, |
| "loss": 0.3352, |
| "step": 1063 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 0.00011324344760971671, |
| "loss": 0.357, |
| "step": 1064 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 0.00011310033088104648, |
| "loss": 0.3745, |
| "step": 1065 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 0.00011295718684599485, |
| "loss": 0.3769, |
| "step": 1066 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 0.0001128140158029317, |
| "loss": 0.4095, |
| "step": 1067 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 0.00011267081805028339, |
| "loss": 0.3746, |
| "step": 1068 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 0.00011252759388653187, |
| "loss": 0.3818, |
| "step": 1069 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 0.00011238434361021412, |
| "loss": 0.3665, |
| "step": 1070 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 0.00011224106751992163, |
| "loss": 0.3427, |
| "step": 1071 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 0.00011209776591429962, |
| "loss": 0.3736, |
| "step": 1072 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 0.00011195443909204653, |
| "loss": 0.3536, |
| "step": 1073 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 0.00011181108735191332, |
| "loss": 0.3759, |
| "step": 1074 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 0.00011166771099270304, |
| "loss": 0.3745, |
| "step": 1075 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 0.00011152431031326978, |
| "loss": 0.3383, |
| "step": 1076 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 0.0001113808856125186, |
| "loss": 0.393, |
| "step": 1077 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 0.00011123743718940442, |
| "loss": 0.3583, |
| "step": 1078 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 0.0001110939653429318, |
| "loss": 0.3314, |
| "step": 1079 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 0.00011095047037215396, |
| "loss": 0.343, |
| "step": 1080 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 0.00011080695257617242, |
| "loss": 0.387, |
| "step": 1081 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 0.00011066341225413622, |
| "loss": 0.3644, |
| "step": 1082 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 0.00011051984970524134, |
| "loss": 0.3741, |
| "step": 1083 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 0.00011037626522873019, |
| "loss": 0.3889, |
| "step": 1084 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 0.0001102326591238908, |
| "loss": 0.3769, |
| "step": 1085 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 0.00011008903169005627, |
| "loss": 0.3296, |
| "step": 1086 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 0.00010994538322660424, |
| "loss": 0.387, |
| "step": 1087 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 0.0001098017140329561, |
| "loss": 0.3979, |
| "step": 1088 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 0.00010965802440857644, |
| "loss": 0.4049, |
| "step": 1089 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 0.00010951431465297259, |
| "loss": 0.3412, |
| "step": 1090 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 0.00010937058506569366, |
| "loss": 0.3353, |
| "step": 1091 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 0.00010922683594633021, |
| "loss": 0.3489, |
| "step": 1092 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 0.00010908306759451343, |
| "loss": 0.3857, |
| "step": 1093 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 0.00010893928030991467, |
| "loss": 0.3367, |
| "step": 1094 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 0.00010879547439224472, |
| "loss": 0.3835, |
| "step": 1095 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 0.00010865165014125316, |
| "loss": 0.3966, |
| "step": 1096 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 0.00010850780785672785, |
| "loss": 0.3609, |
| "step": 1097 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 0.00010836394783849423, |
| "loss": 0.3655, |
| "step": 1098 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 0.00010822007038641466, |
| "loss": 0.3813, |
| "step": 1099 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 0.00010807617580038796, |
| "loss": 0.3663, |
| "step": 1100 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 0.00010793226438034842, |
| "loss": 0.3991, |
| "step": 1101 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 0.00010778833642626573, |
| "loss": 0.359, |
| "step": 1102 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 0.00010764439223814378, |
| "loss": 0.3457, |
| "step": 1103 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 0.00010750043211602045, |
| "loss": 0.3539, |
| "step": 1104 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 0.00010735645635996676, |
| "loss": 0.3888, |
| "step": 1105 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 0.00010721246527008638, |
| "loss": 0.3691, |
| "step": 1106 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 0.00010706845914651486, |
| "loss": 0.3495, |
| "step": 1107 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 0.00010692443828941918, |
| "loss": 0.3363, |
| "step": 1108 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 0.00010678040299899697, |
| "loss": 0.369, |
| "step": 1109 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 0.00010663635357547592, |
| "loss": 0.3697, |
| "step": 1110 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 0.0001064922903191133, |
| "loss": 0.3322, |
| "step": 1111 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 0.00010634821353019504, |
| "loss": 0.3424, |
| "step": 1112 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 0.00010620412350903544, |
| "loss": 0.3584, |
| "step": 1113 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 0.00010606002055597626, |
| "loss": 0.3377, |
| "step": 1114 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 0.0001059159049713863, |
| "loss": 0.3591, |
| "step": 1115 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 0.00010577177705566061, |
| "loss": 0.4015, |
| "step": 1116 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 0.00010562763710922003, |
| "loss": 0.3318, |
| "step": 1117 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 0.00010548348543251042, |
| "loss": 0.3376, |
| "step": 1118 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 0.00010533932232600213, |
| "loss": 0.327, |
| "step": 1119 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 0.00010519514809018927, |
| "loss": 0.3551, |
| "step": 1120 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 0.00010505096302558918, |
| "loss": 0.3892, |
| "step": 1121 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 0.00010490676743274181, |
| "loss": 0.3153, |
| "step": 1122 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 0.000104762561612209, |
| "loss": 0.1766, |
| "step": 1123 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 0.00010461834586457398, |
| "loss": 0.1747, |
| "step": 1124 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 0.00010447412049044054, |
| "loss": 0.1943, |
| "step": 1125 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 0.00010432988579043273, |
| "loss": 0.1828, |
| "step": 1126 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 0.00010418564206519378, |
| "loss": 0.1967, |
| "step": 1127 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 0.00010404138961538603, |
| "loss": 0.1585, |
| "step": 1128 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 0.00010389712874168979, |
| "loss": 0.1764, |
| "step": 1129 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 0.000103752859744803, |
| "loss": 0.1847, |
| "step": 1130 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 0.00010360858292544052, |
| "loss": 0.1452, |
| "step": 1131 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 0.00010346429858433352, |
| "loss": 0.1443, |
| "step": 1132 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 0.00010332000702222889, |
| "loss": 0.1801, |
| "step": 1133 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 0.00010317570853988848, |
| "loss": 0.1556, |
| "step": 1134 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 0.00010303140343808864, |
| "loss": 0.1709, |
| "step": 1135 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 0.00010288709201761948, |
| "loss": 0.1698, |
| "step": 1136 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 0.0001027427745792843, |
| "loss": 0.1811, |
| "step": 1137 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 0.00010259845142389899, |
| "loss": 0.1655, |
| "step": 1138 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 0.00010245412285229124, |
| "loss": 0.1851, |
| "step": 1139 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 0.00010230978916530012, |
| "loss": 0.1697, |
| "step": 1140 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 0.00010216545066377534, |
| "loss": 0.1598, |
| "step": 1141 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 0.00010202110764857662, |
| "loss": 0.1698, |
| "step": 1142 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 0.00010187676042057315, |
| "loss": 0.1711, |
| "step": 1143 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 0.00010173240928064285, |
| "loss": 0.1837, |
| "step": 1144 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 0.0001015880545296718, |
| "loss": 0.1912, |
| "step": 1145 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 0.00010144369646855359, |
| "loss": 0.16, |
| "step": 1146 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 0.00010129933539818876, |
| "loss": 0.1602, |
| "step": 1147 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 0.00010115497161948409, |
| "loss": 0.1549, |
| "step": 1148 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 0.00010101060543335204, |
| "loss": 0.1417, |
| "step": 1149 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 0.00010086623714070998, |
| "loss": 0.1568, |
| "step": 1150 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 0.00010072186704247986, |
| "loss": 0.1664, |
| "step": 1151 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 0.00010057749543958717, |
| "loss": 0.1523, |
| "step": 1152 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 0.00010043312263296074, |
| "loss": 0.1677, |
| "step": 1153 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 0.00010028874892353175, |
| "loss": 0.1805, |
| "step": 1154 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 0.00010014437461223339, |
| "loss": 0.1523, |
| "step": 1155 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 0.0001, |
| "loss": 0.1637, |
| "step": 1156 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 9.985562538776662e-05, |
| "loss": 0.1596, |
| "step": 1157 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 9.971125107646825e-05, |
| "loss": 0.1595, |
| "step": 1158 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 9.95668773670393e-05, |
| "loss": 0.1603, |
| "step": 1159 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 9.942250456041286e-05, |
| "loss": 0.1464, |
| "step": 1160 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 9.927813295752016e-05, |
| "loss": 0.1863, |
| "step": 1161 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 9.913376285929002e-05, |
| "loss": 0.1639, |
| "step": 1162 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 9.898939456664801e-05, |
| "loss": 0.1711, |
| "step": 1163 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 9.884502838051595e-05, |
| "loss": 0.1632, |
| "step": 1164 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 9.870066460181126e-05, |
| "loss": 0.1539, |
| "step": 1165 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 9.855630353144643e-05, |
| "loss": 0.2028, |
| "step": 1166 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 9.841194547032825e-05, |
| "loss": 0.1764, |
| "step": 1167 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 9.826759071935718e-05, |
| "loss": 0.1571, |
| "step": 1168 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 9.812323957942686e-05, |
| "loss": 0.1558, |
| "step": 1169 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 9.797889235142337e-05, |
| "loss": 0.1771, |
| "step": 1170 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 9.783454933622471e-05, |
| "loss": 0.1743, |
| "step": 1171 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 9.76902108346999e-05, |
| "loss": 0.1873, |
| "step": 1172 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 9.754587714770878e-05, |
| "loss": 0.1802, |
| "step": 1173 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 9.740154857610102e-05, |
| "loss": 0.1626, |
| "step": 1174 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 9.72572254207157e-05, |
| "loss": 0.1601, |
| "step": 1175 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 9.711290798238057e-05, |
| "loss": 0.1495, |
| "step": 1176 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 9.69685965619114e-05, |
| "loss": 0.1682, |
| "step": 1177 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 9.682429146011156e-05, |
| "loss": 0.1709, |
| "step": 1178 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 9.667999297777112e-05, |
| "loss": 0.1789, |
| "step": 1179 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 9.653570141566653e-05, |
| "loss": 0.157, |
| "step": 1180 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 9.63914170745595e-05, |
| "loss": 0.167, |
| "step": 1181 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 9.624714025519703e-05, |
| "loss": 0.1765, |
| "step": 1182 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 9.610287125831022e-05, |
| "loss": 0.157, |
| "step": 1183 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 9.595861038461398e-05, |
| "loss": 0.1578, |
| "step": 1184 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 9.581435793480623e-05, |
| "loss": 0.1831, |
| "step": 1185 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 9.567011420956731e-05, |
| "loss": 0.1589, |
| "step": 1186 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 9.552587950955945e-05, |
| "loss": 0.1569, |
| "step": 1187 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 9.538165413542607e-05, |
| "loss": 0.1469, |
| "step": 1188 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 9.523743838779102e-05, |
| "loss": 0.1815, |
| "step": 1189 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 9.509323256725821e-05, |
| "loss": 0.1709, |
| "step": 1190 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 9.494903697441083e-05, |
| "loss": 0.1699, |
| "step": 1191 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 9.480485190981073e-05, |
| "loss": 0.1762, |
| "step": 1192 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 9.466067767399788e-05, |
| "loss": 0.1931, |
| "step": 1193 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 9.451651456748958e-05, |
| "loss": 0.1707, |
| "step": 1194 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 9.437236289077998e-05, |
| "loss": 0.1633, |
| "step": 1195 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 9.422822294433939e-05, |
| "loss": 0.1687, |
| "step": 1196 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 9.408409502861374e-05, |
| "loss": 0.1647, |
| "step": 1197 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 9.393997944402378e-05, |
| "loss": 0.1579, |
| "step": 1198 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 9.379587649096457e-05, |
| "loss": 0.1878, |
| "step": 1199 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 9.365178646980497e-05, |
| "loss": 0.1674, |
| "step": 1200 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 9.350770968088674e-05, |
| "loss": 0.17, |
| "step": 1201 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 9.33636464245241e-05, |
| "loss": 0.1527, |
| "step": 1202 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 9.321959700100305e-05, |
| "loss": 0.1731, |
| "step": 1203 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 9.307556171058085e-05, |
| "loss": 0.1685, |
| "step": 1204 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 9.293154085348519e-05, |
| "loss": 0.1686, |
| "step": 1205 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 9.278753472991366e-05, |
| "loss": 0.1712, |
| "step": 1206 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 9.264354364003327e-05, |
| "loss": 0.1879, |
| "step": 1207 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 9.249956788397957e-05, |
| "loss": 0.1712, |
| "step": 1208 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 9.235560776185623e-05, |
| "loss": 0.1719, |
| "step": 1209 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 9.22116635737343e-05, |
| "loss": 0.169, |
| "step": 1210 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 9.206773561965159e-05, |
| "loss": 0.1744, |
| "step": 1211 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 9.192382419961208e-05, |
| "loss": 0.146, |
| "step": 1212 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 9.177992961358532e-05, |
| "loss": 0.1785, |
| "step": 1213 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 9.16360521615058e-05, |
| "loss": 0.1435, |
| "step": 1214 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 9.149219214327217e-05, |
| "loss": 0.1457, |
| "step": 1215 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 9.134834985874688e-05, |
| "loss": 0.1549, |
| "step": 1216 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 9.120452560775532e-05, |
| "loss": 0.1566, |
| "step": 1217 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 9.106071969008537e-05, |
| "loss": 0.1386, |
| "step": 1218 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 9.091693240548658e-05, |
| "loss": 0.1803, |
| "step": 1219 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 9.077316405366981e-05, |
| "loss": 0.1435, |
| "step": 1220 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 9.062941493430633e-05, |
| "loss": 0.1911, |
| "step": 1221 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 9.048568534702743e-05, |
| "loss": 0.1646, |
| "step": 1222 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 9.034197559142357e-05, |
| "loss": 0.1588, |
| "step": 1223 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 9.019828596704394e-05, |
| "loss": 0.1723, |
| "step": 1224 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 9.00546167733958e-05, |
| "loss": 0.1477, |
| "step": 1225 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 8.991096830994374e-05, |
| "loss": 0.1601, |
| "step": 1226 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 8.976734087610925e-05, |
| "loss": 0.1722, |
| "step": 1227 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 8.962373477126983e-05, |
| "loss": 0.1451, |
| "step": 1228 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 8.948015029475867e-05, |
| "loss": 0.1718, |
| "step": 1229 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 8.933658774586379e-05, |
| "loss": 0.1809, |
| "step": 1230 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 8.919304742382761e-05, |
| "loss": 0.1686, |
| "step": 1231 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 8.904952962784605e-05, |
| "loss": 0.1839, |
| "step": 1232 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 8.890603465706822e-05, |
| "loss": 0.1669, |
| "step": 1233 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 8.876256281059557e-05, |
| "loss": 0.1612, |
| "step": 1234 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 8.861911438748145e-05, |
| "loss": 0.1685, |
| "step": 1235 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 8.847568968673026e-05, |
| "loss": 0.1633, |
| "step": 1236 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 8.8332289007297e-05, |
| "loss": 0.1864, |
| "step": 1237 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 8.818891264808666e-05, |
| "loss": 0.1742, |
| "step": 1238 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 8.804556090795351e-05, |
| "loss": 0.158, |
| "step": 1239 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 8.790223408570042e-05, |
| "loss": 0.1558, |
| "step": 1240 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 8.775893248007839e-05, |
| "loss": 0.1718, |
| "step": 1241 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 8.761565638978589e-05, |
| "loss": 0.1778, |
| "step": 1242 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 8.747240611346814e-05, |
| "loss": 0.1612, |
| "step": 1243 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 8.732918194971664e-05, |
| "loss": 0.1538, |
| "step": 1244 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 8.718598419706833e-05, |
| "loss": 0.1609, |
| "step": 1245 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 8.704281315400517e-05, |
| "loss": 0.1753, |
| "step": 1246 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 8.68996691189535e-05, |
| "loss": 0.1527, |
| "step": 1247 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 8.675655239028333e-05, |
| "loss": 0.1939, |
| "step": 1248 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 8.661346326630767e-05, |
| "loss": 0.1524, |
| "step": 1249 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 8.647040204528205e-05, |
| "loss": 0.1581, |
| "step": 1250 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 8.63273690254039e-05, |
| "loss": 0.1809, |
| "step": 1251 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 8.61843645048118e-05, |
| "loss": 0.1667, |
| "step": 1252 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 8.60413887815849e-05, |
| "loss": 0.1476, |
| "step": 1253 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 8.58984421537424e-05, |
| "loss": 0.1491, |
| "step": 1254 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 8.575552491924279e-05, |
| "loss": 0.1804, |
| "step": 1255 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 8.561263737598338e-05, |
| "loss": 0.159, |
| "step": 1256 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 8.546977982179941e-05, |
| "loss": 0.1536, |
| "step": 1257 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 8.532695255446383e-05, |
| "loss": 0.1873, |
| "step": 1258 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 8.518415587168633e-05, |
| "loss": 0.1783, |
| "step": 1259 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 8.504139007111289e-05, |
| "loss": 0.1539, |
| "step": 1260 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 8.489865545032513e-05, |
| "loss": 0.1789, |
| "step": 1261 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 8.47559523068396e-05, |
| "loss": 0.1572, |
| "step": 1262 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 8.46132809381073e-05, |
| "loss": 0.1566, |
| "step": 1263 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 8.447064164151304e-05, |
| "loss": 0.169, |
| "step": 1264 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 8.432803471437475e-05, |
| "loss": 0.1849, |
| "step": 1265 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 8.418546045394279e-05, |
| "loss": 0.1667, |
| "step": 1266 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 8.404291915739957e-05, |
| "loss": 0.1527, |
| "step": 1267 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 8.39004111218587e-05, |
| "loss": 0.1711, |
| "step": 1268 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 8.375793664436457e-05, |
| "loss": 0.1709, |
| "step": 1269 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 8.361549602189144e-05, |
| "loss": 0.178, |
| "step": 1270 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 8.347308955134317e-05, |
| "loss": 0.1765, |
| "step": 1271 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 8.333071752955233e-05, |
| "loss": 0.1678, |
| "step": 1272 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 8.318838025327977e-05, |
| "loss": 0.2075, |
| "step": 1273 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 8.304607801921384e-05, |
| "loss": 0.1758, |
| "step": 1274 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 8.290381112396987e-05, |
| "loss": 0.1789, |
| "step": 1275 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 8.27615798640896e-05, |
| "loss": 0.1791, |
| "step": 1276 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 8.261938453604033e-05, |
| "loss": 0.1549, |
| "step": 1277 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 8.24772254362147e-05, |
| "loss": 0.1375, |
| "step": 1278 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 8.233510286092955e-05, |
| "loss": 0.1724, |
| "step": 1279 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 8.219301710642583e-05, |
| "loss": 0.1645, |
| "step": 1280 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 8.20509684688676e-05, |
| "loss": 0.1843, |
| "step": 1281 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 8.190895724434168e-05, |
| "loss": 0.1634, |
| "step": 1282 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 8.176698372885675e-05, |
| "loss": 0.148, |
| "step": 1283 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 8.162504821834295e-05, |
| "loss": 0.1751, |
| "step": 1284 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 8.148315100865131e-05, |
| "loss": 0.1558, |
| "step": 1285 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 8.13412923955529e-05, |
| "loss": 0.1598, |
| "step": 1286 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 8.119947267473833e-05, |
| "loss": 0.1768, |
| "step": 1287 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 8.10576921418172e-05, |
| "loss": 0.1943, |
| "step": 1288 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 8.091595109231745e-05, |
| "loss": 0.1571, |
| "step": 1289 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 8.077424982168468e-05, |
| "loss": 0.1722, |
| "step": 1290 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 8.063258862528152e-05, |
| "loss": 0.1802, |
| "step": 1291 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 8.049096779838719e-05, |
| "loss": 0.1642, |
| "step": 1292 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 8.034938763619667e-05, |
| "loss": 0.1669, |
| "step": 1293 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 8.020784843382022e-05, |
| "loss": 0.1738, |
| "step": 1294 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 8.006635048628273e-05, |
| "loss": 0.1703, |
| "step": 1295 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 7.992489408852306e-05, |
| "loss": 0.1535, |
| "step": 1296 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 7.978347953539344e-05, |
| "loss": 0.1667, |
| "step": 1297 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 7.964210712165902e-05, |
| "loss": 0.1964, |
| "step": 1298 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 7.950077714199698e-05, |
| "loss": 0.1771, |
| "step": 1299 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 7.935948989099605e-05, |
| "loss": 0.1818, |
| "step": 1300 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 7.921824566315594e-05, |
| "loss": 0.1651, |
| "step": 1301 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 7.907704475288673e-05, |
| "loss": 0.1596, |
| "step": 1302 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 7.893588745450814e-05, |
| "loss": 0.167, |
| "step": 1303 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 7.879477406224894e-05, |
| "loss": 0.1403, |
| "step": 1304 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 7.865370487024652e-05, |
| "loss": 0.1788, |
| "step": 1305 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 7.851268017254598e-05, |
| "loss": 0.1709, |
| "step": 1306 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 7.837170026309989e-05, |
| "loss": 0.1684, |
| "step": 1307 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 7.823076543576717e-05, |
| "loss": 0.1771, |
| "step": 1308 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 7.808987598431303e-05, |
| "loss": 0.1596, |
| "step": 1309 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 7.794903220240797e-05, |
| "loss": 0.1837, |
| "step": 1310 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 7.780823438362733e-05, |
| "loss": 0.1738, |
| "step": 1311 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 7.766748282145068e-05, |
| "loss": 0.1521, |
| "step": 1312 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 7.752677780926104e-05, |
| "loss": 0.1723, |
| "step": 1313 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 7.738611964034457e-05, |
| "loss": 0.1578, |
| "step": 1314 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 7.724550860788967e-05, |
| "loss": 0.1554, |
| "step": 1315 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 7.710494500498662e-05, |
| "loss": 0.1552, |
| "step": 1316 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 7.696442912462661e-05, |
| "loss": 0.1736, |
| "step": 1317 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 7.68239612597016e-05, |
| "loss": 0.185, |
| "step": 1318 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 7.66835417030033e-05, |
| "loss": 0.1471, |
| "step": 1319 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 7.654317074722287e-05, |
| "loss": 0.1648, |
| "step": 1320 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 7.640284868495e-05, |
| "loss": 0.1746, |
| "step": 1321 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 7.626257580867258e-05, |
| "loss": 0.156, |
| "step": 1322 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 7.612235241077596e-05, |
| "loss": 0.1677, |
| "step": 1323 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 7.598217878354237e-05, |
| "loss": 0.1458, |
| "step": 1324 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 7.584205521915023e-05, |
| "loss": 0.1564, |
| "step": 1325 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 7.570198200967362e-05, |
| "loss": 0.1586, |
| "step": 1326 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 7.556195944708175e-05, |
| "loss": 0.1592, |
| "step": 1327 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 7.542198782323819e-05, |
| "loss": 0.1546, |
| "step": 1328 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 7.528206742990036e-05, |
| "loss": 0.1634, |
| "step": 1329 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 7.514219855871886e-05, |
| "loss": 0.1595, |
| "step": 1330 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 7.50023815012369e-05, |
| "loss": 0.1506, |
| "step": 1331 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 7.486261654888973e-05, |
| "loss": 0.1657, |
| "step": 1332 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 7.4722903993004e-05, |
| "loss": 0.1767, |
| "step": 1333 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 7.458324412479705e-05, |
| "loss": 0.177, |
| "step": 1334 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 7.444363723537648e-05, |
| "loss": 0.1598, |
| "step": 1335 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 7.430408361573948e-05, |
| "loss": 0.1701, |
| "step": 1336 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 7.416458355677215e-05, |
| "loss": 0.1772, |
| "step": 1337 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 7.402513734924894e-05, |
| "loss": 0.1721, |
| "step": 1338 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 7.388574528383206e-05, |
| "loss": 0.1585, |
| "step": 1339 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 7.374640765107095e-05, |
| "loss": 0.1664, |
| "step": 1340 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 7.36071247414015e-05, |
| "loss": 0.1642, |
| "step": 1341 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 7.346789684514549e-05, |
| "loss": 0.1442, |
| "step": 1342 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 7.332872425251018e-05, |
| "loss": 0.1712, |
| "step": 1343 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 7.318960725358741e-05, |
| "loss": 0.1833, |
| "step": 1344 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 7.305054613835326e-05, |
| "loss": 0.1639, |
| "step": 1345 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 7.291154119666727e-05, |
| "loss": 0.1649, |
| "step": 1346 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 7.277259271827184e-05, |
| "loss": 0.1572, |
| "step": 1347 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 7.263370099279172e-05, |
| "loss": 0.1659, |
| "step": 1348 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 7.249486630973341e-05, |
| "loss": 0.1774, |
| "step": 1349 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 7.235608895848451e-05, |
| "loss": 0.1783, |
| "step": 1350 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 7.221736922831297e-05, |
| "loss": 0.1721, |
| "step": 1351 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 7.207870740836684e-05, |
| "loss": 0.1745, |
| "step": 1352 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 7.194010378767333e-05, |
| "loss": 0.1999, |
| "step": 1353 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 7.180155865513838e-05, |
| "loss": 0.139, |
| "step": 1354 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 7.166307229954598e-05, |
| "loss": 0.1747, |
| "step": 1355 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 7.152464500955769e-05, |
| "loss": 0.173, |
| "step": 1356 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 7.138627707371184e-05, |
| "loss": 0.1846, |
| "step": 1357 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 7.12479687804232e-05, |
| "loss": 0.1785, |
| "step": 1358 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 7.110972041798202e-05, |
| "loss": 0.1845, |
| "step": 1359 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 7.097153227455379e-05, |
| "loss": 0.1729, |
| "step": 1360 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 7.083340463817837e-05, |
| "loss": 0.1759, |
| "step": 1361 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 7.069533779676961e-05, |
| "loss": 0.1549, |
| "step": 1362 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 7.055733203811458e-05, |
| "loss": 0.1614, |
| "step": 1363 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 7.041938764987297e-05, |
| "loss": 0.1643, |
| "step": 1364 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 7.028150491957666e-05, |
| "loss": 0.1853, |
| "step": 1365 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 7.014368413462891e-05, |
| "loss": 0.1661, |
| "step": 1366 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 7.000592558230399e-05, |
| "loss": 0.1792, |
| "step": 1367 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 6.98682295497463e-05, |
| "loss": 0.1614, |
| "step": 1368 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 6.973059632397002e-05, |
| "loss": 0.1596, |
| "step": 1369 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 6.959302619185836e-05, |
| "loss": 0.1725, |
| "step": 1370 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 6.94555194401631e-05, |
| "loss": 0.1777, |
| "step": 1371 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 6.931807635550383e-05, |
| "loss": 0.1602, |
| "step": 1372 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 6.918069722436745e-05, |
| "loss": 0.1618, |
| "step": 1373 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 6.904338233310755e-05, |
| "loss": 0.1432, |
| "step": 1374 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 6.89061319679439e-05, |
| "loss": 0.1686, |
| "step": 1375 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 6.876894641496163e-05, |
| "loss": 0.183, |
| "step": 1376 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 6.863182596011087e-05, |
| "loss": 0.151, |
| "step": 1377 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 6.849477088920604e-05, |
| "loss": 0.1593, |
| "step": 1378 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 6.835778148792527e-05, |
| "loss": 0.1542, |
| "step": 1379 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 6.822085804180984e-05, |
| "loss": 0.1713, |
| "step": 1380 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 6.80840008362635e-05, |
| "loss": 0.1639, |
| "step": 1381 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 6.794721015655192e-05, |
| "loss": 0.1728, |
| "step": 1382 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 6.781048628780217e-05, |
| "loss": 0.1557, |
| "step": 1383 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 6.767382951500204e-05, |
| "loss": 0.1597, |
| "step": 1384 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 6.75372401229994e-05, |
| "loss": 0.1784, |
| "step": 1385 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 6.740071839650171e-05, |
| "loss": 0.1848, |
| "step": 1386 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 6.726426462007546e-05, |
| "loss": 0.1685, |
| "step": 1387 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 6.712787907814541e-05, |
| "loss": 0.1565, |
| "step": 1388 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 6.699156205499408e-05, |
| "loss": 0.1719, |
| "step": 1389 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 6.685531383476128e-05, |
| "loss": 0.166, |
| "step": 1390 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 6.671913470144331e-05, |
| "loss": 0.1498, |
| "step": 1391 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 6.65830249388925e-05, |
| "loss": 0.1601, |
| "step": 1392 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 6.644698483081654e-05, |
| "loss": 0.1743, |
| "step": 1393 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 6.6311014660778e-05, |
| "loss": 0.1714, |
| "step": 1394 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 6.617511471219364e-05, |
| "loss": 0.1746, |
| "step": 1395 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 6.603928526833387e-05, |
| "loss": 0.1692, |
| "step": 1396 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 6.590352661232209e-05, |
| "loss": 0.197, |
| "step": 1397 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 6.57678390271342e-05, |
| "loss": 0.1945, |
| "step": 1398 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 6.563222279559787e-05, |
| "loss": 0.1817, |
| "step": 1399 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 6.54966782003922e-05, |
| "loss": 0.1536, |
| "step": 1400 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 6.536120552404688e-05, |
| "loss": 0.1667, |
| "step": 1401 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 6.52258050489416e-05, |
| "loss": 0.1567, |
| "step": 1402 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 6.509047705730572e-05, |
| "loss": 0.1668, |
| "step": 1403 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 6.495522183121741e-05, |
| "loss": 0.1828, |
| "step": 1404 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 6.482003965260326e-05, |
| "loss": 0.162, |
| "step": 1405 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 6.468493080323743e-05, |
| "loss": 0.163, |
| "step": 1406 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 6.454989556474142e-05, |
| "loss": 0.163, |
| "step": 1407 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 6.441493421858317e-05, |
| "loss": 0.1506, |
| "step": 1408 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 6.428004704607671e-05, |
| "loss": 0.1709, |
| "step": 1409 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 6.414523432838133e-05, |
| "loss": 0.1566, |
| "step": 1410 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 6.401049634650118e-05, |
| "loss": 0.1617, |
| "step": 1411 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 6.387583338128471e-05, |
| "loss": 0.1768, |
| "step": 1412 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 6.374124571342386e-05, |
| "loss": 0.1512, |
| "step": 1413 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 6.360673362345382e-05, |
| "loss": 0.1642, |
| "step": 1414 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 6.347229739175196e-05, |
| "loss": 0.1523, |
| "step": 1415 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 6.333793729853781e-05, |
| "loss": 0.1401, |
| "step": 1416 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 6.320365362387201e-05, |
| "loss": 0.1726, |
| "step": 1417 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 6.306944664765606e-05, |
| "loss": 0.1848, |
| "step": 1418 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 6.293531664963144e-05, |
| "loss": 0.1592, |
| "step": 1419 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 6.280126390937925e-05, |
| "loss": 0.1513, |
| "step": 1420 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 6.26672887063196e-05, |
| "loss": 0.1463, |
| "step": 1421 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 6.253339131971092e-05, |
| "loss": 0.1683, |
| "step": 1422 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 6.239957202864943e-05, |
| "loss": 0.1803, |
| "step": 1423 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 6.226583111206856e-05, |
| "loss": 0.1659, |
| "step": 1424 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 6.213216884873848e-05, |
| "loss": 0.1741, |
| "step": 1425 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 6.199858551726531e-05, |
| "loss": 0.1777, |
| "step": 1426 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 6.186508139609064e-05, |
| "loss": 0.1456, |
| "step": 1427 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 6.173165676349103e-05, |
| "loss": 0.15, |
| "step": 1428 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 6.15983118975773e-05, |
| "loss": 0.1811, |
| "step": 1429 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 6.146504707629399e-05, |
| "loss": 0.1468, |
| "step": 1430 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 6.133186257741887e-05, |
| "loss": 0.1616, |
| "step": 1431 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 6.11987586785622e-05, |
| "loss": 0.1677, |
| "step": 1432 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 6.106573565716627e-05, |
| "loss": 0.1837, |
| "step": 1433 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 6.09327937905048e-05, |
| "loss": 0.1713, |
| "step": 1434 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 6.079993335568237e-05, |
| "loss": 0.1766, |
| "step": 1435 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 6.066715462963376e-05, |
| "loss": 0.1837, |
| "step": 1436 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 6.0534457889123455e-05, |
| "loss": 0.1575, |
| "step": 1437 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 6.0401843410745107e-05, |
| "loss": 0.164, |
| "step": 1438 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 6.0269311470920875e-05, |
| "loss": 0.1843, |
| "step": 1439 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 6.013686234590077e-05, |
| "loss": 0.1397, |
| "step": 1440 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 6.000449631176236e-05, |
| "loss": 0.1421, |
| "step": 1441 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 5.987221364440987e-05, |
| "loss": 0.1668, |
| "step": 1442 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 5.974001461957392e-05, |
| "loss": 0.1728, |
| "step": 1443 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 5.960789951281052e-05, |
| "loss": 0.1638, |
| "step": 1444 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 5.947586859950103e-05, |
| "loss": 0.155, |
| "step": 1445 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 5.934392215485116e-05, |
| "loss": 0.175, |
| "step": 1446 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 5.921206045389065e-05, |
| "loss": 0.1676, |
| "step": 1447 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 5.9080283771472524e-05, |
| "loss": 0.1759, |
| "step": 1448 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 5.8948592382272594e-05, |
| "loss": 0.151, |
| "step": 1449 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 5.8816986560788935e-05, |
| "loss": 0.1849, |
| "step": 1450 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 5.868546658134124e-05, |
| "loss": 0.1784, |
| "step": 1451 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 5.855403271807033e-05, |
| "loss": 0.173, |
| "step": 1452 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 5.842268524493735e-05, |
| "loss": 0.1716, |
| "step": 1453 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 5.8291424435723575e-05, |
| "loss": 0.1594, |
| "step": 1454 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 5.8160250564029525e-05, |
| "loss": 0.1772, |
| "step": 1455 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 5.802916390327459e-05, |
| "loss": 0.1529, |
| "step": 1456 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 5.7898164726696226e-05, |
| "loss": 0.1783, |
| "step": 1457 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 5.776725330734974e-05, |
| "loss": 0.1779, |
| "step": 1458 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 5.7636429918107314e-05, |
| "loss": 0.1797, |
| "step": 1459 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 5.750569483165784e-05, |
| "loss": 0.1604, |
| "step": 1460 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 5.737504832050594e-05, |
| "loss": 0.1792, |
| "step": 1461 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 5.7244490656971815e-05, |
| "loss": 0.1646, |
| "step": 1462 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 5.711402211319029e-05, |
| "loss": 0.1835, |
| "step": 1463 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 5.698364296111056e-05, |
| "loss": 0.1641, |
| "step": 1464 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 5.6853353472495474e-05, |
| "loss": 0.1574, |
| "step": 1465 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 5.6723153918920936e-05, |
| "loss": 0.1446, |
| "step": 1466 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 5.6593044571775346e-05, |
| "loss": 0.167, |
| "step": 1467 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 5.646302570225919e-05, |
| "loss": 0.1842, |
| "step": 1468 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 5.633309758138436e-05, |
| "loss": 0.1811, |
| "step": 1469 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 5.6203260479973464e-05, |
| "loss": 0.1599, |
| "step": 1470 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 5.6073514668659534e-05, |
| "loss": 0.156, |
| "step": 1471 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 5.59438604178852e-05, |
| "loss": 0.1682, |
| "step": 1472 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 5.581429799790234e-05, |
| "loss": 0.1849, |
| "step": 1473 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 5.568482767877132e-05, |
| "loss": 0.1844, |
| "step": 1474 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 5.5555449730360665e-05, |
| "loss": 0.1628, |
| "step": 1475 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 5.542616442234618e-05, |
| "loss": 0.1722, |
| "step": 1476 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 5.529697202421078e-05, |
| "loss": 0.1678, |
| "step": 1477 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 5.51678728052435e-05, |
| "loss": 0.1623, |
| "step": 1478 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 5.503886703453933e-05, |
| "loss": 0.1696, |
| "step": 1479 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 5.490995498099843e-05, |
| "loss": 0.1779, |
| "step": 1480 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 5.4781136913325535e-05, |
| "loss": 0.1636, |
| "step": 1481 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 5.4652413100029584e-05, |
| "loss": 0.1872, |
| "step": 1482 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 5.452378380942296e-05, |
| "loss": 0.1822, |
| "step": 1483 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 5.43952493096211e-05, |
| "loss": 0.204, |
| "step": 1484 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 5.426680986854178e-05, |
| "loss": 0.1691, |
| "step": 1485 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 5.413846575390473e-05, |
| "loss": 0.1561, |
| "step": 1486 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 5.4010217233230874e-05, |
| "loss": 0.1758, |
| "step": 1487 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 5.388206457384197e-05, |
| "loss": 0.1381, |
| "step": 1488 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 5.375400804285995e-05, |
| "loss": 0.178, |
| "step": 1489 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 5.362604790720634e-05, |
| "loss": 0.1678, |
| "step": 1490 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 5.3498184433601694e-05, |
| "loss": 0.1597, |
| "step": 1491 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 5.337041788856518e-05, |
| "loss": 0.2029, |
| "step": 1492 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 5.3242748538413954e-05, |
| "loss": 0.191, |
| "step": 1493 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 5.311517664926245e-05, |
| "loss": 0.1454, |
| "step": 1494 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 5.298770248702198e-05, |
| "loss": 0.1572, |
| "step": 1495 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 5.286032631740023e-05, |
| "loss": 0.1271, |
| "step": 1496 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 5.273304840590065e-05, |
| "loss": 0.0756, |
| "step": 1497 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 5.260586901782172e-05, |
| "loss": 0.0713, |
| "step": 1498 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 5.2478788418256755e-05, |
| "loss": 0.0682, |
| "step": 1499 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 5.235180687209296e-05, |
| "loss": 0.0698, |
| "step": 1500 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 5.222492464401123e-05, |
| "loss": 0.0723, |
| "step": 1501 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 5.2098141998485414e-05, |
| "loss": 0.0635, |
| "step": 1502 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 5.197145919978172e-05, |
| "loss": 0.0767, |
| "step": 1503 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 5.184487651195825e-05, |
| "loss": 0.0655, |
| "step": 1504 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 5.1718394198864484e-05, |
| "loss": 0.0709, |
| "step": 1505 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 5.159201252414068e-05, |
| "loss": 0.066, |
| "step": 1506 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 5.146573175121728e-05, |
| "loss": 0.0605, |
| "step": 1507 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 5.1339552143314384e-05, |
| "loss": 0.0622, |
| "step": 1508 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 5.1213473963441315e-05, |
| "loss": 0.0665, |
| "step": 1509 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 5.1087497474395907e-05, |
| "loss": 0.0635, |
| "step": 1510 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 5.096162293876414e-05, |
| "loss": 0.07, |
| "step": 1511 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 5.0835850618919245e-05, |
| "loss": 0.0725, |
| "step": 1512 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 5.071018077702161e-05, |
| "loss": 0.0735, |
| "step": 1513 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 5.058461367501794e-05, |
| "loss": 0.0605, |
| "step": 1514 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 5.045914957464086e-05, |
| "loss": 0.0582, |
| "step": 1515 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 5.033378873740819e-05, |
| "loss": 0.0707, |
| "step": 1516 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 5.0208531424622526e-05, |
| "loss": 0.0791, |
| "step": 1517 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 5.008337789737073e-05, |
| "loss": 0.0628, |
| "step": 1518 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 4.9958328416523304e-05, |
| "loss": 0.0738, |
| "step": 1519 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 4.9833383242733964e-05, |
| "loss": 0.0613, |
| "step": 1520 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 4.970854263643878e-05, |
| "loss": 0.066, |
| "step": 1521 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 4.958380685785608e-05, |
| "loss": 0.0578, |
| "step": 1522 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 4.945917616698559e-05, |
| "loss": 0.0651, |
| "step": 1523 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 4.933465082360807e-05, |
| "loss": 0.0571, |
| "step": 1524 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 4.921023108728461e-05, |
| "loss": 0.0548, |
| "step": 1525 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 4.9085917217356146e-05, |
| "loss": 0.0688, |
| "step": 1526 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 4.896170947294304e-05, |
| "loss": 0.0655, |
| "step": 1527 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 4.8837608112944454e-05, |
| "loss": 0.0689, |
| "step": 1528 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 4.8713613396037704e-05, |
| "loss": 0.0662, |
| "step": 1529 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 4.8589725580677835e-05, |
| "loss": 0.0666, |
| "step": 1530 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 4.846594492509714e-05, |
| "loss": 0.0608, |
| "step": 1531 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 4.834227168730451e-05, |
| "loss": 0.0575, |
| "step": 1532 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 4.821870612508494e-05, |
| "loss": 0.0626, |
| "step": 1533 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 4.809524849599897e-05, |
| "loss": 0.0545, |
| "step": 1534 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 4.7971899057382116e-05, |
| "loss": 0.0771, |
| "step": 1535 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 4.7848658066344486e-05, |
| "loss": 0.0657, |
| "step": 1536 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 4.772552577977012e-05, |
| "loss": 0.0708, |
| "step": 1537 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 4.7602502454316397e-05, |
| "loss": 0.0635, |
| "step": 1538 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 4.7479588346413605e-05, |
| "loss": 0.0643, |
| "step": 1539 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 4.735678371226441e-05, |
| "loss": 0.0701, |
| "step": 1540 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 4.723408880784334e-05, |
| "loss": 0.0623, |
| "step": 1541 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 4.7111503888896066e-05, |
| "loss": 0.0647, |
| "step": 1542 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 4.6989029210939074e-05, |
| "loss": 0.0659, |
| "step": 1543 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 4.686666502925908e-05, |
| "loss": 0.0622, |
| "step": 1544 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 4.674441159891252e-05, |
| "loss": 0.0572, |
| "step": 1545 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 4.6622269174724856e-05, |
| "loss": 0.0724, |
| "step": 1546 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 4.6500238011290295e-05, |
| "loss": 0.0695, |
| "step": 1547 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 4.637831836297103e-05, |
| "loss": 0.067, |
| "step": 1548 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 4.6256510483896866e-05, |
| "loss": 0.0706, |
| "step": 1549 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 4.613481462796468e-05, |
| "loss": 0.0654, |
| "step": 1550 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 4.601323104883776e-05, |
| "loss": 0.0642, |
| "step": 1551 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 4.5891759999945347e-05, |
| "loss": 0.0503, |
| "step": 1552 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 4.577040173448224e-05, |
| "loss": 0.0659, |
| "step": 1553 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 4.564915650540808e-05, |
| "loss": 0.0643, |
| "step": 1554 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 4.552802456544688e-05, |
| "loss": 0.0542, |
| "step": 1555 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 4.540700616708658e-05, |
| "loss": 0.0645, |
| "step": 1556 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 4.528610156257832e-05, |
| "loss": 0.068, |
| "step": 1557 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 4.516531100393624e-05, |
| "loss": 0.0561, |
| "step": 1558 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 4.504463474293657e-05, |
| "loss": 0.062, |
| "step": 1559 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 4.4924073031117455e-05, |
| "loss": 0.063, |
| "step": 1560 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 4.480362611977813e-05, |
| "loss": 0.0617, |
| "step": 1561 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 4.4683294259978694e-05, |
| "loss": 0.0547, |
| "step": 1562 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 4.456307770253927e-05, |
| "loss": 0.0663, |
| "step": 1563 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 4.444297669803981e-05, |
| "loss": 0.0549, |
| "step": 1564 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 4.432299149681923e-05, |
| "loss": 0.058, |
| "step": 1565 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 4.420312234897521e-05, |
| "loss": 0.0604, |
| "step": 1566 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 4.4083369504363526e-05, |
| "loss": 0.0645, |
| "step": 1567 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 4.3963733212597366e-05, |
| "loss": 0.066, |
| "step": 1568 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 4.3844213723047204e-05, |
| "loss": 0.0603, |
| "step": 1569 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 4.372481128483984e-05, |
| "loss": 0.059, |
| "step": 1570 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 4.360552614685825e-05, |
| "loss": 0.0807, |
| "step": 1571 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 4.3486358557740814e-05, |
| "loss": 0.073, |
| "step": 1572 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 4.3367308765880966e-05, |
| "loss": 0.0802, |
| "step": 1573 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 4.32483770194265e-05, |
| "loss": 0.0673, |
| "step": 1574 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 4.312956356627928e-05, |
| "loss": 0.0618, |
| "step": 1575 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 4.301086865409449e-05, |
| "loss": 0.0633, |
| "step": 1576 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 4.289229253028029e-05, |
| "loss": 0.0632, |
| "step": 1577 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 4.277383544199726e-05, |
| "loss": 0.061, |
| "step": 1578 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 4.265549763615779e-05, |
| "loss": 0.0632, |
| "step": 1579 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 4.253727935942563e-05, |
| "loss": 0.0542, |
| "step": 1580 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 4.241918085821547e-05, |
| "loss": 0.0669, |
| "step": 1581 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 4.2301202378692316e-05, |
| "loss": 0.0511, |
| "step": 1582 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 4.218334416677091e-05, |
| "loss": 0.072, |
| "step": 1583 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 4.206560646811545e-05, |
| "loss": 0.0742, |
| "step": 1584 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 4.1947989528138773e-05, |
| "loss": 0.0545, |
| "step": 1585 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 4.183049359200215e-05, |
| "loss": 0.063, |
| "step": 1586 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 4.1713118904614614e-05, |
| "loss": 0.0586, |
| "step": 1587 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 4.159586571063236e-05, |
| "loss": 0.0715, |
| "step": 1588 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 4.1478734254458394e-05, |
| "loss": 0.0697, |
| "step": 1589 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 4.136172478024203e-05, |
| "loss": 0.0662, |
| "step": 1590 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 4.124483753187831e-05, |
| "loss": 0.051, |
| "step": 1591 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 4.112807275300742e-05, |
| "loss": 0.0521, |
| "step": 1592 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 4.1011430687014316e-05, |
| "loss": 0.0661, |
| "step": 1593 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 4.08949115770282e-05, |
| "loss": 0.0581, |
| "step": 1594 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 4.077851566592201e-05, |
| "loss": 0.0728, |
| "step": 1595 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 4.0662243196311815e-05, |
| "loss": 0.0582, |
| "step": 1596 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 4.054609441055636e-05, |
| "loss": 0.0619, |
| "step": 1597 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 4.0430069550756665e-05, |
| "loss": 0.056, |
| "step": 1598 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 4.031416885875543e-05, |
| "loss": 0.0617, |
| "step": 1599 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 4.0198392576136526e-05, |
| "loss": 0.0638, |
| "step": 1600 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 4.008274094422447e-05, |
| "loss": 0.0665, |
| "step": 1601 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 3.9967214204083946e-05, |
| "loss": 0.0669, |
| "step": 1602 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 3.985181259651938e-05, |
| "loss": 0.0753, |
| "step": 1603 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 3.973653636207437e-05, |
| "loss": 0.0716, |
| "step": 1604 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 3.9621385741031137e-05, |
| "loss": 0.0744, |
| "step": 1605 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 3.950636097341003e-05, |
| "loss": 0.0683, |
| "step": 1606 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 3.939146229896918e-05, |
| "loss": 0.0613, |
| "step": 1607 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 3.927668995720384e-05, |
| "loss": 0.0566, |
| "step": 1608 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 3.916204418734599e-05, |
| "loss": 0.054, |
| "step": 1609 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 3.90475252283636e-05, |
| "loss": 0.0632, |
| "step": 1610 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 3.8933133318960514e-05, |
| "loss": 0.0564, |
| "step": 1611 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 3.881886869757565e-05, |
| "loss": 0.0656, |
| "step": 1612 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 3.870473160238271e-05, |
| "loss": 0.0636, |
| "step": 1613 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 3.859072227128945e-05, |
| "loss": 0.0667, |
| "step": 1614 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 3.847684094193733e-05, |
| "loss": 0.0628, |
| "step": 1615 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 3.8363087851701085e-05, |
| "loss": 0.0663, |
| "step": 1616 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 3.824946323768811e-05, |
| "loss": 0.0599, |
| "step": 1617 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 3.8135967336738075e-05, |
| "loss": 0.0634, |
| "step": 1618 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 3.8022600385422126e-05, |
| "loss": 0.0619, |
| "step": 1619 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 3.7909362620042865e-05, |
| "loss": 0.0559, |
| "step": 1620 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 3.7796254276633544e-05, |
| "loss": 0.0495, |
| "step": 1621 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 3.7683275590957666e-05, |
| "loss": 0.0656, |
| "step": 1622 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 3.7570426798508415e-05, |
| "loss": 0.0599, |
| "step": 1623 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 3.745770813450824e-05, |
| "loss": 0.0593, |
| "step": 1624 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 3.734511983390838e-05, |
| "loss": 0.0554, |
| "step": 1625 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 3.7232662131388386e-05, |
| "loss": 0.0516, |
| "step": 1626 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 3.71203352613555e-05, |
| "loss": 0.0739, |
| "step": 1627 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 3.7008139457944245e-05, |
| "loss": 0.0623, |
| "step": 1628 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 3.689607495501606e-05, |
| "loss": 0.0664, |
| "step": 1629 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 3.678414198615865e-05, |
| "loss": 0.0611, |
| "step": 1630 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 3.6672340784685475e-05, |
| "loss": 0.0691, |
| "step": 1631 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 3.6560671583635467e-05, |
| "loss": 0.0711, |
| "step": 1632 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 3.644913461577229e-05, |
| "loss": 0.0639, |
| "step": 1633 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 3.6337730113584053e-05, |
| "loss": 0.0652, |
| "step": 1634 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 3.6226458309282806e-05, |
| "loss": 0.0639, |
| "step": 1635 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 3.6115319434803894e-05, |
| "loss": 0.0543, |
| "step": 1636 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 3.600431372180557e-05, |
| "loss": 0.0627, |
| "step": 1637 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 3.589344140166865e-05, |
| "loss": 0.073, |
| "step": 1638 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 3.5782702705495826e-05, |
| "loss": 0.0704, |
| "step": 1639 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 3.5672097864111285e-05, |
| "loss": 0.0567, |
| "step": 1640 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 3.5561627108060136e-05, |
| "loss": 0.0717, |
| "step": 1641 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 3.54512906676081e-05, |
| "loss": 0.0578, |
| "step": 1642 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 3.534108877274093e-05, |
| "loss": 0.0559, |
| "step": 1643 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 3.523102165316381e-05, |
| "loss": 0.0527, |
| "step": 1644 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 3.5121089538301156e-05, |
| "loss": 0.065, |
| "step": 1645 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 3.5011292657295825e-05, |
| "loss": 0.0542, |
| "step": 1646 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 3.490163123900895e-05, |
| "loss": 0.0596, |
| "step": 1647 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 3.479210551201915e-05, |
| "loss": 0.0637, |
| "step": 1648 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 3.468271570462235e-05, |
| "loss": 0.053, |
| "step": 1649 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 3.457346204483103e-05, |
| "loss": 0.0616, |
| "step": 1650 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 3.446434476037399e-05, |
| "loss": 0.0589, |
| "step": 1651 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 3.435536407869575e-05, |
| "loss": 0.0678, |
| "step": 1652 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 3.424652022695603e-05, |
| "loss": 0.0568, |
| "step": 1653 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 3.413781343202942e-05, |
| "loss": 0.0805, |
| "step": 1654 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 3.402924392050475e-05, |
| "loss": 0.0663, |
| "step": 1655 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 3.3920811918684805e-05, |
| "loss": 0.0637, |
| "step": 1656 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 3.38125176525856e-05, |
| "loss": 0.0629, |
| "step": 1657 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 3.3704361347936187e-05, |
| "loss": 0.0725, |
| "step": 1658 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 3.359634323017795e-05, |
| "loss": 0.0704, |
| "step": 1659 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 3.348846352446435e-05, |
| "loss": 0.0669, |
| "step": 1660 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 3.33807224556602e-05, |
| "loss": 0.0709, |
| "step": 1661 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 3.327312024834143e-05, |
| "loss": 0.0634, |
| "step": 1662 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 3.316565712679453e-05, |
| "loss": 0.0799, |
| "step": 1663 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 3.3058333315016065e-05, |
| "loss": 0.0665, |
| "step": 1664 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 3.295114903671215e-05, |
| "loss": 0.0694, |
| "step": 1665 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 3.2844104515298155e-05, |
| "loss": 0.0589, |
| "step": 1666 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 3.273719997389813e-05, |
| "loss": 0.058, |
| "step": 1667 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 3.263043563534428e-05, |
| "loss": 0.0533, |
| "step": 1668 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 3.252381172217666e-05, |
| "loss": 0.0697, |
| "step": 1669 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 3.2417328456642504e-05, |
| "loss": 0.0683, |
| "step": 1670 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 3.2310986060696034e-05, |
| "loss": 0.0624, |
| "step": 1671 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 3.22047847559977e-05, |
| "loss": 0.0619, |
| "step": 1672 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 3.2098724763913954e-05, |
| "loss": 0.0675, |
| "step": 1673 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 3.199280630551663e-05, |
| "loss": 0.0608, |
| "step": 1674 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 3.188702960158261e-05, |
| "loss": 0.0664, |
| "step": 1675 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 3.1781394872593295e-05, |
| "loss": 0.0663, |
| "step": 1676 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 3.16759023387341e-05, |
| "loss": 0.0643, |
| "step": 1677 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 3.157055221989406e-05, |
| "loss": 0.0579, |
| "step": 1678 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 3.146534473566539e-05, |
| "loss": 0.0681, |
| "step": 1679 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 3.136028010534303e-05, |
| "loss": 0.0658, |
| "step": 1680 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 3.125535854792408e-05, |
| "loss": 0.0508, |
| "step": 1681 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 3.1150580282107425e-05, |
| "loss": 0.0639, |
| "step": 1682 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 3.104594552629331e-05, |
| "loss": 0.0723, |
| "step": 1683 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 3.094145449858285e-05, |
| "loss": 0.0753, |
| "step": 1684 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 3.083710741677756e-05, |
| "loss": 0.0755, |
| "step": 1685 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 3.073290449837892e-05, |
| "loss": 0.0752, |
| "step": 1686 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 3.0628845960587835e-05, |
| "loss": 0.0555, |
| "step": 1687 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 3.05249320203044e-05, |
| "loss": 0.0644, |
| "step": 1688 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 3.0421162894127243e-05, |
| "loss": 0.061, |
| "step": 1689 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 3.0317538798353116e-05, |
| "loss": 0.0463, |
| "step": 1690 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 3.021405994897647e-05, |
| "loss": 0.0596, |
| "step": 1691 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 3.0110726561689063e-05, |
| "loss": 0.0646, |
| "step": 1692 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 3.0007538851879437e-05, |
| "loss": 0.0623, |
| "step": 1693 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 2.990449703463243e-05, |
| "loss": 0.0726, |
| "step": 1694 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 2.980160132472879e-05, |
| "loss": 0.0579, |
| "step": 1695 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 2.9698851936644768e-05, |
| "loss": 0.0552, |
| "step": 1696 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 2.9596249084551585e-05, |
| "loss": 0.061, |
| "step": 1697 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 2.9493792982315083e-05, |
| "loss": 0.06, |
| "step": 1698 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 2.9391483843495126e-05, |
| "loss": 0.0517, |
| "step": 1699 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 2.9289321881345254e-05, |
| "loss": 0.0607, |
| "step": 1700 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 2.9187307308812295e-05, |
| "loss": 0.0589, |
| "step": 1701 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 2.9085440338535864e-05, |
| "loss": 0.0589, |
| "step": 1702 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 2.8983721182847833e-05, |
| "loss": 0.0685, |
| "step": 1703 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 2.8882150053771995e-05, |
| "loss": 0.0804, |
| "step": 1704 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 2.8780727163023635e-05, |
| "loss": 0.0572, |
| "step": 1705 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 2.867945272200904e-05, |
| "loss": 0.0545, |
| "step": 1706 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 2.8578326941825073e-05, |
| "loss": 0.0545, |
| "step": 1707 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 2.8477350033258677e-05, |
| "loss": 0.0674, |
| "step": 1708 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 2.8376522206786494e-05, |
| "loss": 0.0698, |
| "step": 1709 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 2.8275843672574476e-05, |
| "loss": 0.0575, |
| "step": 1710 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 2.817531464047739e-05, |
| "loss": 0.0733, |
| "step": 1711 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 2.8074935320038308e-05, |
| "loss": 0.0562, |
| "step": 1712 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 2.7974705920488263e-05, |
| "loss": 0.0636, |
| "step": 1713 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 2.787462665074584e-05, |
| "loss": 0.062, |
| "step": 1714 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 2.7774697719416685e-05, |
| "loss": 0.0685, |
| "step": 1715 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 2.7674919334793035e-05, |
| "loss": 0.0677, |
| "step": 1716 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 2.7575291704853323e-05, |
| "loss": 0.0643, |
| "step": 1717 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 2.74758150372618e-05, |
| "loss": 0.0502, |
| "step": 1718 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 2.737648953936801e-05, |
| "loss": 0.0645, |
| "step": 1719 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 2.7277315418206473e-05, |
| "loss": 0.0559, |
| "step": 1720 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 2.717829288049607e-05, |
| "loss": 0.0548, |
| "step": 1721 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 2.7079422132639742e-05, |
| "loss": 0.0784, |
| "step": 1722 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 2.6980703380724094e-05, |
| "loss": 0.057, |
| "step": 1723 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 2.688213683051892e-05, |
| "loss": 0.0613, |
| "step": 1724 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 2.6783722687476696e-05, |
| "loss": 0.0525, |
| "step": 1725 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 2.6685461156732216e-05, |
| "loss": 0.0773, |
| "step": 1726 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.658735244310224e-05, |
| "loss": 0.0652, |
| "step": 1727 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.6489396751084982e-05, |
| "loss": 0.0577, |
| "step": 1728 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.6391594284859623e-05, |
| "loss": 0.0558, |
| "step": 1729 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.6293945248286044e-05, |
| "loss": 0.0566, |
| "step": 1730 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.6196449844904258e-05, |
| "loss": 0.0634, |
| "step": 1731 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.6099108277934103e-05, |
| "loss": 0.0666, |
| "step": 1732 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.6001920750274677e-05, |
| "loss": 0.0777, |
| "step": 1733 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.5904887464504114e-05, |
| "loss": 0.0598, |
| "step": 1734 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.5808008622878898e-05, |
| "loss": 0.0574, |
| "step": 1735 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.5711284427333714e-05, |
| "loss": 0.0509, |
| "step": 1736 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.5614715079480888e-05, |
| "loss": 0.0536, |
| "step": 1737 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.5518300780609904e-05, |
| "loss": 0.0686, |
| "step": 1738 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.5422041731687108e-05, |
| "loss": 0.0619, |
| "step": 1739 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.5325938133355242e-05, |
| "loss": 0.0684, |
| "step": 1740 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.5229990185933073e-05, |
| "loss": 0.0498, |
| "step": 1741 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.513419808941482e-05, |
| "loss": 0.0618, |
| "step": 1742 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.5038562043469948e-05, |
| "loss": 0.062, |
| "step": 1743 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.4943082247442585e-05, |
| "loss": 0.0578, |
| "step": 1744 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.4847758900351226e-05, |
| "loss": 0.0689, |
| "step": 1745 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.475259220088818e-05, |
| "loss": 0.0756, |
| "step": 1746 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.4657582347419362e-05, |
| "loss": 0.0599, |
| "step": 1747 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.456272953798361e-05, |
| "loss": 0.064, |
| "step": 1748 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.446803397029257e-05, |
| "loss": 0.0587, |
| "step": 1749 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.437349584172999e-05, |
| "loss": 0.0576, |
| "step": 1750 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.4279115349351543e-05, |
| "loss": 0.0671, |
| "step": 1751 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.418489268988433e-05, |
| "loss": 0.0545, |
| "step": 1752 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.409082805972639e-05, |
| "loss": 0.0626, |
| "step": 1753 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.399692165494646e-05, |
| "loss": 0.0672, |
| "step": 1754 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.3903173671283363e-05, |
| "loss": 0.0605, |
| "step": 1755 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.3809584304145827e-05, |
| "loss": 0.0573, |
| "step": 1756 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.3716153748611835e-05, |
| "loss": 0.0614, |
| "step": 1757 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.3622882199428464e-05, |
| "loss": 0.0569, |
| "step": 1758 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.3529769851011252e-05, |
| "loss": 0.0758, |
| "step": 1759 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.343681689744396e-05, |
| "loss": 0.057, |
| "step": 1760 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.3344023532478133e-05, |
| "loss": 0.068, |
| "step": 1761 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.32513899495326e-05, |
| "loss": 0.0625, |
| "step": 1762 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.3158916341693128e-05, |
| "loss": 0.0684, |
| "step": 1763 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.3066602901712108e-05, |
| "loss": 0.0665, |
| "step": 1764 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.297444982200806e-05, |
| "loss": 0.0478, |
| "step": 1765 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.2882457294665204e-05, |
| "loss": 0.0598, |
| "step": 1766 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.2790625511433096e-05, |
| "loss": 0.0694, |
| "step": 1767 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.26989546637263e-05, |
| "loss": 0.0692, |
| "step": 1768 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.2607444942623922e-05, |
| "loss": 0.0689, |
| "step": 1769 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.2516096538869136e-05, |
| "loss": 0.0636, |
| "step": 1770 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.242490964286895e-05, |
| "loss": 0.0641, |
| "step": 1771 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.2333884444693652e-05, |
| "loss": 0.0778, |
| "step": 1772 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.224302113407656e-05, |
| "loss": 0.0656, |
| "step": 1773 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.215231990041352e-05, |
| "loss": 0.0595, |
| "step": 1774 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.2061780932762542e-05, |
| "loss": 0.0626, |
| "step": 1775 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.1971404419843355e-05, |
| "loss": 0.0664, |
| "step": 1776 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.1881190550037167e-05, |
| "loss": 0.0599, |
| "step": 1777 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.179113951138615e-05, |
| "loss": 0.0533, |
| "step": 1778 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.1701251491593e-05, |
| "loss": 0.0648, |
| "step": 1779 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.1611526678020654e-05, |
| "loss": 0.0538, |
| "step": 1780 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.1521965257691877e-05, |
| "loss": 0.0632, |
| "step": 1781 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.1432567417288862e-05, |
| "loss": 0.0601, |
| "step": 1782 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 2.134333334315287e-05, |
| "loss": 0.0543, |
| "step": 1783 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 2.1254263221283654e-05, |
| "loss": 0.0652, |
| "step": 1784 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 2.116535723733938e-05, |
| "loss": 0.0612, |
| "step": 1785 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 2.1076615576636027e-05, |
| "loss": 0.0641, |
| "step": 1786 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 2.0988038424147093e-05, |
| "loss": 0.0564, |
| "step": 1787 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 2.0899625964503113e-05, |
| "loss": 0.0721, |
| "step": 1788 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 2.0811378381991353e-05, |
| "loss": 0.0674, |
| "step": 1789 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 2.0723295860555437e-05, |
| "loss": 0.0719, |
| "step": 1790 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 2.063537858379493e-05, |
| "loss": 0.0691, |
| "step": 1791 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 2.0547626734964997e-05, |
| "loss": 0.0598, |
| "step": 1792 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 2.046004049697584e-05, |
| "loss": 0.0573, |
| "step": 1793 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 2.03726200523926e-05, |
| "loss": 0.0558, |
| "step": 1794 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 2.028536558343481e-05, |
| "loss": 0.0547, |
| "step": 1795 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 2.0198277271976052e-05, |
| "loss": 0.0661, |
| "step": 1796 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 2.011135529954352e-05, |
| "loss": 0.0622, |
| "step": 1797 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 2.0024599847317695e-05, |
| "loss": 0.0624, |
| "step": 1798 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 1.9938011096131993e-05, |
| "loss": 0.0652, |
| "step": 1799 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 1.98515892264724e-05, |
| "loss": 0.0598, |
| "step": 1800 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 1.9765334418476965e-05, |
| "loss": 0.0647, |
| "step": 1801 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 1.967924685193552e-05, |
| "loss": 0.0569, |
| "step": 1802 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 1.9593326706289362e-05, |
| "loss": 0.0626, |
| "step": 1803 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 1.950757416063077e-05, |
| "loss": 0.0651, |
| "step": 1804 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 1.9421989393702745e-05, |
| "loss": 0.0635, |
| "step": 1805 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 1.933657258389845e-05, |
| "loss": 0.0733, |
| "step": 1806 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 1.9251323909261022e-05, |
| "loss": 0.0747, |
| "step": 1807 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 1.916624354748314e-05, |
| "loss": 0.0617, |
| "step": 1808 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 1.908133167590669e-05, |
| "loss": 0.0587, |
| "step": 1809 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 1.899658847152228e-05, |
| "loss": 0.067, |
| "step": 1810 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 1.8912014110968956e-05, |
| "loss": 0.0596, |
| "step": 1811 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 1.882760877053388e-05, |
| "loss": 0.0643, |
| "step": 1812 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 1.874337262615189e-05, |
| "loss": 0.0617, |
| "step": 1813 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 1.8659305853405118e-05, |
| "loss": 0.0628, |
| "step": 1814 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 1.857540862752265e-05, |
| "loss": 0.0596, |
| "step": 1815 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 1.8491681123380235e-05, |
| "loss": 0.0677, |
| "step": 1816 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 1.840812351549982e-05, |
| "loss": 0.0813, |
| "step": 1817 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 1.8324735978049168e-05, |
| "loss": 0.0636, |
| "step": 1818 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 1.824151868484164e-05, |
| "loss": 0.0628, |
| "step": 1819 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 1.8158471809335654e-05, |
| "loss": 0.0575, |
| "step": 1820 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 1.807559552463446e-05, |
| "loss": 0.0545, |
| "step": 1821 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 1.799289000348574e-05, |
| "loss": 0.0489, |
| "step": 1822 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 1.7910355418281187e-05, |
| "loss": 0.0636, |
| "step": 1823 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 1.7827991941056177e-05, |
| "loss": 0.0721, |
| "step": 1824 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 1.774579974348951e-05, |
| "loss": 0.0693, |
| "step": 1825 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 1.7663778996902947e-05, |
| "loss": 0.0625, |
| "step": 1826 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 1.7581929872260805e-05, |
| "loss": 0.0581, |
| "step": 1827 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 1.750025254016978e-05, |
| "loss": 0.0638, |
| "step": 1828 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 1.741874717087836e-05, |
| "loss": 0.0755, |
| "step": 1829 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 1.7337413934276724e-05, |
| "loss": 0.0627, |
| "step": 1830 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 1.7256252999896138e-05, |
| "loss": 0.0539, |
| "step": 1831 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 1.7175264536908808e-05, |
| "loss": 0.069, |
| "step": 1832 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 1.7094448714127387e-05, |
| "loss": 0.0648, |
| "step": 1833 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 1.7013805700004714e-05, |
| "loss": 0.0684, |
| "step": 1834 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.693333566263339e-05, |
| "loss": 0.0606, |
| "step": 1835 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.6853038769745467e-05, |
| "loss": 0.0682, |
| "step": 1836 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.677291518871216e-05, |
| "loss": 0.0582, |
| "step": 1837 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 1.669296508654331e-05, |
| "loss": 0.0667, |
| "step": 1838 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 1.661318862988729e-05, |
| "loss": 0.0593, |
| "step": 1839 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 1.65335859850304e-05, |
| "loss": 0.0702, |
| "step": 1840 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 1.645415731789677e-05, |
| "loss": 0.0614, |
| "step": 1841 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 1.6374902794047753e-05, |
| "loss": 0.0664, |
| "step": 1842 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 1.6295822578681875e-05, |
| "loss": 0.0725, |
| "step": 1843 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 1.621691683663418e-05, |
| "loss": 0.0665, |
| "step": 1844 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 1.6138185732376145e-05, |
| "loss": 0.0533, |
| "step": 1845 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 1.605962943001518e-05, |
| "loss": 0.062, |
| "step": 1846 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 1.5981248093294377e-05, |
| "loss": 0.062, |
| "step": 1847 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 1.590304188559205e-05, |
| "loss": 0.073, |
| "step": 1848 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 1.5825010969921584e-05, |
| "loss": 0.0649, |
| "step": 1849 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 1.574715550893091e-05, |
| "loss": 0.0651, |
| "step": 1850 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 1.5669475664902267e-05, |
| "loss": 0.0617, |
| "step": 1851 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 1.55919715997518e-05, |
| "loss": 0.0764, |
| "step": 1852 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 1.551464347502929e-05, |
| "loss": 0.0535, |
| "step": 1853 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 1.543749145191783e-05, |
| "loss": 0.0602, |
| "step": 1854 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 1.5360515691233357e-05, |
| "loss": 0.0701, |
| "step": 1855 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 1.528371635342448e-05, |
| "loss": 0.0498, |
| "step": 1856 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.520709359857202e-05, |
| "loss": 0.0683, |
| "step": 1857 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.5130647586388747e-05, |
| "loss": 0.0606, |
| "step": 1858 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.5054378476219078e-05, |
| "loss": 0.0616, |
| "step": 1859 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.4978286427038601e-05, |
| "loss": 0.0612, |
| "step": 1860 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 1.4902371597453878e-05, |
| "loss": 0.0609, |
| "step": 1861 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 1.48266341457021e-05, |
| "loss": 0.07, |
| "step": 1862 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 1.475107422965073e-05, |
| "loss": 0.0674, |
| "step": 1863 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 1.4675692006797137e-05, |
| "loss": 0.0623, |
| "step": 1864 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 1.460048763426829e-05, |
| "loss": 0.0576, |
| "step": 1865 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 1.4525461268820517e-05, |
| "loss": 0.0576, |
| "step": 1866 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 1.4450613066839091e-05, |
| "loss": 0.071, |
| "step": 1867 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 1.437594318433787e-05, |
| "loss": 0.0627, |
| "step": 1868 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 1.4301451776959041e-05, |
| "loss": 0.0728, |
| "step": 1869 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 1.42271389999728e-05, |
| "loss": 0.0513, |
| "step": 1870 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 1.4153005008276988e-05, |
| "loss": 0.0367, |
| "step": 1871 |
| }, |
| { |
| "epoch": 5.01, |
| "learning_rate": 1.4079049956396828e-05, |
| "loss": 0.0345, |
| "step": 1872 |
| }, |
| { |
| "epoch": 5.01, |
| "learning_rate": 1.4005273998484502e-05, |
| "loss": 0.0412, |
| "step": 1873 |
| }, |
| { |
| "epoch": 5.01, |
| "learning_rate": 1.3931677288318868e-05, |
| "loss": 0.0477, |
| "step": 1874 |
| }, |
| { |
| "epoch": 5.01, |
| "learning_rate": 1.3858259979305233e-05, |
| "loss": 0.0339, |
| "step": 1875 |
| }, |
| { |
| "epoch": 5.02, |
| "learning_rate": 1.3785022224474942e-05, |
| "loss": 0.0335, |
| "step": 1876 |
| }, |
| { |
| "epoch": 5.02, |
| "learning_rate": 1.3711964176485049e-05, |
| "loss": 0.0347, |
| "step": 1877 |
| }, |
| { |
| "epoch": 5.02, |
| "learning_rate": 1.3639085987618006e-05, |
| "loss": 0.0358, |
| "step": 1878 |
| }, |
| { |
| "epoch": 5.02, |
| "learning_rate": 1.3566387809781423e-05, |
| "loss": 0.0269, |
| "step": 1879 |
| }, |
| { |
| "epoch": 5.03, |
| "learning_rate": 1.3493869794507663e-05, |
| "loss": 0.0347, |
| "step": 1880 |
| }, |
| { |
| "epoch": 5.03, |
| "learning_rate": 1.3421532092953625e-05, |
| "loss": 0.0292, |
| "step": 1881 |
| }, |
| { |
| "epoch": 5.03, |
| "learning_rate": 1.3349374855900198e-05, |
| "loss": 0.0353, |
| "step": 1882 |
| }, |
| { |
| "epoch": 5.03, |
| "learning_rate": 1.327739823375226e-05, |
| "loss": 0.026, |
| "step": 1883 |
| }, |
| { |
| "epoch": 5.04, |
| "learning_rate": 1.3205602376538163e-05, |
| "loss": 0.034, |
| "step": 1884 |
| }, |
| { |
| "epoch": 5.04, |
| "learning_rate": 1.3133987433909501e-05, |
| "loss": 0.0331, |
| "step": 1885 |
| }, |
| { |
| "epoch": 5.04, |
| "learning_rate": 1.3062553555140722e-05, |
| "loss": 0.033, |
| "step": 1886 |
| }, |
| { |
| "epoch": 5.05, |
| "learning_rate": 1.2991300889128866e-05, |
| "loss": 0.0364, |
| "step": 1887 |
| }, |
| { |
| "epoch": 5.05, |
| "learning_rate": 1.2920229584393283e-05, |
| "loss": 0.045, |
| "step": 1888 |
| }, |
| { |
| "epoch": 5.05, |
| "learning_rate": 1.2849339789075298e-05, |
| "loss": 0.036, |
| "step": 1889 |
| }, |
| { |
| "epoch": 5.05, |
| "learning_rate": 1.27786316509379e-05, |
| "loss": 0.0317, |
| "step": 1890 |
| }, |
| { |
| "epoch": 5.06, |
| "learning_rate": 1.2708105317365349e-05, |
| "loss": 0.0333, |
| "step": 1891 |
| }, |
| { |
| "epoch": 5.06, |
| "learning_rate": 1.2637760935363053e-05, |
| "loss": 0.0304, |
| "step": 1892 |
| }, |
| { |
| "epoch": 5.06, |
| "learning_rate": 1.25675986515571e-05, |
| "loss": 0.0352, |
| "step": 1893 |
| }, |
| { |
| "epoch": 5.06, |
| "learning_rate": 1.2497618612194074e-05, |
| "loss": 0.0271, |
| "step": 1894 |
| }, |
| { |
| "epoch": 5.07, |
| "learning_rate": 1.2427820963140613e-05, |
| "loss": 0.0336, |
| "step": 1895 |
| }, |
| { |
| "epoch": 5.07, |
| "learning_rate": 1.2358205849883198e-05, |
| "loss": 0.0351, |
| "step": 1896 |
| }, |
| { |
| "epoch": 5.07, |
| "learning_rate": 1.2288773417527865e-05, |
| "loss": 0.0392, |
| "step": 1897 |
| }, |
| { |
| "epoch": 5.07, |
| "learning_rate": 1.221952381079986e-05, |
| "loss": 0.03, |
| "step": 1898 |
| }, |
| { |
| "epoch": 5.08, |
| "learning_rate": 1.215045717404334e-05, |
| "loss": 0.0313, |
| "step": 1899 |
| }, |
| { |
| "epoch": 5.08, |
| "learning_rate": 1.2081573651221035e-05, |
| "loss": 0.0277, |
| "step": 1900 |
| }, |
| { |
| "epoch": 5.08, |
| "learning_rate": 1.201287338591407e-05, |
| "loss": 0.0303, |
| "step": 1901 |
| }, |
| { |
| "epoch": 5.09, |
| "learning_rate": 1.1944356521321542e-05, |
| "loss": 0.0323, |
| "step": 1902 |
| }, |
| { |
| "epoch": 5.09, |
| "learning_rate": 1.1876023200260267e-05, |
| "loss": 0.0321, |
| "step": 1903 |
| }, |
| { |
| "epoch": 5.09, |
| "learning_rate": 1.1807873565164506e-05, |
| "loss": 0.0313, |
| "step": 1904 |
| }, |
| { |
| "epoch": 5.09, |
| "learning_rate": 1.1739907758085599e-05, |
| "loss": 0.0332, |
| "step": 1905 |
| }, |
| { |
| "epoch": 5.1, |
| "learning_rate": 1.1672125920691757e-05, |
| "loss": 0.0328, |
| "step": 1906 |
| }, |
| { |
| "epoch": 5.1, |
| "learning_rate": 1.1604528194267739e-05, |
| "loss": 0.0301, |
| "step": 1907 |
| }, |
| { |
| "epoch": 5.1, |
| "learning_rate": 1.1537114719714481e-05, |
| "loss": 0.0242, |
| "step": 1908 |
| }, |
| { |
| "epoch": 5.1, |
| "learning_rate": 1.1469885637548871e-05, |
| "loss": 0.0314, |
| "step": 1909 |
| }, |
| { |
| "epoch": 5.11, |
| "learning_rate": 1.1402841087903515e-05, |
| "loss": 0.0299, |
| "step": 1910 |
| }, |
| { |
| "epoch": 5.11, |
| "learning_rate": 1.1335981210526347e-05, |
| "loss": 0.0343, |
| "step": 1911 |
| }, |
| { |
| "epoch": 5.11, |
| "learning_rate": 1.1269306144780333e-05, |
| "loss": 0.0295, |
| "step": 1912 |
| }, |
| { |
| "epoch": 5.11, |
| "learning_rate": 1.1202816029643237e-05, |
| "loss": 0.0312, |
| "step": 1913 |
| }, |
| { |
| "epoch": 5.12, |
| "learning_rate": 1.1136511003707329e-05, |
| "loss": 0.0268, |
| "step": 1914 |
| }, |
| { |
| "epoch": 5.12, |
| "learning_rate": 1.1070391205179087e-05, |
| "loss": 0.0233, |
| "step": 1915 |
| }, |
| { |
| "epoch": 5.12, |
| "learning_rate": 1.1004456771878835e-05, |
| "loss": 0.0317, |
| "step": 1916 |
| }, |
| { |
| "epoch": 5.13, |
| "learning_rate": 1.0938707841240614e-05, |
| "loss": 0.0302, |
| "step": 1917 |
| }, |
| { |
| "epoch": 5.13, |
| "learning_rate": 1.087314455031172e-05, |
| "loss": 0.0286, |
| "step": 1918 |
| }, |
| { |
| "epoch": 5.13, |
| "learning_rate": 1.080776703575256e-05, |
| "loss": 0.0323, |
| "step": 1919 |
| }, |
| { |
| "epoch": 5.13, |
| "learning_rate": 1.0742575433836255e-05, |
| "loss": 0.0307, |
| "step": 1920 |
| }, |
| { |
| "epoch": 5.14, |
| "learning_rate": 1.067756988044848e-05, |
| "loss": 0.0294, |
| "step": 1921 |
| }, |
| { |
| "epoch": 5.14, |
| "learning_rate": 1.0612750511087022e-05, |
| "loss": 0.0315, |
| "step": 1922 |
| }, |
| { |
| "epoch": 5.14, |
| "learning_rate": 1.0548117460861651e-05, |
| "loss": 0.0265, |
| "step": 1923 |
| }, |
| { |
| "epoch": 5.14, |
| "learning_rate": 1.0483670864493778e-05, |
| "loss": 0.0274, |
| "step": 1924 |
| }, |
| { |
| "epoch": 5.15, |
| "learning_rate": 1.0419410856316093e-05, |
| "loss": 0.0299, |
| "step": 1925 |
| }, |
| { |
| "epoch": 5.15, |
| "learning_rate": 1.035533757027245e-05, |
| "loss": 0.0341, |
| "step": 1926 |
| }, |
| { |
| "epoch": 5.15, |
| "learning_rate": 1.029145113991743e-05, |
| "loss": 0.0295, |
| "step": 1927 |
| }, |
| { |
| "epoch": 5.16, |
| "learning_rate": 1.0227751698416199e-05, |
| "loss": 0.0282, |
| "step": 1928 |
| }, |
| { |
| "epoch": 5.16, |
| "learning_rate": 1.0164239378544083e-05, |
| "loss": 0.033, |
| "step": 1929 |
| }, |
| { |
| "epoch": 5.16, |
| "learning_rate": 1.010091431268645e-05, |
| "loss": 0.0365, |
| "step": 1930 |
| }, |
| { |
| "epoch": 5.16, |
| "learning_rate": 1.0037776632838281e-05, |
| "loss": 0.0318, |
| "step": 1931 |
| }, |
| { |
| "epoch": 5.17, |
| "learning_rate": 9.974826470604049e-06, |
| "loss": 0.0301, |
| "step": 1932 |
| }, |
| { |
| "epoch": 5.17, |
| "learning_rate": 9.91206395719726e-06, |
| "loss": 0.0328, |
| "step": 1933 |
| }, |
| { |
| "epoch": 5.17, |
| "learning_rate": 9.8494892234404e-06, |
| "loss": 0.0319, |
| "step": 1934 |
| }, |
| { |
| "epoch": 5.17, |
| "learning_rate": 9.787102399764481e-06, |
| "loss": 0.0283, |
| "step": 1935 |
| }, |
| { |
| "epoch": 5.18, |
| "learning_rate": 9.724903616208836e-06, |
| "loss": 0.0235, |
| "step": 1936 |
| }, |
| { |
| "epoch": 5.18, |
| "learning_rate": 9.662893002420837e-06, |
| "loss": 0.0355, |
| "step": 1937 |
| }, |
| { |
| "epoch": 5.18, |
| "learning_rate": 9.601070687655667e-06, |
| "loss": 0.0293, |
| "step": 1938 |
| }, |
| { |
| "epoch": 5.18, |
| "learning_rate": 9.539436800776025e-06, |
| "loss": 0.0305, |
| "step": 1939 |
| }, |
| { |
| "epoch": 5.19, |
| "learning_rate": 9.477991470251791e-06, |
| "loss": 0.0424, |
| "step": 1940 |
| }, |
| { |
| "epoch": 5.19, |
| "learning_rate": 9.4167348241599e-06, |
| "loss": 0.0275, |
| "step": 1941 |
| }, |
| { |
| "epoch": 5.19, |
| "learning_rate": 9.355666990183898e-06, |
| "loss": 0.0276, |
| "step": 1942 |
| }, |
| { |
| "epoch": 5.2, |
| "learning_rate": 9.29478809561386e-06, |
| "loss": 0.0341, |
| "step": 1943 |
| }, |
| { |
| "epoch": 5.2, |
| "learning_rate": 9.234098267345958e-06, |
| "loss": 0.0301, |
| "step": 1944 |
| }, |
| { |
| "epoch": 5.2, |
| "learning_rate": 9.173597631882359e-06, |
| "loss": 0.0303, |
| "step": 1945 |
| }, |
| { |
| "epoch": 5.2, |
| "learning_rate": 9.11328631533076e-06, |
| "loss": 0.0325, |
| "step": 1946 |
| }, |
| { |
| "epoch": 5.21, |
| "learning_rate": 9.05316444340436e-06, |
| "loss": 0.0396, |
| "step": 1947 |
| }, |
| { |
| "epoch": 5.21, |
| "learning_rate": 8.993232141421415e-06, |
| "loss": 0.0372, |
| "step": 1948 |
| }, |
| { |
| "epoch": 5.21, |
| "learning_rate": 8.933489534305051e-06, |
| "loss": 0.0299, |
| "step": 1949 |
| }, |
| { |
| "epoch": 5.21, |
| "learning_rate": 8.873936746582978e-06, |
| "loss": 0.0451, |
| "step": 1950 |
| }, |
| { |
| "epoch": 5.22, |
| "learning_rate": 8.81457390238728e-06, |
| "loss": 0.028, |
| "step": 1951 |
| }, |
| { |
| "epoch": 5.22, |
| "learning_rate": 8.75540112545411e-06, |
| "loss": 0.0305, |
| "step": 1952 |
| }, |
| { |
| "epoch": 5.22, |
| "learning_rate": 8.69641853912342e-06, |
| "loss": 0.0372, |
| "step": 1953 |
| }, |
| { |
| "epoch": 5.22, |
| "learning_rate": 8.637626266338749e-06, |
| "loss": 0.0312, |
| "step": 1954 |
| }, |
| { |
| "epoch": 5.23, |
| "learning_rate": 8.579024429646932e-06, |
| "loss": 0.028, |
| "step": 1955 |
| }, |
| { |
| "epoch": 5.23, |
| "learning_rate": 8.520613151197898e-06, |
| "loss": 0.0311, |
| "step": 1956 |
| }, |
| { |
| "epoch": 5.23, |
| "learning_rate": 8.462392552744347e-06, |
| "loss": 0.0317, |
| "step": 1957 |
| }, |
| { |
| "epoch": 5.24, |
| "learning_rate": 8.404362755641504e-06, |
| "loss": 0.0317, |
| "step": 1958 |
| }, |
| { |
| "epoch": 5.24, |
| "learning_rate": 8.346523880846902e-06, |
| "loss": 0.0265, |
| "step": 1959 |
| }, |
| { |
| "epoch": 5.24, |
| "learning_rate": 8.288876048920124e-06, |
| "loss": 0.0261, |
| "step": 1960 |
| }, |
| { |
| "epoch": 5.24, |
| "learning_rate": 8.231419380022576e-06, |
| "loss": 0.0357, |
| "step": 1961 |
| }, |
| { |
| "epoch": 5.25, |
| "learning_rate": 8.174153993917122e-06, |
| "loss": 0.0329, |
| "step": 1962 |
| }, |
| { |
| "epoch": 5.25, |
| "learning_rate": 8.11708000996797e-06, |
| "loss": 0.03, |
| "step": 1963 |
| }, |
| { |
| "epoch": 5.25, |
| "learning_rate": 8.060197547140347e-06, |
| "loss": 0.0316, |
| "step": 1964 |
| }, |
| { |
| "epoch": 5.25, |
| "learning_rate": 8.003506724000321e-06, |
| "loss": 0.03, |
| "step": 1965 |
| }, |
| { |
| "epoch": 5.26, |
| "learning_rate": 7.947007658714444e-06, |
| "loss": 0.0343, |
| "step": 1966 |
| }, |
| { |
| "epoch": 5.26, |
| "learning_rate": 7.890700469049572e-06, |
| "loss": 0.0297, |
| "step": 1967 |
| }, |
| { |
| "epoch": 5.26, |
| "learning_rate": 7.834585272372663e-06, |
| "loss": 0.0292, |
| "step": 1968 |
| }, |
| { |
| "epoch": 5.26, |
| "learning_rate": 7.778662185650432e-06, |
| "loss": 0.0302, |
| "step": 1969 |
| }, |
| { |
| "epoch": 5.27, |
| "learning_rate": 7.722931325449223e-06, |
| "loss": 0.0316, |
| "step": 1970 |
| }, |
| { |
| "epoch": 5.27, |
| "learning_rate": 7.667392807934615e-06, |
| "loss": 0.0577, |
| "step": 1971 |
| }, |
| { |
| "epoch": 5.27, |
| "learning_rate": 7.612046748871327e-06, |
| "loss": 0.032, |
| "step": 1972 |
| }, |
| { |
| "epoch": 5.28, |
| "learning_rate": 7.556893263622911e-06, |
| "loss": 0.0313, |
| "step": 1973 |
| }, |
| { |
| "epoch": 5.28, |
| "learning_rate": 7.5019324671515065e-06, |
| "loss": 0.0307, |
| "step": 1974 |
| }, |
| { |
| "epoch": 5.28, |
| "learning_rate": 7.447164474017632e-06, |
| "loss": 0.0337, |
| "step": 1975 |
| }, |
| { |
| "epoch": 5.28, |
| "learning_rate": 7.392589398379868e-06, |
| "loss": 0.0231, |
| "step": 1976 |
| }, |
| { |
| "epoch": 5.29, |
| "learning_rate": 7.33820735399473e-06, |
| "loss": 0.0376, |
| "step": 1977 |
| }, |
| { |
| "epoch": 5.29, |
| "learning_rate": 7.28401845421639e-06, |
| "loss": 0.0306, |
| "step": 1978 |
| }, |
| { |
| "epoch": 5.29, |
| "learning_rate": 7.2300228119964064e-06, |
| "loss": 0.0254, |
| "step": 1979 |
| }, |
| { |
| "epoch": 5.29, |
| "learning_rate": 7.1762205398834936e-06, |
| "loss": 0.0274, |
| "step": 1980 |
| }, |
| { |
| "epoch": 5.3, |
| "learning_rate": 7.1226117500233e-06, |
| "loss": 0.0273, |
| "step": 1981 |
| }, |
| { |
| "epoch": 5.3, |
| "learning_rate": 7.06919655415822e-06, |
| "loss": 0.0307, |
| "step": 1982 |
| }, |
| { |
| "epoch": 5.3, |
| "learning_rate": 7.015975063627123e-06, |
| "loss": 0.0279, |
| "step": 1983 |
| }, |
| { |
| "epoch": 5.3, |
| "learning_rate": 6.962947389365071e-06, |
| "loss": 0.0299, |
| "step": 1984 |
| }, |
| { |
| "epoch": 5.31, |
| "learning_rate": 6.910113641903138e-06, |
| "loss": 0.0335, |
| "step": 1985 |
| }, |
| { |
| "epoch": 5.31, |
| "learning_rate": 6.857473931368219e-06, |
| "loss": 0.0277, |
| "step": 1986 |
| }, |
| { |
| "epoch": 5.31, |
| "learning_rate": 6.805028367482735e-06, |
| "loss": 0.0321, |
| "step": 1987 |
| }, |
| { |
| "epoch": 5.32, |
| "learning_rate": 6.75277705956443e-06, |
| "loss": 0.0266, |
| "step": 1988 |
| }, |
| { |
| "epoch": 5.32, |
| "learning_rate": 6.700720116526116e-06, |
| "loss": 0.0299, |
| "step": 1989 |
| }, |
| { |
| "epoch": 5.32, |
| "learning_rate": 6.648857646875506e-06, |
| "loss": 0.0316, |
| "step": 1990 |
| }, |
| { |
| "epoch": 5.32, |
| "learning_rate": 6.597189758714928e-06, |
| "loss": 0.0357, |
| "step": 1991 |
| }, |
| { |
| "epoch": 5.33, |
| "learning_rate": 6.545716559741166e-06, |
| "loss": 0.0354, |
| "step": 1992 |
| }, |
| { |
| "epoch": 5.33, |
| "learning_rate": 6.49443815724512e-06, |
| "loss": 0.0314, |
| "step": 1993 |
| }, |
| { |
| "epoch": 5.33, |
| "learning_rate": 6.4433546581116996e-06, |
| "loss": 0.034, |
| "step": 1994 |
| }, |
| { |
| "epoch": 5.33, |
| "learning_rate": 6.3924661688195555e-06, |
| "loss": 0.0322, |
| "step": 1995 |
| }, |
| { |
| "epoch": 5.34, |
| "learning_rate": 6.34177279544087e-06, |
| "loss": 0.0276, |
| "step": 1996 |
| }, |
| { |
| "epoch": 5.34, |
| "learning_rate": 6.29127464364111e-06, |
| "loss": 0.0325, |
| "step": 1997 |
| }, |
| { |
| "epoch": 5.34, |
| "learning_rate": 6.240971818678798e-06, |
| "loss": 0.0302, |
| "step": 1998 |
| }, |
| { |
| "epoch": 5.34, |
| "learning_rate": 6.190864425405362e-06, |
| "loss": 0.026, |
| "step": 1999 |
| }, |
| { |
| "epoch": 5.35, |
| "learning_rate": 6.140952568264857e-06, |
| "loss": 0.0286, |
| "step": 2000 |
| }, |
| { |
| "epoch": 5.35, |
| "learning_rate": 6.091236351293717e-06, |
| "loss": 0.0307, |
| "step": 2001 |
| }, |
| { |
| "epoch": 5.35, |
| "learning_rate": 6.04171587812068e-06, |
| "loss": 0.0379, |
| "step": 2002 |
| }, |
| { |
| "epoch": 5.36, |
| "learning_rate": 5.992391251966356e-06, |
| "loss": 0.0309, |
| "step": 2003 |
| }, |
| { |
| "epoch": 5.36, |
| "learning_rate": 5.943262575643238e-06, |
| "loss": 0.0433, |
| "step": 2004 |
| }, |
| { |
| "epoch": 5.36, |
| "learning_rate": 5.894329951555311e-06, |
| "loss": 0.0283, |
| "step": 2005 |
| }, |
| { |
| "epoch": 5.36, |
| "learning_rate": 5.8455934816979305e-06, |
| "loss": 0.0281, |
| "step": 2006 |
| }, |
| { |
| "epoch": 5.37, |
| "learning_rate": 5.797053267657582e-06, |
| "loss": 0.0323, |
| "step": 2007 |
| }, |
| { |
| "epoch": 5.37, |
| "learning_rate": 5.7487094106116854e-06, |
| "loss": 0.042, |
| "step": 2008 |
| }, |
| { |
| "epoch": 5.37, |
| "learning_rate": 5.700562011328381e-06, |
| "loss": 0.0339, |
| "step": 2009 |
| }, |
| { |
| "epoch": 5.37, |
| "learning_rate": 5.652611170166289e-06, |
| "loss": 0.0273, |
| "step": 2010 |
| }, |
| { |
| "epoch": 5.38, |
| "learning_rate": 5.604856987074314e-06, |
| "loss": 0.0286, |
| "step": 2011 |
| }, |
| { |
| "epoch": 5.38, |
| "learning_rate": 5.557299561591478e-06, |
| "loss": 0.0338, |
| "step": 2012 |
| }, |
| { |
| "epoch": 5.38, |
| "learning_rate": 5.509938992846686e-06, |
| "loss": 0.0324, |
| "step": 2013 |
| }, |
| { |
| "epoch": 5.39, |
| "learning_rate": 5.46277537955846e-06, |
| "loss": 0.0296, |
| "step": 2014 |
| }, |
| { |
| "epoch": 5.39, |
| "learning_rate": 5.415808820034851e-06, |
| "loss": 0.0325, |
| "step": 2015 |
| }, |
| { |
| "epoch": 5.39, |
| "learning_rate": 5.369039412173116e-06, |
| "loss": 0.0258, |
| "step": 2016 |
| }, |
| { |
| "epoch": 5.39, |
| "learning_rate": 5.322467253459618e-06, |
| "loss": 0.031, |
| "step": 2017 |
| }, |
| { |
| "epoch": 5.4, |
| "learning_rate": 5.2760924409695265e-06, |
| "loss": 0.0345, |
| "step": 2018 |
| }, |
| { |
| "epoch": 5.4, |
| "learning_rate": 5.229915071366698e-06, |
| "loss": 0.0344, |
| "step": 2019 |
| }, |
| { |
| "epoch": 5.4, |
| "learning_rate": 5.183935240903414e-06, |
| "loss": 0.0395, |
| "step": 2020 |
| }, |
| { |
| "epoch": 5.4, |
| "learning_rate": 5.138153045420235e-06, |
| "loss": 0.0343, |
| "step": 2021 |
| }, |
| { |
| "epoch": 5.41, |
| "learning_rate": 5.092568580345724e-06, |
| "loss": 0.0302, |
| "step": 2022 |
| }, |
| { |
| "epoch": 5.41, |
| "learning_rate": 5.047181940696333e-06, |
| "loss": 0.034, |
| "step": 2023 |
| }, |
| { |
| "epoch": 5.41, |
| "learning_rate": 5.001993221076162e-06, |
| "loss": 0.0271, |
| "step": 2024 |
| }, |
| { |
| "epoch": 5.41, |
| "learning_rate": 4.9570025156767345e-06, |
| "loss": 0.0245, |
| "step": 2025 |
| }, |
| { |
| "epoch": 5.42, |
| "learning_rate": 4.912209918276878e-06, |
| "loss": 0.0317, |
| "step": 2026 |
| }, |
| { |
| "epoch": 5.42, |
| "learning_rate": 4.867615522242441e-06, |
| "loss": 0.031, |
| "step": 2027 |
| }, |
| { |
| "epoch": 5.42, |
| "learning_rate": 4.8232194205261815e-06, |
| "loss": 0.03, |
| "step": 2028 |
| }, |
| { |
| "epoch": 5.43, |
| "learning_rate": 4.779021705667474e-06, |
| "loss": 0.0281, |
| "step": 2029 |
| }, |
| { |
| "epoch": 5.43, |
| "learning_rate": 4.73502246979225e-06, |
| "loss": 0.0328, |
| "step": 2030 |
| }, |
| { |
| "epoch": 5.43, |
| "learning_rate": 4.6912218046126555e-06, |
| "loss": 0.0353, |
| "step": 2031 |
| }, |
| { |
| "epoch": 5.43, |
| "learning_rate": 4.647619801426995e-06, |
| "loss": 0.0328, |
| "step": 2032 |
| }, |
| { |
| "epoch": 5.44, |
| "learning_rate": 4.604216551119445e-06, |
| "loss": 0.0361, |
| "step": 2033 |
| }, |
| { |
| "epoch": 5.44, |
| "learning_rate": 4.561012144159927e-06, |
| "loss": 0.0334, |
| "step": 2034 |
| }, |
| { |
| "epoch": 5.44, |
| "learning_rate": 4.518006670603847e-06, |
| "loss": 0.0297, |
| "step": 2035 |
| }, |
| { |
| "epoch": 5.44, |
| "learning_rate": 4.475200220092002e-06, |
| "loss": 0.0364, |
| "step": 2036 |
| }, |
| { |
| "epoch": 5.45, |
| "learning_rate": 4.432592881850339e-06, |
| "loss": 0.0321, |
| "step": 2037 |
| }, |
| { |
| "epoch": 5.45, |
| "learning_rate": 4.39018474468974e-06, |
| "loss": 0.0303, |
| "step": 2038 |
| }, |
| { |
| "epoch": 5.45, |
| "learning_rate": 4.347975897005907e-06, |
| "loss": 0.0407, |
| "step": 2039 |
| }, |
| { |
| "epoch": 5.45, |
| "learning_rate": 4.305966426779118e-06, |
| "loss": 0.0321, |
| "step": 2040 |
| }, |
| { |
| "epoch": 5.46, |
| "learning_rate": 4.264156421574095e-06, |
| "loss": 0.0326, |
| "step": 2041 |
| }, |
| { |
| "epoch": 5.46, |
| "learning_rate": 4.22254596853976e-06, |
| "loss": 0.0309, |
| "step": 2042 |
| }, |
| { |
| "epoch": 5.46, |
| "learning_rate": 4.181135154409122e-06, |
| "loss": 0.0389, |
| "step": 2043 |
| }, |
| { |
| "epoch": 5.47, |
| "learning_rate": 4.139924065499035e-06, |
| "loss": 0.034, |
| "step": 2044 |
| }, |
| { |
| "epoch": 5.47, |
| "learning_rate": 4.098912787710052e-06, |
| "loss": 0.0306, |
| "step": 2045 |
| }, |
| { |
| "epoch": 5.47, |
| "learning_rate": 4.058101406526271e-06, |
| "loss": 0.0365, |
| "step": 2046 |
| }, |
| { |
| "epoch": 5.47, |
| "learning_rate": 4.017490007015068e-06, |
| "loss": 0.0293, |
| "step": 2047 |
| }, |
| { |
| "epoch": 5.48, |
| "learning_rate": 3.977078673826995e-06, |
| "loss": 0.0358, |
| "step": 2048 |
| }, |
| { |
| "epoch": 5.48, |
| "learning_rate": 3.9368674911956175e-06, |
| "loss": 0.0351, |
| "step": 2049 |
| }, |
| { |
| "epoch": 5.48, |
| "learning_rate": 3.8968565429372886e-06, |
| "loss": 0.0373, |
| "step": 2050 |
| }, |
| { |
| "epoch": 5.48, |
| "learning_rate": 3.857045912450974e-06, |
| "loss": 0.029, |
| "step": 2051 |
| }, |
| { |
| "epoch": 5.49, |
| "learning_rate": 3.817435682718096e-06, |
| "loss": 0.0314, |
| "step": 2052 |
| }, |
| { |
| "epoch": 5.49, |
| "learning_rate": 3.7780259363023985e-06, |
| "loss": 0.0318, |
| "step": 2053 |
| }, |
| { |
| "epoch": 5.49, |
| "learning_rate": 3.7388167553496946e-06, |
| "loss": 0.0286, |
| "step": 2054 |
| }, |
| { |
| "epoch": 5.49, |
| "learning_rate": 3.699808221587786e-06, |
| "loss": 0.0282, |
| "step": 2055 |
| }, |
| { |
| "epoch": 5.5, |
| "learning_rate": 3.6610004163261767e-06, |
| "loss": 0.0304, |
| "step": 2056 |
| }, |
| { |
| "epoch": 5.5, |
| "learning_rate": 3.622393420456016e-06, |
| "loss": 0.0443, |
| "step": 2057 |
| }, |
| { |
| "epoch": 5.5, |
| "learning_rate": 3.5839873144498883e-06, |
| "loss": 0.0309, |
| "step": 2058 |
| }, |
| { |
| "epoch": 5.51, |
| "learning_rate": 3.5457821783616563e-06, |
| "loss": 0.0351, |
| "step": 2059 |
| }, |
| { |
| "epoch": 5.51, |
| "learning_rate": 3.507778091826219e-06, |
| "loss": 0.0314, |
| "step": 2060 |
| }, |
| { |
| "epoch": 5.51, |
| "learning_rate": 3.4699751340594554e-06, |
| "loss": 0.0394, |
| "step": 2061 |
| }, |
| { |
| "epoch": 5.51, |
| "learning_rate": 3.432373383858001e-06, |
| "loss": 0.0342, |
| "step": 2062 |
| }, |
| { |
| "epoch": 5.52, |
| "learning_rate": 3.394972919599093e-06, |
| "loss": 0.034, |
| "step": 2063 |
| }, |
| { |
| "epoch": 5.52, |
| "learning_rate": 3.3577738192404395e-06, |
| "loss": 0.0294, |
| "step": 2064 |
| }, |
| { |
| "epoch": 5.52, |
| "learning_rate": 3.3207761603199273e-06, |
| "loss": 0.028, |
| "step": 2065 |
| }, |
| { |
| "epoch": 5.52, |
| "learning_rate": 3.283980019955668e-06, |
| "loss": 0.0312, |
| "step": 2066 |
| }, |
| { |
| "epoch": 5.53, |
| "learning_rate": 3.2473854748456545e-06, |
| "loss": 0.031, |
| "step": 2067 |
| }, |
| { |
| "epoch": 5.53, |
| "learning_rate": 3.2109926012677484e-06, |
| "loss": 0.0321, |
| "step": 2068 |
| }, |
| { |
| "epoch": 5.53, |
| "learning_rate": 3.1748014750793585e-06, |
| "loss": 0.0333, |
| "step": 2069 |
| }, |
| { |
| "epoch": 5.53, |
| "learning_rate": 3.138812171717409e-06, |
| "loss": 0.0275, |
| "step": 2070 |
| }, |
| { |
| "epoch": 5.54, |
| "learning_rate": 3.103024766198159e-06, |
| "loss": 0.0307, |
| "step": 2071 |
| }, |
| { |
| "epoch": 5.54, |
| "learning_rate": 3.067439333117028e-06, |
| "loss": 0.0271, |
| "step": 2072 |
| }, |
| { |
| "epoch": 5.54, |
| "learning_rate": 3.032055946648427e-06, |
| "loss": 0.0278, |
| "step": 2073 |
| }, |
| { |
| "epoch": 5.55, |
| "learning_rate": 2.996874680545603e-06, |
| "loss": 0.0324, |
| "step": 2074 |
| }, |
| { |
| "epoch": 5.55, |
| "learning_rate": 2.9618956081405525e-06, |
| "loss": 0.0389, |
| "step": 2075 |
| }, |
| { |
| "epoch": 5.55, |
| "learning_rate": 2.9271188023437867e-06, |
| "loss": 0.0381, |
| "step": 2076 |
| }, |
| { |
| "epoch": 5.55, |
| "learning_rate": 2.8925443356442205e-06, |
| "loss": 0.0297, |
| "step": 2077 |
| }, |
| { |
| "epoch": 5.56, |
| "learning_rate": 2.858172280109006e-06, |
| "loss": 0.0322, |
| "step": 2078 |
| }, |
| { |
| "epoch": 5.56, |
| "learning_rate": 2.8240027073833774e-06, |
| "loss": 0.0341, |
| "step": 2079 |
| }, |
| { |
| "epoch": 5.56, |
| "learning_rate": 2.79003568869054e-06, |
| "loss": 0.0358, |
| "step": 2080 |
| }, |
| { |
| "epoch": 5.56, |
| "learning_rate": 2.7562712948314916e-06, |
| "loss": 0.0265, |
| "step": 2081 |
| }, |
| { |
| "epoch": 5.57, |
| "learning_rate": 2.722709596184858e-06, |
| "loss": 0.0326, |
| "step": 2082 |
| }, |
| { |
| "epoch": 5.57, |
| "learning_rate": 2.689350662706769e-06, |
| "loss": 0.0309, |
| "step": 2083 |
| }, |
| { |
| "epoch": 5.57, |
| "learning_rate": 2.656194563930714e-06, |
| "loss": 0.0327, |
| "step": 2084 |
| }, |
| { |
| "epoch": 5.57, |
| "learning_rate": 2.6232413689674216e-06, |
| "loss": 0.0253, |
| "step": 2085 |
| }, |
| { |
| "epoch": 5.58, |
| "learning_rate": 2.5904911465046477e-06, |
| "loss": 0.0363, |
| "step": 2086 |
| }, |
| { |
| "epoch": 5.58, |
| "learning_rate": 2.5579439648070745e-06, |
| "loss": 0.031, |
| "step": 2087 |
| }, |
| { |
| "epoch": 5.58, |
| "learning_rate": 2.5255998917161906e-06, |
| "loss": 0.0318, |
| "step": 2088 |
| }, |
| { |
| "epoch": 5.59, |
| "learning_rate": 2.493458994650111e-06, |
| "loss": 0.0415, |
| "step": 2089 |
| }, |
| { |
| "epoch": 5.59, |
| "learning_rate": 2.4615213406034343e-06, |
| "loss": 0.0378, |
| "step": 2090 |
| }, |
| { |
| "epoch": 5.59, |
| "learning_rate": 2.429786996147154e-06, |
| "loss": 0.0334, |
| "step": 2091 |
| }, |
| { |
| "epoch": 5.59, |
| "learning_rate": 2.398256027428436e-06, |
| "loss": 0.0318, |
| "step": 2092 |
| }, |
| { |
| "epoch": 5.6, |
| "learning_rate": 2.366928500170573e-06, |
| "loss": 0.03, |
| "step": 2093 |
| }, |
| { |
| "epoch": 5.6, |
| "learning_rate": 2.3358044796727872e-06, |
| "loss": 0.0354, |
| "step": 2094 |
| }, |
| { |
| "epoch": 5.6, |
| "learning_rate": 2.304884030810117e-06, |
| "loss": 0.0311, |
| "step": 2095 |
| }, |
| { |
| "epoch": 5.6, |
| "learning_rate": 2.2741672180332406e-06, |
| "loss": 0.0308, |
| "step": 2096 |
| }, |
| { |
| "epoch": 5.61, |
| "learning_rate": 2.24365410536842e-06, |
| "loss": 0.0352, |
| "step": 2097 |
| }, |
| { |
| "epoch": 5.61, |
| "learning_rate": 2.2133447564173236e-06, |
| "loss": 0.0281, |
| "step": 2098 |
| }, |
| { |
| "epoch": 5.61, |
| "learning_rate": 2.1832392343568596e-06, |
| "loss": 0.0317, |
| "step": 2099 |
| }, |
| { |
| "epoch": 5.61, |
| "learning_rate": 2.1533376019391094e-06, |
| "loss": 0.026, |
| "step": 2100 |
| }, |
| { |
| "epoch": 5.62, |
| "learning_rate": 2.1236399214911272e-06, |
| "loss": 0.0268, |
| "step": 2101 |
| }, |
| { |
| "epoch": 5.62, |
| "learning_rate": 2.094146254914908e-06, |
| "loss": 0.0283, |
| "step": 2102 |
| }, |
| { |
| "epoch": 5.62, |
| "learning_rate": 2.0648566636871423e-06, |
| "loss": 0.0302, |
| "step": 2103 |
| }, |
| { |
| "epoch": 5.63, |
| "learning_rate": 2.035771208859194e-06, |
| "loss": 0.0417, |
| "step": 2104 |
| }, |
| { |
| "epoch": 5.63, |
| "learning_rate": 2.0068899510568786e-06, |
| "loss": 0.0267, |
| "step": 2105 |
| }, |
| { |
| "epoch": 5.63, |
| "learning_rate": 1.978212950480418e-06, |
| "loss": 0.0404, |
| "step": 2106 |
| }, |
| { |
| "epoch": 5.63, |
| "learning_rate": 1.949740266904243e-06, |
| "loss": 0.0312, |
| "step": 2107 |
| }, |
| { |
| "epoch": 5.64, |
| "learning_rate": 1.921471959676957e-06, |
| "loss": 0.0329, |
| "step": 2108 |
| }, |
| { |
| "epoch": 5.64, |
| "learning_rate": 1.8934080877211157e-06, |
| "loss": 0.0319, |
| "step": 2109 |
| }, |
| { |
| "epoch": 5.64, |
| "learning_rate": 1.8655487095331714e-06, |
| "loss": 0.034, |
| "step": 2110 |
| }, |
| { |
| "epoch": 5.64, |
| "learning_rate": 1.837893883183317e-06, |
| "loss": 0.0289, |
| "step": 2111 |
| }, |
| { |
| "epoch": 5.65, |
| "learning_rate": 1.8104436663153756e-06, |
| "loss": 0.0323, |
| "step": 2112 |
| }, |
| { |
| "epoch": 5.65, |
| "learning_rate": 1.7831981161467115e-06, |
| "loss": 0.0388, |
| "step": 2113 |
| }, |
| { |
| "epoch": 5.65, |
| "learning_rate": 1.756157289468019e-06, |
| "loss": 0.0298, |
| "step": 2114 |
| }, |
| { |
| "epoch": 5.66, |
| "learning_rate": 1.7293212426433447e-06, |
| "loss": 0.0402, |
| "step": 2115 |
| }, |
| { |
| "epoch": 5.66, |
| "learning_rate": 1.7026900316098215e-06, |
| "loss": 0.0307, |
| "step": 2116 |
| }, |
| { |
| "epoch": 5.66, |
| "learning_rate": 1.6762637118776681e-06, |
| "loss": 0.0325, |
| "step": 2117 |
| }, |
| { |
| "epoch": 5.66, |
| "learning_rate": 1.65004233853e-06, |
| "loss": 0.0276, |
| "step": 2118 |
| }, |
| { |
| "epoch": 5.67, |
| "learning_rate": 1.6240259662227531e-06, |
| "loss": 0.0298, |
| "step": 2119 |
| }, |
| { |
| "epoch": 5.67, |
| "learning_rate": 1.5982146491845595e-06, |
| "loss": 0.0296, |
| "step": 2120 |
| }, |
| { |
| "epoch": 5.67, |
| "learning_rate": 1.5726084412166276e-06, |
| "loss": 0.031, |
| "step": 2121 |
| }, |
| { |
| "epoch": 5.67, |
| "learning_rate": 1.5472073956926403e-06, |
| "loss": 0.0308, |
| "step": 2122 |
| }, |
| { |
| "epoch": 5.68, |
| "learning_rate": 1.5220115655586452e-06, |
| "loss": 0.0293, |
| "step": 2123 |
| }, |
| { |
| "epoch": 5.68, |
| "learning_rate": 1.49702100333291e-06, |
| "loss": 0.0307, |
| "step": 2124 |
| }, |
| { |
| "epoch": 5.68, |
| "learning_rate": 1.472235761105878e-06, |
| "loss": 0.0261, |
| "step": 2125 |
| }, |
| { |
| "epoch": 5.68, |
| "learning_rate": 1.4476558905400007e-06, |
| "loss": 0.0323, |
| "step": 2126 |
| }, |
| { |
| "epoch": 5.69, |
| "learning_rate": 1.4232814428696506e-06, |
| "loss": 0.0289, |
| "step": 2127 |
| }, |
| { |
| "epoch": 5.69, |
| "learning_rate": 1.3991124689010427e-06, |
| "loss": 0.0393, |
| "step": 2128 |
| }, |
| { |
| "epoch": 5.69, |
| "learning_rate": 1.3751490190120675e-06, |
| "loss": 0.04, |
| "step": 2129 |
| }, |
| { |
| "epoch": 5.7, |
| "learning_rate": 1.3513911431522252e-06, |
| "loss": 0.0297, |
| "step": 2130 |
| }, |
| { |
| "epoch": 5.7, |
| "learning_rate": 1.3278388908425475e-06, |
| "loss": 0.0337, |
| "step": 2131 |
| }, |
| { |
| "epoch": 5.7, |
| "learning_rate": 1.3044923111754427e-06, |
| "loss": 0.0322, |
| "step": 2132 |
| }, |
| { |
| "epoch": 5.7, |
| "learning_rate": 1.2813514528145831e-06, |
| "loss": 0.0324, |
| "step": 2133 |
| }, |
| { |
| "epoch": 5.71, |
| "learning_rate": 1.2584163639948854e-06, |
| "loss": 0.0323, |
| "step": 2134 |
| }, |
| { |
| "epoch": 5.71, |
| "learning_rate": 1.235687092522353e-06, |
| "loss": 0.0327, |
| "step": 2135 |
| }, |
| { |
| "epoch": 5.71, |
| "learning_rate": 1.2131636857739547e-06, |
| "loss": 0.0315, |
| "step": 2136 |
| }, |
| { |
| "epoch": 5.71, |
| "learning_rate": 1.1908461906975587e-06, |
| "loss": 0.0308, |
| "step": 2137 |
| }, |
| { |
| "epoch": 5.72, |
| "learning_rate": 1.1687346538118537e-06, |
| "loss": 0.0291, |
| "step": 2138 |
| }, |
| { |
| "epoch": 5.72, |
| "learning_rate": 1.1468291212062166e-06, |
| "loss": 0.0286, |
| "step": 2139 |
| }, |
| { |
| "epoch": 5.72, |
| "learning_rate": 1.125129638540623e-06, |
| "loss": 0.0252, |
| "step": 2140 |
| }, |
| { |
| "epoch": 5.72, |
| "learning_rate": 1.1036362510455477e-06, |
| "loss": 0.0297, |
| "step": 2141 |
| }, |
| { |
| "epoch": 5.73, |
| "learning_rate": 1.0823490035218987e-06, |
| "loss": 0.0397, |
| "step": 2142 |
| }, |
| { |
| "epoch": 5.73, |
| "learning_rate": 1.0612679403409154e-06, |
| "loss": 0.03, |
| "step": 2143 |
| }, |
| { |
| "epoch": 5.73, |
| "learning_rate": 1.0403931054440374e-06, |
| "loss": 0.0294, |
| "step": 2144 |
| }, |
| { |
| "epoch": 5.74, |
| "learning_rate": 1.019724542342848e-06, |
| "loss": 0.032, |
| "step": 2145 |
| }, |
| { |
| "epoch": 5.74, |
| "learning_rate": 9.992622941189856e-07, |
| "loss": 0.027, |
| "step": 2146 |
| }, |
| { |
| "epoch": 5.74, |
| "learning_rate": 9.790064034240432e-07, |
| "loss": 0.0367, |
| "step": 2147 |
| }, |
| { |
| "epoch": 5.74, |
| "learning_rate": 9.589569124794916e-07, |
| "loss": 0.029, |
| "step": 2148 |
| }, |
| { |
| "epoch": 5.75, |
| "learning_rate": 9.391138630765462e-07, |
| "loss": 0.0332, |
| "step": 2149 |
| }, |
| { |
| "epoch": 5.75, |
| "learning_rate": 9.194772965761433e-07, |
| "loss": 0.0294, |
| "step": 2150 |
| }, |
| { |
| "epoch": 5.75, |
| "learning_rate": 9.0004725390882e-07, |
| "loss": 0.0319, |
| "step": 2151 |
| }, |
| { |
| "epoch": 5.75, |
| "learning_rate": 8.808237755746351e-07, |
| "loss": 0.0279, |
| "step": 2152 |
| }, |
| { |
| "epoch": 5.76, |
| "learning_rate": 8.61806901643103e-07, |
| "loss": 0.0304, |
| "step": 2153 |
| }, |
| { |
| "epoch": 5.76, |
| "learning_rate": 8.42996671753038e-07, |
| "loss": 0.0357, |
| "step": 2154 |
| }, |
| { |
| "epoch": 5.76, |
| "learning_rate": 8.24393125112577e-07, |
| "loss": 0.0359, |
| "step": 2155 |
| }, |
| { |
| "epoch": 5.76, |
| "learning_rate": 8.059963004990234e-07, |
| "loss": 0.0308, |
| "step": 2156 |
| }, |
| { |
| "epoch": 5.77, |
| "learning_rate": 7.878062362587924e-07, |
| "loss": 0.0328, |
| "step": 2157 |
| }, |
| { |
| "epoch": 5.77, |
| "learning_rate": 7.698229703073212e-07, |
| "loss": 0.032, |
| "step": 2158 |
| }, |
| { |
| "epoch": 5.77, |
| "learning_rate": 7.520465401290033e-07, |
| "loss": 0.032, |
| "step": 2159 |
| }, |
| { |
| "epoch": 5.78, |
| "learning_rate": 7.344769827770881e-07, |
| "loss": 0.0294, |
| "step": 2160 |
| }, |
| { |
| "epoch": 5.78, |
| "learning_rate": 7.171143348736476e-07, |
| "loss": 0.0319, |
| "step": 2161 |
| }, |
| { |
| "epoch": 5.78, |
| "learning_rate": 6.999586326094653e-07, |
| "loss": 0.0364, |
| "step": 2162 |
| }, |
| { |
| "epoch": 5.78, |
| "learning_rate": 6.830099117439149e-07, |
| "loss": 0.0312, |
| "step": 2163 |
| }, |
| { |
| "epoch": 5.79, |
| "learning_rate": 6.662682076050031e-07, |
| "loss": 0.0308, |
| "step": 2164 |
| }, |
| { |
| "epoch": 5.79, |
| "learning_rate": 6.497335550892047e-07, |
| "loss": 0.0352, |
| "step": 2165 |
| }, |
| { |
| "epoch": 5.79, |
| "learning_rate": 6.334059886614064e-07, |
| "loss": 0.0335, |
| "step": 2166 |
| }, |
| { |
| "epoch": 5.79, |
| "learning_rate": 6.172855423548618e-07, |
| "loss": 0.0295, |
| "step": 2167 |
| }, |
| { |
| "epoch": 5.8, |
| "learning_rate": 6.013722497710816e-07, |
| "loss": 0.0301, |
| "step": 2168 |
| }, |
| { |
| "epoch": 5.8, |
| "learning_rate": 5.856661440797995e-07, |
| "loss": 0.0367, |
| "step": 2169 |
| }, |
| { |
| "epoch": 5.8, |
| "learning_rate": 5.701672580188943e-07, |
| "loss": 0.0299, |
| "step": 2170 |
| }, |
| { |
| "epoch": 5.8, |
| "learning_rate": 5.548756238942909e-07, |
| "loss": 0.0361, |
| "step": 2171 |
| }, |
| { |
| "epoch": 5.81, |
| "learning_rate": 5.397912735799371e-07, |
| "loss": 0.0295, |
| "step": 2172 |
| }, |
| { |
| "epoch": 5.81, |
| "learning_rate": 5.249142385177152e-07, |
| "loss": 0.0371, |
| "step": 2173 |
| }, |
| { |
| "epoch": 5.81, |
| "learning_rate": 5.102445497173757e-07, |
| "loss": 0.0261, |
| "step": 2174 |
| }, |
| { |
| "epoch": 5.82, |
| "learning_rate": 4.957822377564702e-07, |
| "loss": 0.0312, |
| "step": 2175 |
| }, |
| { |
| "epoch": 5.82, |
| "learning_rate": 4.815273327803182e-07, |
| "loss": 0.0322, |
| "step": 2176 |
| }, |
| { |
| "epoch": 5.82, |
| "learning_rate": 4.67479864501863e-07, |
| "loss": 0.0299, |
| "step": 2177 |
| }, |
| { |
| "epoch": 5.82, |
| "learning_rate": 4.5363986220173795e-07, |
| "loss": 0.0348, |
| "step": 2178 |
| }, |
| { |
| "epoch": 5.83, |
| "learning_rate": 4.400073547280781e-07, |
| "loss": 0.0306, |
| "step": 2179 |
| }, |
| { |
| "epoch": 5.83, |
| "learning_rate": 4.2658237049655323e-07, |
| "loss": 0.0331, |
| "step": 2180 |
| }, |
| { |
| "epoch": 5.83, |
| "learning_rate": 4.133649374902349e-07, |
| "loss": 0.0359, |
| "step": 2181 |
| }, |
| { |
| "epoch": 5.83, |
| "learning_rate": 4.0035508325959593e-07, |
| "loss": 0.0284, |
| "step": 2182 |
| }, |
| { |
| "epoch": 5.84, |
| "learning_rate": 3.875528349224444e-07, |
| "loss": 0.0316, |
| "step": 2183 |
| }, |
| { |
| "epoch": 5.84, |
| "learning_rate": 3.7495821916382344e-07, |
| "loss": 0.0331, |
| "step": 2184 |
| }, |
| { |
| "epoch": 5.84, |
| "learning_rate": 3.62571262236e-07, |
| "loss": 0.0253, |
| "step": 2185 |
| }, |
| { |
| "epoch": 5.84, |
| "learning_rate": 3.5039198995839853e-07, |
| "loss": 0.0331, |
| "step": 2186 |
| }, |
| { |
| "epoch": 5.85, |
| "learning_rate": 3.384204277175451e-07, |
| "loss": 0.0312, |
| "step": 2187 |
| }, |
| { |
| "epoch": 5.85, |
| "learning_rate": 3.266566004670013e-07, |
| "loss": 0.0371, |
| "step": 2188 |
| }, |
| { |
| "epoch": 5.85, |
| "learning_rate": 3.151005327273526e-07, |
| "loss": 0.0398, |
| "step": 2189 |
| }, |
| { |
| "epoch": 5.86, |
| "learning_rate": 3.0375224858609773e-07, |
| "loss": 0.0279, |
| "step": 2190 |
| }, |
| { |
| "epoch": 5.86, |
| "learning_rate": 2.926117716976484e-07, |
| "loss": 0.0373, |
| "step": 2191 |
| }, |
| { |
| "epoch": 5.86, |
| "learning_rate": 2.816791252832518e-07, |
| "loss": 0.0343, |
| "step": 2192 |
| }, |
| { |
| "epoch": 5.86, |
| "learning_rate": 2.709543321309793e-07, |
| "loss": 0.0347, |
| "step": 2193 |
| }, |
| { |
| "epoch": 5.87, |
| "learning_rate": 2.6043741459561563e-07, |
| "loss": 0.0291, |
| "step": 2194 |
| }, |
| { |
| "epoch": 5.87, |
| "learning_rate": 2.501283945986699e-07, |
| "loss": 0.0276, |
| "step": 2195 |
| }, |
| { |
| "epoch": 5.87, |
| "learning_rate": 2.400272936283088e-07, |
| "loss": 0.0292, |
| "step": 2196 |
| }, |
| { |
| "epoch": 5.87, |
| "learning_rate": 2.301341327392903e-07, |
| "loss": 0.0312, |
| "step": 2197 |
| }, |
| { |
| "epoch": 5.88, |
| "learning_rate": 2.204489325529857e-07, |
| "loss": 0.0339, |
| "step": 2198 |
| }, |
| { |
| "epoch": 5.88, |
| "learning_rate": 2.109717132572353e-07, |
| "loss": 0.0293, |
| "step": 2199 |
| }, |
| { |
| "epoch": 5.88, |
| "learning_rate": 2.0170249460641498e-07, |
| "loss": 0.033, |
| "step": 2200 |
| }, |
| { |
| "epoch": 5.89, |
| "learning_rate": 1.926412959213031e-07, |
| "loss": 0.0416, |
| "step": 2201 |
| }, |
| { |
| "epoch": 5.89, |
| "learning_rate": 1.837881360891136e-07, |
| "loss": 0.0304, |
| "step": 2202 |
| }, |
| { |
| "epoch": 5.89, |
| "learning_rate": 1.7514303356339635e-07, |
| "loss": 0.033, |
| "step": 2203 |
| }, |
| { |
| "epoch": 5.89, |
| "learning_rate": 1.6670600636403687e-07, |
| "loss": 0.0439, |
| "step": 2204 |
| }, |
| { |
| "epoch": 5.9, |
| "learning_rate": 1.584770720772233e-07, |
| "loss": 0.0371, |
| "step": 2205 |
| }, |
| { |
| "epoch": 5.9, |
| "learning_rate": 1.504562478553684e-07, |
| "loss": 0.0358, |
| "step": 2206 |
| }, |
| { |
| "epoch": 5.9, |
| "learning_rate": 1.4264355041709864e-07, |
| "loss": 0.0267, |
| "step": 2207 |
| }, |
| { |
| "epoch": 5.9, |
| "learning_rate": 1.3503899604725423e-07, |
| "loss": 0.029, |
| "step": 2208 |
| }, |
| { |
| "epoch": 5.91, |
| "learning_rate": 1.2764260059677792e-07, |
| "loss": 0.0321, |
| "step": 2209 |
| }, |
| { |
| "epoch": 5.91, |
| "learning_rate": 1.204543794827595e-07, |
| "loss": 0.0321, |
| "step": 2210 |
| }, |
| { |
| "epoch": 5.91, |
| "learning_rate": 1.1347434768834708e-07, |
| "loss": 0.0352, |
| "step": 2211 |
| }, |
| { |
| "epoch": 5.91, |
| "learning_rate": 1.0670251976275803e-07, |
| "loss": 0.032, |
| "step": 2212 |
| }, |
| { |
| "epoch": 5.92, |
| "learning_rate": 1.0013890982120134e-07, |
| "loss": 0.0318, |
| "step": 2213 |
| }, |
| { |
| "epoch": 5.92, |
| "learning_rate": 9.378353154489982e-08, |
| "loss": 0.0262, |
| "step": 2214 |
| }, |
| { |
| "epoch": 5.92, |
| "learning_rate": 8.763639818103464e-08, |
| "loss": 0.033, |
| "step": 2215 |
| }, |
| { |
| "epoch": 5.93, |
| "learning_rate": 8.16975225427008e-08, |
| "loss": 0.0287, |
| "step": 2216 |
| }, |
| { |
| "epoch": 5.93, |
| "learning_rate": 7.596691700891834e-08, |
| "loss": 0.0311, |
| "step": 2217 |
| }, |
| { |
| "epoch": 5.93, |
| "learning_rate": 7.044459352459898e-08, |
| "loss": 0.0277, |
| "step": 2218 |
| }, |
| { |
| "epoch": 5.93, |
| "learning_rate": 6.513056360047953e-08, |
| "loss": 0.0276, |
| "step": 2219 |
| }, |
| { |
| "epoch": 5.94, |
| "learning_rate": 6.002483831314409e-08, |
| "loss": 0.0304, |
| "step": 2220 |
| }, |
| { |
| "epoch": 5.94, |
| "learning_rate": 5.512742830500184e-08, |
| "loss": 0.0329, |
| "step": 2221 |
| }, |
| { |
| "epoch": 5.94, |
| "learning_rate": 5.043834378422041e-08, |
| "loss": 0.0306, |
| "step": 2222 |
| }, |
| { |
| "epoch": 5.94, |
| "learning_rate": 4.5957594524748124e-08, |
| "loss": 0.0364, |
| "step": 2223 |
| }, |
| { |
| "epoch": 5.95, |
| "learning_rate": 4.168518986628067e-08, |
| "loss": 0.0361, |
| "step": 2224 |
| }, |
| { |
| "epoch": 5.95, |
| "learning_rate": 3.762113871422779e-08, |
| "loss": 0.0309, |
| "step": 2225 |
| }, |
| { |
| "epoch": 5.95, |
| "learning_rate": 3.3765449539724384e-08, |
| "loss": 0.0341, |
| "step": 2226 |
| }, |
| { |
| "epoch": 5.95, |
| "learning_rate": 3.0118130379575005e-08, |
| "loss": 0.0401, |
| "step": 2227 |
| }, |
| { |
| "epoch": 5.96, |
| "learning_rate": 2.667918883627607e-08, |
| "loss": 0.0247, |
| "step": 2228 |
| }, |
| { |
| "epoch": 5.96, |
| "learning_rate": 2.3448632077960332e-08, |
| "loss": 0.0332, |
| "step": 2229 |
| }, |
| { |
| "epoch": 5.96, |
| "learning_rate": 2.0426466838407988e-08, |
| "loss": 0.0348, |
| "step": 2230 |
| }, |
| { |
| "epoch": 5.97, |
| "learning_rate": 1.7612699417057788e-08, |
| "loss": 0.0312, |
| "step": 2231 |
| }, |
| { |
| "epoch": 5.97, |
| "learning_rate": 1.500733567890711e-08, |
| "loss": 0.0321, |
| "step": 2232 |
| }, |
| { |
| "epoch": 5.97, |
| "learning_rate": 1.2610381054611874e-08, |
| "loss": 0.0277, |
| "step": 2233 |
| }, |
| { |
| "epoch": 5.97, |
| "learning_rate": 1.0421840540375539e-08, |
| "loss": 0.0339, |
| "step": 2234 |
| }, |
| { |
| "epoch": 5.98, |
| "learning_rate": 8.441718698004586e-09, |
| "loss": 0.0311, |
| "step": 2235 |
| }, |
| { |
| "epoch": 5.98, |
| "learning_rate": 6.670019654875237e-09, |
| "loss": 0.0355, |
| "step": 2236 |
| }, |
| { |
| "epoch": 5.98, |
| "learning_rate": 5.10674710393344e-09, |
| "loss": 0.0323, |
| "step": 2237 |
| }, |
| { |
| "epoch": 5.98, |
| "learning_rate": 3.751904303661568e-09, |
| "loss": 0.0358, |
| "step": 2238 |
| }, |
| { |
| "epoch": 5.99, |
| "learning_rate": 2.6054940780895207e-09, |
| "loss": 0.0336, |
| "step": 2239 |
| }, |
| { |
| "epoch": 5.99, |
| "learning_rate": 1.6675188168169265e-09, |
| "loss": 0.0277, |
| "step": 2240 |
| }, |
| { |
| "epoch": 5.99, |
| "learning_rate": 9.379804749465315e-10, |
| "loss": 0.0316, |
| "step": 2241 |
| }, |
| { |
| "epoch": 5.99, |
| "learning_rate": 4.1688057315081297e-10, |
| "loss": 0.0315, |
| "step": 2242 |
| }, |
| { |
| "epoch": 6.0, |
| "learning_rate": 1.042201975942625e-10, |
| "loss": 0.0374, |
| "step": 2243 |
| }, |
| { |
| "epoch": 6.0, |
| "learning_rate": 0.0, |
| "loss": 0.0285, |
| "step": 2244 |
| }, |
| { |
| "epoch": 6.0, |
| "step": 2244, |
| "total_flos": 1013558966550528.0, |
| "train_loss": 0.38766651762460164, |
| "train_runtime": 29486.2918, |
| "train_samples_per_second": 15.207, |
| "train_steps_per_second": 0.076 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2244, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 50000, |
| "total_flos": 1013558966550528.0, |
| "train_batch_size": 50, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|