|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 11.12867530825166, |
|
"eval_steps": 500, |
|
"global_step": 1100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.996811224489796e-05, |
|
"loss": 3.2244, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.993622448979592e-05, |
|
"loss": 2.9431, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.990433673469388e-05, |
|
"loss": 2.7933, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.987244897959184e-05, |
|
"loss": 2.7027, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.984056122448981e-05, |
|
"loss": 2.5159, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.980867346938776e-05, |
|
"loss": 2.5234, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.977678571428571e-05, |
|
"loss": 2.4048, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.974489795918368e-05, |
|
"loss": 2.297, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.971301020408164e-05, |
|
"loss": 2.2237, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.96811224489796e-05, |
|
"loss": 2.1927, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.964923469387755e-05, |
|
"loss": 2.1405, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.961734693877551e-05, |
|
"loss": 2.2251, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.958545918367348e-05, |
|
"loss": 2.1525, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.955357142857144e-05, |
|
"loss": 2.1014, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.952168367346938e-05, |
|
"loss": 2.061, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.948979591836736e-05, |
|
"loss": 2.05, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.945790816326531e-05, |
|
"loss": 2.101, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.942602040816327e-05, |
|
"loss": 1.8366, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.939413265306123e-05, |
|
"loss": 1.8752, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.936224489795919e-05, |
|
"loss": 2.0378, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.933035714285714e-05, |
|
"loss": 2.0911, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.929846938775512e-05, |
|
"loss": 1.9428, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.926658163265307e-05, |
|
"loss": 1.9914, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.923469387755102e-05, |
|
"loss": 1.949, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.920280612244899e-05, |
|
"loss": 2.0358, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.917091836734695e-05, |
|
"loss": 2.1013, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.91390306122449e-05, |
|
"loss": 1.9653, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.910714285714286e-05, |
|
"loss": 1.8327, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.907525510204082e-05, |
|
"loss": 1.6626, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.904336734693878e-05, |
|
"loss": 1.9772, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.901147959183675e-05, |
|
"loss": 1.9107, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.897959183673469e-05, |
|
"loss": 1.8967, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.894770408163265e-05, |
|
"loss": 2.0814, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.891581632653062e-05, |
|
"loss": 1.8678, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.888392857142858e-05, |
|
"loss": 1.8817, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.885204081632652e-05, |
|
"loss": 1.9398, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.88201530612245e-05, |
|
"loss": 1.8375, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.878826530612245e-05, |
|
"loss": 2.0266, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.875637755102042e-05, |
|
"loss": 1.937, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.872448979591837e-05, |
|
"loss": 1.9531, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.869260204081633e-05, |
|
"loss": 1.9794, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.86607142857143e-05, |
|
"loss": 1.8376, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.862882653061225e-05, |
|
"loss": 1.8657, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.859693877551021e-05, |
|
"loss": 1.7837, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.856505102040817e-05, |
|
"loss": 1.8824, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.853316326530613e-05, |
|
"loss": 1.8274, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.850127551020409e-05, |
|
"loss": 1.9559, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.846938775510204e-05, |
|
"loss": 1.8214, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.84375e-05, |
|
"loss": 1.9955, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.840561224489796e-05, |
|
"loss": 1.8716, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.837372448979593e-05, |
|
"loss": 1.8892, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.834183673469389e-05, |
|
"loss": 1.9172, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.830994897959183e-05, |
|
"loss": 1.8013, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.82780612244898e-05, |
|
"loss": 1.9853, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.824617346938776e-05, |
|
"loss": 1.8744, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.821428571428572e-05, |
|
"loss": 1.8443, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.818239795918368e-05, |
|
"loss": 1.9072, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.815051020408163e-05, |
|
"loss": 1.8902, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.811862244897959e-05, |
|
"loss": 1.9012, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.808673469387756e-05, |
|
"loss": 1.8728, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.805484693877551e-05, |
|
"loss": 1.7778, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.802295918367348e-05, |
|
"loss": 1.9753, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.799107142857144e-05, |
|
"loss": 1.9134, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.79591836734694e-05, |
|
"loss": 1.7386, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.792729591836735e-05, |
|
"loss": 1.7398, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.789540816326531e-05, |
|
"loss": 1.8563, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.786352040816327e-05, |
|
"loss": 1.9333, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.783163265306124e-05, |
|
"loss": 1.8351, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.77997448979592e-05, |
|
"loss": 1.8199, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.776785714285714e-05, |
|
"loss": 1.9057, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.773596938775511e-05, |
|
"loss": 1.9722, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.770408163265307e-05, |
|
"loss": 1.8762, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.767219387755103e-05, |
|
"loss": 1.8591, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.764030612244898e-05, |
|
"loss": 1.7435, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.760841836734694e-05, |
|
"loss": 1.8729, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.75765306122449e-05, |
|
"loss": 1.8089, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.754464285714287e-05, |
|
"loss": 1.9033, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.751275510204082e-05, |
|
"loss": 1.8569, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.748086734693877e-05, |
|
"loss": 1.8682, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.744897959183674e-05, |
|
"loss": 1.6905, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.74170918367347e-05, |
|
"loss": 1.8561, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.738520408163265e-05, |
|
"loss": 1.8295, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.735331632653062e-05, |
|
"loss": 1.848, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.732142857142858e-05, |
|
"loss": 1.722, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.728954081632653e-05, |
|
"loss": 1.6927, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.725765306122449e-05, |
|
"loss": 1.7642, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.722576530612245e-05, |
|
"loss": 1.8603, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 9.719387755102042e-05, |
|
"loss": 1.8246, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.716198979591838e-05, |
|
"loss": 1.9409, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.713010204081633e-05, |
|
"loss": 1.7765, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 9.709821428571429e-05, |
|
"loss": 1.7607, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.706632653061225e-05, |
|
"loss": 1.863, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.703443877551021e-05, |
|
"loss": 1.7051, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.700255102040817e-05, |
|
"loss": 1.8005, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.697066326530612e-05, |
|
"loss": 1.7598, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.693877551020408e-05, |
|
"loss": 1.8445, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.690688775510205e-05, |
|
"loss": 1.9542, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.687500000000001e-05, |
|
"loss": 1.7595, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.684311224489795e-05, |
|
"loss": 1.8095, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.681122448979593e-05, |
|
"loss": 1.7445, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.677933673469388e-05, |
|
"loss": 1.9506, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.674744897959184e-05, |
|
"loss": 1.7708, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.67155612244898e-05, |
|
"loss": 1.898, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.668367346938776e-05, |
|
"loss": 1.7326, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.665178571428571e-05, |
|
"loss": 1.8423, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.661989795918369e-05, |
|
"loss": 1.7541, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.658801020408163e-05, |
|
"loss": 1.9325, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.655612244897959e-05, |
|
"loss": 1.8437, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.652423469387756e-05, |
|
"loss": 1.8415, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.649234693877552e-05, |
|
"loss": 1.8457, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.646045918367347e-05, |
|
"loss": 1.6994, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.642857142857143e-05, |
|
"loss": 1.8482, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.639668367346939e-05, |
|
"loss": 1.8053, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 9.636479591836736e-05, |
|
"loss": 1.8064, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.633290816326532e-05, |
|
"loss": 1.7199, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.630102040816326e-05, |
|
"loss": 1.7226, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.626913265306123e-05, |
|
"loss": 1.8804, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.623724489795919e-05, |
|
"loss": 1.7488, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 9.620535714285715e-05, |
|
"loss": 1.8766, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.617346938775511e-05, |
|
"loss": 1.8261, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.614158163265306e-05, |
|
"loss": 1.7813, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.610969387755102e-05, |
|
"loss": 1.6967, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 9.6077806122449e-05, |
|
"loss": 1.6953, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.604591836734694e-05, |
|
"loss": 1.6754, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.60140306122449e-05, |
|
"loss": 1.7052, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 9.598214285714287e-05, |
|
"loss": 1.8196, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.595025510204082e-05, |
|
"loss": 1.7346, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 9.591836734693878e-05, |
|
"loss": 1.8132, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.588647959183674e-05, |
|
"loss": 1.7865, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 9.58545918367347e-05, |
|
"loss": 1.745, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.582270408163266e-05, |
|
"loss": 1.8189, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.579081632653061e-05, |
|
"loss": 1.7351, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.575892857142857e-05, |
|
"loss": 1.8111, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 9.572704081632653e-05, |
|
"loss": 1.815, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 9.56951530612245e-05, |
|
"loss": 1.7898, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.566326530612246e-05, |
|
"loss": 1.8174, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.563137755102042e-05, |
|
"loss": 1.7156, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 9.559948979591837e-05, |
|
"loss": 1.8901, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 9.556760204081633e-05, |
|
"loss": 1.7957, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 9.553571428571429e-05, |
|
"loss": 1.8229, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 9.550382653061225e-05, |
|
"loss": 1.7423, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.54719387755102e-05, |
|
"loss": 1.8035, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 9.544005102040818e-05, |
|
"loss": 1.8071, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 9.540816326530613e-05, |
|
"loss": 1.7434, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 9.537627551020408e-05, |
|
"loss": 1.7771, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 9.534438775510205e-05, |
|
"loss": 1.7994, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.53125e-05, |
|
"loss": 1.8915, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.528061224489796e-05, |
|
"loss": 1.7729, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.524872448979592e-05, |
|
"loss": 1.8204, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 9.521683673469388e-05, |
|
"loss": 1.8064, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 9.518494897959184e-05, |
|
"loss": 1.8689, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 9.515306122448981e-05, |
|
"loss": 1.7717, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.512117346938775e-05, |
|
"loss": 1.819, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.508928571428571e-05, |
|
"loss": 1.754, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.505739795918368e-05, |
|
"loss": 1.9207, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.502551020408164e-05, |
|
"loss": 1.8492, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.49936224489796e-05, |
|
"loss": 1.8649, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.496173469387755e-05, |
|
"loss": 1.8203, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.492984693877551e-05, |
|
"loss": 1.8118, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.489795918367348e-05, |
|
"loss": 1.7663, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.486607142857144e-05, |
|
"loss": 1.8782, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.483418367346939e-05, |
|
"loss": 1.8452, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.480229591836736e-05, |
|
"loss": 1.7889, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.477040816326531e-05, |
|
"loss": 1.7076, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.473852040816327e-05, |
|
"loss": 1.845, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.470663265306123e-05, |
|
"loss": 1.7403, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.467474489795919e-05, |
|
"loss": 1.8155, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.464285714285715e-05, |
|
"loss": 1.8515, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.461096938775512e-05, |
|
"loss": 1.7782, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.457908163265306e-05, |
|
"loss": 1.7943, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.454719387755102e-05, |
|
"loss": 1.7188, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 9.451530612244899e-05, |
|
"loss": 1.7785, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.448341836734695e-05, |
|
"loss": 1.8056, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 9.44515306122449e-05, |
|
"loss": 1.7764, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 9.441964285714286e-05, |
|
"loss": 1.8921, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 9.438775510204082e-05, |
|
"loss": 1.762, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 9.435586734693878e-05, |
|
"loss": 1.7324, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 9.432397959183674e-05, |
|
"loss": 1.7451, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 9.42920918367347e-05, |
|
"loss": 1.7564, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 9.426020408163265e-05, |
|
"loss": 1.7764, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.422831632653062e-05, |
|
"loss": 1.7353, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 9.419642857142858e-05, |
|
"loss": 1.8021, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 9.416454081632652e-05, |
|
"loss": 1.7088, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 9.41326530612245e-05, |
|
"loss": 1.8425, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 9.410076530612245e-05, |
|
"loss": 1.7693, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 9.406887755102041e-05, |
|
"loss": 1.6315, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 9.403698979591837e-05, |
|
"loss": 1.7466, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 9.400510204081633e-05, |
|
"loss": 1.8778, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.39732142857143e-05, |
|
"loss": 1.8097, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 9.394132653061226e-05, |
|
"loss": 1.8514, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 9.39094387755102e-05, |
|
"loss": 1.7642, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 9.387755102040817e-05, |
|
"loss": 1.7704, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 9.384566326530613e-05, |
|
"loss": 1.8643, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 9.381377551020409e-05, |
|
"loss": 1.7102, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 9.378188775510204e-05, |
|
"loss": 1.8454, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 9.375e-05, |
|
"loss": 1.7541, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 9.371811224489796e-05, |
|
"loss": 1.6068, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.368622448979593e-05, |
|
"loss": 1.8284, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.365433673469388e-05, |
|
"loss": 1.7188, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.362244897959183e-05, |
|
"loss": 1.7339, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.35905612244898e-05, |
|
"loss": 1.851, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 9.355867346938776e-05, |
|
"loss": 1.7182, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 9.352678571428572e-05, |
|
"loss": 1.6969, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.349489795918368e-05, |
|
"loss": 1.7887, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.346301020408163e-05, |
|
"loss": 1.7422, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 9.343112244897959e-05, |
|
"loss": 1.6576, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 9.339923469387756e-05, |
|
"loss": 1.8358, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.336734693877551e-05, |
|
"loss": 1.8129, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 9.333545918367348e-05, |
|
"loss": 1.7009, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.330357142857144e-05, |
|
"loss": 1.6702, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.32716836734694e-05, |
|
"loss": 1.7322, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.323979591836735e-05, |
|
"loss": 1.7945, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.320790816326531e-05, |
|
"loss": 1.7738, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.317602040816327e-05, |
|
"loss": 1.7397, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 9.314413265306124e-05, |
|
"loss": 1.8265, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.311224489795918e-05, |
|
"loss": 1.8222, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 9.308035714285714e-05, |
|
"loss": 1.7259, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 9.304846938775511e-05, |
|
"loss": 1.7603, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 9.301658163265307e-05, |
|
"loss": 1.7456, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 9.298469387755103e-05, |
|
"loss": 1.7605, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.295280612244899e-05, |
|
"loss": 1.7271, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 9.292091836734694e-05, |
|
"loss": 1.778, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 9.28890306122449e-05, |
|
"loss": 1.762, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 9.285714285714286e-05, |
|
"loss": 1.8079, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 9.282525510204082e-05, |
|
"loss": 1.7605, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 9.279336734693877e-05, |
|
"loss": 1.8419, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 9.276147959183675e-05, |
|
"loss": 1.7739, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 9.27295918367347e-05, |
|
"loss": 1.7588, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 9.269770408163265e-05, |
|
"loss": 1.6675, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 9.266581632653062e-05, |
|
"loss": 1.7239, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 9.263392857142858e-05, |
|
"loss": 1.7963, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 9.260204081632653e-05, |
|
"loss": 1.6988, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 9.257015306122449e-05, |
|
"loss": 1.8134, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 9.253826530612245e-05, |
|
"loss": 1.7699, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 9.250637755102042e-05, |
|
"loss": 1.8085, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 9.247448979591838e-05, |
|
"loss": 1.6914, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.244260204081632e-05, |
|
"loss": 1.5951, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.24107142857143e-05, |
|
"loss": 1.7358, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.237882653061225e-05, |
|
"loss": 1.7977, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.234693877551021e-05, |
|
"loss": 1.7313, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.231505102040817e-05, |
|
"loss": 1.76, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.228316326530612e-05, |
|
"loss": 1.7217, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.225127551020408e-05, |
|
"loss": 1.7061, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.221938775510205e-05, |
|
"loss": 1.6406, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 9.21875e-05, |
|
"loss": 1.7745, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.215561224489796e-05, |
|
"loss": 1.6836, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.212372448979593e-05, |
|
"loss": 1.9216, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 9.209183673469388e-05, |
|
"loss": 1.6483, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 9.205994897959184e-05, |
|
"loss": 1.8161, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 9.20280612244898e-05, |
|
"loss": 1.7031, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 9.199617346938776e-05, |
|
"loss": 1.8476, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 9.196428571428572e-05, |
|
"loss": 1.7902, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 9.193239795918369e-05, |
|
"loss": 1.6055, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 9.190051020408163e-05, |
|
"loss": 1.7595, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.186862244897959e-05, |
|
"loss": 1.7044, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.183673469387756e-05, |
|
"loss": 1.7589, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.180484693877552e-05, |
|
"loss": 1.7683, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.177295918367348e-05, |
|
"loss": 1.711, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.174107142857143e-05, |
|
"loss": 1.7786, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.170918367346939e-05, |
|
"loss": 1.7657, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 9.167729591836736e-05, |
|
"loss": 1.7338, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 9.16454081632653e-05, |
|
"loss": 1.7197, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.161352040816326e-05, |
|
"loss": 1.7484, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.158163265306124e-05, |
|
"loss": 1.7471, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 9.154974489795919e-05, |
|
"loss": 1.8983, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.151785714285715e-05, |
|
"loss": 1.6745, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.148596938775511e-05, |
|
"loss": 1.711, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.145408163265307e-05, |
|
"loss": 1.7758, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.142219387755102e-05, |
|
"loss": 1.7229, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.139030612244898e-05, |
|
"loss": 1.7072, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.135841836734694e-05, |
|
"loss": 1.7081, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 9.13265306122449e-05, |
|
"loss": 1.7859, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 9.129464285714287e-05, |
|
"loss": 1.7052, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 9.126275510204083e-05, |
|
"loss": 1.7401, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 9.123086734693877e-05, |
|
"loss": 1.7259, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 9.119897959183674e-05, |
|
"loss": 1.7349, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 9.11670918367347e-05, |
|
"loss": 1.8151, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 9.113520408163266e-05, |
|
"loss": 1.7264, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 9.110331632653061e-05, |
|
"loss": 1.8593, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 9.107142857142857e-05, |
|
"loss": 1.765, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 9.103954081632653e-05, |
|
"loss": 1.7441, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 9.10076530612245e-05, |
|
"loss": 1.7231, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 9.097576530612245e-05, |
|
"loss": 1.6782, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.094387755102042e-05, |
|
"loss": 1.6981, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.091198979591837e-05, |
|
"loss": 1.6902, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 9.088010204081633e-05, |
|
"loss": 1.7088, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 9.084821428571429e-05, |
|
"loss": 1.8195, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 9.081632653061225e-05, |
|
"loss": 1.7754, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 9.07844387755102e-05, |
|
"loss": 1.6354, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 9.075255102040818e-05, |
|
"loss": 1.8996, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.072066326530612e-05, |
|
"loss": 1.8391, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 9.068877551020408e-05, |
|
"loss": 1.6725, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.065688775510205e-05, |
|
"loss": 1.7103, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 9.062500000000001e-05, |
|
"loss": 1.7725, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 9.059311224489797e-05, |
|
"loss": 1.6804, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 9.056122448979592e-05, |
|
"loss": 1.7693, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 9.052933673469388e-05, |
|
"loss": 1.7712, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 9.049744897959184e-05, |
|
"loss": 1.7508, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 9.046556122448981e-05, |
|
"loss": 1.6661, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 9.043367346938775e-05, |
|
"loss": 1.7509, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 9.040178571428571e-05, |
|
"loss": 1.5842, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 9.036989795918368e-05, |
|
"loss": 1.7407, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 9.033801020408164e-05, |
|
"loss": 1.733, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 9.030612244897958e-05, |
|
"loss": 1.6372, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 9.027423469387756e-05, |
|
"loss": 1.6975, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 9.024234693877551e-05, |
|
"loss": 1.7982, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 9.021045918367348e-05, |
|
"loss": 1.7448, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 9.017857142857143e-05, |
|
"loss": 1.6857, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 9.014668367346939e-05, |
|
"loss": 1.7282, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 9.011479591836736e-05, |
|
"loss": 1.7198, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 9.008290816326532e-05, |
|
"loss": 1.7697, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 9.005102040816327e-05, |
|
"loss": 1.6301, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 9.001913265306123e-05, |
|
"loss": 1.6895, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 8.998724489795919e-05, |
|
"loss": 1.6554, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 8.995535714285715e-05, |
|
"loss": 1.7577, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 8.99234693877551e-05, |
|
"loss": 1.7406, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 8.989158163265306e-05, |
|
"loss": 1.6299, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 8.985969387755102e-05, |
|
"loss": 1.6744, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 8.982780612244899e-05, |
|
"loss": 1.7713, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 8.979591836734695e-05, |
|
"loss": 1.66, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 8.976403061224489e-05, |
|
"loss": 1.6957, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 8.973214285714286e-05, |
|
"loss": 1.6484, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 8.970025510204082e-05, |
|
"loss": 1.5804, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 8.966836734693878e-05, |
|
"loss": 1.762, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 8.963647959183674e-05, |
|
"loss": 1.8231, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 8.96045918367347e-05, |
|
"loss": 1.7701, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 8.957270408163265e-05, |
|
"loss": 1.7539, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 8.954081632653062e-05, |
|
"loss": 1.7987, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 8.950892857142857e-05, |
|
"loss": 1.6533, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 8.947704081632653e-05, |
|
"loss": 1.8039, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 8.94451530612245e-05, |
|
"loss": 1.7719, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 8.941326530612245e-05, |
|
"loss": 1.7582, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 8.938137755102041e-05, |
|
"loss": 1.6774, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 8.934948979591837e-05, |
|
"loss": 1.6298, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 8.931760204081633e-05, |
|
"loss": 1.7936, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 8.92857142857143e-05, |
|
"loss": 1.8324, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 8.925382653061224e-05, |
|
"loss": 1.7726, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 8.92219387755102e-05, |
|
"loss": 1.7102, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 8.919005102040817e-05, |
|
"loss": 1.6567, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 8.915816326530613e-05, |
|
"loss": 1.6084, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 8.912627551020409e-05, |
|
"loss": 1.5728, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 8.909438775510205e-05, |
|
"loss": 1.6946, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 8.90625e-05, |
|
"loss": 1.7973, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 8.903061224489796e-05, |
|
"loss": 1.796, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 8.899872448979593e-05, |
|
"loss": 1.691, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.896683673469388e-05, |
|
"loss": 1.7176, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 8.893494897959183e-05, |
|
"loss": 1.634, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 8.89030612244898e-05, |
|
"loss": 1.766, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 8.887117346938776e-05, |
|
"loss": 1.8859, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 8.883928571428571e-05, |
|
"loss": 1.7391, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 8.880739795918368e-05, |
|
"loss": 1.7228, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 8.877551020408164e-05, |
|
"loss": 1.7388, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 8.87436224489796e-05, |
|
"loss": 1.6822, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 8.871173469387755e-05, |
|
"loss": 1.7068, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 8.867984693877551e-05, |
|
"loss": 1.8398, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 8.864795918367348e-05, |
|
"loss": 1.7962, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 8.861607142857144e-05, |
|
"loss": 1.6781, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 8.85841836734694e-05, |
|
"loss": 1.6459, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 8.855229591836735e-05, |
|
"loss": 1.7337, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 8.852040816326531e-05, |
|
"loss": 1.678, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 8.848852040816327e-05, |
|
"loss": 1.7358, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 8.845663265306123e-05, |
|
"loss": 1.7226, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 8.842474489795918e-05, |
|
"loss": 1.7129, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 8.839285714285714e-05, |
|
"loss": 1.6493, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 8.836096938775511e-05, |
|
"loss": 1.7302, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 8.832908163265307e-05, |
|
"loss": 1.6787, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 8.829719387755102e-05, |
|
"loss": 1.6729, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 8.826530612244899e-05, |
|
"loss": 1.7308, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 8.823341836734694e-05, |
|
"loss": 1.6517, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 8.82015306122449e-05, |
|
"loss": 1.7198, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 8.816964285714286e-05, |
|
"loss": 1.6133, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 8.813775510204082e-05, |
|
"loss": 1.6732, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 8.810586734693878e-05, |
|
"loss": 1.729, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 8.807397959183675e-05, |
|
"loss": 1.699, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 8.804209183673469e-05, |
|
"loss": 1.7103, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 8.801020408163265e-05, |
|
"loss": 1.7119, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 8.797831632653062e-05, |
|
"loss": 1.646, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 8.794642857142858e-05, |
|
"loss": 1.5956, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 8.791454081632654e-05, |
|
"loss": 1.7735, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 8.788265306122449e-05, |
|
"loss": 1.7089, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 8.785076530612245e-05, |
|
"loss": 1.7068, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 8.781887755102042e-05, |
|
"loss": 1.6947, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 8.778698979591838e-05, |
|
"loss": 1.7792, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 8.775510204081632e-05, |
|
"loss": 1.6787, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 8.77232142857143e-05, |
|
"loss": 1.825, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 8.769132653061225e-05, |
|
"loss": 1.7203, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 8.765943877551021e-05, |
|
"loss": 1.6524, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 8.762755102040817e-05, |
|
"loss": 1.6665, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 8.759566326530613e-05, |
|
"loss": 1.7011, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 8.756377551020408e-05, |
|
"loss": 1.7353, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 8.753188775510205e-05, |
|
"loss": 1.7717, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 8.75e-05, |
|
"loss": 1.6323, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 8.746811224489796e-05, |
|
"loss": 1.6933, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 8.743622448979593e-05, |
|
"loss": 1.6924, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.740433673469389e-05, |
|
"loss": 1.797, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 8.737244897959183e-05, |
|
"loss": 1.6189, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 8.73405612244898e-05, |
|
"loss": 1.6157, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 8.730867346938776e-05, |
|
"loss": 1.5831, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 8.727678571428572e-05, |
|
"loss": 1.6539, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 8.724489795918367e-05, |
|
"loss": 1.6136, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 8.721301020408163e-05, |
|
"loss": 1.6864, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 8.718112244897959e-05, |
|
"loss": 1.5279, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 8.714923469387756e-05, |
|
"loss": 1.6846, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 8.711734693877552e-05, |
|
"loss": 1.7603, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 8.708545918367348e-05, |
|
"loss": 1.7768, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 8.705357142857143e-05, |
|
"loss": 1.751, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 8.702168367346939e-05, |
|
"loss": 1.6345, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 8.698979591836735e-05, |
|
"loss": 1.6333, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 8.695790816326531e-05, |
|
"loss": 1.6771, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 8.692602040816327e-05, |
|
"loss": 1.6764, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 8.689413265306124e-05, |
|
"loss": 1.7229, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 8.68622448979592e-05, |
|
"loss": 1.6311, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 8.683035714285714e-05, |
|
"loss": 1.7222, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 8.679846938775511e-05, |
|
"loss": 1.6414, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 8.676658163265307e-05, |
|
"loss": 1.7688, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 8.673469387755102e-05, |
|
"loss": 1.7213, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 8.670280612244898e-05, |
|
"loss": 1.6997, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 8.667091836734694e-05, |
|
"loss": 1.7518, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 8.66390306122449e-05, |
|
"loss": 1.7478, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 8.660714285714287e-05, |
|
"loss": 1.7175, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 8.657525510204081e-05, |
|
"loss": 1.6769, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 8.654336734693877e-05, |
|
"loss": 1.7689, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 8.651147959183674e-05, |
|
"loss": 1.6298, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 8.64795918367347e-05, |
|
"loss": 1.6876, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 8.644770408163266e-05, |
|
"loss": 1.7329, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 8.641581632653062e-05, |
|
"loss": 1.7087, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 8.638392857142857e-05, |
|
"loss": 1.6905, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 8.635204081632653e-05, |
|
"loss": 1.7005, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 8.63201530612245e-05, |
|
"loss": 1.6099, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 8.628826530612245e-05, |
|
"loss": 1.7045, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 8.625637755102042e-05, |
|
"loss": 1.5877, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 8.622448979591838e-05, |
|
"loss": 1.6743, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 8.619260204081633e-05, |
|
"loss": 1.7205, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 8.616071428571429e-05, |
|
"loss": 1.566, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 8.612882653061225e-05, |
|
"loss": 1.5954, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 8.60969387755102e-05, |
|
"loss": 1.6791, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 8.606505102040818e-05, |
|
"loss": 1.5968, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 8.603316326530612e-05, |
|
"loss": 1.7167, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 8.600127551020408e-05, |
|
"loss": 1.6694, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 8.596938775510205e-05, |
|
"loss": 1.7976, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 8.593750000000001e-05, |
|
"loss": 1.7609, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 8.590561224489795e-05, |
|
"loss": 1.8266, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 8.587372448979592e-05, |
|
"loss": 1.7249, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 8.584183673469388e-05, |
|
"loss": 1.7351, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 8.580994897959184e-05, |
|
"loss": 1.7943, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 8.57780612244898e-05, |
|
"loss": 1.7614, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 8.574617346938775e-05, |
|
"loss": 1.6992, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 1.6828, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 8.568239795918368e-05, |
|
"loss": 1.7782, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 8.565051020408164e-05, |
|
"loss": 1.6556, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 8.561862244897959e-05, |
|
"loss": 1.6431, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 8.558673469387756e-05, |
|
"loss": 1.6783, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 8.555484693877551e-05, |
|
"loss": 1.6774, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 8.552295918367347e-05, |
|
"loss": 1.6588, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 8.549107142857143e-05, |
|
"loss": 1.6892, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 8.545918367346939e-05, |
|
"loss": 1.6022, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 8.542729591836736e-05, |
|
"loss": 1.7083, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 8.539540816326532e-05, |
|
"loss": 1.746, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 8.536352040816326e-05, |
|
"loss": 1.7238, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 8.533163265306123e-05, |
|
"loss": 1.6144, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 8.529974489795919e-05, |
|
"loss": 1.5568, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 8.526785714285715e-05, |
|
"loss": 1.6344, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 8.52359693877551e-05, |
|
"loss": 1.6804, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 8.520408163265306e-05, |
|
"loss": 1.5241, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 8.517219387755102e-05, |
|
"loss": 1.7548, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 8.514030612244899e-05, |
|
"loss": 1.6902, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 8.510841836734694e-05, |
|
"loss": 1.761, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 8.50765306122449e-05, |
|
"loss": 1.6532, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 8.504464285714287e-05, |
|
"loss": 1.6625, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 8.501275510204082e-05, |
|
"loss": 1.6393, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 8.498086734693878e-05, |
|
"loss": 1.6935, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 8.494897959183674e-05, |
|
"loss": 1.6177, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 8.49170918367347e-05, |
|
"loss": 1.6962, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 8.488520408163265e-05, |
|
"loss": 1.6814, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 8.485331632653063e-05, |
|
"loss": 1.631, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 8.482142857142857e-05, |
|
"loss": 1.6923, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 8.478954081632653e-05, |
|
"loss": 1.6235, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 8.47576530612245e-05, |
|
"loss": 1.6599, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 8.472576530612246e-05, |
|
"loss": 1.6514, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 8.469387755102041e-05, |
|
"loss": 1.6919, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 8.466198979591837e-05, |
|
"loss": 1.647, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 8.463010204081633e-05, |
|
"loss": 1.6861, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 8.45982142857143e-05, |
|
"loss": 1.7036, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 8.456632653061224e-05, |
|
"loss": 1.7445, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 8.45344387755102e-05, |
|
"loss": 1.6108, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 8.450255102040817e-05, |
|
"loss": 1.5834, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 8.447066326530613e-05, |
|
"loss": 1.6655, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 8.443877551020409e-05, |
|
"loss": 1.6679, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 8.440688775510205e-05, |
|
"loss": 1.6832, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 8.4375e-05, |
|
"loss": 1.7003, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 8.434311224489796e-05, |
|
"loss": 1.7136, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 8.431122448979592e-05, |
|
"loss": 1.6938, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 8.427933673469388e-05, |
|
"loss": 1.6589, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 8.424744897959184e-05, |
|
"loss": 1.7169, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 8.42155612244898e-05, |
|
"loss": 1.6738, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 8.418367346938776e-05, |
|
"loss": 1.5954, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 8.415178571428571e-05, |
|
"loss": 1.6149, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 8.411989795918368e-05, |
|
"loss": 1.7302, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 8.408801020408164e-05, |
|
"loss": 1.6168, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 8.40561224489796e-05, |
|
"loss": 1.572, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 8.402423469387755e-05, |
|
"loss": 1.7744, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 8.399234693877551e-05, |
|
"loss": 1.711, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 8.396045918367348e-05, |
|
"loss": 1.6042, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 8.392857142857144e-05, |
|
"loss": 1.7177, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 8.389668367346938e-05, |
|
"loss": 1.7372, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 8.386479591836736e-05, |
|
"loss": 1.6844, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 8.383290816326531e-05, |
|
"loss": 1.5831, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 8.380102040816327e-05, |
|
"loss": 1.6598, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 8.376913265306123e-05, |
|
"loss": 1.5603, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 8.373724489795919e-05, |
|
"loss": 1.6364, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 8.370535714285714e-05, |
|
"loss": 1.591, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 8.367346938775511e-05, |
|
"loss": 1.8037, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 8.364158163265306e-05, |
|
"loss": 1.5908, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 8.360969387755102e-05, |
|
"loss": 1.6017, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 8.357780612244899e-05, |
|
"loss": 1.718, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 8.354591836734695e-05, |
|
"loss": 1.669, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 8.35140306122449e-05, |
|
"loss": 1.5423, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 8.348214285714286e-05, |
|
"loss": 1.6151, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 8.345025510204082e-05, |
|
"loss": 1.623, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 8.341836734693878e-05, |
|
"loss": 1.5912, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 8.338647959183675e-05, |
|
"loss": 1.6331, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 8.335459183673469e-05, |
|
"loss": 1.6982, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 8.332270408163265e-05, |
|
"loss": 1.5511, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 8.329081632653062e-05, |
|
"loss": 1.5958, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 8.325892857142858e-05, |
|
"loss": 1.5648, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 8.322704081632652e-05, |
|
"loss": 1.4833, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 8.31951530612245e-05, |
|
"loss": 1.5844, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 8.316326530612245e-05, |
|
"loss": 1.6717, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 8.313137755102042e-05, |
|
"loss": 1.6574, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 8.309948979591837e-05, |
|
"loss": 1.622, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 8.306760204081633e-05, |
|
"loss": 1.5806, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 8.30357142857143e-05, |
|
"loss": 1.6023, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 8.300382653061225e-05, |
|
"loss": 1.6616, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 8.297193877551021e-05, |
|
"loss": 1.6409, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 8.294005102040817e-05, |
|
"loss": 1.6206, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 8.290816326530613e-05, |
|
"loss": 1.5892, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 8.287627551020408e-05, |
|
"loss": 1.6957, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 8.284438775510204e-05, |
|
"loss": 1.713, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 8.28125e-05, |
|
"loss": 1.6176, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 8.278061224489796e-05, |
|
"loss": 1.7442, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 8.274872448979593e-05, |
|
"loss": 1.6822, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 8.271683673469389e-05, |
|
"loss": 1.7747, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 8.268494897959183e-05, |
|
"loss": 1.5352, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 8.26530612244898e-05, |
|
"loss": 1.6603, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 8.262117346938776e-05, |
|
"loss": 1.6696, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 8.258928571428572e-05, |
|
"loss": 1.6286, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 8.255739795918368e-05, |
|
"loss": 1.6475, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 8.252551020408163e-05, |
|
"loss": 1.7275, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 8.249362244897959e-05, |
|
"loss": 1.681, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 8.246173469387756e-05, |
|
"loss": 1.5995, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 8.24298469387755e-05, |
|
"loss": 1.6233, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 8.239795918367348e-05, |
|
"loss": 1.6292, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 8.236607142857144e-05, |
|
"loss": 1.6524, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 8.23341836734694e-05, |
|
"loss": 1.735, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 8.230229591836735e-05, |
|
"loss": 1.5992, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 8.227040816326531e-05, |
|
"loss": 1.7104, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 8.223852040816327e-05, |
|
"loss": 1.6503, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 8.220663265306124e-05, |
|
"loss": 1.6799, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 8.217474489795918e-05, |
|
"loss": 1.7083, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 8.214285714285714e-05, |
|
"loss": 1.539, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 8.211096938775511e-05, |
|
"loss": 1.6676, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 8.207908163265307e-05, |
|
"loss": 1.5834, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 8.204719387755103e-05, |
|
"loss": 1.7084, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 8.201530612244898e-05, |
|
"loss": 1.746, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 8.198341836734694e-05, |
|
"loss": 1.6726, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 8.19515306122449e-05, |
|
"loss": 1.7247, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 8.191964285714287e-05, |
|
"loss": 1.7044, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 8.188775510204081e-05, |
|
"loss": 1.7063, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 8.185586734693877e-05, |
|
"loss": 1.6217, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 8.182397959183674e-05, |
|
"loss": 1.6446, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 8.17920918367347e-05, |
|
"loss": 1.5033, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 8.176020408163265e-05, |
|
"loss": 1.6669, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 8.172831632653062e-05, |
|
"loss": 1.6104, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 8.169642857142857e-05, |
|
"loss": 1.7092, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 8.166454081632653e-05, |
|
"loss": 1.6563, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 8.163265306122449e-05, |
|
"loss": 1.6982, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 8.160076530612245e-05, |
|
"loss": 1.6593, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 8.156887755102042e-05, |
|
"loss": 1.6762, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 8.153698979591838e-05, |
|
"loss": 1.6478, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 8.150510204081633e-05, |
|
"loss": 1.6337, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 8.147321428571429e-05, |
|
"loss": 1.6629, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 8.144132653061225e-05, |
|
"loss": 1.6911, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 8.140943877551021e-05, |
|
"loss": 1.6673, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 8.137755102040817e-05, |
|
"loss": 1.669, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 8.134566326530612e-05, |
|
"loss": 1.7474, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 8.131377551020408e-05, |
|
"loss": 1.7361, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 8.128188775510205e-05, |
|
"loss": 1.5395, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 8.125000000000001e-05, |
|
"loss": 1.667, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 8.121811224489795e-05, |
|
"loss": 1.6239, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 8.118622448979593e-05, |
|
"loss": 1.6123, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 8.115433673469388e-05, |
|
"loss": 1.6097, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 8.112244897959184e-05, |
|
"loss": 1.5834, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 8.10905612244898e-05, |
|
"loss": 1.616, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 8.105867346938776e-05, |
|
"loss": 1.7001, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 8.102678571428571e-05, |
|
"loss": 1.5911, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 8.099489795918369e-05, |
|
"loss": 1.6972, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 8.096301020408163e-05, |
|
"loss": 1.5402, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 8.093112244897959e-05, |
|
"loss": 1.4818, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 8.089923469387756e-05, |
|
"loss": 1.6306, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 8.086734693877552e-05, |
|
"loss": 1.663, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 8.083545918367347e-05, |
|
"loss": 1.7275, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 8.080357142857143e-05, |
|
"loss": 1.7, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 8.077168367346939e-05, |
|
"loss": 1.6751, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 8.073979591836736e-05, |
|
"loss": 1.6002, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 8.07079081632653e-05, |
|
"loss": 1.5782, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 8.067602040816326e-05, |
|
"loss": 1.5804, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 8.064413265306123e-05, |
|
"loss": 1.607, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 8.061224489795919e-05, |
|
"loss": 1.5291, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 8.058035714285715e-05, |
|
"loss": 1.6181, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 8.05484693877551e-05, |
|
"loss": 1.5749, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 8.051658163265306e-05, |
|
"loss": 1.6551, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 8.048469387755102e-05, |
|
"loss": 1.6582, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 8.0452806122449e-05, |
|
"loss": 1.5973, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 8.042091836734694e-05, |
|
"loss": 1.6114, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 8.03890306122449e-05, |
|
"loss": 1.6425, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 8.035714285714287e-05, |
|
"loss": 1.6389, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 8.032525510204082e-05, |
|
"loss": 1.6543, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 8.029336734693877e-05, |
|
"loss": 1.7075, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 8.026147959183674e-05, |
|
"loss": 1.7116, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 8.02295918367347e-05, |
|
"loss": 1.6724, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 8.019770408163266e-05, |
|
"loss": 1.6999, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 8.016581632653061e-05, |
|
"loss": 1.6217, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 8.013392857142857e-05, |
|
"loss": 1.4954, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 8.010204081632653e-05, |
|
"loss": 1.5963, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 8.00701530612245e-05, |
|
"loss": 1.5706, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 8.003826530612246e-05, |
|
"loss": 1.5399, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 8.000637755102042e-05, |
|
"loss": 1.5439, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 7.997448979591837e-05, |
|
"loss": 1.6247, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 7.994260204081633e-05, |
|
"loss": 1.5136, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 7.991071428571429e-05, |
|
"loss": 1.6517, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 7.987882653061225e-05, |
|
"loss": 1.4643, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 7.98469387755102e-05, |
|
"loss": 1.6165, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 7.981505102040817e-05, |
|
"loss": 1.6965, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 7.978316326530613e-05, |
|
"loss": 1.6557, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 7.975127551020408e-05, |
|
"loss": 1.589, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 7.971938775510205e-05, |
|
"loss": 1.5587, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 7.96875e-05, |
|
"loss": 1.6603, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 7.965561224489796e-05, |
|
"loss": 1.628, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 7.962372448979592e-05, |
|
"loss": 1.5991, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 7.959183673469388e-05, |
|
"loss": 1.5695, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 7.955994897959184e-05, |
|
"loss": 1.5779, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 7.952806122448981e-05, |
|
"loss": 1.6066, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 7.949617346938775e-05, |
|
"loss": 1.5591, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 7.946428571428571e-05, |
|
"loss": 1.7178, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 7.943239795918368e-05, |
|
"loss": 1.5171, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 7.940051020408164e-05, |
|
"loss": 1.5264, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 7.93686224489796e-05, |
|
"loss": 1.6441, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 7.933673469387755e-05, |
|
"loss": 1.6895, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 7.930484693877551e-05, |
|
"loss": 1.5807, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 7.927295918367348e-05, |
|
"loss": 1.6415, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 7.924107142857143e-05, |
|
"loss": 1.5917, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 7.920918367346939e-05, |
|
"loss": 1.683, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 7.917729591836736e-05, |
|
"loss": 1.7768, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 7.914540816326531e-05, |
|
"loss": 1.6131, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 7.911352040816327e-05, |
|
"loss": 1.6095, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 7.908163265306123e-05, |
|
"loss": 1.5111, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 7.904974489795919e-05, |
|
"loss": 1.5455, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 7.901785714285714e-05, |
|
"loss": 1.6746, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 7.898596938775512e-05, |
|
"loss": 1.635, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 7.895408163265306e-05, |
|
"loss": 1.4963, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 7.892219387755102e-05, |
|
"loss": 1.6298, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 7.889030612244899e-05, |
|
"loss": 1.6339, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 7.885841836734695e-05, |
|
"loss": 1.6626, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 7.882653061224489e-05, |
|
"loss": 1.6076, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 7.879464285714286e-05, |
|
"loss": 1.6432, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 7.876275510204082e-05, |
|
"loss": 1.713, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 7.873086734693878e-05, |
|
"loss": 1.6984, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 7.869897959183674e-05, |
|
"loss": 1.6848, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 7.86670918367347e-05, |
|
"loss": 1.6054, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 7.863520408163265e-05, |
|
"loss": 1.6365, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 7.860331632653062e-05, |
|
"loss": 1.5795, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 7.857142857142858e-05, |
|
"loss": 1.5735, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 7.853954081632652e-05, |
|
"loss": 1.5976, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 7.85076530612245e-05, |
|
"loss": 1.6806, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 7.847576530612245e-05, |
|
"loss": 1.5685, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 7.844387755102041e-05, |
|
"loss": 1.5815, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 7.841198979591837e-05, |
|
"loss": 1.5752, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 7.838010204081633e-05, |
|
"loss": 1.73, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 7.83482142857143e-05, |
|
"loss": 1.6906, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 7.831632653061226e-05, |
|
"loss": 1.4941, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 7.82844387755102e-05, |
|
"loss": 1.6237, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 7.825255102040817e-05, |
|
"loss": 1.6532, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 7.822066326530613e-05, |
|
"loss": 1.5407, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 7.818877551020409e-05, |
|
"loss": 1.6433, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 7.815688775510204e-05, |
|
"loss": 1.5911, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 7.8125e-05, |
|
"loss": 1.5864, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 7.809311224489796e-05, |
|
"loss": 1.5878, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 7.806122448979593e-05, |
|
"loss": 1.575, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 7.802933673469387e-05, |
|
"loss": 1.6028, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 7.799744897959183e-05, |
|
"loss": 1.6365, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 7.79655612244898e-05, |
|
"loss": 1.5638, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 7.793367346938776e-05, |
|
"loss": 1.5945, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 7.790178571428572e-05, |
|
"loss": 1.5479, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 7.786989795918368e-05, |
|
"loss": 1.5982, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 7.783801020408163e-05, |
|
"loss": 1.5528, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 7.780612244897959e-05, |
|
"loss": 1.5581, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 7.777423469387755e-05, |
|
"loss": 1.5725, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 7.774234693877551e-05, |
|
"loss": 1.6888, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 7.771045918367348e-05, |
|
"loss": 1.5152, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 7.767857142857144e-05, |
|
"loss": 1.5641, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 7.76466836734694e-05, |
|
"loss": 1.644, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 7.761479591836735e-05, |
|
"loss": 1.6291, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 7.758290816326531e-05, |
|
"loss": 1.6123, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 7.755102040816327e-05, |
|
"loss": 1.5258, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 7.751913265306124e-05, |
|
"loss": 1.5356, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 7.748724489795918e-05, |
|
"loss": 1.6682, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 7.745535714285714e-05, |
|
"loss": 1.5796, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 7.742346938775511e-05, |
|
"loss": 1.6159, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 7.739158163265307e-05, |
|
"loss": 1.5525, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 7.735969387755101e-05, |
|
"loss": 1.7342, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 7.732780612244899e-05, |
|
"loss": 1.5667, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 7.729591836734694e-05, |
|
"loss": 1.5964, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 7.72640306122449e-05, |
|
"loss": 1.5842, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 7.723214285714286e-05, |
|
"loss": 1.541, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 7.720025510204082e-05, |
|
"loss": 1.6021, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 7.716836734693877e-05, |
|
"loss": 1.591, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 7.713647959183675e-05, |
|
"loss": 1.6173, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 7.71045918367347e-05, |
|
"loss": 1.5007, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 7.707270408163265e-05, |
|
"loss": 1.5286, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 7.704081632653062e-05, |
|
"loss": 1.5203, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 7.700892857142858e-05, |
|
"loss": 1.5964, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 7.697704081632653e-05, |
|
"loss": 1.5882, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 7.694515306122449e-05, |
|
"loss": 1.5834, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 7.691326530612245e-05, |
|
"loss": 1.729, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 7.688137755102042e-05, |
|
"loss": 1.5524, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 7.684948979591838e-05, |
|
"loss": 1.5673, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 7.681760204081632e-05, |
|
"loss": 1.5539, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 7.67857142857143e-05, |
|
"loss": 1.6296, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 7.675382653061225e-05, |
|
"loss": 1.5992, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 7.672193877551021e-05, |
|
"loss": 1.5906, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 7.669005102040817e-05, |
|
"loss": 1.491, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 7.665816326530612e-05, |
|
"loss": 1.5692, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 7.662627551020408e-05, |
|
"loss": 1.5475, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 7.659438775510205e-05, |
|
"loss": 1.6955, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 7.65625e-05, |
|
"loss": 1.6321, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 7.653061224489796e-05, |
|
"loss": 1.6069, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 7.649872448979593e-05, |
|
"loss": 1.5381, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 7.646683673469388e-05, |
|
"loss": 1.5282, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 7.643494897959184e-05, |
|
"loss": 1.5751, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 7.64030612244898e-05, |
|
"loss": 1.7236, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 7.637117346938776e-05, |
|
"loss": 1.4974, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 7.633928571428572e-05, |
|
"loss": 1.5998, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 7.630739795918369e-05, |
|
"loss": 1.5756, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 7.627551020408163e-05, |
|
"loss": 1.5504, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 7.624362244897959e-05, |
|
"loss": 1.596, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 7.621173469387756e-05, |
|
"loss": 1.5373, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 7.617984693877552e-05, |
|
"loss": 1.343, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 7.614795918367347e-05, |
|
"loss": 1.6185, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 7.611607142857143e-05, |
|
"loss": 1.5853, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 7.608418367346939e-05, |
|
"loss": 1.5827, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 7.605229591836736e-05, |
|
"loss": 1.5355, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 7.60204081632653e-05, |
|
"loss": 1.6442, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 7.598852040816326e-05, |
|
"loss": 1.5472, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 7.595663265306123e-05, |
|
"loss": 1.7043, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 7.592474489795919e-05, |
|
"loss": 1.618, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 7.589285714285714e-05, |
|
"loss": 1.5461, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 7.586096938775511e-05, |
|
"loss": 1.6406, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 7.582908163265307e-05, |
|
"loss": 1.6037, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 7.579719387755102e-05, |
|
"loss": 1.5769, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 7.576530612244898e-05, |
|
"loss": 1.6035, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 7.573341836734694e-05, |
|
"loss": 1.6155, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 7.57015306122449e-05, |
|
"loss": 1.5729, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 7.566964285714287e-05, |
|
"loss": 1.5519, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 7.563775510204083e-05, |
|
"loss": 1.6702, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 7.560586734693877e-05, |
|
"loss": 1.6641, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 7.557397959183674e-05, |
|
"loss": 1.6938, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 7.55420918367347e-05, |
|
"loss": 1.6093, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 7.551020408163266e-05, |
|
"loss": 1.5468, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 7.547831632653061e-05, |
|
"loss": 1.6407, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 7.544642857142857e-05, |
|
"loss": 1.6491, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 7.541454081632653e-05, |
|
"loss": 1.4902, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 7.53826530612245e-05, |
|
"loss": 1.562, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 7.535076530612244e-05, |
|
"loss": 1.5765, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 7.531887755102042e-05, |
|
"loss": 1.541, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 7.528698979591837e-05, |
|
"loss": 1.6346, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 7.525510204081633e-05, |
|
"loss": 1.482, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 7.522321428571429e-05, |
|
"loss": 1.6146, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 7.519132653061225e-05, |
|
"loss": 1.569, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 7.51594387755102e-05, |
|
"loss": 1.5714, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 7.512755102040818e-05, |
|
"loss": 1.5187, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 7.509566326530612e-05, |
|
"loss": 1.5353, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 7.506377551020408e-05, |
|
"loss": 1.471, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 7.503188775510205e-05, |
|
"loss": 1.5398, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.5894, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 7.496811224489796e-05, |
|
"loss": 1.6253, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 7.493622448979592e-05, |
|
"loss": 1.5551, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 7.490433673469388e-05, |
|
"loss": 1.625, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 7.487244897959184e-05, |
|
"loss": 1.6302, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 7.484056122448981e-05, |
|
"loss": 1.6363, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 7.480867346938775e-05, |
|
"loss": 1.5215, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 7.477678571428571e-05, |
|
"loss": 1.4867, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 7.474489795918368e-05, |
|
"loss": 1.6359, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 7.471301020408164e-05, |
|
"loss": 1.5422, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 7.468112244897958e-05, |
|
"loss": 1.5507, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 7.464923469387756e-05, |
|
"loss": 1.5375, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 7.461734693877551e-05, |
|
"loss": 1.5598, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 7.458545918367348e-05, |
|
"loss": 1.5592, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 7.455357142857143e-05, |
|
"loss": 1.5317, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 7.452168367346939e-05, |
|
"loss": 1.5153, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 7.448979591836736e-05, |
|
"loss": 1.6207, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 7.445790816326532e-05, |
|
"loss": 1.5162, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 7.442602040816326e-05, |
|
"loss": 1.606, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 7.439413265306123e-05, |
|
"loss": 1.5156, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 7.436224489795919e-05, |
|
"loss": 1.5116, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 7.433035714285715e-05, |
|
"loss": 1.6724, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 7.42984693877551e-05, |
|
"loss": 1.4939, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 7.426658163265306e-05, |
|
"loss": 1.5348, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 7.423469387755102e-05, |
|
"loss": 1.5413, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 7.420280612244899e-05, |
|
"loss": 1.6815, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 7.417091836734695e-05, |
|
"loss": 1.4869, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 7.413903061224489e-05, |
|
"loss": 1.5318, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 7.410714285714286e-05, |
|
"loss": 1.5961, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 7.407525510204082e-05, |
|
"loss": 1.4803, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 7.404336734693878e-05, |
|
"loss": 1.5162, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 7.401147959183674e-05, |
|
"loss": 1.3663, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 7.39795918367347e-05, |
|
"loss": 1.5663, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 7.394770408163265e-05, |
|
"loss": 1.448, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 7.391581632653062e-05, |
|
"loss": 1.5293, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 7.388392857142857e-05, |
|
"loss": 1.5237, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 7.385204081632653e-05, |
|
"loss": 1.5021, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 7.38201530612245e-05, |
|
"loss": 1.6319, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 7.378826530612245e-05, |
|
"loss": 1.5267, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 7.375637755102041e-05, |
|
"loss": 1.5979, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 7.372448979591837e-05, |
|
"loss": 1.5628, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 7.369260204081633e-05, |
|
"loss": 1.6452, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 7.36607142857143e-05, |
|
"loss": 1.5668, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 7.362882653061224e-05, |
|
"loss": 1.5252, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 7.35969387755102e-05, |
|
"loss": 1.4068, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 7.356505102040817e-05, |
|
"loss": 1.5646, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 7.353316326530613e-05, |
|
"loss": 1.572, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 7.350127551020409e-05, |
|
"loss": 1.6101, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 7.346938775510205e-05, |
|
"loss": 1.4802, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 7.34375e-05, |
|
"loss": 1.5475, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 7.340561224489796e-05, |
|
"loss": 1.6726, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 7.337372448979593e-05, |
|
"loss": 1.5066, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 7.334183673469388e-05, |
|
"loss": 1.4366, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 7.330994897959183e-05, |
|
"loss": 1.4615, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 7.32780612244898e-05, |
|
"loss": 1.5023, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 7.324617346938776e-05, |
|
"loss": 1.5047, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 7.321428571428571e-05, |
|
"loss": 1.5154, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 7.318239795918368e-05, |
|
"loss": 1.5953, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 7.315051020408164e-05, |
|
"loss": 1.5582, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 7.31186224489796e-05, |
|
"loss": 1.4305, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 7.308673469387755e-05, |
|
"loss": 1.5189, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 7.305484693877551e-05, |
|
"loss": 1.5471, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 7.302295918367348e-05, |
|
"loss": 1.5957, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 7.299107142857144e-05, |
|
"loss": 1.6169, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 7.29591836734694e-05, |
|
"loss": 1.527, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 7.292729591836735e-05, |
|
"loss": 1.5946, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 7.289540816326531e-05, |
|
"loss": 1.5168, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 7.286352040816327e-05, |
|
"loss": 1.611, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 7.283163265306123e-05, |
|
"loss": 1.579, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 7.279974489795918e-05, |
|
"loss": 1.5112, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 7.276785714285714e-05, |
|
"loss": 1.6126, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 7.273596938775511e-05, |
|
"loss": 1.6139, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 7.270408163265307e-05, |
|
"loss": 1.5538, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 7.267219387755102e-05, |
|
"loss": 1.4936, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 7.264030612244899e-05, |
|
"loss": 1.5517, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 7.260841836734694e-05, |
|
"loss": 1.5221, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 7.25765306122449e-05, |
|
"loss": 1.552, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 7.254464285714286e-05, |
|
"loss": 1.5297, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 7.251275510204082e-05, |
|
"loss": 1.4874, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 7.248086734693878e-05, |
|
"loss": 1.6291, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 7.244897959183675e-05, |
|
"loss": 1.5594, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 7.241709183673469e-05, |
|
"loss": 1.5455, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 7.238520408163265e-05, |
|
"loss": 1.4842, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 7.235331632653062e-05, |
|
"loss": 1.676, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 7.232142857142858e-05, |
|
"loss": 1.4955, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 7.228954081632653e-05, |
|
"loss": 1.5832, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 7.225765306122449e-05, |
|
"loss": 1.5278, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 7.222576530612245e-05, |
|
"loss": 1.6077, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 7.219387755102042e-05, |
|
"loss": 1.5612, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 7.216198979591837e-05, |
|
"loss": 1.549, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 7.213010204081632e-05, |
|
"loss": 1.6173, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 7.20982142857143e-05, |
|
"loss": 1.6918, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 7.206632653061225e-05, |
|
"loss": 1.5562, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 7.203443877551021e-05, |
|
"loss": 1.5568, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 7.200255102040817e-05, |
|
"loss": 1.6067, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 7.197066326530613e-05, |
|
"loss": 1.493, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 7.193877551020408e-05, |
|
"loss": 1.597, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 7.190688775510205e-05, |
|
"loss": 1.5958, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 7.1875e-05, |
|
"loss": 1.5026, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 7.184311224489796e-05, |
|
"loss": 1.4857, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 7.181122448979593e-05, |
|
"loss": 1.5722, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 7.177933673469389e-05, |
|
"loss": 1.5176, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 7.174744897959183e-05, |
|
"loss": 1.4883, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 7.17155612244898e-05, |
|
"loss": 1.5146, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 7.168367346938776e-05, |
|
"loss": 1.5528, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 7.165178571428572e-05, |
|
"loss": 1.5486, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 7.161989795918367e-05, |
|
"loss": 1.6233, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 7.158801020408163e-05, |
|
"loss": 1.4514, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 7.155612244897959e-05, |
|
"loss": 1.4619, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 7.152423469387756e-05, |
|
"loss": 1.5101, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 7.149234693877552e-05, |
|
"loss": 1.5235, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 7.146045918367348e-05, |
|
"loss": 1.6027, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 7.142857142857143e-05, |
|
"loss": 1.5341, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 7.139668367346939e-05, |
|
"loss": 1.4323, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 7.136479591836735e-05, |
|
"loss": 1.5753, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 7.133290816326531e-05, |
|
"loss": 1.5127, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 7.130102040816326e-05, |
|
"loss": 1.5789, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 7.126913265306124e-05, |
|
"loss": 1.473, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 7.12372448979592e-05, |
|
"loss": 1.4387, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 7.120535714285714e-05, |
|
"loss": 1.4895, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 7.117346938775511e-05, |
|
"loss": 1.4464, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 7.114158163265307e-05, |
|
"loss": 1.5466, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 7.110969387755102e-05, |
|
"loss": 1.5301, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 7.107780612244898e-05, |
|
"loss": 1.5272, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 7.104591836734694e-05, |
|
"loss": 1.58, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 7.10140306122449e-05, |
|
"loss": 1.4687, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 7.098214285714287e-05, |
|
"loss": 1.4929, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 7.095025510204081e-05, |
|
"loss": 1.4503, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 7.091836734693877e-05, |
|
"loss": 1.4659, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 7.088647959183674e-05, |
|
"loss": 1.5948, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 7.08545918367347e-05, |
|
"loss": 1.4717, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 7.082270408163266e-05, |
|
"loss": 1.5466, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 7.079081632653062e-05, |
|
"loss": 1.4722, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 7.075892857142857e-05, |
|
"loss": 1.4694, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 7.072704081632653e-05, |
|
"loss": 1.3661, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 7.069515306122449e-05, |
|
"loss": 1.5341, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 7.066326530612245e-05, |
|
"loss": 1.4553, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 7.063137755102042e-05, |
|
"loss": 1.4315, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 7.059948979591838e-05, |
|
"loss": 1.5519, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 7.056760204081633e-05, |
|
"loss": 1.4633, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 7.053571428571429e-05, |
|
"loss": 1.5942, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 7.050382653061225e-05, |
|
"loss": 1.3865, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 7.04719387755102e-05, |
|
"loss": 1.5227, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 7.044005102040818e-05, |
|
"loss": 1.5258, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 7.040816326530612e-05, |
|
"loss": 1.576, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 7.037627551020408e-05, |
|
"loss": 1.5317, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 7.034438775510205e-05, |
|
"loss": 1.537, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 7.031250000000001e-05, |
|
"loss": 1.3364, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 7.028061224489795e-05, |
|
"loss": 1.499, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 7.024872448979592e-05, |
|
"loss": 1.4468, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 7.021683673469388e-05, |
|
"loss": 1.5918, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 7.018494897959184e-05, |
|
"loss": 1.5661, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 7.01530612244898e-05, |
|
"loss": 1.5732, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 7.012117346938775e-05, |
|
"loss": 1.5791, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 7.008928571428571e-05, |
|
"loss": 1.5933, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 7.005739795918368e-05, |
|
"loss": 1.5665, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 7.002551020408164e-05, |
|
"loss": 1.4839, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 6.999362244897959e-05, |
|
"loss": 1.4768, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 6.996173469387756e-05, |
|
"loss": 1.6085, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 6.992984693877551e-05, |
|
"loss": 1.4922, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 6.989795918367347e-05, |
|
"loss": 1.5707, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 6.986607142857143e-05, |
|
"loss": 1.5311, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 6.983418367346939e-05, |
|
"loss": 1.5081, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 6.980229591836736e-05, |
|
"loss": 1.512, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 6.977040816326532e-05, |
|
"loss": 1.5491, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 6.973852040816326e-05, |
|
"loss": 1.5359, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 6.970663265306123e-05, |
|
"loss": 1.5589, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 6.967474489795919e-05, |
|
"loss": 1.4852, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 6.964285714285715e-05, |
|
"loss": 1.5861, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 6.96109693877551e-05, |
|
"loss": 1.3901, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 6.957908163265306e-05, |
|
"loss": 1.5414, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 6.954719387755102e-05, |
|
"loss": 1.5375, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 6.951530612244899e-05, |
|
"loss": 1.5343, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 6.948341836734694e-05, |
|
"loss": 1.424, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 6.94515306122449e-05, |
|
"loss": 1.4389, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 6.941964285714286e-05, |
|
"loss": 1.4984, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 6.938775510204082e-05, |
|
"loss": 1.5261, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 6.935586734693878e-05, |
|
"loss": 1.5071, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 6.932397959183674e-05, |
|
"loss": 1.5013, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 6.92920918367347e-05, |
|
"loss": 1.6516, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 6.926020408163265e-05, |
|
"loss": 1.5105, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 6.922831632653061e-05, |
|
"loss": 1.48, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 6.919642857142857e-05, |
|
"loss": 1.5517, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 6.916454081632653e-05, |
|
"loss": 1.364, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 6.91326530612245e-05, |
|
"loss": 1.5156, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 6.910076530612246e-05, |
|
"loss": 1.5559, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 6.906887755102041e-05, |
|
"loss": 1.4075, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 6.903698979591837e-05, |
|
"loss": 1.5884, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 6.900510204081633e-05, |
|
"loss": 1.554, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 6.89732142857143e-05, |
|
"loss": 1.6034, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 6.894132653061224e-05, |
|
"loss": 1.4644, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 6.89094387755102e-05, |
|
"loss": 1.5907, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 6.887755102040817e-05, |
|
"loss": 1.4513, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 6.884566326530613e-05, |
|
"loss": 1.6045, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 6.881377551020408e-05, |
|
"loss": 1.5816, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 6.878188775510205e-05, |
|
"loss": 1.5426, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 6.875e-05, |
|
"loss": 1.4962, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 6.871811224489796e-05, |
|
"loss": 1.5634, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 6.868622448979592e-05, |
|
"loss": 1.4918, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 6.865433673469388e-05, |
|
"loss": 1.5547, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 6.862244897959184e-05, |
|
"loss": 1.6126, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 6.85905612244898e-05, |
|
"loss": 1.4723, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 6.855867346938776e-05, |
|
"loss": 1.5965, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 6.852678571428571e-05, |
|
"loss": 1.5439, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 6.849489795918368e-05, |
|
"loss": 1.5178, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 6.846301020408164e-05, |
|
"loss": 1.4874, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"learning_rate": 6.84311224489796e-05, |
|
"loss": 1.5047, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 6.839923469387755e-05, |
|
"loss": 1.4825, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 6.836734693877551e-05, |
|
"loss": 1.4454, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 6.833545918367348e-05, |
|
"loss": 1.5514, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"learning_rate": 6.830357142857144e-05, |
|
"loss": 1.5191, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 6.827168367346938e-05, |
|
"loss": 1.5448, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 6.823979591836735e-05, |
|
"loss": 1.5009, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 6.820790816326531e-05, |
|
"loss": 1.5012, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 6.817602040816327e-05, |
|
"loss": 1.4249, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 6.814413265306123e-05, |
|
"loss": 1.5253, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 6.811224489795919e-05, |
|
"loss": 1.5544, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 6.808035714285714e-05, |
|
"loss": 1.4549, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 6.804846938775511e-05, |
|
"loss": 1.4832, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"learning_rate": 6.801658163265306e-05, |
|
"loss": 1.5244, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 6.798469387755102e-05, |
|
"loss": 1.4185, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 6.795280612244899e-05, |
|
"loss": 1.4247, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 6.792091836734695e-05, |
|
"loss": 1.5257, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 6.78890306122449e-05, |
|
"loss": 1.4686, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 6.785714285714286e-05, |
|
"loss": 1.4543, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 6.782525510204082e-05, |
|
"loss": 1.4694, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 10.22, |
|
"learning_rate": 6.779336734693878e-05, |
|
"loss": 1.4686, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 6.776147959183673e-05, |
|
"loss": 1.4029, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 6.772959183673469e-05, |
|
"loss": 1.4833, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 6.769770408163265e-05, |
|
"loss": 1.3575, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 6.766581632653062e-05, |
|
"loss": 1.5344, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"learning_rate": 6.763392857142858e-05, |
|
"loss": 1.3947, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 6.760204081632652e-05, |
|
"loss": 1.5571, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 6.75701530612245e-05, |
|
"loss": 1.3519, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 6.753826530612245e-05, |
|
"loss": 1.4303, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 6.750637755102042e-05, |
|
"loss": 1.5916, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 6.747448979591837e-05, |
|
"loss": 1.4956, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 6.744260204081632e-05, |
|
"loss": 1.3845, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 6.74107142857143e-05, |
|
"loss": 1.5454, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 6.737882653061225e-05, |
|
"loss": 1.4286, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"learning_rate": 6.73469387755102e-05, |
|
"loss": 1.4626, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 6.731505102040817e-05, |
|
"loss": 1.5129, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 6.728316326530613e-05, |
|
"loss": 1.4496, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 6.725127551020408e-05, |
|
"loss": 1.5002, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 6.721938775510204e-05, |
|
"loss": 1.4472, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 6.71875e-05, |
|
"loss": 1.5916, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 6.715561224489796e-05, |
|
"loss": 1.4294, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 6.712372448979593e-05, |
|
"loss": 1.5925, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"learning_rate": 6.709183673469389e-05, |
|
"loss": 1.3663, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 6.705994897959183e-05, |
|
"loss": 1.4723, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 6.70280612244898e-05, |
|
"loss": 1.5188, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 6.699617346938776e-05, |
|
"loss": 1.5211, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 6.696428571428572e-05, |
|
"loss": 1.493, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"learning_rate": 6.693239795918368e-05, |
|
"loss": 1.5321, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 6.690051020408163e-05, |
|
"loss": 1.5492, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 6.686862244897959e-05, |
|
"loss": 1.494, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 6.683673469387756e-05, |
|
"loss": 1.534, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 6.68048469387755e-05, |
|
"loss": 1.4434, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 10.54, |
|
"learning_rate": 6.677295918367348e-05, |
|
"loss": 1.4481, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 6.674107142857144e-05, |
|
"loss": 1.4684, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 6.670918367346939e-05, |
|
"loss": 1.5014, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 6.667729591836735e-05, |
|
"loss": 1.5881, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 6.664540816326531e-05, |
|
"loss": 1.5053, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 6.661352040816327e-05, |
|
"loss": 1.4924, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 6.658163265306124e-05, |
|
"loss": 1.4137, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 6.654974489795918e-05, |
|
"loss": 1.4442, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"learning_rate": 6.651785714285714e-05, |
|
"loss": 1.4903, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 6.648596938775511e-05, |
|
"loss": 1.5714, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 6.645408163265307e-05, |
|
"loss": 1.4596, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"learning_rate": 6.642219387755103e-05, |
|
"loss": 1.482, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 6.639030612244898e-05, |
|
"loss": 1.5267, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 6.635841836734694e-05, |
|
"loss": 1.4587, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 6.63265306122449e-05, |
|
"loss": 1.5019, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 6.629464285714286e-05, |
|
"loss": 1.4034, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 6.626275510204081e-05, |
|
"loss": 1.4521, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 10.71, |
|
"learning_rate": 6.623086734693877e-05, |
|
"loss": 1.5502, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 6.619897959183674e-05, |
|
"loss": 1.4779, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 6.61670918367347e-05, |
|
"loss": 1.4686, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"learning_rate": 6.613520408163265e-05, |
|
"loss": 1.4792, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 6.610331632653062e-05, |
|
"loss": 1.4339, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 6.607142857142857e-05, |
|
"loss": 1.5343, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 6.603954081632653e-05, |
|
"loss": 1.4648, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"learning_rate": 6.600765306122449e-05, |
|
"loss": 1.5308, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 6.597576530612245e-05, |
|
"loss": 1.5239, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 6.594387755102042e-05, |
|
"loss": 1.4896, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"learning_rate": 6.591198979591838e-05, |
|
"loss": 1.5079, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 6.588010204081632e-05, |
|
"loss": 1.4364, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"learning_rate": 6.584821428571429e-05, |
|
"loss": 1.5552, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 6.581632653061225e-05, |
|
"loss": 1.4662, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"learning_rate": 6.578443877551021e-05, |
|
"loss": 1.5869, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 6.575255102040817e-05, |
|
"loss": 1.3986, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 6.572066326530612e-05, |
|
"loss": 1.486, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 6.568877551020408e-05, |
|
"loss": 1.4775, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 6.565688775510205e-05, |
|
"loss": 1.5488, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 10.91, |
|
"learning_rate": 6.562500000000001e-05, |
|
"loss": 1.4099, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 6.559311224489795e-05, |
|
"loss": 1.4595, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 6.556122448979592e-05, |
|
"loss": 1.4397, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 6.552933673469388e-05, |
|
"loss": 1.5166, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"learning_rate": 6.549744897959184e-05, |
|
"loss": 1.4361, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"learning_rate": 6.54655612244898e-05, |
|
"loss": 1.4449, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 6.543367346938776e-05, |
|
"loss": 1.4578, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 6.540178571428571e-05, |
|
"loss": 1.4669, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 6.536989795918368e-05, |
|
"loss": 1.5328, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 6.533801020408163e-05, |
|
"loss": 1.4579, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 6.530612244897959e-05, |
|
"loss": 1.5361, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"learning_rate": 6.527423469387756e-05, |
|
"loss": 1.5363, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 6.524234693877552e-05, |
|
"loss": 1.4183, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 6.521045918367347e-05, |
|
"loss": 1.3972, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 6.517857142857143e-05, |
|
"loss": 1.4249, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 11.06, |
|
"learning_rate": 6.514668367346939e-05, |
|
"loss": 1.3283, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 6.511479591836736e-05, |
|
"loss": 1.4536, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"learning_rate": 6.50829081632653e-05, |
|
"loss": 1.4411, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 6.505102040816326e-05, |
|
"loss": 1.4507, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 6.501913265306123e-05, |
|
"loss": 1.4749, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 6.498724489795919e-05, |
|
"loss": 1.5126, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 6.495535714285715e-05, |
|
"loss": 1.6189, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 6.49234693877551e-05, |
|
"loss": 1.3727, |
|
"step": 1100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 3136, |
|
"num_train_epochs": 32, |
|
"save_steps": 100, |
|
"total_flos": 8.142360683945779e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|