|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 182, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.0555, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.0519, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.0001, |
|
"loss": 1.0504, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00013333333333333334, |
|
"loss": 1.0691, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.0001666666666666667, |
|
"loss": 1.0723, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.0002, |
|
"loss": 1.0722, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019998406937250034, |
|
"loss": 1.0411, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0001999362825656992, |
|
"loss": 1.0547, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0001998566548050729, |
|
"loss": 1.0558, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019974521146102537, |
|
"loss": 1.0537, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0001996019880408046, |
|
"loss": 1.054, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019942703017718975, |
|
"loss": 1.0643, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019922039361395185, |
|
"loss": 1.0553, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0001989821441880933, |
|
"loss": 1.0878, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00019871235780887113, |
|
"loss": 1.0588, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0001984111204336116, |
|
"loss": 1.0319, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019807852804032305, |
|
"loss": 1.0609, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019771468659711595, |
|
"loss": 1.0511, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019731971202844036, |
|
"loss": 1.0569, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019689373017815073, |
|
"loss": 1.0452, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019643687676941068, |
|
"loss": 1.0627, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019594929736144976, |
|
"loss": 1.0351, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001954311473031864, |
|
"loss": 1.0515, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019488259168373197, |
|
"loss": 1.0502, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019430380527979123, |
|
"loss": 1.058, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001936949724999762, |
|
"loss": 1.0678, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019305628732605137, |
|
"loss": 1.0382, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001923879532511287, |
|
"loss": 1.0493, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019169018321483198, |
|
"loss": 1.0555, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019096319953545185, |
|
"loss": 1.0413, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019020723383911215, |
|
"loss": 1.0521, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018942252698597113, |
|
"loss": 1.0523, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018860932899348028, |
|
"loss": 1.0782, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018776789895672558, |
|
"loss": 1.0492, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018689850496587674, |
|
"loss": 1.0628, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018600142402077006, |
|
"loss": 1.0818, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001850769419426531, |
|
"loss": 1.0533, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018412535328311814, |
|
"loss": 1.061, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018314696123025454, |
|
"loss": 1.0414, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018214207751204918, |
|
"loss": 1.0515, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001811110222970659, |
|
"loss": 1.0519, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00018005412409243606, |
|
"loss": 1.0528, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001789717196391916, |
|
"loss": 1.046, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017786415380497553, |
|
"loss": 1.0489, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017673177947416258, |
|
"loss": 1.0856, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017557495743542585, |
|
"loss": 1.0508, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017439405626678496, |
|
"loss": 1.0709, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017318945221817255, |
|
"loss": 1.0621, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017196152909155628, |
|
"loss": 1.074, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017071067811865476, |
|
"loss": 1.0546, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016943729783628608, |
|
"loss": 1.0616, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016814179395938913, |
|
"loss": 1.0568, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016682457925175763, |
|
"loss": 1.0421, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016548607339452853, |
|
"loss": 1.0618, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001641267028524661, |
|
"loss": 1.0637, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001627469007380852, |
|
"loss": 1.023, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016134710667365596, |
|
"loss": 1.0577, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001599277666511347, |
|
"loss": 1.0542, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001584893328900653, |
|
"loss": 1.0607, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001570322636934964, |
|
"loss": 1.0664, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015555702330196023, |
|
"loss": 1.0557, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015406408174555976, |
|
"loss": 1.0704, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015255391469421128, |
|
"loss": 1.0444, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015102700330609, |
|
"loss": 1.0549, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014948383407432678, |
|
"loss": 1.0403, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001479248986720057, |
|
"loss": 1.0352, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014635069379551055, |
|
"loss": 1.0534, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014476172100627127, |
|
"loss": 1.0444, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014315848657096004, |
|
"loss": 1.0637, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014154150130018866, |
|
"loss": 1.0581, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013991128038575741, |
|
"loss": 1.0488, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 1.076, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013661321331283796, |
|
"loss": 1.0295, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013494641795990986, |
|
"loss": 1.0569, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013326848823964243, |
|
"loss": 1.0782, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001315799587615025, |
|
"loss": 1.07, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00012988136751217291, |
|
"loss": 1.0647, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012817325568414297, |
|
"loss": 1.0527, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001264561675032779, |
|
"loss": 1.052, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012473065005542155, |
|
"loss": 1.0569, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012299725311208808, |
|
"loss": 1.0412, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012125652895529766, |
|
"loss": 1.0668, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011950903220161285, |
|
"loss": 1.0387, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011775531962543036, |
|
"loss": 1.0593, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011599594998158602, |
|
"loss": 1.0429, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011423148382732853, |
|
"loss": 1.0486, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.0001124624833437186, |
|
"loss": 1.0505, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011068951215651132, |
|
"loss": 1.0576, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0001089131351565776, |
|
"loss": 1.0575, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010713391831992323, |
|
"loss": 1.054, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010535242852736151, |
|
"loss": 1.0454, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010356923338389806, |
|
"loss": 1.043, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0001017849010378846, |
|
"loss": 1.0382, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.0001, |
|
"loss": 1.0645, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.821509896211539e-05, |
|
"loss": 1.0499, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.643076661610196e-05, |
|
"loss": 1.0205, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.464757147263849e-05, |
|
"loss": 1.0618, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.286608168007678e-05, |
|
"loss": 1.0752, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.108686484342241e-05, |
|
"loss": 1.0555, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.931048784348875e-05, |
|
"loss": 1.0637, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.753751665628141e-05, |
|
"loss": 1.0502, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.57685161726715e-05, |
|
"loss": 1.0569, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.400405001841399e-05, |
|
"loss": 1.0415, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.224468037456969e-05, |
|
"loss": 1.0497, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.049096779838719e-05, |
|
"loss": 1.0516, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.874347104470234e-05, |
|
"loss": 1.0502, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.700274688791196e-05, |
|
"loss": 1.0394, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.526934994457844e-05, |
|
"loss": 1.0585, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.354383249672212e-05, |
|
"loss": 1.0485, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.182674431585704e-05, |
|
"loss": 1.0539, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.011863248782711e-05, |
|
"loss": 1.0591, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.842004123849752e-05, |
|
"loss": 1.0528, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.673151176035762e-05, |
|
"loss": 1.0342, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.505358204009017e-05, |
|
"loss": 1.0435, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.338678668716209e-05, |
|
"loss": 1.0469, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.173165676349103e-05, |
|
"loss": 1.0297, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.008871961424258e-05, |
|
"loss": 1.0337, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.845849869981137e-05, |
|
"loss": 1.0793, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.684151342903992e-05, |
|
"loss": 1.0456, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.5238278993728756e-05, |
|
"loss": 1.0519, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.364930620448946e-05, |
|
"loss": 1.0143, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.207510132799436e-05, |
|
"loss": 1.0377, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.051616592567323e-05, |
|
"loss": 1.044, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.8972996693910054e-05, |
|
"loss": 1.0583, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.7446085305788725e-05, |
|
"loss": 1.0382, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.593591825444028e-05, |
|
"loss": 1.0654, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.444297669803981e-05, |
|
"loss": 1.0555, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.296773630650358e-05, |
|
"loss": 1.0428, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.15106671099347e-05, |
|
"loss": 1.0236, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.007223334886531e-05, |
|
"loss": 1.0258, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.865289332634407e-05, |
|
"loss": 1.0655, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.725309926191479e-05, |
|
"loss": 1.0677, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5873297147533915e-05, |
|
"loss": 1.0256, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.45139266054715e-05, |
|
"loss": 1.0398, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3175420748242406e-05, |
|
"loss": 1.0504, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.185820604061088e-05, |
|
"loss": 1.0413, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.056270216371395e-05, |
|
"loss": 1.0678, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9289321881345254e-05, |
|
"loss": 1.0241, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8038470908443714e-05, |
|
"loss": 1.0376, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.681054778182748e-05, |
|
"loss": 1.0484, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5605943733215042e-05, |
|
"loss": 1.0663, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4425042564574184e-05, |
|
"loss": 1.0423, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.3268220525837437e-05, |
|
"loss": 1.064, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2135846195024513e-05, |
|
"loss": 1.0541, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.1028280360808407e-05, |
|
"loss": 1.0343, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.994587590756397e-05, |
|
"loss": 1.0331, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8888977702934085e-05, |
|
"loss": 1.0457, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7857922487950874e-05, |
|
"loss": 1.07, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6853038769745467e-05, |
|
"loss": 1.0616, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.587464671688187e-05, |
|
"loss": 1.0651, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4923058057346929e-05, |
|
"loss": 1.0497, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3998575979229944e-05, |
|
"loss": 1.0409, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3101495034123313e-05, |
|
"loss": 1.04, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2232101043274436e-05, |
|
"loss": 1.0541, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.139067100651976e-05, |
|
"loss": 1.0453, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.057747301402887e-05, |
|
"loss": 1.0602, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.792766160887868e-06, |
|
"loss": 1.05, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.036800464548157e-06, |
|
"loss": 1.0546, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.309816785168034e-06, |
|
"loss": 1.0632, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.612046748871327e-06, |
|
"loss": 1.0394, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.943712673948644e-06, |
|
"loss": 1.0306, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.3050275000238414e-06, |
|
"loss": 1.0287, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.696194720208792e-06, |
|
"loss": 1.0399, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.1174083162680465e-06, |
|
"loss": 1.0401, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.568852696813619e-06, |
|
"loss": 1.0361, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.050702638550275e-06, |
|
"loss": 1.0339, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5631232305893046e-06, |
|
"loss": 1.0353, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.1062698218492724e-06, |
|
"loss": 1.0418, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.6802879715596585e-06, |
|
"loss": 1.0463, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.2853134028840594e-06, |
|
"loss": 1.0664, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.921471959676957e-06, |
|
"loss": 1.055, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5888795663883904e-06, |
|
"loss": 1.033, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2876421911288905e-06, |
|
"loss": 1.0441, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0178558119067315e-06, |
|
"loss": 1.0464, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.796063860481595e-07, |
|
"loss": 1.0477, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.729698228102653e-07, |
|
"loss": 1.0405, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.9801195919541014e-07, |
|
"loss": 1.0329, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.547885389746485e-07, |
|
"loss": 1.0379, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.4334519492711362e-07, |
|
"loss": 1.0373, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.37174343008251e-08, |
|
"loss": 1.0709, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.593062749967178e-08, |
|
"loss": 1.0425, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.9978, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 182, |
|
"total_flos": 94360717230080.0, |
|
"train_loss": 1.0510600356610267, |
|
"train_runtime": 3384.5855, |
|
"train_samples_per_second": 6.866, |
|
"train_steps_per_second": 0.054 |
|
} |
|
], |
|
"max_steps": 182, |
|
"num_train_epochs": 1, |
|
"total_flos": 94360717230080.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|