|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.99968, |
|
"global_step": 1562, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.255319148936171e-06, |
|
"loss": 1.7417, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 8.510638297872341e-06, |
|
"loss": 1.7839, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.2765957446808511e-05, |
|
"loss": 1.7869, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.7021276595744682e-05, |
|
"loss": 1.9512, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.1276595744680852e-05, |
|
"loss": 1.8318, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.5531914893617022e-05, |
|
"loss": 1.7205, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.9787234042553192e-05, |
|
"loss": 1.7183, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4042553191489365e-05, |
|
"loss": 1.5933, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.829787234042553e-05, |
|
"loss": 1.5022, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2553191489361704e-05, |
|
"loss": 1.51, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.680851063829788e-05, |
|
"loss": 1.5203, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.1063829787234044e-05, |
|
"loss": 1.4753, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.531914893617022e-05, |
|
"loss": 1.5745, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.9574468085106384e-05, |
|
"loss": 1.5435, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.382978723404256e-05, |
|
"loss": 1.4009, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.808510638297873e-05, |
|
"loss": 1.4255, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.23404255319149e-05, |
|
"loss": 1.3938, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.659574468085106e-05, |
|
"loss": 1.3752, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.085106382978723e-05, |
|
"loss": 1.4204, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.510638297872341e-05, |
|
"loss": 1.4148, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.936170212765958e-05, |
|
"loss": 1.4078, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.361702127659576e-05, |
|
"loss": 1.3379, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.787234042553192e-05, |
|
"loss": 1.3192, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00010212765957446809, |
|
"loss": 1.3999, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00010638297872340425, |
|
"loss": 1.3838, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00011063829787234043, |
|
"loss": 1.3392, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00011489361702127661, |
|
"loss": 1.2795, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00011914893617021277, |
|
"loss": 1.3931, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00012340425531914893, |
|
"loss": 1.3298, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00012765957446808513, |
|
"loss": 1.3601, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00013191489361702127, |
|
"loss": 1.2849, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00013617021276595746, |
|
"loss": 1.3647, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00014042553191489363, |
|
"loss": 1.3516, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.0001446808510638298, |
|
"loss": 1.3466, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00014893617021276596, |
|
"loss": 1.2629, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00015319148936170213, |
|
"loss": 1.3135, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00015744680851063832, |
|
"loss": 1.3647, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00016170212765957446, |
|
"loss": 1.2788, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00016595744680851065, |
|
"loss": 1.3026, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00017021276595744682, |
|
"loss": 1.3643, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00017446808510638298, |
|
"loss": 1.3149, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00017872340425531915, |
|
"loss": 1.3228, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00018297872340425532, |
|
"loss": 1.266, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.0001872340425531915, |
|
"loss": 1.2443, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00019148936170212768, |
|
"loss": 1.2927, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00019574468085106384, |
|
"loss": 1.3774, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.0002, |
|
"loss": 1.309, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00019999978499709475, |
|
"loss": 1.2808, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00019999913998930348, |
|
"loss": 1.2878, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00019999806497939982, |
|
"loss": 1.2429, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00019999655997200635, |
|
"loss": 1.2528, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00019999462497359466, |
|
"loss": 1.2949, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00019999225999248538, |
|
"loss": 1.3453, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00019998946503884809, |
|
"loss": 1.3411, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0001999862401247012, |
|
"loss": 1.2302, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019998258526391207, |
|
"loss": 1.3079, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0001999785004721968, |
|
"loss": 1.3201, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0001999739857671202, |
|
"loss": 1.2678, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019996904116809585, |
|
"loss": 1.3325, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019996366669638573, |
|
"loss": 1.3556, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0001999578623751004, |
|
"loss": 1.2568, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019995162822919883, |
|
"loss": 1.3364, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019994496428548812, |
|
"loss": 1.2809, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0001999378705726237, |
|
"loss": 1.2058, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019993034712110887, |
|
"loss": 1.1538, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019992239396329498, |
|
"loss": 1.3418, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019991401113338104, |
|
"loss": 1.2815, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019990519866741371, |
|
"loss": 1.313, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0001998959566032871, |
|
"loss": 1.3596, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00019988628498074262, |
|
"loss": 1.2872, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019987618384136879, |
|
"loss": 1.2368, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019986565322860115, |
|
"loss": 1.2751, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019985469318772192, |
|
"loss": 1.2997, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019984330376585988, |
|
"loss": 1.3271, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019983148501199026, |
|
"loss": 1.2793, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019981923697693437, |
|
"loss": 1.2528, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019980655971335945, |
|
"loss": 1.3113, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0001997934532757785, |
|
"loss": 1.3623, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019977991772054992, |
|
"loss": 1.3097, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019976595310587742, |
|
"loss": 1.3203, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019975155949180967, |
|
"loss": 1.2419, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019973673694024, |
|
"loss": 1.297, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00019972148551490628, |
|
"loss": 1.3119, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0001997058052813905, |
|
"loss": 1.2587, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0001996896963071186, |
|
"loss": 1.2898, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0001996731586613601, |
|
"loss": 1.2974, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0001996561924152278, |
|
"loss": 1.3253, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019963879764167762, |
|
"loss": 1.2562, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019962097441550802, |
|
"loss": 1.3362, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019960272281335995, |
|
"loss": 1.2628, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019958404291371635, |
|
"loss": 1.2172, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0001995649347969019, |
|
"loss": 1.295, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019954539854508254, |
|
"loss": 1.198, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019952543424226534, |
|
"loss": 1.2078, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019950504197429795, |
|
"loss": 1.2612, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019948422182886833, |
|
"loss": 1.2709, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019946297389550433, |
|
"loss": 1.2277, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.0001994412982655732, |
|
"loss": 1.296, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019941919503228153, |
|
"loss": 1.23, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019939666429067443, |
|
"loss": 1.3438, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00019937370613763543, |
|
"loss": 1.3506, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0001993503206718859, |
|
"loss": 1.333, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019932650799398474, |
|
"loss": 1.3064, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019930226820632778, |
|
"loss": 1.2507, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019927760141314756, |
|
"loss": 1.2402, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019925250772051276, |
|
"loss": 1.3029, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019922698723632767, |
|
"loss": 1.3087, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019920104007033185, |
|
"loss": 1.2673, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019917466633409965, |
|
"loss": 1.2455, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0001991478661410396, |
|
"loss": 1.2919, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.0001991206396063942, |
|
"loss": 1.2332, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019909298684723904, |
|
"loss": 1.2898, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019906490798248265, |
|
"loss": 1.2184, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019903640313286572, |
|
"loss": 1.3071, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019900747242096082, |
|
"loss": 1.2631, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019897811597117168, |
|
"loss": 1.2346, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00019894833390973266, |
|
"loss": 1.2219, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00019891812636470846, |
|
"loss": 1.2968, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00019888749346599321, |
|
"loss": 1.2018, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00019885643534531022, |
|
"loss": 1.1738, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00019882495213621116, |
|
"loss": 1.262, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0001987930439740757, |
|
"loss": 1.2281, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0001987607109961107, |
|
"loss": 1.2307, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00019872795334134998, |
|
"loss": 1.2157, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00019869477115065325, |
|
"loss": 1.162, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0001986611645667059, |
|
"loss": 1.26, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0001986271337340182, |
|
"loss": 1.3362, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0001985926787989247, |
|
"loss": 1.2592, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0001985577999095836, |
|
"loss": 1.2189, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00019852249721597618, |
|
"loss": 1.1444, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00019848677086990605, |
|
"loss": 1.2634, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0001984506210249986, |
|
"loss": 1.3064, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019841404783670026, |
|
"loss": 1.2454, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019837705146227784, |
|
"loss": 1.2474, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019833963206081795, |
|
"loss": 1.1831, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019830178979322614, |
|
"loss": 1.2302, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019826352482222638, |
|
"loss": 1.2761, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019822483731236025, |
|
"loss": 1.2333, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019818572742998634, |
|
"loss": 1.2295, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019814619534327935, |
|
"loss": 1.1975, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0001981062412222296, |
|
"loss": 1.2122, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0001980658652386421, |
|
"loss": 1.235, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019802506756613596, |
|
"loss": 1.2147, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0001979838483801435, |
|
"loss": 1.2522, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019794220785790967, |
|
"loss": 1.2301, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019790014617849106, |
|
"loss": 1.2216, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019785766352275542, |
|
"loss": 1.2888, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019781476007338058, |
|
"loss": 1.2786, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019777143601485387, |
|
"loss": 1.2614, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019772769153347127, |
|
"loss": 1.2789, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019768352681733662, |
|
"loss": 1.2466, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019763894205636072, |
|
"loss": 1.2343, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019759393744226065, |
|
"loss": 1.2729, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019754851316855886, |
|
"loss": 1.2662, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019750266943058237, |
|
"loss": 1.2278, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019745640642546196, |
|
"loss": 1.2419, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019740972435213115, |
|
"loss": 1.2341, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019736262341132563, |
|
"loss": 1.3319, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019731510380558218, |
|
"loss": 1.248, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019726716573923784, |
|
"loss": 1.1666, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019721880941842913, |
|
"loss": 1.3455, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019717003505109095, |
|
"loss": 1.3564, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019712084284695603, |
|
"loss": 1.1661, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0001970712330175536, |
|
"loss": 1.2573, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019702120577620887, |
|
"loss": 1.2416, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019697076133804185, |
|
"loss": 1.1995, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019691989991996663, |
|
"loss": 1.2102, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019686862174069015, |
|
"loss": 1.2968, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019681692702071163, |
|
"loss": 1.3381, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019676481598232139, |
|
"loss": 1.248, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019671228884959987, |
|
"loss": 1.28, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019665934584841682, |
|
"loss": 1.1851, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001966059872064302, |
|
"loss": 1.1873, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019655221315308528, |
|
"loss": 1.2014, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019649802391961363, |
|
"loss": 1.2319, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019644341973903208, |
|
"loss": 1.2565, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019638840084614182, |
|
"loss": 1.212, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019633296747752723, |
|
"loss": 1.1847, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019627711987155503, |
|
"loss": 1.2024, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019622085826837324, |
|
"loss": 1.3152, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019616418290990993, |
|
"loss": 1.2759, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019610709403987246, |
|
"loss": 1.1858, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019604959190374632, |
|
"loss": 1.1573, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.000195991676748794, |
|
"loss": 1.3047, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019593334882405408, |
|
"loss": 1.214, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019587460838033996, |
|
"loss": 1.2463, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.000195815455670239, |
|
"loss": 1.1788, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019575589094811128, |
|
"loss": 1.1427, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0001956959144700886, |
|
"loss": 1.2283, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019563552649407324, |
|
"loss": 1.2657, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019557472727973707, |
|
"loss": 1.2681, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0001955135170885202, |
|
"loss": 1.2141, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019545189618363001, |
|
"loss": 1.2653, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019538986483004, |
|
"loss": 1.2444, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019532742329448854, |
|
"loss": 1.2416, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019526457184547793, |
|
"loss": 1.3125, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019520131075327298, |
|
"loss": 1.2513, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019513764028990016, |
|
"loss": 1.235, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019507356072914607, |
|
"loss": 1.238, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001950090723465566, |
|
"loss": 1.214, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019494417541943547, |
|
"loss": 1.2566, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019487887022684336, |
|
"loss": 1.2634, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001948131570495963, |
|
"loss": 1.2087, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019474703617026478, |
|
"loss": 1.3126, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019468050787317242, |
|
"loss": 1.2074, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019461357244439479, |
|
"loss": 1.2638, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019454623017175812, |
|
"loss": 1.2639, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001944784813448381, |
|
"loss": 1.2058, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019441032625495857, |
|
"loss": 1.2854, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019434176519519047, |
|
"loss": 1.2042, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019427279846035025, |
|
"loss": 1.2153, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001942034263469989, |
|
"loss": 1.2743, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001941336491534406, |
|
"loss": 1.2437, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019406346717972125, |
|
"loss": 1.1423, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019399288072762747, |
|
"loss": 1.332, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019392189010068508, |
|
"loss": 1.2505, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019385049560415794, |
|
"loss": 1.3014, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019377869754504644, |
|
"loss": 1.2224, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019370649623208651, |
|
"loss": 1.2295, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019363389197574798, |
|
"loss": 1.2991, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001935608850882333, |
|
"loss": 1.2339, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019348747588347637, |
|
"loss": 1.1903, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019341366467714104, |
|
"loss": 1.2069, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019333945178661978, |
|
"loss": 1.2621, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019326483753103236, |
|
"loss": 1.2079, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019318982223122437, |
|
"loss": 1.276, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019311440620976597, |
|
"loss": 1.2046, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019303858979095043, |
|
"loss": 1.3154, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019296237330079278, |
|
"loss": 1.3109, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019288575706702835, |
|
"loss": 1.2039, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019280874141911137, |
|
"loss": 1.2771, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019273132668821364, |
|
"loss": 1.2169, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019265351320722296, |
|
"loss": 1.2961, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019257530131074185, |
|
"loss": 1.228, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.000192496691335086, |
|
"loss": 1.2474, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.0001924176836182829, |
|
"loss": 1.2238, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019233827850007027, |
|
"loss": 1.2428, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019225847632189474, |
|
"loss": 1.2717, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019217827742691037, |
|
"loss": 1.2773, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.000192097682159977, |
|
"loss": 1.2352, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019201669086765902, |
|
"loss": 1.2473, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019193530389822363, |
|
"loss": 1.1792, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001918535216016396, |
|
"loss": 1.2341, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001917713443295755, |
|
"loss": 1.1545, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001916887724353984, |
|
"loss": 1.2286, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019160580627417223, |
|
"loss": 1.2582, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001915224462026563, |
|
"loss": 1.1648, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019143869257930377, |
|
"loss": 1.3004, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001913545457642601, |
|
"loss": 1.2527, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019127000611936148, |
|
"loss": 1.175, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019118507400813325, |
|
"loss": 1.2183, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001910997497957885, |
|
"loss": 1.2338, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019101403384922625, |
|
"loss": 1.2175, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019092792653703007, |
|
"loss": 1.1876, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001908414282294664, |
|
"loss": 1.2152, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.000190754539298483, |
|
"loss": 1.1963, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019066726011770726, |
|
"loss": 1.3168, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001905795910624448, |
|
"loss": 1.2391, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019049153250967767, |
|
"loss": 1.2212, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019040308483806267, |
|
"loss": 1.1311, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019031424842793, |
|
"loss": 1.1852, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019022502366128135, |
|
"loss": 1.2108, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019013541092178843, |
|
"loss": 1.1906, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019004541059479123, |
|
"loss": 1.2416, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018995502306729634, |
|
"loss": 1.2911, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001898642487279754, |
|
"loss": 1.1554, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001897730879671634, |
|
"loss": 1.2697, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018968154117685683, |
|
"loss": 1.2675, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018958960875071226, |
|
"loss": 1.2365, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001894972910840444, |
|
"loss": 1.1556, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018940458857382467, |
|
"loss": 1.1615, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018931150161867916, |
|
"loss": 1.2577, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00018921803061888728, |
|
"loss": 1.2688, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018912417597637967, |
|
"loss": 1.2346, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001890299380947368, |
|
"loss": 1.2354, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018893531737918702, |
|
"loss": 1.1978, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001888403142366049, |
|
"loss": 1.1801, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001887449290755095, |
|
"loss": 1.2578, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018864916230606254, |
|
"loss": 1.1904, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018855301434006667, |
|
"loss": 1.2335, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018845648559096377, |
|
"loss": 1.1714, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018835957647383303, |
|
"loss": 1.2479, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001882622874053893, |
|
"loss": 1.2498, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018816461880398125, |
|
"loss": 1.2019, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018806657108958952, |
|
"loss": 1.2397, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018796814468382498, |
|
"loss": 1.2969, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018786934000992688, |
|
"loss": 1.2114, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001877701574927611, |
|
"loss": 1.2269, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00018767059755881818, |
|
"loss": 1.2316, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018757066063621166, |
|
"loss": 1.1359, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018747034715467612, |
|
"loss": 1.1882, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018736965754556528, |
|
"loss": 1.2671, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001872685922418504, |
|
"loss": 1.1952, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001871671516781181, |
|
"loss": 1.2418, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018706533629056873, |
|
"loss": 1.2175, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018696314651701437, |
|
"loss": 1.2578, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018686058279687698, |
|
"loss": 1.2942, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018675764557118653, |
|
"loss": 1.1584, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018665433528257904, |
|
"loss": 1.2019, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018655065237529477, |
|
"loss": 1.1372, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018644659729517623, |
|
"loss": 1.2638, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018634217048966637, |
|
"loss": 1.2821, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018623737240780646, |
|
"loss": 1.2134, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00018613220350023434, |
|
"loss": 1.2821, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018602666421918245, |
|
"loss": 1.1929, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018592075501847584, |
|
"loss": 1.2036, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001858144763535302, |
|
"loss": 1.3065, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018570782868134998, |
|
"loss": 1.1853, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018560081246052633, |
|
"loss": 1.194, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018549342815123528, |
|
"loss": 1.2194, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001853856762152356, |
|
"loss": 1.2305, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018527755711586678, |
|
"loss": 1.2203, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018516907131804734, |
|
"loss": 1.1858, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018506021928827243, |
|
"loss": 1.1719, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001849510014946122, |
|
"loss": 1.2834, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001848414184067094, |
|
"loss": 1.224, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018473147049577774, |
|
"loss": 1.3112, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018462115823459962, |
|
"loss": 1.2651, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018451048209752418, |
|
"loss": 1.1859, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00018439944256046518, |
|
"loss": 1.2177, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0001842880401008992, |
|
"loss": 1.2284, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018417627519786315, |
|
"loss": 1.2012, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018406414833195268, |
|
"loss": 1.2, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018395165998531986, |
|
"loss": 1.2393, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018383881064167103, |
|
"loss": 1.196, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.000183725600786265, |
|
"loss": 1.1853, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018361203090591071, |
|
"loss": 1.1794, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018349810148896522, |
|
"loss": 1.1421, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018338381302533162, |
|
"loss": 1.1948, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018326916600645702, |
|
"loss": 1.2379, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018315416092533023, |
|
"loss": 1.3147, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018303879827647975, |
|
"loss": 1.1538, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018292307855597172, |
|
"loss": 1.2203, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00018280700226140764, |
|
"loss": 1.1741, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0001826905698919223, |
|
"loss": 1.302, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001825737819481817, |
|
"loss": 1.2623, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018245663893238075, |
|
"loss": 1.3401, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018233914134824122, |
|
"loss": 1.1852, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018222128970100956, |
|
"loss": 1.2277, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018210308449745472, |
|
"loss": 1.2296, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018198452624586592, |
|
"loss": 1.1051, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018186561545605054, |
|
"loss": 1.2346, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001817463526393319, |
|
"loss": 1.2339, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.000181626738308547, |
|
"loss": 1.1985, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018150677297804445, |
|
"loss": 1.1785, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018138645716368212, |
|
"loss": 1.1619, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018126579138282503, |
|
"loss": 1.1221, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018114477615434303, |
|
"loss": 1.2717, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018102341199860864, |
|
"loss": 1.3185, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 1.1874, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.0001807796389943725, |
|
"loss": 1.1949, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00018065723119410884, |
|
"loss": 1.276, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00018053447656306445, |
|
"loss": 1.1644, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00018041137562909137, |
|
"loss": 1.2693, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00018028792892153076, |
|
"loss": 1.1438, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00018016413697121065, |
|
"loss": 1.1836, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001800400003104436, |
|
"loss": 1.2102, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00017991551947302452, |
|
"loss": 1.1578, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001797906949942282, |
|
"loss": 1.2866, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00017966552741080708, |
|
"loss": 1.2307, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00017954001726098917, |
|
"loss": 1.1588, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00017941416508447536, |
|
"loss": 1.1719, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001792879714224373, |
|
"loss": 1.2384, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.0001791614368175151, |
|
"loss": 1.3005, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00017903456181381485, |
|
"loss": 1.2584, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00017890734695690652, |
|
"loss": 1.2102, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00017877979279382135, |
|
"loss": 1.2107, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017865189987304963, |
|
"loss": 1.3347, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017852366874453838, |
|
"loss": 1.1805, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001783950999596889, |
|
"loss": 1.1626, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017826619407135445, |
|
"loss": 1.2205, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001781369516338378, |
|
"loss": 1.1646, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.000178007373202889, |
|
"loss": 1.2349, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017787745933570278, |
|
"loss": 1.252, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017774721059091633, |
|
"loss": 1.197, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017761662752860678, |
|
"loss": 1.2517, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.000177485710710289, |
|
"loss": 1.2853, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001773544606989128, |
|
"loss": 1.2175, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001772228780588609, |
|
"loss": 1.2812, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017709096335594634, |
|
"loss": 1.2227, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017695871715740994, |
|
"loss": 1.2107, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00017682614003191807, |
|
"loss": 1.2279, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017669323254956005, |
|
"loss": 1.2261, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0001765599952818458, |
|
"loss": 1.2397, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017642642880170333, |
|
"loss": 1.156, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017629253368347622, |
|
"loss": 1.2394, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0001761583105029213, |
|
"loss": 1.2703, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.000176023759837206, |
|
"loss": 1.193, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017588888226490604, |
|
"loss": 1.1993, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0001757536783660028, |
|
"loss": 1.1201, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017561814872188092, |
|
"loss": 1.2258, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017548229391532572, |
|
"loss": 1.1379, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017534611453052075, |
|
"loss": 1.212, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017520961115304534, |
|
"loss": 1.1478, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0001750727843698719, |
|
"loss": 1.1945, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017493563476936351, |
|
"loss": 1.2498, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017479816294127152, |
|
"loss": 1.2142, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00017466036947673267, |
|
"loss": 1.2241, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017452225496826697, |
|
"loss": 1.2344, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017438382000977475, |
|
"loss": 1.2201, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017424506519653438, |
|
"loss": 1.1334, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001741059911251997, |
|
"loss": 1.2565, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017396659839379722, |
|
"loss": 1.215, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017382688760172375, |
|
"loss": 1.1726, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017368685934974394, |
|
"loss": 1.2939, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017354651423998733, |
|
"loss": 1.1736, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017340585287594604, |
|
"loss": 1.2247, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001732648758624721, |
|
"loss": 1.2283, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017312358380577493, |
|
"loss": 1.1475, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001729819773134185, |
|
"loss": 1.1941, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017284005699431896, |
|
"loss": 1.2371, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017269782345874203, |
|
"loss": 1.2217, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.0001725552773183001, |
|
"loss": 1.292, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00017241241918594983, |
|
"loss": 1.1865, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017226924967598956, |
|
"loss": 1.168, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017212576940405647, |
|
"loss": 1.1677, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017198197898712404, |
|
"loss": 1.3153, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017183787904349944, |
|
"loss": 1.1591, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.0001716934701928208, |
|
"loss": 1.1223, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017154875305605457, |
|
"loss": 1.1827, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017140372825549284, |
|
"loss": 1.2881, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017125839641475072, |
|
"loss": 1.2413, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017111275815876348, |
|
"loss": 1.2347, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017096681411378416, |
|
"loss": 1.2043, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017082056490738063, |
|
"loss": 1.1754, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017067401116843296, |
|
"loss": 1.1959, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017052715352713075, |
|
"loss": 1.2229, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017037999261497035, |
|
"loss": 1.1536, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00017023252906475227, |
|
"loss": 1.2147, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00017008476351057836, |
|
"loss": 1.2196, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016993669658784904, |
|
"loss": 1.188, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016978832893326074, |
|
"loss": 1.2643, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016963966118480292, |
|
"loss": 1.2505, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016949069398175564, |
|
"loss": 1.1752, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001693414279646864, |
|
"loss": 1.2156, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016919186377544788, |
|
"loss": 1.2576, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001690420020571747, |
|
"loss": 1.1948, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016889184345428095, |
|
"loss": 1.2086, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001687413886124574, |
|
"loss": 1.1836, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016859063817866859, |
|
"loss": 1.1949, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016843959280115015, |
|
"loss": 1.1963, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016828825312940592, |
|
"loss": 1.2238, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016813661981420538, |
|
"loss": 1.2253, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016798469350758056, |
|
"loss": 1.2057, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00016783247486282335, |
|
"loss": 1.1617, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016767996453448283, |
|
"loss": 1.1896, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016752716317836229, |
|
"loss": 1.1548, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016737407145151636, |
|
"loss": 1.1881, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016722069001224842, |
|
"loss": 1.1876, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016706701952010758, |
|
"loss": 1.2465, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016691306063588583, |
|
"loss": 1.2247, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016675881402161536, |
|
"loss": 1.1149, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001666042803405656, |
|
"loss": 1.1575, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016644946025724027, |
|
"loss": 1.1538, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001662943544373748, |
|
"loss": 1.1682, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001661389635479332, |
|
"loss": 1.1312, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016598328825710533, |
|
"loss": 1.1553, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.000165827329234304, |
|
"loss": 1.1284, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016567108715016205, |
|
"loss": 1.2048, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00016551456267652952, |
|
"loss": 1.1881, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016535775648647075, |
|
"loss": 1.1561, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016520066925426144, |
|
"loss": 1.1252, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001650433016553859, |
|
"loss": 1.2258, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001648856543665338, |
|
"loss": 1.2365, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016472772806559778, |
|
"loss": 1.1802, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016456952343167007, |
|
"loss": 1.1545, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001644110411450398, |
|
"loss": 1.1172, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016425228188718995, |
|
"loss": 1.2764, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001640932463407946, |
|
"loss": 1.1831, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016393393518971587, |
|
"loss": 1.1617, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001637743491190009, |
|
"loss": 1.1534, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016361448881487914, |
|
"loss": 1.2035, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016345435496475908, |
|
"loss": 1.186, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00016329394825722568, |
|
"loss": 1.283, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001631332693820371, |
|
"loss": 1.1425, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001629723190301218, |
|
"loss": 1.2441, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001628110978935756, |
|
"loss": 1.1863, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016264960666565882, |
|
"loss": 1.1378, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016248784604079315, |
|
"loss": 1.1266, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016232581671455861, |
|
"loss": 1.1106, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016216351938369073, |
|
"loss": 1.2808, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016200095474607753, |
|
"loss": 1.1763, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001618381235007563, |
|
"loss": 1.1654, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016167502634791095, |
|
"loss": 1.1053, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016151166398886861, |
|
"loss": 1.1648, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.000161348037126097, |
|
"loss": 1.1927, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001611841464632011, |
|
"loss": 1.1971, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016101999270492034, |
|
"loss": 1.1814, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016085557655712532, |
|
"loss": 1.1801, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.0001606908987268151, |
|
"loss": 1.1608, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016052595992211387, |
|
"loss": 1.1726, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00016036076085226814, |
|
"loss": 1.2643, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00016019530222764346, |
|
"loss": 1.2025, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00016002958475972156, |
|
"loss": 1.209, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00015986360916109713, |
|
"loss": 1.1636, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00015969737614547494, |
|
"loss": 1.1713, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001595308864276666, |
|
"loss": 1.1997, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00015936414072358757, |
|
"loss": 1.163, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00015919713975025407, |
|
"loss": 1.1831, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00015902988422577998, |
|
"loss": 1.1898, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00015886237486937378, |
|
"loss": 1.2146, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001586946124013354, |
|
"loss": 1.1581, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001585265975430533, |
|
"loss": 1.2067, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00015835833101700102, |
|
"loss": 1.2665, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00015818981354673447, |
|
"loss": 1.1948, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00015802104585688851, |
|
"loss": 1.2046, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00015785202867317407, |
|
"loss": 1.1941, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015768276272237484, |
|
"loss": 1.1608, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001575132487323442, |
|
"loss": 1.2275, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001573434874320022, |
|
"loss": 1.0887, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015717347955133233, |
|
"loss": 1.2151, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015700322582137827, |
|
"loss": 1.1537, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015683272697424104, |
|
"loss": 1.194, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015666198374307552, |
|
"loss": 1.2704, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015649099686208755, |
|
"loss": 1.1606, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015631976706653063, |
|
"loss": 1.2455, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001561482950927029, |
|
"loss": 1.1924, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015597658167794374, |
|
"loss": 1.1241, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015580462756063082, |
|
"loss": 1.2062, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015563243348017691, |
|
"loss": 1.2021, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015546000017702648, |
|
"loss": 1.1777, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015528732839265272, |
|
"loss": 1.1935, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00015511441886955444, |
|
"loss": 1.1775, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0001549412723512526, |
|
"loss": 1.1912, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015476788958228725, |
|
"loss": 1.1858, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015459427130821442, |
|
"loss": 1.1986, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015442041827560274, |
|
"loss": 1.218, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015424633123203036, |
|
"loss": 1.1241, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015407201092608173, |
|
"loss": 1.2119, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0001538974581073442, |
|
"loss": 1.2456, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015372267352640513, |
|
"loss": 1.1842, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015354765793484834, |
|
"loss": 1.1135, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015337241208525097, |
|
"loss": 1.1587, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015319693673118047, |
|
"loss": 1.2242, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.000153021232627191, |
|
"loss": 1.1139, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015284530052882045, |
|
"loss": 1.1373, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.000152669141192587, |
|
"loss": 1.2007, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0001524927553759861, |
|
"loss": 1.1783, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00015231614383748698, |
|
"loss": 1.1786, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0001521393073365295, |
|
"loss": 1.2511, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015196224663352093, |
|
"loss": 1.1642, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015178496248983254, |
|
"loss": 1.1476, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015160745566779652, |
|
"loss": 1.2125, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0001514297269307024, |
|
"loss": 1.16, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015125177704279418, |
|
"loss": 1.187, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015107360676926666, |
|
"loss": 1.1085, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015089521687626243, |
|
"loss": 1.217, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015071660813086837, |
|
"loss": 1.1141, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0001505377813011124, |
|
"loss": 1.1608, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015035873715596036, |
|
"loss": 1.1924, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0001501794764653124, |
|
"loss": 1.2295, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 1.2363, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00014982030853178234, |
|
"loss": 1.1823, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00014964040283334316, |
|
"loss": 1.2605, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014946028367828745, |
|
"loss": 1.1935, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.000149279951841138, |
|
"loss": 1.2208, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014909940809733222, |
|
"loss": 1.116, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001489186532232187, |
|
"loss": 1.2063, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001487376879960539, |
|
"loss": 1.177, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014855651319399882, |
|
"loss": 1.1899, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001483751295961156, |
|
"loss": 1.1343, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014819353798236427, |
|
"loss": 1.2297, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001480117391335993, |
|
"loss": 1.1555, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014782973383156635, |
|
"loss": 1.1316, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014764752285889875, |
|
"loss": 1.2272, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014746510699911432, |
|
"loss": 1.2361, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00014728248703661182, |
|
"loss": 1.2084, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001470996637566677, |
|
"loss": 1.1774, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001469166379454327, |
|
"loss": 1.2804, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001467334103899284, |
|
"loss": 1.1555, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.000146549981878044, |
|
"loss": 1.1625, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014636635319853275, |
|
"loss": 1.1508, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014618252514100858, |
|
"loss": 1.1898, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001459984984959429, |
|
"loss": 1.2384, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001458142740546609, |
|
"loss": 1.2347, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014562985260933845, |
|
"loss": 1.2174, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014544523495299842, |
|
"loss": 1.1958, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001452604218795075, |
|
"loss": 1.2289, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014507541418357265, |
|
"loss": 1.095, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014489021266073768, |
|
"loss": 1.1451, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001447048181073799, |
|
"loss": 1.1649, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001445192313207067, |
|
"loss": 1.1453, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014433345309875203, |
|
"loss": 1.1285, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00014414748424037302, |
|
"loss": 1.1593, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.0001439613255452466, |
|
"loss": 1.1799, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0001437749778138659, |
|
"loss": 1.1652, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014358844184753712, |
|
"loss": 1.1847, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014340171844837567, |
|
"loss": 1.1749, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014321480841930298, |
|
"loss": 1.2343, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014302771256404311, |
|
"loss": 1.219, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014284043168711906, |
|
"loss": 1.212, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014265296659384956, |
|
"loss": 1.1803, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014246531809034528, |
|
"loss": 1.1622, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014227748698350583, |
|
"loss": 1.2043, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014208947408101576, |
|
"loss": 1.2637, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014190128019134153, |
|
"loss": 1.2462, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0001417129061237278, |
|
"loss": 1.1798, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.000141524352688194, |
|
"loss": 1.243, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0001413356206955309, |
|
"loss": 1.1082, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014114671095729695, |
|
"loss": 1.2122, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00014095762428581506, |
|
"loss": 1.1753, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00014076836149416887, |
|
"loss": 1.1422, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00014057892339619944, |
|
"loss": 1.1818, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00014038931080650156, |
|
"loss": 1.1827, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00014019952454042035, |
|
"loss": 1.1517, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00014000956541404785, |
|
"loss": 1.2177, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013981943424421932, |
|
"loss": 1.1884, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013962913184850979, |
|
"loss": 1.1519, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013943865904523066, |
|
"loss": 1.1736, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013924801665342603, |
|
"loss": 1.2433, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013905720549286932, |
|
"loss": 1.1155, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013886622638405952, |
|
"loss": 1.1417, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013867508014821797, |
|
"loss": 1.2524, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013848376760728455, |
|
"loss": 1.1986, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0001382922895839143, |
|
"loss": 1.2299, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013810064690147387, |
|
"loss": 1.1937, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00013790884038403795, |
|
"loss": 1.1978, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013771687085638564, |
|
"loss": 1.2552, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013752473914399713, |
|
"loss": 1.1432, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013733244607304988, |
|
"loss": 1.163, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013713999247041533, |
|
"loss": 1.1996, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013694737916365517, |
|
"loss": 1.1993, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013675460698101772, |
|
"loss": 1.1851, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013656167675143462, |
|
"loss": 1.2172, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0001363685893045171, |
|
"loss": 1.1332, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013617534547055236, |
|
"loss": 1.2618, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0001359819460805001, |
|
"loss": 1.2021, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013578839196598899, |
|
"loss": 1.2086, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0001355946839593129, |
|
"loss": 1.2122, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013540082289342756, |
|
"loss": 1.1428, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.0001352068096019468, |
|
"loss": 1.1406, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00013501264491913906, |
|
"loss": 1.1794, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013481832967992378, |
|
"loss": 1.0834, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013462386471986773, |
|
"loss": 1.1808, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013442925087518153, |
|
"loss": 1.1782, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001342344889827161, |
|
"loss": 1.1171, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013403957987995882, |
|
"loss": 1.1755, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013384452440503018, |
|
"loss": 1.1349, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013364932339668008, |
|
"loss": 1.1179, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013345397769428413, |
|
"loss": 1.2069, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001332584881378403, |
|
"loss": 1.1396, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013306285556796495, |
|
"loss": 1.2068, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013286708082588955, |
|
"loss": 1.2002, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001326711647534568, |
|
"loss": 1.1348, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013247510819311732, |
|
"loss": 1.1887, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001322789119879256, |
|
"loss": 1.1902, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013208257698153677, |
|
"loss": 1.1693, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00013188610401820277, |
|
"loss": 1.1429, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001316894939427687, |
|
"loss": 1.1323, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00013149274760066942, |
|
"loss": 1.1664, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001312958658379255, |
|
"loss": 1.1743, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00013109884950114007, |
|
"loss": 1.2094, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 1.1506, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001307044164947463, |
|
"loss": 1.1667, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00013050700152122285, |
|
"loss": 1.0879, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00013030945536582025, |
|
"loss": 1.176, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00013011177887799845, |
|
"loss": 1.223, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001299139729077778, |
|
"loss": 1.1235, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001297160383057355, |
|
"loss": 1.179, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00012951797592300186, |
|
"loss": 1.248, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00012931978661125655, |
|
"loss": 1.1429, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00012912147122272523, |
|
"loss": 1.2206, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001289230306101755, |
|
"loss": 1.1935, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001287244656269136, |
|
"loss": 1.1617, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012852577712678045, |
|
"loss": 1.2507, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012832696596414817, |
|
"loss": 1.1566, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012812803299391628, |
|
"loss": 1.2056, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012792897907150817, |
|
"loss": 1.1694, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001277298050528672, |
|
"loss": 1.2231, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012753051179445325, |
|
"loss": 1.188, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012733110015323898, |
|
"loss": 1.2972, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001271315709867059, |
|
"loss": 1.0586, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001269319251528411, |
|
"loss": 1.1582, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001267321635101333, |
|
"loss": 1.1554, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012653228691756913, |
|
"loss": 1.1897, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012633229623462951, |
|
"loss": 1.2316, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012613219232128608, |
|
"loss": 1.1713, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012593197603799725, |
|
"loss": 1.1412, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00012573164824570464, |
|
"loss": 1.1844, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012553120980582946, |
|
"loss": 1.2214, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012533066158026862, |
|
"loss": 1.1602, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012513000443139112, |
|
"loss": 1.1747, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001249292392220344, |
|
"loss": 1.2472, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012472836681550048, |
|
"loss": 1.1359, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012452738807555244, |
|
"loss": 1.1903, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001243263038664105, |
|
"loss": 1.2522, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012412511505274844, |
|
"loss": 1.0997, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012392382249968987, |
|
"loss": 1.1289, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012372242707280447, |
|
"loss": 1.1274, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012352092963810425, |
|
"loss": 1.1438, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012331933106203986, |
|
"loss": 1.1536, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.000123117632211497, |
|
"loss": 1.1329, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012291583395379228, |
|
"loss": 1.2712, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00012271393715667013, |
|
"loss": 1.1494, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001225119426882984, |
|
"loss": 1.1537, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012230985141726498, |
|
"loss": 1.2388, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001221076642125742, |
|
"loss": 1.2141, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001219053819436427, |
|
"loss": 1.1442, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012170300548029605, |
|
"loss": 1.1599, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012150053569276478, |
|
"loss": 1.228, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012129797345168073, |
|
"loss": 1.2166, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012109531962807332, |
|
"loss": 1.1168, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012089257509336577, |
|
"loss": 1.2246, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012068974071937136, |
|
"loss": 1.3116, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012048681737828962, |
|
"loss": 1.0999, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012028380594270283, |
|
"loss": 1.1261, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00012008070728557186, |
|
"loss": 1.1099, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00011987752228023277, |
|
"loss": 1.1195, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001196742518003929, |
|
"loss": 1.1169, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001194708967201271, |
|
"loss": 1.1305, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011926745791387406, |
|
"loss": 1.2178, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011906393625643244, |
|
"loss": 1.089, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0001188603326229572, |
|
"loss": 1.2487, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011865664788895578, |
|
"loss": 1.2024, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011845288293028445, |
|
"loss": 1.2987, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011824903862314427, |
|
"loss": 1.1744, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011804511584407763, |
|
"loss": 1.1953, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011784111546996439, |
|
"loss": 1.1957, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011763703837801794, |
|
"loss": 1.2052, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011743288544578166, |
|
"loss": 1.2065, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011722865755112504, |
|
"loss": 1.208, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011702435557223987, |
|
"loss": 1.2378, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011681998038763653, |
|
"loss": 1.2872, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0001166155328761402, |
|
"loss": 1.2098, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00011641101391688707, |
|
"loss": 1.173, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.0001162064243893205, |
|
"loss": 1.2261, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011600176517318741, |
|
"loss": 1.1428, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011579703714853425, |
|
"loss": 1.21, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0001155922411957035, |
|
"loss": 1.1725, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011538737819532958, |
|
"loss": 1.1587, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011518244902833537, |
|
"loss": 1.1833, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011497745457592816, |
|
"loss": 1.1714, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.000114772395719596, |
|
"loss": 1.1599, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011456727334110389, |
|
"loss": 1.1924, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011436208832248997, |
|
"loss": 1.2112, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011415684154606177, |
|
"loss": 1.1709, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011395153389439233, |
|
"loss": 1.1292, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011374616625031647, |
|
"loss": 1.1881, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011354073949692703, |
|
"loss": 1.153, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011333525451757094, |
|
"loss": 1.2166, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00011312971219584563, |
|
"loss": 1.1292, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.0001129241134155949, |
|
"loss": 1.1156, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011271845906090557, |
|
"loss": 1.1687, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011251275001610327, |
|
"loss": 1.0724, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011230698716574886, |
|
"loss": 1.1212, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011210117139463452, |
|
"loss": 1.1542, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011189530358778005, |
|
"loss": 1.1841, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011168938463042895, |
|
"loss": 1.1019, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011148341540804472, |
|
"loss": 1.1106, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011127739680630705, |
|
"loss": 1.1577, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011107132971110779, |
|
"loss": 1.1444, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011086521500854745, |
|
"loss": 1.1184, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011065905358493132, |
|
"loss": 1.1562, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011045284632676536, |
|
"loss": 1.1566, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011024659412075284, |
|
"loss": 1.1202, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00011004029785379024, |
|
"loss": 1.1932, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00010983395841296348, |
|
"loss": 1.1973, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010962757668554413, |
|
"loss": 1.1412, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010942115355898563, |
|
"loss": 1.2078, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010921468992091941, |
|
"loss": 1.2145, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010900818665915109, |
|
"loss": 1.1747, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010880164466165674, |
|
"loss": 1.1938, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010859506481657889, |
|
"loss": 1.1536, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010838844801222295, |
|
"loss": 1.0934, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010818179513705311, |
|
"loss": 1.123, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010797510707968878, |
|
"loss": 1.1267, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010776838472890065, |
|
"loss": 1.208, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010756162897360675, |
|
"loss": 1.0935, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010735484070286892, |
|
"loss": 1.1809, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010714802080588872, |
|
"loss": 1.1965, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010694117017200372, |
|
"loss": 1.2223, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010673428969068364, |
|
"loss": 1.1519, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00010652738025152661, |
|
"loss": 1.0735, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010632044274425524, |
|
"loss": 1.0942, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010611347805871277, |
|
"loss": 1.1316, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010590648708485946, |
|
"loss": 1.2067, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010569947071276847, |
|
"loss": 1.1517, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010549242983262227, |
|
"loss": 1.1459, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010528536533470863, |
|
"loss": 1.1318, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010507827810941695, |
|
"loss": 1.1545, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010487116904723433, |
|
"loss": 1.1475, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010466403903874176, |
|
"loss": 1.2015, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010445688897461032, |
|
"loss": 1.1841, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010424971974559732, |
|
"loss": 1.1901, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0001040425322425425, |
|
"loss": 1.0631, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010383532735636411, |
|
"loss": 1.1538, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010362810597805526, |
|
"loss": 1.1512, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010342086899867991, |
|
"loss": 1.1804, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00010321361730936904, |
|
"loss": 1.0977, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010300635180131708, |
|
"loss": 1.1124, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010279907336577765, |
|
"loss": 1.1691, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010259178289406011, |
|
"loss": 1.1335, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010238448127752554, |
|
"loss": 1.1041, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010217716940758291, |
|
"loss": 1.1401, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010196984817568534, |
|
"loss": 1.1738, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010176251847332614, |
|
"loss": 1.1201, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0001015551811920351, |
|
"loss": 1.1411, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010134783722337455, |
|
"loss": 1.1713, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010114048745893567, |
|
"loss": 1.1046, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010093313279033441, |
|
"loss": 1.1963, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010072577410920794, |
|
"loss": 1.2045, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010051841230721065, |
|
"loss": 1.1401, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010031104827601027, |
|
"loss": 1.1497, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00010010368290728427, |
|
"loss": 1.1224, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.989631709271571e-05, |
|
"loss": 1.1826, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.968895172398974e-05, |
|
"loss": 1.1705, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.948158769278939e-05, |
|
"loss": 1.1644, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.927422589079206e-05, |
|
"loss": 1.1825, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.906686720966561e-05, |
|
"loss": 1.1396, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.885951254106437e-05, |
|
"loss": 1.1647, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.865216277662545e-05, |
|
"loss": 1.1077, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.844481880796491e-05, |
|
"loss": 1.1812, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.82374815266739e-05, |
|
"loss": 1.0654, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.803015182431468e-05, |
|
"loss": 1.1079, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.782283059241711e-05, |
|
"loss": 1.1233, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.761551872247449e-05, |
|
"loss": 1.1553, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.740821710593989e-05, |
|
"loss": 1.2006, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.720092663422237e-05, |
|
"loss": 1.1814, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.699364819868295e-05, |
|
"loss": 1.1859, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.678638269063094e-05, |
|
"loss": 1.1329, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.657913100132011e-05, |
|
"loss": 1.1667, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.637189402194476e-05, |
|
"loss": 1.2103, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.61646726436359e-05, |
|
"loss": 1.1074, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.595746775745753e-05, |
|
"loss": 1.1871, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.575028025440272e-05, |
|
"loss": 1.1433, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.554311102538966e-05, |
|
"loss": 1.137, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.533596096125825e-05, |
|
"loss": 1.1602, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.512883095276569e-05, |
|
"loss": 1.1733, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.492172189058304e-05, |
|
"loss": 1.125, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.471463466529139e-05, |
|
"loss": 1.1597, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.450757016737776e-05, |
|
"loss": 1.1364, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.430052928723153e-05, |
|
"loss": 1.1344, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.409351291514054e-05, |
|
"loss": 1.0674, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.388652194128726e-05, |
|
"loss": 1.1382, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.36795572557448e-05, |
|
"loss": 1.1605, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.347261974847341e-05, |
|
"loss": 1.1862, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.326571030931637e-05, |
|
"loss": 1.184, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.305882982799633e-05, |
|
"loss": 1.1393, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.28519791941113e-05, |
|
"loss": 1.1614, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.264515929713109e-05, |
|
"loss": 1.1273, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.243837102639328e-05, |
|
"loss": 1.1093, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.223161527109937e-05, |
|
"loss": 1.1691, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.202489292031123e-05, |
|
"loss": 1.1337, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.181820486294693e-05, |
|
"loss": 1.1567, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.161155198777707e-05, |
|
"loss": 1.1486, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.140493518342113e-05, |
|
"loss": 1.1324, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.119835533834331e-05, |
|
"loss": 1.1495, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.099181334084892e-05, |
|
"loss": 1.1776, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.078531007908062e-05, |
|
"loss": 1.2368, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.05788464410144e-05, |
|
"loss": 1.0144, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.037242331445588e-05, |
|
"loss": 1.1685, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.016604158703654e-05, |
|
"loss": 1.1536, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.995970214620977e-05, |
|
"loss": 1.1459, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.975340587924716e-05, |
|
"loss": 1.1815, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.954715367323468e-05, |
|
"loss": 1.1047, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.934094641506873e-05, |
|
"loss": 1.1449, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.913478499145254e-05, |
|
"loss": 1.1432, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.892867028889223e-05, |
|
"loss": 1.1549, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.872260319369301e-05, |
|
"loss": 1.1686, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.851658459195528e-05, |
|
"loss": 1.2246, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.831061536957107e-05, |
|
"loss": 1.1605, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.810469641222001e-05, |
|
"loss": 1.1598, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.789882860536549e-05, |
|
"loss": 1.2153, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.769301283425117e-05, |
|
"loss": 1.1487, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.748724998389674e-05, |
|
"loss": 1.1018, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.728154093909441e-05, |
|
"loss": 1.116, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.707588658440511e-05, |
|
"loss": 1.1493, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.687028780415442e-05, |
|
"loss": 1.1542, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.666474548242907e-05, |
|
"loss": 1.1676, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.645926050307299e-05, |
|
"loss": 1.1742, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.625383374968357e-05, |
|
"loss": 1.1565, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.604846610560771e-05, |
|
"loss": 1.1178, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.584315845393827e-05, |
|
"loss": 1.1311, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.563791167751008e-05, |
|
"loss": 1.1128, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.543272665889612e-05, |
|
"loss": 1.1713, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.522760428040402e-05, |
|
"loss": 1.0631, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.502254542407186e-05, |
|
"loss": 1.145, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.481755097166463e-05, |
|
"loss": 1.2001, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.461262180467043e-05, |
|
"loss": 1.1534, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.440775880429653e-05, |
|
"loss": 1.1454, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.420296285146574e-05, |
|
"loss": 1.1576, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.399823482681262e-05, |
|
"loss": 1.1447, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.379357561067952e-05, |
|
"loss": 1.1235, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.358898608311296e-05, |
|
"loss": 1.1724, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.338446712385982e-05, |
|
"loss": 1.1528, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.318001961236349e-05, |
|
"loss": 1.1079, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.297564442776014e-05, |
|
"loss": 1.1935, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.277134244887499e-05, |
|
"loss": 1.1636, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.256711455421835e-05, |
|
"loss": 1.1444, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.236296162198206e-05, |
|
"loss": 1.1599, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.215888453003562e-05, |
|
"loss": 1.1545, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.195488415592238e-05, |
|
"loss": 1.1512, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.175096137685575e-05, |
|
"loss": 1.1985, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.154711706971557e-05, |
|
"loss": 1.1396, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.134335211104423e-05, |
|
"loss": 1.1888, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.11396673770428e-05, |
|
"loss": 1.2313, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.093606374356759e-05, |
|
"loss": 1.1218, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.073254208612597e-05, |
|
"loss": 1.1675, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.05291032798729e-05, |
|
"loss": 1.1619, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.032574819960713e-05, |
|
"loss": 1.2344, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.012247771976726e-05, |
|
"loss": 1.1226, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.991929271442817e-05, |
|
"loss": 1.0907, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.971619405729719e-05, |
|
"loss": 1.1901, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.951318262171039e-05, |
|
"loss": 1.1099, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.931025928062869e-05, |
|
"loss": 1.1261, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.910742490663425e-05, |
|
"loss": 1.1609, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.89046803719267e-05, |
|
"loss": 1.2235, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.870202654831931e-05, |
|
"loss": 1.151, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.849946430723525e-05, |
|
"loss": 1.2247, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.829699451970397e-05, |
|
"loss": 1.1696, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.809461805635734e-05, |
|
"loss": 1.1638, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.789233578742582e-05, |
|
"loss": 1.1042, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.769014858273506e-05, |
|
"loss": 1.1649, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.748805731170168e-05, |
|
"loss": 1.1675, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.728606284332988e-05, |
|
"loss": 1.1241, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.708416604620772e-05, |
|
"loss": 1.082, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.688236778850306e-05, |
|
"loss": 1.1375, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.668066893796014e-05, |
|
"loss": 1.2039, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.647907036189579e-05, |
|
"loss": 1.1631, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.627757292719558e-05, |
|
"loss": 1.1604, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.607617750031014e-05, |
|
"loss": 1.0842, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.587488494725157e-05, |
|
"loss": 1.1527, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.567369613358953e-05, |
|
"loss": 1.0852, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.547261192444755e-05, |
|
"loss": 1.1232, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.527163318449953e-05, |
|
"loss": 1.1283, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.507076077796565e-05, |
|
"loss": 1.2274, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.48699955686089e-05, |
|
"loss": 1.1752, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.46693384197314e-05, |
|
"loss": 1.1863, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.446879019417059e-05, |
|
"loss": 1.1312, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.426835175429537e-05, |
|
"loss": 1.1844, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.406802396200279e-05, |
|
"loss": 1.173, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.386780767871397e-05, |
|
"loss": 1.1541, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.366770376537048e-05, |
|
"loss": 1.1309, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.34677130824309e-05, |
|
"loss": 1.1582, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.326783648986671e-05, |
|
"loss": 1.1874, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.306807484715888e-05, |
|
"loss": 1.1975, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.286842901329412e-05, |
|
"loss": 1.1273, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.266889984676109e-05, |
|
"loss": 1.0885, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.246948820554675e-05, |
|
"loss": 1.1217, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.227019494713282e-05, |
|
"loss": 1.1392, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.207102092849187e-05, |
|
"loss": 1.1562, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.187196700608373e-05, |
|
"loss": 1.1219, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.167303403585186e-05, |
|
"loss": 1.1326, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.147422287321957e-05, |
|
"loss": 1.1615, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.12755343730864e-05, |
|
"loss": 1.1578, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.107696938982451e-05, |
|
"loss": 1.2216, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.087852877727481e-05, |
|
"loss": 1.1921, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.068021338874344e-05, |
|
"loss": 1.1387, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.048202407699816e-05, |
|
"loss": 1.0919, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.028396169426452e-05, |
|
"loss": 1.1836, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.00860270922222e-05, |
|
"loss": 1.2598, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.988822112200156e-05, |
|
"loss": 1.1233, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.969054463417976e-05, |
|
"loss": 1.0995, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.949299847877712e-05, |
|
"loss": 1.1595, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.92955835052537e-05, |
|
"loss": 1.1337, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 1.1672, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.890115049885994e-05, |
|
"loss": 1.1196, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.870413416207451e-05, |
|
"loss": 1.1234, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.850725239933063e-05, |
|
"loss": 1.1017, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.831050605723128e-05, |
|
"loss": 1.176, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.811389598179725e-05, |
|
"loss": 1.1055, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.791742301846326e-05, |
|
"loss": 1.109, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.772108801207442e-05, |
|
"loss": 1.0842, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.75248918068827e-05, |
|
"loss": 1.1461, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.732883524654318e-05, |
|
"loss": 1.129, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.71329191741105e-05, |
|
"loss": 1.1416, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.693714443203507e-05, |
|
"loss": 1.1243, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.674151186215973e-05, |
|
"loss": 1.118, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.654602230571589e-05, |
|
"loss": 1.155, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.635067660331993e-05, |
|
"loss": 1.1504, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.615547559496984e-05, |
|
"loss": 1.0894, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.59604201200412e-05, |
|
"loss": 1.1758, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.57655110172839e-05, |
|
"loss": 1.1959, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.557074912481846e-05, |
|
"loss": 1.1593, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.537613528013232e-05, |
|
"loss": 1.1343, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.518167032007623e-05, |
|
"loss": 1.1719, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.498735508086093e-05, |
|
"loss": 1.1345, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.479319039805323e-05, |
|
"loss": 1.1636, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.459917710657246e-05, |
|
"loss": 1.193, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.440531604068713e-05, |
|
"loss": 1.1906, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.421160803401105e-05, |
|
"loss": 1.1815, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.40180539194999e-05, |
|
"loss": 1.1255, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.382465452944768e-05, |
|
"loss": 1.1794, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.363141069548293e-05, |
|
"loss": 1.1478, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.343832324856538e-05, |
|
"loss": 1.1276, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.324539301898229e-05, |
|
"loss": 1.1556, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.305262083634488e-05, |
|
"loss": 1.1263, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.286000752958467e-05, |
|
"loss": 1.1423, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.266755392695011e-05, |
|
"loss": 1.1251, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.247526085600291e-05, |
|
"loss": 1.2281, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.228312914361435e-05, |
|
"loss": 1.1592, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.209115961596208e-05, |
|
"loss": 1.1982, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.189935309852612e-05, |
|
"loss": 1.1608, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.17077104160857e-05, |
|
"loss": 1.1901, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.151623239271548e-05, |
|
"loss": 1.0984, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.132491985178206e-05, |
|
"loss": 1.181, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.113377361594049e-05, |
|
"loss": 1.1445, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.0942794507130707e-05, |
|
"loss": 1.1573, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.075198334657399e-05, |
|
"loss": 1.2152, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.056134095476936e-05, |
|
"loss": 1.0542, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.0370868151490244e-05, |
|
"loss": 1.1953, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.018056575578075e-05, |
|
"loss": 1.2288, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.999043458595215e-05, |
|
"loss": 1.1121, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.980047545957965e-05, |
|
"loss": 1.0894, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.961068919349847e-05, |
|
"loss": 1.1046, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.9421076603800566e-05, |
|
"loss": 1.1754, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.923163850583113e-05, |
|
"loss": 1.1395, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.904237571418496e-05, |
|
"loss": 1.1428, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.885328904270306e-05, |
|
"loss": 1.0903, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.866437930446913e-05, |
|
"loss": 1.1772, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.8475647311806016e-05, |
|
"loss": 1.0997, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.828709387627218e-05, |
|
"loss": 1.1145, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.8098719808658464e-05, |
|
"loss": 1.1002, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.7910525918984285e-05, |
|
"loss": 1.1476, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.772251301649418e-05, |
|
"loss": 1.116, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.7534681909654694e-05, |
|
"loss": 1.2078, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.73470334061505e-05, |
|
"loss": 1.2252, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.715956831288092e-05, |
|
"loss": 1.1134, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.6972287435956895e-05, |
|
"loss": 1.1069, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.6785191580697025e-05, |
|
"loss": 1.0913, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.6598281551624364e-05, |
|
"loss": 1.198, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.6411558152462894e-05, |
|
"loss": 1.2224, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.622502218613408e-05, |
|
"loss": 1.1956, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.6038674454753414e-05, |
|
"loss": 1.1119, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.5852515759626976e-05, |
|
"loss": 1.1057, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.5666546901247974e-05, |
|
"loss": 1.1144, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.54807686792933e-05, |
|
"loss": 1.14, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.5295181892620105e-05, |
|
"loss": 1.1362, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.510978733926234e-05, |
|
"loss": 1.1582, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.4924585816427375e-05, |
|
"loss": 1.1298, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.473957812049252e-05, |
|
"loss": 1.1852, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.4554765047001613e-05, |
|
"loss": 1.1534, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.4370147390661595e-05, |
|
"loss": 1.1598, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.418572594533908e-05, |
|
"loss": 1.1655, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.400150150405714e-05, |
|
"loss": 1.1503, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.3817474858991446e-05, |
|
"loss": 1.0911, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.363364680146725e-05, |
|
"loss": 1.1611, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.345001812195603e-05, |
|
"loss": 1.1061, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.3266589610071624e-05, |
|
"loss": 1.1285, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.308336205456732e-05, |
|
"loss": 1.1658, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.29003362433323e-05, |
|
"loss": 1.1265, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.271751296338823e-05, |
|
"loss": 1.1495, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.2534893000885676e-05, |
|
"loss": 1.1719, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.235247714110124e-05, |
|
"loss": 1.1809, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.217026616843366e-05, |
|
"loss": 1.1127, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.198826086640072e-05, |
|
"loss": 1.1893, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.180646201763577e-05, |
|
"loss": 1.1195, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.162487040388444e-05, |
|
"loss": 1.1379, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.144348680600122e-05, |
|
"loss": 1.1675, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.1262312003946115e-05, |
|
"loss": 1.1418, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.1081346776781316e-05, |
|
"loss": 1.1956, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.090059190266779e-05, |
|
"loss": 1.2166, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.072004815886202e-05, |
|
"loss": 1.1348, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.053971632171259e-05, |
|
"loss": 1.0433, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.0359597166656826e-05, |
|
"loss": 1.1761, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.0179691468217694e-05, |
|
"loss": 1.1838, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 1.1764, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.982052353468757e-05, |
|
"loss": 1.1196, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.964126284403969e-05, |
|
"loss": 1.1193, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.946221869888764e-05, |
|
"loss": 1.1338, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.9283391869131656e-05, |
|
"loss": 1.2031, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.9104783123737566e-05, |
|
"loss": 1.1786, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.892639323073336e-05, |
|
"loss": 1.1268, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.874822295720581e-05, |
|
"loss": 1.1255, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.85702730692976e-05, |
|
"loss": 1.1171, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.839254433220355e-05, |
|
"loss": 1.1187, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.821503751016746e-05, |
|
"loss": 1.142, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.8037753366479085e-05, |
|
"loss": 1.0964, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.7860692663470505e-05, |
|
"loss": 1.1437, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.768385616251304e-05, |
|
"loss": 1.1338, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.7507244624013914e-05, |
|
"loss": 1.1956, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.733085880741301e-05, |
|
"loss": 1.1274, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.7154699471179575e-05, |
|
"loss": 1.1342, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.697876737280901e-05, |
|
"loss": 1.1718, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.680306326881954e-05, |
|
"loss": 1.1409, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.6627587914749035e-05, |
|
"loss": 1.2383, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.645234206515171e-05, |
|
"loss": 1.1102, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.6277326473594894e-05, |
|
"loss": 1.1378, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.610254189265577e-05, |
|
"loss": 1.1069, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.592798907391831e-05, |
|
"loss": 1.1332, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.575366876796966e-05, |
|
"loss": 1.1381, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.5579581724397255e-05, |
|
"loss": 1.1187, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.5405728691785585e-05, |
|
"loss": 1.2009, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.523211041771278e-05, |
|
"loss": 1.1088, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.505872764874741e-05, |
|
"loss": 1.1226, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.488558113044555e-05, |
|
"loss": 1.1039, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.471267160734731e-05, |
|
"loss": 1.1254, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.4539999822973553e-05, |
|
"loss": 1.1194, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.436756651982311e-05, |
|
"loss": 1.178, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.419537243936917e-05, |
|
"loss": 1.1473, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.402341832205631e-05, |
|
"loss": 1.1101, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.385170490729712e-05, |
|
"loss": 1.1844, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.368023293346937e-05, |
|
"loss": 1.1498, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.3509003137912476e-05, |
|
"loss": 1.2166, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.333801625692451e-05, |
|
"loss": 1.1019, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.316727302575899e-05, |
|
"loss": 1.1077, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.2996774178621736e-05, |
|
"loss": 1.1669, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.2826520448667696e-05, |
|
"loss": 1.0706, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.265651256799781e-05, |
|
"loss": 1.1403, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.2486751267655825e-05, |
|
"loss": 1.095, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.231723727762521e-05, |
|
"loss": 1.1428, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.2147971326825966e-05, |
|
"loss": 1.058, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1978954143111515e-05, |
|
"loss": 1.1794, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1810186453265534e-05, |
|
"loss": 1.1375, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1641668982999005e-05, |
|
"loss": 1.0833, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1473402456946744e-05, |
|
"loss": 1.1292, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.130538759866457e-05, |
|
"loss": 1.0895, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1137625130626256e-05, |
|
"loss": 1.1876, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0970115774220054e-05, |
|
"loss": 1.1388, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0802860249745936e-05, |
|
"loss": 1.163, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.063585927641243e-05, |
|
"loss": 1.1589, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.046911357233343e-05, |
|
"loss": 1.1545, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0302623854525054e-05, |
|
"loss": 1.1185, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.013639083890287e-05, |
|
"loss": 1.1385, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.997041524027846e-05, |
|
"loss": 1.1622, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.9804697772356544e-05, |
|
"loss": 1.1124, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.963923914773187e-05, |
|
"loss": 1.0652, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.9474040077886134e-05, |
|
"loss": 1.1329, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.9309101273184926e-05, |
|
"loss": 1.1583, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.91444234428747e-05, |
|
"loss": 1.1295, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8980007295079704e-05, |
|
"loss": 1.1003, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8815853536798904e-05, |
|
"loss": 1.1418, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.865196287390301e-05, |
|
"loss": 1.1189, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.848833601113141e-05, |
|
"loss": 1.1334, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8324973652089056e-05, |
|
"loss": 1.1342, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.816187649924372e-05, |
|
"loss": 1.1097, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.79990452539225e-05, |
|
"loss": 1.0721, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7836480616309244e-05, |
|
"loss": 1.1252, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.767418328544142e-05, |
|
"loss": 1.1486, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.75121539592069e-05, |
|
"loss": 1.1272, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.735039333434117e-05, |
|
"loss": 1.1097, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7188902106424416e-05, |
|
"loss": 1.1287, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7027680969878274e-05, |
|
"loss": 1.0658, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.686673061796293e-05, |
|
"loss": 1.142, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6706051742774315e-05, |
|
"loss": 1.1876, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6545645035240915e-05, |
|
"loss": 1.0573, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.638551118512089e-05, |
|
"loss": 1.1575, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.622565088099911e-05, |
|
"loss": 1.101, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.606606481028415e-05, |
|
"loss": 1.0853, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5906753659205395e-05, |
|
"loss": 1.1421, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5747718112810056e-05, |
|
"loss": 1.1744, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.558895885496023e-05, |
|
"loss": 1.0837, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.543047656832994e-05, |
|
"loss": 1.0829, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5272271934402226e-05, |
|
"loss": 1.1592, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.511434563346621e-05, |
|
"loss": 1.2019, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.4956698344614126e-05, |
|
"loss": 1.1754, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.479933074573858e-05, |
|
"loss": 1.1323, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.464224351352929e-05, |
|
"loss": 1.165, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.448543732347048e-05, |
|
"loss": 1.1201, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.432891284983798e-05, |
|
"loss": 1.2426, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.417267076569602e-05, |
|
"loss": 1.1017, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.401671174289469e-05, |
|
"loss": 1.1931, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.386103645206679e-05, |
|
"loss": 1.1614, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.370564556262523e-05, |
|
"loss": 1.0848, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3550539742759766e-05, |
|
"loss": 1.0789, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.339571965943442e-05, |
|
"loss": 1.1305, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.324118597838464e-05, |
|
"loss": 1.1694, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.308693936411421e-05, |
|
"loss": 1.1715, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.293298047989244e-05, |
|
"loss": 1.1205, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2779309987751586e-05, |
|
"loss": 1.176, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.262592854848369e-05, |
|
"loss": 1.0929, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2472836821637744e-05, |
|
"loss": 1.168, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2320035465517176e-05, |
|
"loss": 1.1996, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2167525137176655e-05, |
|
"loss": 1.1481, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.2015306492419475e-05, |
|
"loss": 1.0442, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.186338018579463e-05, |
|
"loss": 1.1133, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1711746870594086e-05, |
|
"loss": 1.1696, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1560407198849884e-05, |
|
"loss": 1.0999, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.140936182133144e-05, |
|
"loss": 1.1434, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1258611387542614e-05, |
|
"loss": 1.146, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.110815654571907e-05, |
|
"loss": 1.141, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0957997942825336e-05, |
|
"loss": 1.1866, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0808136224552154e-05, |
|
"loss": 1.1321, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0658572035313584e-05, |
|
"loss": 1.0857, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.050930601824441e-05, |
|
"loss": 1.1287, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0360338815197097e-05, |
|
"loss": 1.1174, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.021167106673928e-05, |
|
"loss": 1.1344, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0063303412150957e-05, |
|
"loss": 1.0624, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.9915236489421684e-05, |
|
"loss": 1.1863, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9767470935247733e-05, |
|
"loss": 1.1361, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9620007385029658e-05, |
|
"loss": 1.1829, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9472846472869298e-05, |
|
"loss": 1.0559, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.932598883156704e-05, |
|
"loss": 1.1842, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9179435092619366e-05, |
|
"loss": 1.1078, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.903318588621583e-05, |
|
"loss": 1.1636, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8887241841236524e-05, |
|
"loss": 1.1315, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.874160358524931e-05, |
|
"loss": 1.0558, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8596271744507175e-05, |
|
"loss": 1.0801, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.845124694394545e-05, |
|
"loss": 1.1187, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8306529807179227e-05, |
|
"loss": 1.0895, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8162120956500593e-05, |
|
"loss": 1.145, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8018021012875994e-05, |
|
"loss": 1.0654, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.7874230595943573e-05, |
|
"loss": 1.1296, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.7730750324010467e-05, |
|
"loss": 1.0781, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.758758081405015e-05, |
|
"loss": 1.1252, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.744472268169993e-05, |
|
"loss": 1.1168, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7302176541257986e-05, |
|
"loss": 1.1046, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7159943005680997e-05, |
|
"loss": 1.1561, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7018022686581534e-05, |
|
"loss": 1.145, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.687641619422512e-05, |
|
"loss": 1.0917, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.67351241375279e-05, |
|
"loss": 1.1322, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.659414712405398e-05, |
|
"loss": 1.1647, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6453485760012718e-05, |
|
"loss": 1.2217, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6313140650256064e-05, |
|
"loss": 1.1871, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6173112398276235e-05, |
|
"loss": 1.0884, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.603340160620281e-05, |
|
"loss": 1.1461, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5894008874800325e-05, |
|
"loss": 1.1003, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5754934803465614e-05, |
|
"loss": 1.1453, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5616179990225263e-05, |
|
"loss": 1.043, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5477745031733057e-05, |
|
"loss": 1.1628, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.533963052326733e-05, |
|
"loss": 1.0878, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.5201837058728505e-05, |
|
"loss": 1.1375, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.5064365230636498e-05, |
|
"loss": 1.1521, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.492721563012813e-05, |
|
"loss": 1.1508, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.479038884695468e-05, |
|
"loss": 1.2063, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4653885469479253e-05, |
|
"loss": 1.1038, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.451770608467432e-05, |
|
"loss": 1.0598, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4381851278119118e-05, |
|
"loss": 1.1674, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4246321633997225e-05, |
|
"loss": 1.2076, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4111117735093993e-05, |
|
"loss": 1.1309, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.397624016279403e-05, |
|
"loss": 1.2217, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3841689497078746e-05, |
|
"loss": 1.09, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3707466316523785e-05, |
|
"loss": 1.1078, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3573571198296706e-05, |
|
"loss": 1.1259, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3440004718154218e-05, |
|
"loss": 1.1655, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.330676745043995e-05, |
|
"loss": 1.0859, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.3173859968081944e-05, |
|
"loss": 1.0924, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.3041282842590097e-05, |
|
"loss": 1.1421, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.290903664405367e-05, |
|
"loss": 1.0713, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.277712194113909e-05, |
|
"loss": 1.0837, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2645539301087238e-05, |
|
"loss": 1.0574, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.251428928971102e-05, |
|
"loss": 1.1298, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2383372471393215e-05, |
|
"loss": 1.1036, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2252789409083708e-05, |
|
"loss": 1.094, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2122540664297254e-05, |
|
"loss": 1.1522, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1992626797111026e-05, |
|
"loss": 1.1178, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1863048366162208e-05, |
|
"loss": 1.1472, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1733805928645567e-05, |
|
"loss": 1.1826, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1604900040311105e-05, |
|
"loss": 1.1304, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1476331255461635e-05, |
|
"loss": 1.087, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1348100126950398e-05, |
|
"loss": 1.1079, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1220207206178688e-05, |
|
"loss": 1.1205, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.1092653043093513e-05, |
|
"loss": 1.1505, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0965438186185148e-05, |
|
"loss": 1.0918, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.083856318248495e-05, |
|
"loss": 1.0422, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0712028577562738e-05, |
|
"loss": 1.066, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.058583491552465e-05, |
|
"loss": 1.1057, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.045998273901082e-05, |
|
"loss": 1.0065, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0334472589192933e-05, |
|
"loss": 1.0826, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.020930500577184e-05, |
|
"loss": 1.1976, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.008448052697549e-05, |
|
"loss": 1.1365, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.995999968955641e-05, |
|
"loss": 1.1066, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.983586302878935e-05, |
|
"loss": 1.2499, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9712071078469242e-05, |
|
"loss": 1.1559, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.958862437090865e-05, |
|
"loss": 1.1011, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.946552343693556e-05, |
|
"loss": 1.1321, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9342768805891178e-05, |
|
"loss": 1.1361, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.922036100562753e-05, |
|
"loss": 1.1542, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 1.1322, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.897658800139139e-05, |
|
"loss": 1.1599, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8855223845656988e-05, |
|
"loss": 1.054, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8734208617174988e-05, |
|
"loss": 1.0933, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8613542836317888e-05, |
|
"loss": 1.0936, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.849322702195556e-05, |
|
"loss": 1.2249, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.837326169145299e-05, |
|
"loss": 1.1735, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8253647360668126e-05, |
|
"loss": 1.1558, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8134384543949478e-05, |
|
"loss": 1.1237, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.801547375413407e-05, |
|
"loss": 1.1055, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7896915502545273e-05, |
|
"loss": 1.0945, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7778710298990464e-05, |
|
"loss": 1.1356, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7660858651758792e-05, |
|
"loss": 1.0895, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.754336106761927e-05, |
|
"loss": 1.1038, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7426218051818344e-05, |
|
"loss": 1.0459, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7309430108077707e-05, |
|
"loss": 1.1342, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7192997738592375e-05, |
|
"loss": 1.1471, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.707692144402828e-05, |
|
"loss": 1.1213, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.696120172352025e-05, |
|
"loss": 1.1418, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6845839074669777e-05, |
|
"loss": 1.1826, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6730833993542982e-05, |
|
"loss": 1.1608, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6616186974668378e-05, |
|
"loss": 1.1427, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6501898511034808e-05, |
|
"loss": 1.0375, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6387969094089316e-05, |
|
"loss": 1.1113, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6274399213735014e-05, |
|
"loss": 1.1357, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6161189358328978e-05, |
|
"loss": 1.0583, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6048340014680165e-05, |
|
"loss": 1.0255, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.593585166804732e-05, |
|
"loss": 1.1014, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5823724802136865e-05, |
|
"loss": 1.1903, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5711959899100838e-05, |
|
"loss": 1.1096, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5600557439534812e-05, |
|
"loss": 1.1181, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5489517902475815e-05, |
|
"loss": 1.0726, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5378841765400386e-05, |
|
"loss": 1.1566, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.526852950422226e-05, |
|
"loss": 1.1924, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5158581593290588e-05, |
|
"loss": 1.1052, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5048998505387835e-05, |
|
"loss": 1.1394, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.493978071172757e-05, |
|
"loss": 1.0989, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4830928681952672e-05, |
|
"loss": 1.083, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4722442884133214e-05, |
|
"loss": 1.0513, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.461432378476445e-05, |
|
"loss": 1.1754, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4506571848764716e-05, |
|
"loss": 1.1161, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4399187539473668e-05, |
|
"loss": 1.1337, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.429217131865005e-05, |
|
"loss": 1.1, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4185523646469822e-05, |
|
"loss": 1.1135, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4079244981524187e-05, |
|
"loss": 1.1974, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.3973335780817564e-05, |
|
"loss": 1.0519, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3867796499765684e-05, |
|
"loss": 1.1055, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3762627592193566e-05, |
|
"loss": 1.104, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3657829510333654e-05, |
|
"loss": 1.1268, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.355340270482377e-05, |
|
"loss": 0.9304, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3449347624705244e-05, |
|
"loss": 1.1049, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.334566471742098e-05, |
|
"loss": 1.1178, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3242354428813497e-05, |
|
"loss": 1.1276, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3139417203123027e-05, |
|
"loss": 1.1857, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3036853482985644e-05, |
|
"loss": 1.1394, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2934663709431261e-05, |
|
"loss": 1.168, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2832848321881918e-05, |
|
"loss": 1.1279, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2731407758149639e-05, |
|
"loss": 1.1697, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.263034245443473e-05, |
|
"loss": 1.1641, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2529652845323914e-05, |
|
"loss": 1.2076, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2429339363788362e-05, |
|
"loss": 1.1156, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2329402441181815e-05, |
|
"loss": 1.1279, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2229842507238909e-05, |
|
"loss": 1.1234, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2130659990073146e-05, |
|
"loss": 1.1093, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2031855316175033e-05, |
|
"loss": 1.1171, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1933428910410493e-05, |
|
"loss": 1.0764, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1835381196018769e-05, |
|
"loss": 1.1218, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1737712594610706e-05, |
|
"loss": 1.123, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1640423526166988e-05, |
|
"loss": 1.1746, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.154351440903626e-05, |
|
"loss": 1.0978, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1446985659933352e-05, |
|
"loss": 1.17, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1350837693937488e-05, |
|
"loss": 1.1079, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1255070924490518e-05, |
|
"loss": 1.041, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1159685763395111e-05, |
|
"loss": 1.1293, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1064682620813005e-05, |
|
"loss": 1.1125, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.0970061905263229e-05, |
|
"loss": 1.1263, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.087582402362034e-05, |
|
"loss": 1.1774, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0781969381112755e-05, |
|
"loss": 1.0214, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0688498381320855e-05, |
|
"loss": 1.2096, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.059541142617535e-05, |
|
"loss": 1.1316, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0502708915955595e-05, |
|
"loss": 1.0732, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0410391249287788e-05, |
|
"loss": 1.0472, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0318458823143174e-05, |
|
"loss": 1.1808, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0226912032836611e-05, |
|
"loss": 1.1432, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0135751272024607e-05, |
|
"loss": 1.0643, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0044976932703686e-05, |
|
"loss": 1.0752, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.954589405208791e-06, |
|
"loss": 1.1056, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.864589078211572e-06, |
|
"loss": 1.1063, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.774976338718677e-06, |
|
"loss": 0.9705, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.685751572070024e-06, |
|
"loss": 1.1154, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.596915161937347e-06, |
|
"loss": 1.1411, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.50846749032237e-06, |
|
"loss": 1.1222, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.420408937555203e-06, |
|
"loss": 1.0753, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.332739882292752e-06, |
|
"loss": 1.1313, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.24546070151704e-06, |
|
"loss": 1.1444, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.158571770533619e-06, |
|
"loss": 1.063, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.072073462969944e-06, |
|
"loss": 1.1243, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.98596615077376e-06, |
|
"loss": 1.0975, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.900250204211514e-06, |
|
"loss": 1.1384, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.814925991866751e-06, |
|
"loss": 1.1802, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.729993880638555e-06, |
|
"loss": 1.103, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.645454235739903e-06, |
|
"loss": 1.1226, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.561307420696252e-06, |
|
"loss": 1.0923, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.47755379734373e-06, |
|
"loss": 1.0684, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.394193725827792e-06, |
|
"loss": 1.123, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.311227564601642e-06, |
|
"loss": 1.0881, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.228655670424535e-06, |
|
"loss": 1.0903, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.146478398360414e-06, |
|
"loss": 1.1017, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.064696101776358e-06, |
|
"loss": 1.1592, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.98330913234101e-06, |
|
"loss": 1.0905, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.902317840023e-06, |
|
"loss": 1.0947, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.821722573089641e-06, |
|
"loss": 1.0614, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.741523678105257e-06, |
|
"loss": 1.1547, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.661721499929753e-06, |
|
"loss": 1.071, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.582316381717125e-06, |
|
"loss": 1.0781, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.503308664913999e-06, |
|
"loss": 1.082, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.424698689258158e-06, |
|
"loss": 1.1213, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.346486792777052e-06, |
|
"loss": 1.0657, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.2686733117863784e-06, |
|
"loss": 1.0945, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.191258580888638e-06, |
|
"loss": 1.1382, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.114242932971671e-06, |
|
"loss": 1.1581, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.037626699207234e-06, |
|
"loss": 1.1543, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.961410209049568e-06, |
|
"loss": 1.0864, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.8855937902340576e-06, |
|
"loss": 1.0708, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.8101777687756605e-06, |
|
"loss": 1.1335, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.735162468967649e-06, |
|
"loss": 1.1222, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.660548213380224e-06, |
|
"loss": 1.0801, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.586335322858972e-06, |
|
"loss": 1.1293, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.512524116523633e-06, |
|
"loss": 1.0828, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.439114911766708e-06, |
|
"loss": 1.1077, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.36610802425206e-06, |
|
"loss": 1.1335, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.293503767913478e-06, |
|
"loss": 1.0741, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.221302454953548e-06, |
|
"loss": 1.1428, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.149504395842087e-06, |
|
"loss": 1.1523, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.078109899314921e-06, |
|
"loss": 1.1409, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.007119272372541e-06, |
|
"loss": 1.16, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.936532820278751e-06, |
|
"loss": 1.1863, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.8663508465594204e-06, |
|
"loss": 1.1307, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.7965736530010916e-06, |
|
"loss": 1.1085, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.727201539649762e-06, |
|
"loss": 1.1204, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.658234804809548e-06, |
|
"loss": 1.1105, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.589673745041424e-06, |
|
"loss": 1.1074, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.521518655161917e-06, |
|
"loss": 1.1377, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.453769828241872e-06, |
|
"loss": 1.1239, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.386427555605211e-06, |
|
"loss": 1.1853, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.319492126827585e-06, |
|
"loss": 1.0693, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.252963829735247e-06, |
|
"loss": 1.1064, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.186842950403737e-06, |
|
"loss": 1.137, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.121129773156663e-06, |
|
"loss": 1.1108, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.055824580564517e-06, |
|
"loss": 1.106, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.990927653443423e-06, |
|
"loss": 1.127, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.926439270853955e-06, |
|
"loss": 1.0995, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.862359710099873e-06, |
|
"loss": 1.1565, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.798689246727006e-06, |
|
"loss": 1.0658, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.735428154522092e-06, |
|
"loss": 1.1161, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.672576705511478e-06, |
|
"loss": 1.1477, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.610135169960028e-06, |
|
"loss": 1.0669, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.5481038163700085e-06, |
|
"loss": 1.0856, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.486482911479839e-06, |
|
"loss": 1.1489, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.425272720262941e-06, |
|
"loss": 1.1494, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.3644735059267585e-06, |
|
"loss": 1.1315, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.304085529911406e-06, |
|
"loss": 1.1737, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.244109051888712e-06, |
|
"loss": 1.0747, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.184544329761009e-06, |
|
"loss": 1.1371, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.125391619660057e-06, |
|
"loss": 1.1001, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.066651175945946e-06, |
|
"loss": 1.1176, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.0083232512060035e-06, |
|
"loss": 1.1436, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.950408096253699e-06, |
|
"loss": 1.0927, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.892905960127546e-06, |
|
"loss": 1.1204, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.8358170900900995e-06, |
|
"loss": 1.1904, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.77914173162679e-06, |
|
"loss": 1.0658, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.72288012844495e-06, |
|
"loss": 1.1394, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.6670325224727907e-06, |
|
"loss": 1.095, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.611599153858214e-06, |
|
"loss": 1.1508, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.556580260967923e-06, |
|
"loss": 1.1062, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.5019760803863823e-06, |
|
"loss": 1.0555, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4477868469147466e-06, |
|
"loss": 1.0875, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.394012793569823e-06, |
|
"loss": 1.0709, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.3406541515832003e-06, |
|
"loss": 1.1849, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.2877111504001522e-06, |
|
"loss": 1.1282, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.2351840176786164e-06, |
|
"loss": 1.0425, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.1830729792883596e-06, |
|
"loss": 1.101, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.1313782593098474e-06, |
|
"loss": 1.1094, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.0801000800333877e-06, |
|
"loss": 1.1074, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.0292386619581337e-06, |
|
"loss": 1.0964, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.978794223791137e-06, |
|
"loss": 1.1046, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.9287669824464072e-06, |
|
"loss": 1.1477, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8791571530439985e-06, |
|
"loss": 1.1431, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8299649489090475e-06, |
|
"loss": 1.111, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.781190581570903e-06, |
|
"loss": 1.1109, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.732834260762174e-06, |
|
"loss": 1.183, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.684896194417841e-06, |
|
"loss": 1.1298, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.6373765886743785e-06, |
|
"loss": 1.1205, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.590275647868867e-06, |
|
"loss": 1.1281, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.5435935745380835e-06, |
|
"loss": 1.1438, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.497330569417633e-06, |
|
"loss": 1.0519, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.451486831441152e-06, |
|
"loss": 1.0969, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.406062557739375e-06, |
|
"loss": 1.1803, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.3610579436393e-06, |
|
"loss": 1.1187, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.3164731826633925e-06, |
|
"loss": 1.1517, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.2723084665287385e-06, |
|
"loss": 1.1587, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.228563985146137e-06, |
|
"loss": 1.1232, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.1852399266194314e-06, |
|
"loss": 1.1198, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.1423364772445887e-06, |
|
"loss": 1.1532, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.0998538215089348e-06, |
|
"loss": 1.1666, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.0577921420903535e-06, |
|
"loss": 1.1318, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.0161516198564988e-06, |
|
"loss": 1.1309, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9749324338640517e-06, |
|
"loss": 1.1505, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9341347613579087e-06, |
|
"loss": 1.1744, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8937587777704157e-06, |
|
"loss": 1.1532, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8538046567206591e-06, |
|
"loss": 1.1746, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8142725700136865e-06, |
|
"loss": 1.0931, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7751626876397532e-06, |
|
"loss": 1.1063, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7364751777736332e-06, |
|
"loss": 1.0747, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6982102067738758e-06, |
|
"loss": 1.0687, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6603679391820616e-06, |
|
"loss": 1.1362, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6229485377221576e-06, |
|
"loss": 1.115, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5859521632997533e-06, |
|
"loss": 1.1656, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5493789750014031e-06, |
|
"loss": 1.0537, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5132291300939628e-06, |
|
"loss": 1.0706, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.4775027840238431e-06, |
|
"loss": 1.076, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.4422000904164345e-06, |
|
"loss": 1.0984, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.40732120107534e-06, |
|
"loss": 1.1323, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3728662659818204e-06, |
|
"loss": 1.1809, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3388354332941166e-06, |
|
"loss": 1.1897, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3052288493467735e-06, |
|
"loss": 1.1423, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.272046658650039e-06, |
|
"loss": 1.1757, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.239289003889288e-06, |
|
"loss": 1.104, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2069560259243328e-06, |
|
"loss": 1.087, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1750478637888473e-06, |
|
"loss": 1.1382, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.143564654689777e-06, |
|
"loss": 1.0892, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1125065340067743e-06, |
|
"loss": 1.1399, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0818736352915527e-06, |
|
"loss": 1.1483, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0516660902673448e-06, |
|
"loss": 1.0756, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0218840288283571e-06, |
|
"loss": 1.1632, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.92527579039193e-07, |
|
"loss": 1.0754, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.635968671342754e-07, |
|
"loss": 1.1562, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.350920175173694e-07, |
|
"loss": 1.0787, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.070131527609604e-07, |
|
"loss": 1.1334, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.793603936058104e-07, |
|
"loss": 1.1202, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.521338589603911e-07, |
|
"loss": 1.1241, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.253336659003741e-07, |
|
"loss": 1.1545, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.989599296681749e-07, |
|
"loss": 1.1483, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.730127636723539e-07, |
|
"loss": 1.0649, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.474922794872497e-07, |
|
"loss": 1.093, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.223985868524241e-07, |
|
"loss": 1.1648, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.977317936722294e-07, |
|
"loss": 1.095, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.734920060152861e-07, |
|
"loss": 1.2029, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.496793281141056e-07, |
|
"loss": 1.1561, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.262938623645908e-07, |
|
"loss": 1.1479, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.033357093255809e-07, |
|
"loss": 1.0747, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.808049677184846e-07, |
|
"loss": 1.1157, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.587017344267919e-07, |
|
"loss": 1.167, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.370261044956971e-07, |
|
"loss": 1.1707, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.157781711316645e-07, |
|
"loss": 1.123, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.949580257020525e-07, |
|
"loss": 1.0945, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.745657577346796e-07, |
|
"loss": 1.0894, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.546014549174915e-07, |
|
"loss": 1.1667, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.3506520309813947e-07, |
|
"loss": 1.1136, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.1595708628366926e-07, |
|
"loss": 1.1222, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.97277186640066e-07, |
|
"loss": 1.1266, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.7902558449198765e-07, |
|
"loss": 1.1196, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.612023583223878e-07, |
|
"loss": 1.0963, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.4380758477219333e-07, |
|
"loss": 1.1187, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.2684133863991604e-07, |
|
"loss": 1.1245, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.1030369288139736e-07, |
|
"loss": 1.0632, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.941947186094973e-07, |
|
"loss": 1.1425, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.7851448509372826e-07, |
|
"loss": 1.0514, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.6326305976001055e-07, |
|
"loss": 1.1483, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.484405081903507e-07, |
|
"loss": 1.1021, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.3404689412258596e-07, |
|
"loss": 1.1647, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.2008227945009563e-07, |
|
"loss": 1.0427, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.0654672422152356e-07, |
|
"loss": 1.1174, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.9344028664056713e-07, |
|
"loss": 1.0723, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.807630230656443e-07, |
|
"loss": 1.1381, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.6851498800973808e-07, |
|
"loss": 1.1365, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5669623414011901e-07, |
|
"loss": 1.0962, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.4530681227808984e-07, |
|
"loss": 1.0846, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3434677139885222e-07, |
|
"loss": 1.1132, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2381615863120699e-07, |
|
"loss": 1.1, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.137150192573988e-07, |
|
"loss": 1.1193, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0404339671291618e-07, |
|
"loss": 1.1172, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.480133258630286e-08, |
|
"loss": 1.0637, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.598886661895788e-08, |
|
"loss": 1.0847, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.760603670501354e-08, |
|
"loss": 1.1638, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.965287889112438e-08, |
|
"loss": 1.0765, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.212942737632288e-08, |
|
"loss": 1.0667, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.503571451188627e-08, |
|
"loss": 1.1353, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.837177080119215e-08, |
|
"loss": 1.1317, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.2137624899596387e-08, |
|
"loss": 1.1282, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.633330361427767e-08, |
|
"loss": 1.0785, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.0958831904170926e-08, |
|
"loss": 1.0884, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.601423287978966e-08, |
|
"loss": 1.1924, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.1499527803214846e-08, |
|
"loss": 1.0063, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.7414736087950633e-08, |
|
"loss": 1.1067, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.3759875298813286e-08, |
|
"loss": 1.1364, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.05349611519312e-08, |
|
"loss": 1.1656, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.740007514622782e-09, |
|
"loss": 1.1687, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.375026405352035e-09, |
|
"loss": 1.1046, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.4400279936619427e-09, |
|
"loss": 1.1127, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.935020600174475e-09, |
|
"loss": 1.0908, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 8.600106965128696e-10, |
|
"loss": 1.0944, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.1500290525722223e-10, |
|
"loss": 1.1345, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.1343, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1562, |
|
"total_flos": 1820945008820224.0, |
|
"train_loss": 1.1780791337603034, |
|
"train_runtime": 25885.7958, |
|
"train_samples_per_second": 7.727, |
|
"train_steps_per_second": 0.06 |
|
} |
|
], |
|
"max_steps": 1562, |
|
"num_train_epochs": 1, |
|
"total_flos": 1820945008820224.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|