| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.7806973218797373, |
| "eval_steps": 500, |
| "global_step": 1545, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0005053057099545225, |
| "grad_norm": 1.6384633779525757, |
| "learning_rate": 0.0, |
| "loss": 0.4704, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.001010611419909045, |
| "grad_norm": 2.194593906402588, |
| "learning_rate": 6.41025641025641e-07, |
| "loss": 0.4286, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0015159171298635675, |
| "grad_norm": 3.6220381259918213, |
| "learning_rate": 1.282051282051282e-06, |
| "loss": 0.8297, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.00202122283981809, |
| "grad_norm": 1.929093837738037, |
| "learning_rate": 1.9230769230769234e-06, |
| "loss": 0.6096, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0025265285497726125, |
| "grad_norm": 3.1244475841522217, |
| "learning_rate": 2.564102564102564e-06, |
| "loss": 0.8738, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.003031834259727135, |
| "grad_norm": 3.0733749866485596, |
| "learning_rate": 3.205128205128205e-06, |
| "loss": 0.668, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0035371399696816574, |
| "grad_norm": 1.7501100301742554, |
| "learning_rate": 3.846153846153847e-06, |
| "loss": 0.5054, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.00404244567963618, |
| "grad_norm": 2.8912413120269775, |
| "learning_rate": 4.487179487179488e-06, |
| "loss": 0.8734, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.004547751389590703, |
| "grad_norm": 1.4918310642242432, |
| "learning_rate": 5.128205128205128e-06, |
| "loss": 0.8562, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.005053057099545225, |
| "grad_norm": 2.0631442070007324, |
| "learning_rate": 5.76923076923077e-06, |
| "loss": 0.7117, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.0055583628094997475, |
| "grad_norm": 1.4029316902160645, |
| "learning_rate": 6.41025641025641e-06, |
| "loss": 0.5705, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.00606366851945427, |
| "grad_norm": 1.4444737434387207, |
| "learning_rate": 7.051282051282052e-06, |
| "loss": 0.661, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.006568974229408792, |
| "grad_norm": 1.2494786977767944, |
| "learning_rate": 7.692307692307694e-06, |
| "loss": 0.7558, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.007074279939363315, |
| "grad_norm": 1.3939992189407349, |
| "learning_rate": 8.333333333333334e-06, |
| "loss": 0.5186, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.007579585649317837, |
| "grad_norm": 1.0526946783065796, |
| "learning_rate": 8.974358974358976e-06, |
| "loss": 0.543, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.00808489135927236, |
| "grad_norm": 0.7698296904563904, |
| "learning_rate": 9.615384615384616e-06, |
| "loss": 0.366, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.008590197069226882, |
| "grad_norm": 1.036993145942688, |
| "learning_rate": 1.0256410256410256e-05, |
| "loss": 0.4858, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.009095502779181405, |
| "grad_norm": 1.1180205345153809, |
| "learning_rate": 1.0897435897435898e-05, |
| "loss": 0.5841, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.009600808489135927, |
| "grad_norm": 0.8241035342216492, |
| "learning_rate": 1.153846153846154e-05, |
| "loss": 0.5563, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.01010611419909045, |
| "grad_norm": 0.6611151695251465, |
| "learning_rate": 1.217948717948718e-05, |
| "loss": 0.3478, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.010611419909044972, |
| "grad_norm": 0.4365403950214386, |
| "learning_rate": 1.282051282051282e-05, |
| "loss": 0.2442, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.011116725618999495, |
| "grad_norm": 0.7351874709129333, |
| "learning_rate": 1.3461538461538462e-05, |
| "loss": 0.3456, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.011622031328954016, |
| "grad_norm": 0.55842524766922, |
| "learning_rate": 1.4102564102564104e-05, |
| "loss": 0.3809, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.01212733703890854, |
| "grad_norm": 0.9409593343734741, |
| "learning_rate": 1.4743589743589745e-05, |
| "loss": 0.4769, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.012632642748863061, |
| "grad_norm": 0.803429901599884, |
| "learning_rate": 1.5384615384615387e-05, |
| "loss": 0.5099, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.013137948458817585, |
| "grad_norm": 0.9147859215736389, |
| "learning_rate": 1.602564102564103e-05, |
| "loss": 0.5725, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.013643254168772108, |
| "grad_norm": 0.6797801852226257, |
| "learning_rate": 1.6666666666666667e-05, |
| "loss": 0.3108, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.01414855987872663, |
| "grad_norm": 0.6329852342605591, |
| "learning_rate": 1.730769230769231e-05, |
| "loss": 0.4665, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.014653865588681153, |
| "grad_norm": 0.49528101086616516, |
| "learning_rate": 1.794871794871795e-05, |
| "loss": 0.3252, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.015159171298635674, |
| "grad_norm": 0.6480412483215332, |
| "learning_rate": 1.858974358974359e-05, |
| "loss": 0.4001, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.015664477008590198, |
| "grad_norm": 1.0041990280151367, |
| "learning_rate": 1.923076923076923e-05, |
| "loss": 0.635, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.01616978271854472, |
| "grad_norm": 0.7632623314857483, |
| "learning_rate": 1.987179487179487e-05, |
| "loss": 0.3599, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.01667508842849924, |
| "grad_norm": 0.8572860360145569, |
| "learning_rate": 2.0512820512820512e-05, |
| "loss": 0.4534, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.017180394138453764, |
| "grad_norm": 0.9279261231422424, |
| "learning_rate": 2.1153846153846154e-05, |
| "loss": 0.4895, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.017685699848408287, |
| "grad_norm": 0.5363617539405823, |
| "learning_rate": 2.1794871794871795e-05, |
| "loss": 0.284, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.01819100555836281, |
| "grad_norm": 0.6156304478645325, |
| "learning_rate": 2.2435897435897437e-05, |
| "loss": 0.3305, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.01869631126831733, |
| "grad_norm": 0.8967059850692749, |
| "learning_rate": 2.307692307692308e-05, |
| "loss": 0.5963, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.019201616978271854, |
| "grad_norm": 0.8374812006950378, |
| "learning_rate": 2.3717948717948718e-05, |
| "loss": 0.3155, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.019706922688226377, |
| "grad_norm": 0.7348405718803406, |
| "learning_rate": 2.435897435897436e-05, |
| "loss": 0.3585, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.0202122283981809, |
| "grad_norm": 0.7170049548149109, |
| "learning_rate": 2.5e-05, |
| "loss": 0.3285, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.020717534108135423, |
| "grad_norm": 0.7275897860527039, |
| "learning_rate": 2.564102564102564e-05, |
| "loss": 0.3612, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.021222839818089943, |
| "grad_norm": 0.7469742298126221, |
| "learning_rate": 2.6282051282051285e-05, |
| "loss": 0.3367, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.021728145528044467, |
| "grad_norm": 0.6935989856719971, |
| "learning_rate": 2.6923076923076923e-05, |
| "loss": 0.2455, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.02223345123799899, |
| "grad_norm": 0.7077957391738892, |
| "learning_rate": 2.756410256410257e-05, |
| "loss": 0.4161, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.022738756947953513, |
| "grad_norm": 0.38968655467033386, |
| "learning_rate": 2.8205128205128207e-05, |
| "loss": 0.1841, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.023244062657908033, |
| "grad_norm": 0.9259281158447266, |
| "learning_rate": 2.8846153846153845e-05, |
| "loss": 0.4498, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.023749368367862556, |
| "grad_norm": 1.0964964628219604, |
| "learning_rate": 2.948717948717949e-05, |
| "loss": 0.5429, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.02425467407781708, |
| "grad_norm": 0.4879132807254791, |
| "learning_rate": 3.012820512820513e-05, |
| "loss": 0.2142, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.024759979787771603, |
| "grad_norm": 0.6453592777252197, |
| "learning_rate": 3.0769230769230774e-05, |
| "loss": 0.2836, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.025265285497726123, |
| "grad_norm": 0.5058504343032837, |
| "learning_rate": 3.141025641025641e-05, |
| "loss": 0.2157, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.025770591207680646, |
| "grad_norm": 0.8575659990310669, |
| "learning_rate": 3.205128205128206e-05, |
| "loss": 0.6295, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.02627589691763517, |
| "grad_norm": 0.48030632734298706, |
| "learning_rate": 3.269230769230769e-05, |
| "loss": 0.2874, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.026781202627589692, |
| "grad_norm": 0.8366153240203857, |
| "learning_rate": 3.3333333333333335e-05, |
| "loss": 0.4651, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.027286508337544216, |
| "grad_norm": 0.5325554609298706, |
| "learning_rate": 3.397435897435898e-05, |
| "loss": 0.2285, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.027791814047498736, |
| "grad_norm": 0.764580488204956, |
| "learning_rate": 3.461538461538462e-05, |
| "loss": 0.428, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.02829711975745326, |
| "grad_norm": 0.8604835867881775, |
| "learning_rate": 3.525641025641026e-05, |
| "loss": 0.5092, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.028802425467407782, |
| "grad_norm": 2.482994794845581, |
| "learning_rate": 3.58974358974359e-05, |
| "loss": 0.4473, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.029307731177362305, |
| "grad_norm": 0.8809303045272827, |
| "learning_rate": 3.653846153846154e-05, |
| "loss": 0.5762, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.029813036887316825, |
| "grad_norm": 0.43776047229766846, |
| "learning_rate": 3.717948717948718e-05, |
| "loss": 0.1808, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.03031834259727135, |
| "grad_norm": 0.6836262941360474, |
| "learning_rate": 3.782051282051282e-05, |
| "loss": 0.2987, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.030823648307225872, |
| "grad_norm": 0.5660818815231323, |
| "learning_rate": 3.846153846153846e-05, |
| "loss": 0.2198, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.031328954017180395, |
| "grad_norm": 0.4436876177787781, |
| "learning_rate": 3.9102564102564105e-05, |
| "loss": 0.1963, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.03183425972713492, |
| "grad_norm": 0.7230072617530823, |
| "learning_rate": 3.974358974358974e-05, |
| "loss": 0.4539, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.03233956543708944, |
| "grad_norm": 0.9129766225814819, |
| "learning_rate": 4.038461538461539e-05, |
| "loss": 0.3737, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.032844871147043965, |
| "grad_norm": 1.1869587898254395, |
| "learning_rate": 4.1025641025641023e-05, |
| "loss": 0.696, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.03335017685699848, |
| "grad_norm": 0.7398460507392883, |
| "learning_rate": 4.166666666666667e-05, |
| "loss": 0.5932, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.033855482566953005, |
| "grad_norm": 0.7067481875419617, |
| "learning_rate": 4.230769230769231e-05, |
| "loss": 0.3213, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.03436078827690753, |
| "grad_norm": 0.9902649521827698, |
| "learning_rate": 4.294871794871795e-05, |
| "loss": 0.416, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.03486609398686205, |
| "grad_norm": 0.42111849784851074, |
| "learning_rate": 4.358974358974359e-05, |
| "loss": 0.2465, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.035371399696816574, |
| "grad_norm": 0.9634829759597778, |
| "learning_rate": 4.423076923076923e-05, |
| "loss": 0.5916, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.0358767054067711, |
| "grad_norm": 0.8240818977355957, |
| "learning_rate": 4.4871794871794874e-05, |
| "loss": 0.4181, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.03638201111672562, |
| "grad_norm": 0.7216759324073792, |
| "learning_rate": 4.5512820512820516e-05, |
| "loss": 0.4966, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.036887316826680144, |
| "grad_norm": 0.7505428194999695, |
| "learning_rate": 4.615384615384616e-05, |
| "loss": 0.3799, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.03739262253663466, |
| "grad_norm": 1.2560772895812988, |
| "learning_rate": 4.67948717948718e-05, |
| "loss": 0.4691, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.037897928246589184, |
| "grad_norm": 0.7000345587730408, |
| "learning_rate": 4.7435897435897435e-05, |
| "loss": 0.2898, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.03840323395654371, |
| "grad_norm": 0.6096041202545166, |
| "learning_rate": 4.8076923076923084e-05, |
| "loss": 0.2929, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.03890853966649823, |
| "grad_norm": 0.6045755743980408, |
| "learning_rate": 4.871794871794872e-05, |
| "loss": 0.282, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.039413845376452754, |
| "grad_norm": 0.9146702885627747, |
| "learning_rate": 4.935897435897436e-05, |
| "loss": 0.4637, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.03991915108640728, |
| "grad_norm": 0.9550428986549377, |
| "learning_rate": 5e-05, |
| "loss": 0.3772, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.0404244567963618, |
| "grad_norm": 0.7160158157348633, |
| "learning_rate": 4.999998018159948e-05, |
| "loss": 0.3099, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.040929762506316324, |
| "grad_norm": 0.6506277322769165, |
| "learning_rate": 4.999992072642933e-05, |
| "loss": 0.3599, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.04143506821627085, |
| "grad_norm": 0.8049689531326294, |
| "learning_rate": 4.999982163458383e-05, |
| "loss": 0.3184, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.04194037392622536, |
| "grad_norm": 1.0008819103240967, |
| "learning_rate": 4.999968290622007e-05, |
| "loss": 0.4903, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.04244567963617989, |
| "grad_norm": 0.6113778352737427, |
| "learning_rate": 4.999950454155801e-05, |
| "loss": 0.2561, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.04295098534613441, |
| "grad_norm": 1.4032593965530396, |
| "learning_rate": 4.999928654088044e-05, |
| "loss": 0.3617, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.04345629105608893, |
| "grad_norm": 0.5701761841773987, |
| "learning_rate": 4.9999028904532996e-05, |
| "loss": 0.3419, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.043961596766043456, |
| "grad_norm": 0.8148670792579651, |
| "learning_rate": 4.9998731632924144e-05, |
| "loss": 0.2048, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.04446690247599798, |
| "grad_norm": 1.7208861112594604, |
| "learning_rate": 4.999839472652522e-05, |
| "loss": 0.4322, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.0449722081859525, |
| "grad_norm": 2.0720438957214355, |
| "learning_rate": 4.9998018185870356e-05, |
| "loss": 0.2536, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.045477513895907026, |
| "grad_norm": 1.085567831993103, |
| "learning_rate": 4.999760201155657e-05, |
| "loss": 0.5578, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.04598281960586155, |
| "grad_norm": 0.5517830848693848, |
| "learning_rate": 4.999714620424367e-05, |
| "loss": 0.2543, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.046488125315816066, |
| "grad_norm": 0.6557161808013916, |
| "learning_rate": 4.999665076465434e-05, |
| "loss": 0.4033, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.04699343102577059, |
| "grad_norm": 0.8143996000289917, |
| "learning_rate": 4.9996115693574086e-05, |
| "loss": 0.3873, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.04749873673572511, |
| "grad_norm": 0.9482316970825195, |
| "learning_rate": 4.999554099185124e-05, |
| "loss": 0.3902, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.048004042445679636, |
| "grad_norm": 0.6633082032203674, |
| "learning_rate": 4.999492666039699e-05, |
| "loss": 0.3041, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.04850934815563416, |
| "grad_norm": 1.107414960861206, |
| "learning_rate": 4.999427270018533e-05, |
| "loss": 0.4195, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.04901465386558868, |
| "grad_norm": 0.4633142948150635, |
| "learning_rate": 4.99935791122531e-05, |
| "loss": 0.22, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.049519959575543206, |
| "grad_norm": 0.7195435166358948, |
| "learning_rate": 4.999284589769996e-05, |
| "loss": 0.2776, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.05002526528549773, |
| "grad_norm": 0.38064494729042053, |
| "learning_rate": 4.9992073057688403e-05, |
| "loss": 0.2126, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.050530570995452245, |
| "grad_norm": 0.4273965060710907, |
| "learning_rate": 4.9991260593443745e-05, |
| "loss": 0.2464, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.05103587670540677, |
| "grad_norm": 0.7326411008834839, |
| "learning_rate": 4.9990408506254134e-05, |
| "loss": 0.3473, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.05154118241536129, |
| "grad_norm": 0.6799935102462769, |
| "learning_rate": 4.998951679747051e-05, |
| "loss": 0.3258, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.052046488125315815, |
| "grad_norm": 0.6898297071456909, |
| "learning_rate": 4.998858546850668e-05, |
| "loss": 0.6305, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.05255179383527034, |
| "grad_norm": 0.5631976127624512, |
| "learning_rate": 4.998761452083922e-05, |
| "loss": 0.3241, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.05305709954522486, |
| "grad_norm": 0.4945349395275116, |
| "learning_rate": 4.998660395600754e-05, |
| "loss": 0.3002, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.053562405255179385, |
| "grad_norm": 0.6329333186149597, |
| "learning_rate": 4.998555377561387e-05, |
| "loss": 0.3074, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.05406771096513391, |
| "grad_norm": 0.532968282699585, |
| "learning_rate": 4.9984463981323246e-05, |
| "loss": 0.334, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.05457301667508843, |
| "grad_norm": 0.7290483713150024, |
| "learning_rate": 4.998333457486349e-05, |
| "loss": 0.2753, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.05507832238504295, |
| "grad_norm": 0.7623900771141052, |
| "learning_rate": 4.998216555802526e-05, |
| "loss": 0.4656, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.05558362809499747, |
| "grad_norm": 0.6192227005958557, |
| "learning_rate": 4.9980956932661996e-05, |
| "loss": 0.258, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.056088933804951994, |
| "grad_norm": 1.0014296770095825, |
| "learning_rate": 4.9979708700689934e-05, |
| "loss": 0.5037, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.05659423951490652, |
| "grad_norm": 0.5563094615936279, |
| "learning_rate": 4.9978420864088115e-05, |
| "loss": 0.3379, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.05709954522486104, |
| "grad_norm": 0.9516816139221191, |
| "learning_rate": 4.997709342489837e-05, |
| "loss": 0.4447, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.057604850934815564, |
| "grad_norm": 0.7699498534202576, |
| "learning_rate": 4.997572638522531e-05, |
| "loss": 0.5482, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.05811015664477009, |
| "grad_norm": 0.623145580291748, |
| "learning_rate": 4.9974319747236344e-05, |
| "loss": 0.3454, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.05861546235472461, |
| "grad_norm": 0.62673419713974, |
| "learning_rate": 4.997287351316165e-05, |
| "loss": 0.4063, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.059120768064679134, |
| "grad_norm": 0.6477133631706238, |
| "learning_rate": 4.997138768529421e-05, |
| "loss": 0.2508, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.05962607377463365, |
| "grad_norm": 0.650576114654541, |
| "learning_rate": 4.9969862265989745e-05, |
| "loss": 0.3157, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.060131379484588174, |
| "grad_norm": 0.6956886053085327, |
| "learning_rate": 4.9968297257666764e-05, |
| "loss": 0.2984, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.0606366851945427, |
| "grad_norm": 1.090057611465454, |
| "learning_rate": 4.996669266280655e-05, |
| "loss": 0.5217, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.06114199090449722, |
| "grad_norm": 0.6649771928787231, |
| "learning_rate": 4.996504848395314e-05, |
| "loss": 0.4094, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.061647296614451744, |
| "grad_norm": 1.0914398431777954, |
| "learning_rate": 4.9963364723713344e-05, |
| "loss": 0.7907, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.06215260232440627, |
| "grad_norm": 1.1362191438674927, |
| "learning_rate": 4.9961641384756705e-05, |
| "loss": 0.5202, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.06265790803436079, |
| "grad_norm": 0.6446903347969055, |
| "learning_rate": 4.9959878469815534e-05, |
| "loss": 0.5434, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.06316321374431531, |
| "grad_norm": 0.5553262233734131, |
| "learning_rate": 4.9958075981684884e-05, |
| "loss": 0.2622, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.06366851945426984, |
| "grad_norm": 0.7034339904785156, |
| "learning_rate": 4.995623392322254e-05, |
| "loss": 0.4785, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.06417382516422436, |
| "grad_norm": 0.8382132649421692, |
| "learning_rate": 4.995435229734905e-05, |
| "loss": 0.3813, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.06467913087417888, |
| "grad_norm": 0.8590944409370422, |
| "learning_rate": 4.995243110704766e-05, |
| "loss": 0.6019, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.0651844365841334, |
| "grad_norm": 0.8058075904846191, |
| "learning_rate": 4.9950470355364386e-05, |
| "loss": 0.4251, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.06568974229408793, |
| "grad_norm": 0.5873439908027649, |
| "learning_rate": 4.994847004540793e-05, |
| "loss": 0.4432, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.06619504800404244, |
| "grad_norm": 0.6679672598838806, |
| "learning_rate": 4.994643018034973e-05, |
| "loss": 0.4053, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.06670035371399696, |
| "grad_norm": 0.8931362628936768, |
| "learning_rate": 4.994435076342394e-05, |
| "loss": 0.5092, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.06720565942395149, |
| "grad_norm": 0.4417133033275604, |
| "learning_rate": 4.994223179792741e-05, |
| "loss": 0.2055, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.06771096513390601, |
| "grad_norm": 0.3722580671310425, |
| "learning_rate": 4.9940073287219703e-05, |
| "loss": 0.2183, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.06821627084386053, |
| "grad_norm": 1.3628448247909546, |
| "learning_rate": 4.9937875234723075e-05, |
| "loss": 0.4615, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.06872157655381506, |
| "grad_norm": 0.6365114450454712, |
| "learning_rate": 4.993563764392248e-05, |
| "loss": 0.3071, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.06922688226376958, |
| "grad_norm": 0.5635626912117004, |
| "learning_rate": 4.9933360518365566e-05, |
| "loss": 0.3514, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.0697321879737241, |
| "grad_norm": 0.7801100611686707, |
| "learning_rate": 4.993104386166263e-05, |
| "loss": 0.3682, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.07023749368367863, |
| "grad_norm": 0.7181437611579895, |
| "learning_rate": 4.9928687677486685e-05, |
| "loss": 0.3864, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.07074279939363315, |
| "grad_norm": 0.7251635193824768, |
| "learning_rate": 4.992629196957339e-05, |
| "loss": 0.4515, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.07124810510358767, |
| "grad_norm": 0.45708200335502625, |
| "learning_rate": 4.992385674172107e-05, |
| "loss": 0.2097, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.0717534108135422, |
| "grad_norm": 0.5553367733955383, |
| "learning_rate": 4.9921381997790705e-05, |
| "loss": 0.246, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.07225871652349672, |
| "grad_norm": 0.6126506924629211, |
| "learning_rate": 4.991886774170594e-05, |
| "loss": 0.2903, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.07276402223345124, |
| "grad_norm": 0.8143837451934814, |
| "learning_rate": 4.991631397745307e-05, |
| "loss": 0.3333, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.07326932794340577, |
| "grad_norm": 1.035240888595581, |
| "learning_rate": 4.991372070908099e-05, |
| "loss": 0.6579, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.07377463365336029, |
| "grad_norm": 0.3664172887802124, |
| "learning_rate": 4.9911087940701274e-05, |
| "loss": 0.1993, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.07427993936331481, |
| "grad_norm": 0.42232248187065125, |
| "learning_rate": 4.9908415676488094e-05, |
| "loss": 0.211, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.07478524507326932, |
| "grad_norm": 0.7374719381332397, |
| "learning_rate": 4.990570392067826e-05, |
| "loss": 0.2841, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.07529055078322384, |
| "grad_norm": 0.7315521240234375, |
| "learning_rate": 4.990295267757117e-05, |
| "loss": 0.4779, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.07579585649317837, |
| "grad_norm": 0.8291402459144592, |
| "learning_rate": 4.9900161951528854e-05, |
| "loss": 0.5315, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.07630116220313289, |
| "grad_norm": 0.9879725575447083, |
| "learning_rate": 4.9897331746975926e-05, |
| "loss": 0.5184, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.07680646791308741, |
| "grad_norm": 0.6654685139656067, |
| "learning_rate": 4.98944620683996e-05, |
| "loss": 0.4268, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.07731177362304194, |
| "grad_norm": 0.6749838590621948, |
| "learning_rate": 4.989155292034966e-05, |
| "loss": 0.3266, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.07781707933299646, |
| "grad_norm": 0.5096444487571716, |
| "learning_rate": 4.98886043074385e-05, |
| "loss": 0.4308, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.07832238504295098, |
| "grad_norm": 0.5235171914100647, |
| "learning_rate": 4.988561623434105e-05, |
| "loss": 0.3358, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.07882769075290551, |
| "grad_norm": 0.39268213510513306, |
| "learning_rate": 4.988258870579481e-05, |
| "loss": 0.2653, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.07933299646286003, |
| "grad_norm": 0.6037962436676025, |
| "learning_rate": 4.987952172659985e-05, |
| "loss": 0.1633, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.07983830217281455, |
| "grad_norm": 0.4751133918762207, |
| "learning_rate": 4.9876415301618787e-05, |
| "loss": 0.2796, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.08034360788276908, |
| "grad_norm": 0.7282652854919434, |
| "learning_rate": 4.987326943577675e-05, |
| "loss": 0.5218, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.0808489135927236, |
| "grad_norm": 0.42741066217422485, |
| "learning_rate": 4.987008413406144e-05, |
| "loss": 0.1733, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.08135421930267812, |
| "grad_norm": 0.5225434899330139, |
| "learning_rate": 4.9866859401523056e-05, |
| "loss": 0.1923, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.08185952501263265, |
| "grad_norm": 0.5940921902656555, |
| "learning_rate": 4.986359524327433e-05, |
| "loss": 0.2548, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.08236483072258717, |
| "grad_norm": 1.0237605571746826, |
| "learning_rate": 4.986029166449048e-05, |
| "loss": 0.3913, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.0828701364325417, |
| "grad_norm": 0.44926929473876953, |
| "learning_rate": 4.985694867040924e-05, |
| "loss": 0.2304, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.0833754421424962, |
| "grad_norm": 0.968037486076355, |
| "learning_rate": 4.9853566266330855e-05, |
| "loss": 0.4391, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.08388074785245073, |
| "grad_norm": 0.6649966835975647, |
| "learning_rate": 4.9850144457618e-05, |
| "loss": 0.2662, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.08438605356240525, |
| "grad_norm": 0.5354177951812744, |
| "learning_rate": 4.984668324969588e-05, |
| "loss": 0.223, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.08489135927235977, |
| "grad_norm": 0.9306359887123108, |
| "learning_rate": 4.9843182648052136e-05, |
| "loss": 0.5532, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.0853966649823143, |
| "grad_norm": 0.2804405987262726, |
| "learning_rate": 4.983964265823687e-05, |
| "loss": 0.1795, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.08590197069226882, |
| "grad_norm": 0.6046093702316284, |
| "learning_rate": 4.9836063285862636e-05, |
| "loss": 0.3467, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.08640727640222334, |
| "grad_norm": 0.3996451199054718, |
| "learning_rate": 4.983244453660445e-05, |
| "loss": 0.2116, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.08691258211217787, |
| "grad_norm": 0.5045155882835388, |
| "learning_rate": 4.98287864161997e-05, |
| "loss": 0.2028, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.08741788782213239, |
| "grad_norm": 0.7425013780593872, |
| "learning_rate": 4.982508893044827e-05, |
| "loss": 0.4183, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.08792319353208691, |
| "grad_norm": 0.4544399380683899, |
| "learning_rate": 4.98213520852124e-05, |
| "loss": 0.2321, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.08842849924204144, |
| "grad_norm": 0.7836821675300598, |
| "learning_rate": 4.981757588641676e-05, |
| "loss": 0.4456, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.08893380495199596, |
| "grad_norm": 0.24565193057060242, |
| "learning_rate": 4.981376034004841e-05, |
| "loss": 0.1439, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.08943911066195048, |
| "grad_norm": 0.3137032687664032, |
| "learning_rate": 4.980990545215678e-05, |
| "loss": 0.1513, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.089944416371905, |
| "grad_norm": 0.6917077302932739, |
| "learning_rate": 4.98060112288537e-05, |
| "loss": 0.3292, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.09044972208185953, |
| "grad_norm": 0.6623265147209167, |
| "learning_rate": 4.9802077676313354e-05, |
| "loss": 0.381, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.09095502779181405, |
| "grad_norm": 0.6027669906616211, |
| "learning_rate": 4.979810480077227e-05, |
| "loss": 0.3129, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.09146033350176858, |
| "grad_norm": 0.4134594798088074, |
| "learning_rate": 4.979409260852933e-05, |
| "loss": 0.1588, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.0919656392117231, |
| "grad_norm": 0.33261945843696594, |
| "learning_rate": 4.979004110594576e-05, |
| "loss": 0.1806, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.09247094492167761, |
| "grad_norm": 1.0738641023635864, |
| "learning_rate": 4.97859502994451e-05, |
| "loss": 0.4609, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.09297625063163213, |
| "grad_norm": 1.1859742403030396, |
| "learning_rate": 4.978182019551321e-05, |
| "loss": 0.5995, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.09348155634158666, |
| "grad_norm": 0.5822703838348389, |
| "learning_rate": 4.977765080069825e-05, |
| "loss": 0.2734, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.09398686205154118, |
| "grad_norm": 0.9306544065475464, |
| "learning_rate": 4.977344212161069e-05, |
| "loss": 0.5029, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.0944921677614957, |
| "grad_norm": 0.3687601685523987, |
| "learning_rate": 4.976919416492327e-05, |
| "loss": 0.2571, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.09499747347145023, |
| "grad_norm": 0.7644825577735901, |
| "learning_rate": 4.9764906937370995e-05, |
| "loss": 0.4293, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.09550277918140475, |
| "grad_norm": 0.6048409342765808, |
| "learning_rate": 4.976058044575116e-05, |
| "loss": 0.252, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.09600808489135927, |
| "grad_norm": 0.89262455701828, |
| "learning_rate": 4.975621469692329e-05, |
| "loss": 0.4889, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.0965133906013138, |
| "grad_norm": 0.7101221680641174, |
| "learning_rate": 4.9751809697809145e-05, |
| "loss": 0.2739, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.09701869631126832, |
| "grad_norm": 0.29569289088249207, |
| "learning_rate": 4.974736545539275e-05, |
| "loss": 0.1573, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.09752400202122284, |
| "grad_norm": 0.5595278739929199, |
| "learning_rate": 4.9742881976720314e-05, |
| "loss": 0.335, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.09802930773117736, |
| "grad_norm": 0.6488416790962219, |
| "learning_rate": 4.973835926890027e-05, |
| "loss": 0.4142, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.09853461344113189, |
| "grad_norm": 0.6213271021842957, |
| "learning_rate": 4.973379733910325e-05, |
| "loss": 0.3829, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.09903991915108641, |
| "grad_norm": 0.5019063353538513, |
| "learning_rate": 4.972919619456205e-05, |
| "loss": 0.275, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.09954522486104093, |
| "grad_norm": 0.9575361609458923, |
| "learning_rate": 4.972455584257168e-05, |
| "loss": 0.507, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.10005053057099546, |
| "grad_norm": 0.36353740096092224, |
| "learning_rate": 4.9719876290489264e-05, |
| "loss": 0.2864, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.10055583628094998, |
| "grad_norm": 0.46957746148109436, |
| "learning_rate": 4.971515754573412e-05, |
| "loss": 0.2227, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.10106114199090449, |
| "grad_norm": 0.7227563858032227, |
| "learning_rate": 4.9710399615787676e-05, |
| "loss": 0.5132, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.10156644770085901, |
| "grad_norm": 0.4742310643196106, |
| "learning_rate": 4.9705602508193496e-05, |
| "loss": 0.1944, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.10207175341081354, |
| "grad_norm": 0.7540463209152222, |
| "learning_rate": 4.970076623055727e-05, |
| "loss": 0.2761, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.10257705912076806, |
| "grad_norm": 0.7352909445762634, |
| "learning_rate": 4.9695890790546765e-05, |
| "loss": 0.486, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.10308236483072258, |
| "grad_norm": 0.6061909794807434, |
| "learning_rate": 4.969097619589187e-05, |
| "loss": 0.5126, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.10358767054067711, |
| "grad_norm": 0.8726075291633606, |
| "learning_rate": 4.968602245438453e-05, |
| "loss": 0.5034, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.10409297625063163, |
| "grad_norm": 0.9859480857849121, |
| "learning_rate": 4.968102957387877e-05, |
| "loss": 0.4496, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.10459828196058615, |
| "grad_norm": 0.5186076164245605, |
| "learning_rate": 4.967599756229065e-05, |
| "loss": 0.3507, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.10510358767054068, |
| "grad_norm": 0.4728350043296814, |
| "learning_rate": 4.967092642759829e-05, |
| "loss": 0.1672, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.1056088933804952, |
| "grad_norm": 0.6087246537208557, |
| "learning_rate": 4.966581617784184e-05, |
| "loss": 0.3796, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.10611419909044972, |
| "grad_norm": 0.8859485983848572, |
| "learning_rate": 4.966066682112345e-05, |
| "loss": 0.5639, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.10661950480040425, |
| "grad_norm": 0.8429753184318542, |
| "learning_rate": 4.9655478365607276e-05, |
| "loss": 0.2432, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.10712481051035877, |
| "grad_norm": 0.609099805355072, |
| "learning_rate": 4.9650250819519485e-05, |
| "loss": 0.3056, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.1076301162203133, |
| "grad_norm": 0.40738436579704285, |
| "learning_rate": 4.964498419114819e-05, |
| "loss": 0.1862, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.10813542193026782, |
| "grad_norm": 0.7260504364967346, |
| "learning_rate": 4.963967848884349e-05, |
| "loss": 0.5646, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.10864072764022234, |
| "grad_norm": 1.013758659362793, |
| "learning_rate": 4.963433372101743e-05, |
| "loss": 0.4497, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.10914603335017686, |
| "grad_norm": 0.41159892082214355, |
| "learning_rate": 4.962894989614398e-05, |
| "loss": 0.3157, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.10965133906013137, |
| "grad_norm": 0.42557021975517273, |
| "learning_rate": 4.9623527022759054e-05, |
| "loss": 0.1879, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.1101566447700859, |
| "grad_norm": 0.7539249658584595, |
| "learning_rate": 4.9618065109460465e-05, |
| "loss": 0.4206, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.11066195048004042, |
| "grad_norm": 0.6201863884925842, |
| "learning_rate": 4.961256416490792e-05, |
| "loss": 0.2961, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.11116725618999494, |
| "grad_norm": 0.7610240578651428, |
| "learning_rate": 4.9607024197823014e-05, |
| "loss": 0.4731, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.11167256189994947, |
| "grad_norm": 0.4443597197532654, |
| "learning_rate": 4.9601445216989206e-05, |
| "loss": 0.2716, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.11217786760990399, |
| "grad_norm": 0.47122883796691895, |
| "learning_rate": 4.9595827231251824e-05, |
| "loss": 0.1896, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.11268317331985851, |
| "grad_norm": 0.6130548715591431, |
| "learning_rate": 4.959017024951802e-05, |
| "loss": 0.3805, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.11318847902981304, |
| "grad_norm": 0.6293324828147888, |
| "learning_rate": 4.958447428075678e-05, |
| "loss": 0.5559, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.11369378473976756, |
| "grad_norm": 0.5696532726287842, |
| "learning_rate": 4.957873933399891e-05, |
| "loss": 0.2836, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.11419909044972208, |
| "grad_norm": 0.5803974270820618, |
| "learning_rate": 4.9572965418336995e-05, |
| "loss": 0.3246, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.1147043961596766, |
| "grad_norm": 0.6962433457374573, |
| "learning_rate": 4.956715254292543e-05, |
| "loss": 0.4295, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.11520970186963113, |
| "grad_norm": 0.8711988925933838, |
| "learning_rate": 4.9561300716980365e-05, |
| "loss": 0.6052, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.11571500757958565, |
| "grad_norm": 0.39029747247695923, |
| "learning_rate": 4.95554099497797e-05, |
| "loss": 0.2225, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.11622031328954018, |
| "grad_norm": 0.7048918008804321, |
| "learning_rate": 4.9549480250663086e-05, |
| "loss": 0.3617, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.1167256189994947, |
| "grad_norm": 0.8495561480522156, |
| "learning_rate": 4.954351162903188e-05, |
| "loss": 0.47, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.11723092470944922, |
| "grad_norm": 0.8110531568527222, |
| "learning_rate": 4.953750409434919e-05, |
| "loss": 0.5883, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.11773623041940375, |
| "grad_norm": 0.5132966637611389, |
| "learning_rate": 4.953145765613978e-05, |
| "loss": 0.2866, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.11824153612935827, |
| "grad_norm": 0.6017918586730957, |
| "learning_rate": 4.952537232399012e-05, |
| "loss": 0.3651, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.11874684183931278, |
| "grad_norm": 0.5894491672515869, |
| "learning_rate": 4.951924810754831e-05, |
| "loss": 0.2754, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.1192521475492673, |
| "grad_norm": 0.46862131357192993, |
| "learning_rate": 4.9513085016524153e-05, |
| "loss": 0.3216, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.11975745325922182, |
| "grad_norm": 0.6514838933944702, |
| "learning_rate": 4.9506883060689035e-05, |
| "loss": 0.2915, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.12026275896917635, |
| "grad_norm": 1.0483429431915283, |
| "learning_rate": 4.950064224987599e-05, |
| "loss": 0.6246, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.12076806467913087, |
| "grad_norm": 0.3560936748981476, |
| "learning_rate": 4.949436259397966e-05, |
| "loss": 0.2231, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.1212733703890854, |
| "grad_norm": 0.7140908241271973, |
| "learning_rate": 4.948804410295625e-05, |
| "loss": 0.8288, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.12177867609903992, |
| "grad_norm": 0.8638331294059753, |
| "learning_rate": 4.9481686786823554e-05, |
| "loss": 0.4715, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.12228398180899444, |
| "grad_norm": 0.8147590160369873, |
| "learning_rate": 4.9475290655660926e-05, |
| "loss": 0.3322, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.12278928751894896, |
| "grad_norm": 1.1923398971557617, |
| "learning_rate": 4.946885571960925e-05, |
| "loss": 0.4714, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.12329459322890349, |
| "grad_norm": 0.4850132167339325, |
| "learning_rate": 4.946238198887093e-05, |
| "loss": 0.2975, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.12379989893885801, |
| "grad_norm": 0.4749908447265625, |
| "learning_rate": 4.9455869473709895e-05, |
| "loss": 0.3996, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.12430520464881253, |
| "grad_norm": 0.4123810827732086, |
| "learning_rate": 4.9449318184451545e-05, |
| "loss": 0.1817, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.12481051035876706, |
| "grad_norm": 0.603053629398346, |
| "learning_rate": 4.944272813148277e-05, |
| "loss": 0.4783, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.12531581606872158, |
| "grad_norm": 0.2952713668346405, |
| "learning_rate": 4.943609932525193e-05, |
| "loss": 0.1619, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.1258211217786761, |
| "grad_norm": 0.7722076773643494, |
| "learning_rate": 4.942943177626879e-05, |
| "loss": 0.4847, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.12632642748863063, |
| "grad_norm": 0.4615485966205597, |
| "learning_rate": 4.942272549510457e-05, |
| "loss": 0.2843, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.12683173319858515, |
| "grad_norm": 0.501020073890686, |
| "learning_rate": 4.94159804923919e-05, |
| "loss": 0.2411, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.12733703890853967, |
| "grad_norm": 0.9735652208328247, |
| "learning_rate": 4.9409196778824776e-05, |
| "loss": 0.546, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.1278423446184942, |
| "grad_norm": 0.9183477163314819, |
| "learning_rate": 4.94023743651586e-05, |
| "loss": 0.5422, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.12834765032844872, |
| "grad_norm": 0.25691601634025574, |
| "learning_rate": 4.939551326221012e-05, |
| "loss": 0.1477, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.12885295603840324, |
| "grad_norm": 1.1621726751327515, |
| "learning_rate": 4.938861348085742e-05, |
| "loss": 0.518, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.12935826174835777, |
| "grad_norm": 1.0264886617660522, |
| "learning_rate": 4.9381675032039896e-05, |
| "loss": 0.513, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.1298635674583123, |
| "grad_norm": 0.46496647596359253, |
| "learning_rate": 4.9374697926758275e-05, |
| "loss": 0.2556, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.1303688731682668, |
| "grad_norm": 0.2897842526435852, |
| "learning_rate": 4.936768217607457e-05, |
| "loss": 0.1476, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.13087417887822134, |
| "grad_norm": 1.0029571056365967, |
| "learning_rate": 4.9360627791112046e-05, |
| "loss": 0.6114, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.13137948458817586, |
| "grad_norm": 0.42095255851745605, |
| "learning_rate": 4.935353478305524e-05, |
| "loss": 0.3028, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.13188479029813036, |
| "grad_norm": 1.4107495546340942, |
| "learning_rate": 4.9346403163149924e-05, |
| "loss": 0.677, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.13239009600808488, |
| "grad_norm": 0.33536162972450256, |
| "learning_rate": 4.9339232942703067e-05, |
| "loss": 0.1942, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.1328954017180394, |
| "grad_norm": 0.4370989203453064, |
| "learning_rate": 4.933202413308286e-05, |
| "loss": 0.3653, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.13340070742799393, |
| "grad_norm": 1.1874058246612549, |
| "learning_rate": 4.932477674571867e-05, |
| "loss": 0.5772, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.13390601313794845, |
| "grad_norm": 0.6043871641159058, |
| "learning_rate": 4.931749079210103e-05, |
| "loss": 0.2519, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.13441131884790297, |
| "grad_norm": 0.5337929129600525, |
| "learning_rate": 4.931016628378161e-05, |
| "loss": 0.2081, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.1349166245578575, |
| "grad_norm": 0.6960159540176392, |
| "learning_rate": 4.930280323237321e-05, |
| "loss": 0.3724, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.13542193026781202, |
| "grad_norm": 0.3179565966129303, |
| "learning_rate": 4.9295401649549755e-05, |
| "loss": 0.2135, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.13592723597776654, |
| "grad_norm": 0.4810411036014557, |
| "learning_rate": 4.9287961547046234e-05, |
| "loss": 0.2307, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.13643254168772107, |
| "grad_norm": 0.4444022476673126, |
| "learning_rate": 4.928048293665873e-05, |
| "loss": 0.3267, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.1369378473976756, |
| "grad_norm": 0.5844589471817017, |
| "learning_rate": 4.9272965830244366e-05, |
| "loss": 0.2969, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.1374431531076301, |
| "grad_norm": 0.37751492857933044, |
| "learning_rate": 4.9265410239721313e-05, |
| "loss": 0.2331, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.13794845881758463, |
| "grad_norm": 0.9071863293647766, |
| "learning_rate": 4.925781617706874e-05, |
| "loss": 0.486, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.13845376452753916, |
| "grad_norm": 0.4862212538719177, |
| "learning_rate": 4.925018365432682e-05, |
| "loss": 0.234, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.13895907023749368, |
| "grad_norm": 0.2530370056629181, |
| "learning_rate": 4.92425126835967e-05, |
| "loss": 0.1656, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.1394643759474482, |
| "grad_norm": 0.8576244711875916, |
| "learning_rate": 4.923480327704051e-05, |
| "loss": 0.6208, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.13996968165740273, |
| "grad_norm": 0.6219656467437744, |
| "learning_rate": 4.922705544688127e-05, |
| "loss": 0.3705, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.14047498736735725, |
| "grad_norm": 0.3833385705947876, |
| "learning_rate": 4.9219269205402976e-05, |
| "loss": 0.2777, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.14098029307731177, |
| "grad_norm": 0.5600672960281372, |
| "learning_rate": 4.921144456495048e-05, |
| "loss": 0.5572, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.1414855987872663, |
| "grad_norm": 0.5854461193084717, |
| "learning_rate": 4.9203581537929536e-05, |
| "loss": 0.3957, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.14199090449722082, |
| "grad_norm": 0.6858914494514465, |
| "learning_rate": 4.919568013680676e-05, |
| "loss": 0.3994, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.14249621020717534, |
| "grad_norm": 0.8274922370910645, |
| "learning_rate": 4.918774037410958e-05, |
| "loss": 0.3523, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.14300151591712987, |
| "grad_norm": 0.423542320728302, |
| "learning_rate": 4.9179762262426286e-05, |
| "loss": 0.2194, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.1435068216270844, |
| "grad_norm": 0.7165141105651855, |
| "learning_rate": 4.917174581440595e-05, |
| "loss": 0.4029, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.14401212733703891, |
| "grad_norm": 0.900047242641449, |
| "learning_rate": 4.916369104275842e-05, |
| "loss": 0.5053, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.14451743304699344, |
| "grad_norm": 0.466221421957016, |
| "learning_rate": 4.9155597960254316e-05, |
| "loss": 0.3247, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.14502273875694796, |
| "grad_norm": 0.6081900596618652, |
| "learning_rate": 4.914746657972499e-05, |
| "loss": 0.3749, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.14552804446690248, |
| "grad_norm": 0.8832564949989319, |
| "learning_rate": 4.913929691406253e-05, |
| "loss": 0.3952, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.146033350176857, |
| "grad_norm": 0.6159337759017944, |
| "learning_rate": 4.91310889762197e-05, |
| "loss": 0.4868, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.14653865588681153, |
| "grad_norm": 0.6993011832237244, |
| "learning_rate": 4.9122842779209954e-05, |
| "loss": 0.4032, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.14704396159676605, |
| "grad_norm": 0.7520043253898621, |
| "learning_rate": 4.911455833610742e-05, |
| "loss": 0.2641, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.14754926730672058, |
| "grad_norm": 0.4298335313796997, |
| "learning_rate": 4.910623566004684e-05, |
| "loss": 0.1774, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.1480545730166751, |
| "grad_norm": 0.5507643818855286, |
| "learning_rate": 4.909787476422358e-05, |
| "loss": 0.305, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.14855987872662962, |
| "grad_norm": 0.6847674250602722, |
| "learning_rate": 4.9089475661893625e-05, |
| "loss": 0.3482, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.14906518443658415, |
| "grad_norm": 0.48942941427230835, |
| "learning_rate": 4.908103836637351e-05, |
| "loss": 0.2464, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.14957049014653864, |
| "grad_norm": 0.39289000630378723, |
| "learning_rate": 4.907256289104032e-05, |
| "loss": 0.195, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.15007579585649317, |
| "grad_norm": 0.599053144454956, |
| "learning_rate": 4.906404924933169e-05, |
| "loss": 0.2337, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.1505811015664477, |
| "grad_norm": 0.6427844762802124, |
| "learning_rate": 4.9055497454745755e-05, |
| "loss": 0.3626, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.1510864072764022, |
| "grad_norm": 0.875948429107666, |
| "learning_rate": 4.904690752084117e-05, |
| "loss": 0.4159, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.15159171298635674, |
| "grad_norm": 0.7781713604927063, |
| "learning_rate": 4.9038279461237016e-05, |
| "loss": 0.3178, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.15209701869631126, |
| "grad_norm": 0.5986030697822571, |
| "learning_rate": 4.902961328961283e-05, |
| "loss": 0.5787, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.15260232440626578, |
| "grad_norm": 0.6488214135169983, |
| "learning_rate": 4.902090901970861e-05, |
| "loss": 0.3557, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.1531076301162203, |
| "grad_norm": 0.6705836057662964, |
| "learning_rate": 4.9012166665324714e-05, |
| "loss": 0.306, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.15361293582617483, |
| "grad_norm": 0.5052811503410339, |
| "learning_rate": 4.900338624032191e-05, |
| "loss": 0.2413, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.15411824153612935, |
| "grad_norm": 0.5678831934928894, |
| "learning_rate": 4.899456775862131e-05, |
| "loss": 0.3372, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.15462354724608388, |
| "grad_norm": 0.4097347855567932, |
| "learning_rate": 4.898571123420438e-05, |
| "loss": 0.2792, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.1551288529560384, |
| "grad_norm": 0.3428628444671631, |
| "learning_rate": 4.897681668111288e-05, |
| "loss": 0.2215, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.15563415866599292, |
| "grad_norm": 0.5401535630226135, |
| "learning_rate": 4.896788411344888e-05, |
| "loss": 0.2768, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.15613946437594745, |
| "grad_norm": 0.65704745054245, |
| "learning_rate": 4.895891354537472e-05, |
| "loss": 0.4244, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.15664477008590197, |
| "grad_norm": 0.8244349360466003, |
| "learning_rate": 4.894990499111297e-05, |
| "loss": 0.4721, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.1571500757958565, |
| "grad_norm": 1.0010887384414673, |
| "learning_rate": 4.894085846494647e-05, |
| "loss": 0.6647, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.15765538150581102, |
| "grad_norm": 0.5577598214149475, |
| "learning_rate": 4.893177398121821e-05, |
| "loss": 0.458, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.15816068721576554, |
| "grad_norm": 0.9697132110595703, |
| "learning_rate": 4.8922651554331386e-05, |
| "loss": 0.583, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.15866599292572006, |
| "grad_norm": 0.7918589115142822, |
| "learning_rate": 4.891349119874936e-05, |
| "loss": 0.3947, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.15917129863567459, |
| "grad_norm": 0.5606548190116882, |
| "learning_rate": 4.890429292899562e-05, |
| "loss": 0.2518, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.1596766043456291, |
| "grad_norm": 0.46425801515579224, |
| "learning_rate": 4.889505675965376e-05, |
| "loss": 0.231, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.16018191005558363, |
| "grad_norm": 0.5073766708374023, |
| "learning_rate": 4.888578270536747e-05, |
| "loss": 0.404, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.16068721576553815, |
| "grad_norm": 0.8138315677642822, |
| "learning_rate": 4.88764707808405e-05, |
| "loss": 0.3759, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.16119252147549268, |
| "grad_norm": 0.6912230849266052, |
| "learning_rate": 4.886712100083665e-05, |
| "loss": 0.4989, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.1616978271854472, |
| "grad_norm": 0.43325480818748474, |
| "learning_rate": 4.8857733380179735e-05, |
| "loss": 0.3023, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.16220313289540172, |
| "grad_norm": 0.4167453944683075, |
| "learning_rate": 4.8848307933753566e-05, |
| "loss": 0.2701, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.16270843860535625, |
| "grad_norm": 0.3875753581523895, |
| "learning_rate": 4.883884467650193e-05, |
| "loss": 0.2084, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.16321374431531077, |
| "grad_norm": 0.4990168511867523, |
| "learning_rate": 4.882934362342854e-05, |
| "loss": 0.6131, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.1637190500252653, |
| "grad_norm": 0.5788955092430115, |
| "learning_rate": 4.881980478959707e-05, |
| "loss": 0.2931, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.16422435573521982, |
| "grad_norm": 0.7873169183731079, |
| "learning_rate": 4.881022819013106e-05, |
| "loss": 0.3825, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.16472966144517434, |
| "grad_norm": 0.30970779061317444, |
| "learning_rate": 4.8800613840213936e-05, |
| "loss": 0.1879, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.16523496715512886, |
| "grad_norm": 0.43223828077316284, |
| "learning_rate": 4.8790961755089006e-05, |
| "loss": 0.194, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.1657402728650834, |
| "grad_norm": 0.4357849061489105, |
| "learning_rate": 4.878127195005936e-05, |
| "loss": 0.2503, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.1662455785750379, |
| "grad_norm": 0.6348159313201904, |
| "learning_rate": 4.8771544440487916e-05, |
| "loss": 0.2907, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.1667508842849924, |
| "grad_norm": 0.4712270200252533, |
| "learning_rate": 4.8761779241797366e-05, |
| "loss": 0.2412, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.16725618999494693, |
| "grad_norm": 0.7354241609573364, |
| "learning_rate": 4.875197636947018e-05, |
| "loss": 0.4536, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.16776149570490145, |
| "grad_norm": 0.3180365264415741, |
| "learning_rate": 4.874213583904851e-05, |
| "loss": 0.1503, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.16826680141485598, |
| "grad_norm": 0.9528758525848389, |
| "learning_rate": 4.873225766613425e-05, |
| "loss": 0.4599, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.1687721071248105, |
| "grad_norm": 0.5695481300354004, |
| "learning_rate": 4.872234186638898e-05, |
| "loss": 0.3238, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.16927741283476502, |
| "grad_norm": 0.9408870935440063, |
| "learning_rate": 4.871238845553391e-05, |
| "loss": 0.4594, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.16978271854471955, |
| "grad_norm": 0.5320512652397156, |
| "learning_rate": 4.87023974493499e-05, |
| "loss": 0.3218, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.17028802425467407, |
| "grad_norm": 0.6941995024681091, |
| "learning_rate": 4.8692368863677416e-05, |
| "loss": 0.4405, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.1707933299646286, |
| "grad_norm": 0.5261353850364685, |
| "learning_rate": 4.8682302714416494e-05, |
| "loss": 0.2217, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.17129863567458312, |
| "grad_norm": 0.6723947525024414, |
| "learning_rate": 4.8672199017526726e-05, |
| "loss": 0.3233, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.17180394138453764, |
| "grad_norm": 0.499994695186615, |
| "learning_rate": 4.866205778902725e-05, |
| "loss": 0.2603, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.17230924709449216, |
| "grad_norm": 0.3436490595340729, |
| "learning_rate": 4.86518790449967e-05, |
| "loss": 0.2763, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.17281455280444669, |
| "grad_norm": 0.6315925717353821, |
| "learning_rate": 4.8641662801573185e-05, |
| "loss": 0.2521, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.1733198585144012, |
| "grad_norm": 0.5178197026252747, |
| "learning_rate": 4.863140907495428e-05, |
| "loss": 0.222, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.17382516422435573, |
| "grad_norm": 0.5388314127922058, |
| "learning_rate": 4.862111788139697e-05, |
| "loss": 0.3894, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.17433046993431026, |
| "grad_norm": 0.7556846737861633, |
| "learning_rate": 4.861078923721767e-05, |
| "loss": 0.3644, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.17483577564426478, |
| "grad_norm": 0.827375590801239, |
| "learning_rate": 4.860042315879214e-05, |
| "loss": 0.5613, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.1753410813542193, |
| "grad_norm": 0.550095796585083, |
| "learning_rate": 4.8590019662555534e-05, |
| "loss": 0.3149, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.17584638706417383, |
| "grad_norm": 0.41516733169555664, |
| "learning_rate": 4.857957876500227e-05, |
| "loss": 0.254, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.17635169277412835, |
| "grad_norm": 0.6708664298057556, |
| "learning_rate": 4.8569100482686125e-05, |
| "loss": 0.7031, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.17685699848408287, |
| "grad_norm": 0.8377763032913208, |
| "learning_rate": 4.855858483222011e-05, |
| "loss": 0.4918, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.1773623041940374, |
| "grad_norm": 0.5428167581558228, |
| "learning_rate": 4.8548031830276496e-05, |
| "loss": 0.3071, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.17786760990399192, |
| "grad_norm": 0.6892185807228088, |
| "learning_rate": 4.8537441493586786e-05, |
| "loss": 0.3797, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.17837291561394644, |
| "grad_norm": 0.5104579925537109, |
| "learning_rate": 4.852681383894164e-05, |
| "loss": 0.3025, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.17887822132390097, |
| "grad_norm": 0.42806196212768555, |
| "learning_rate": 4.8516148883190926e-05, |
| "loss": 0.2176, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.1793835270338555, |
| "grad_norm": 0.6678165197372437, |
| "learning_rate": 4.850544664324363e-05, |
| "loss": 0.3057, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.17988883274381, |
| "grad_norm": 0.33582162857055664, |
| "learning_rate": 4.849470713606785e-05, |
| "loss": 0.1869, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.18039413845376454, |
| "grad_norm": 0.8661889433860779, |
| "learning_rate": 4.8483930378690765e-05, |
| "loss": 0.4672, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.18089944416371906, |
| "grad_norm": 0.6204270124435425, |
| "learning_rate": 4.847311638819865e-05, |
| "loss": 0.2449, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.18140474987367358, |
| "grad_norm": 0.8477338552474976, |
| "learning_rate": 4.8462265181736756e-05, |
| "loss": 0.3236, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.1819100555836281, |
| "grad_norm": 0.5757856965065002, |
| "learning_rate": 4.845137677650937e-05, |
| "loss": 0.2292, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.18241536129358263, |
| "grad_norm": 0.3746514320373535, |
| "learning_rate": 4.8440451189779776e-05, |
| "loss": 0.2001, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.18292066700353715, |
| "grad_norm": 0.5158061981201172, |
| "learning_rate": 4.8429488438870174e-05, |
| "loss": 0.4076, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.18342597271349168, |
| "grad_norm": 0.6773928999900818, |
| "learning_rate": 4.841848854116169e-05, |
| "loss": 0.3376, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.1839312784234462, |
| "grad_norm": 0.6250408291816711, |
| "learning_rate": 4.840745151409437e-05, |
| "loss": 0.3262, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.1844365841334007, |
| "grad_norm": 0.43249404430389404, |
| "learning_rate": 4.83963773751671e-05, |
| "loss": 0.2738, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.18494188984335522, |
| "grad_norm": 0.5019465684890747, |
| "learning_rate": 4.838526614193762e-05, |
| "loss": 0.4144, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.18544719555330974, |
| "grad_norm": 0.45221251249313354, |
| "learning_rate": 4.837411783202249e-05, |
| "loss": 0.2398, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.18595250126326426, |
| "grad_norm": 0.36865469813346863, |
| "learning_rate": 4.836293246309702e-05, |
| "loss": 0.222, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.1864578069732188, |
| "grad_norm": 0.43405380845069885, |
| "learning_rate": 4.8351710052895324e-05, |
| "loss": 0.2316, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.1869631126831733, |
| "grad_norm": 0.7884693145751953, |
| "learning_rate": 4.834045061921022e-05, |
| "loss": 0.3766, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.18746841839312783, |
| "grad_norm": 0.8879896402359009, |
| "learning_rate": 4.83291541798932e-05, |
| "loss": 0.3775, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.18797372410308236, |
| "grad_norm": 0.9513370394706726, |
| "learning_rate": 4.831782075285448e-05, |
| "loss": 0.3324, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.18847902981303688, |
| "grad_norm": 0.4089733064174652, |
| "learning_rate": 4.830645035606287e-05, |
| "loss": 0.1853, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.1889843355229914, |
| "grad_norm": 0.5456333756446838, |
| "learning_rate": 4.829504300754584e-05, |
| "loss": 0.2975, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.18948964123294593, |
| "grad_norm": 0.5223088264465332, |
| "learning_rate": 4.82835987253894e-05, |
| "loss": 0.308, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.18999494694290045, |
| "grad_norm": 0.42592930793762207, |
| "learning_rate": 4.8272117527738146e-05, |
| "loss": 0.2699, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.19050025265285497, |
| "grad_norm": 0.30264854431152344, |
| "learning_rate": 4.8260599432795205e-05, |
| "loss": 0.1962, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.1910055583628095, |
| "grad_norm": 0.5884383916854858, |
| "learning_rate": 4.824904445882218e-05, |
| "loss": 0.2513, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.19151086407276402, |
| "grad_norm": 0.5112461447715759, |
| "learning_rate": 4.823745262413917e-05, |
| "loss": 0.2216, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.19201616978271854, |
| "grad_norm": 0.5951370000839233, |
| "learning_rate": 4.822582394712471e-05, |
| "loss": 0.2971, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.19252147549267307, |
| "grad_norm": 0.4729789197444916, |
| "learning_rate": 4.821415844621572e-05, |
| "loss": 0.3288, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.1930267812026276, |
| "grad_norm": 0.8325059413909912, |
| "learning_rate": 4.820245613990755e-05, |
| "loss": 0.5792, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.1935320869125821, |
| "grad_norm": 0.6559721827507019, |
| "learning_rate": 4.819071704675386e-05, |
| "loss": 0.3209, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.19403739262253664, |
| "grad_norm": 0.5043466091156006, |
| "learning_rate": 4.817894118536667e-05, |
| "loss": 0.2815, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.19454269833249116, |
| "grad_norm": 0.6874819397926331, |
| "learning_rate": 4.8167128574416266e-05, |
| "loss": 0.4657, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.19504800404244568, |
| "grad_norm": 0.7303402423858643, |
| "learning_rate": 4.8155279232631214e-05, |
| "loss": 0.4223, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.1955533097524002, |
| "grad_norm": 0.5754918456077576, |
| "learning_rate": 4.8143393178798326e-05, |
| "loss": 0.4584, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.19605861546235473, |
| "grad_norm": 0.613545298576355, |
| "learning_rate": 4.81314704317626e-05, |
| "loss": 0.2793, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.19656392117230925, |
| "grad_norm": 0.49800634384155273, |
| "learning_rate": 4.811951101042722e-05, |
| "loss": 0.3728, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.19706922688226378, |
| "grad_norm": 0.3457117974758148, |
| "learning_rate": 4.8107514933753507e-05, |
| "loss": 0.3332, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.1975745325922183, |
| "grad_norm": 0.4752323031425476, |
| "learning_rate": 4.8095482220760914e-05, |
| "loss": 0.249, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.19807983830217282, |
| "grad_norm": 0.6126275062561035, |
| "learning_rate": 4.808341289052697e-05, |
| "loss": 0.334, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.19858514401212735, |
| "grad_norm": 0.7043460011482239, |
| "learning_rate": 4.807130696218726e-05, |
| "loss": 0.3765, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.19909044972208187, |
| "grad_norm": 0.410325825214386, |
| "learning_rate": 4.805916445493538e-05, |
| "loss": 0.214, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.1995957554320364, |
| "grad_norm": 0.794836699962616, |
| "learning_rate": 4.8046985388022955e-05, |
| "loss": 0.3224, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.20010106114199092, |
| "grad_norm": 0.3947627544403076, |
| "learning_rate": 4.8034769780759556e-05, |
| "loss": 0.2713, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.20060636685194544, |
| "grad_norm": 0.6008957028388977, |
| "learning_rate": 4.8022517652512674e-05, |
| "loss": 0.3625, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.20111167256189996, |
| "grad_norm": 0.796962559223175, |
| "learning_rate": 4.801022902270773e-05, |
| "loss": 0.4219, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.20161697827185449, |
| "grad_norm": 0.5383734107017517, |
| "learning_rate": 4.799790391082799e-05, |
| "loss": 0.2021, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.20212228398180898, |
| "grad_norm": 0.28900346159935, |
| "learning_rate": 4.7985542336414576e-05, |
| "loss": 0.1807, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.2026275896917635, |
| "grad_norm": 0.883991539478302, |
| "learning_rate": 4.797314431906642e-05, |
| "loss": 0.4265, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.20313289540171803, |
| "grad_norm": 0.44878625869750977, |
| "learning_rate": 4.796070987844024e-05, |
| "loss": 0.2376, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.20363820111167255, |
| "grad_norm": 0.3813835680484772, |
| "learning_rate": 4.7948239034250485e-05, |
| "loss": 0.1876, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.20414350682162707, |
| "grad_norm": 0.396606981754303, |
| "learning_rate": 4.793573180626934e-05, |
| "loss": 0.2111, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.2046488125315816, |
| "grad_norm": 0.6839037537574768, |
| "learning_rate": 4.792318821432664e-05, |
| "loss": 0.4695, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.20515411824153612, |
| "grad_norm": 0.39374062418937683, |
| "learning_rate": 4.791060827830993e-05, |
| "loss": 0.2562, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.20565942395149064, |
| "grad_norm": 0.5716267824172974, |
| "learning_rate": 4.789799201816433e-05, |
| "loss": 0.3251, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.20616472966144517, |
| "grad_norm": 0.4453814625740051, |
| "learning_rate": 4.788533945389257e-05, |
| "loss": 0.2136, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.2066700353713997, |
| "grad_norm": 0.6336759924888611, |
| "learning_rate": 4.787265060555495e-05, |
| "loss": 0.3443, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.20717534108135421, |
| "grad_norm": 0.7093701362609863, |
| "learning_rate": 4.785992549326925e-05, |
| "loss": 0.4021, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.20768064679130874, |
| "grad_norm": 0.38103368878364563, |
| "learning_rate": 4.784716413721082e-05, |
| "loss": 0.2219, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.20818595250126326, |
| "grad_norm": 0.49759477376937866, |
| "learning_rate": 4.783436655761241e-05, |
| "loss": 0.2649, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.20869125821121778, |
| "grad_norm": 0.7099438309669495, |
| "learning_rate": 4.7821532774764235e-05, |
| "loss": 0.2655, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.2091965639211723, |
| "grad_norm": 0.6431573629379272, |
| "learning_rate": 4.780866280901389e-05, |
| "loss": 0.2983, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.20970186963112683, |
| "grad_norm": 0.6158983111381531, |
| "learning_rate": 4.779575668076636e-05, |
| "loss": 0.2617, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.21020717534108135, |
| "grad_norm": 0.29485249519348145, |
| "learning_rate": 4.778281441048392e-05, |
| "loss": 0.1779, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.21071248105103588, |
| "grad_norm": 0.45584672689437866, |
| "learning_rate": 4.7769836018686214e-05, |
| "loss": 0.2727, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.2112177867609904, |
| "grad_norm": 0.5246008634567261, |
| "learning_rate": 4.77568215259501e-05, |
| "loss": 0.2185, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.21172309247094492, |
| "grad_norm": 0.9197020530700684, |
| "learning_rate": 4.7743770952909694e-05, |
| "loss": 0.3985, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.21222839818089945, |
| "grad_norm": 0.6227766871452332, |
| "learning_rate": 4.773068432025632e-05, |
| "loss": 0.3579, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.21273370389085397, |
| "grad_norm": 0.42557641863822937, |
| "learning_rate": 4.771756164873847e-05, |
| "loss": 0.355, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.2132390096008085, |
| "grad_norm": 0.2553727626800537, |
| "learning_rate": 4.770440295916176e-05, |
| "loss": 0.1222, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.21374431531076302, |
| "grad_norm": 0.6675715446472168, |
| "learning_rate": 4.769120827238893e-05, |
| "loss": 0.31, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.21424962102071754, |
| "grad_norm": 0.7006489634513855, |
| "learning_rate": 4.76779776093398e-05, |
| "loss": 0.5355, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.21475492673067206, |
| "grad_norm": 0.49595072865486145, |
| "learning_rate": 4.766471099099119e-05, |
| "loss": 0.2717, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.2152602324406266, |
| "grad_norm": 0.7701184153556824, |
| "learning_rate": 4.765140843837697e-05, |
| "loss": 0.5776, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.2157655381505811, |
| "grad_norm": 0.5482222437858582, |
| "learning_rate": 4.763806997258798e-05, |
| "loss": 0.3752, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.21627084386053563, |
| "grad_norm": 0.7224484086036682, |
| "learning_rate": 4.762469561477195e-05, |
| "loss": 0.4275, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.21677614957049016, |
| "grad_norm": 0.5047935843467712, |
| "learning_rate": 4.7611285386133584e-05, |
| "loss": 0.3352, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.21728145528044468, |
| "grad_norm": 0.7135103344917297, |
| "learning_rate": 4.7597839307934397e-05, |
| "loss": 0.4041, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.2177867609903992, |
| "grad_norm": 0.622363805770874, |
| "learning_rate": 4.7584357401492784e-05, |
| "loss": 0.4279, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.21829206670035373, |
| "grad_norm": 0.616588830947876, |
| "learning_rate": 4.7570839688183934e-05, |
| "loss": 0.5381, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.21879737241030825, |
| "grad_norm": 0.4345376789569855, |
| "learning_rate": 4.7557286189439794e-05, |
| "loss": 0.2053, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.21930267812026275, |
| "grad_norm": 0.5042337775230408, |
| "learning_rate": 4.754369692674906e-05, |
| "loss": 0.3279, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.21980798383021727, |
| "grad_norm": 0.6724066734313965, |
| "learning_rate": 4.7530071921657134e-05, |
| "loss": 0.4296, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.2203132895401718, |
| "grad_norm": 0.607983410358429, |
| "learning_rate": 4.751641119576607e-05, |
| "loss": 0.3913, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.22081859525012631, |
| "grad_norm": 0.7748120427131653, |
| "learning_rate": 4.750271477073458e-05, |
| "loss": 0.2699, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.22132390096008084, |
| "grad_norm": 0.9839958548545837, |
| "learning_rate": 4.748898266827795e-05, |
| "loss": 0.365, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.22182920667003536, |
| "grad_norm": 0.8189884424209595, |
| "learning_rate": 4.747521491016805e-05, |
| "loss": 0.4648, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.22233451237998988, |
| "grad_norm": 0.43248504400253296, |
| "learning_rate": 4.7461411518233286e-05, |
| "loss": 0.2829, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.2228398180899444, |
| "grad_norm": 0.6537896990776062, |
| "learning_rate": 4.744757251435853e-05, |
| "loss": 0.4676, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.22334512379989893, |
| "grad_norm": 0.733722448348999, |
| "learning_rate": 4.7433697920485156e-05, |
| "loss": 0.3722, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.22385042950985345, |
| "grad_norm": 0.38846614956855774, |
| "learning_rate": 4.741978775861092e-05, |
| "loss": 0.1931, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.22435573521980798, |
| "grad_norm": 0.5748143196105957, |
| "learning_rate": 4.7405842050790014e-05, |
| "loss": 0.3245, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.2248610409297625, |
| "grad_norm": 0.674279510974884, |
| "learning_rate": 4.739186081913297e-05, |
| "loss": 0.2701, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.22536634663971702, |
| "grad_norm": 0.4921652376651764, |
| "learning_rate": 4.737784408580663e-05, |
| "loss": 0.3256, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.22587165234967155, |
| "grad_norm": 0.4046092629432678, |
| "learning_rate": 4.736379187303414e-05, |
| "loss": 0.2572, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.22637695805962607, |
| "grad_norm": 0.935234785079956, |
| "learning_rate": 4.734970420309488e-05, |
| "loss": 0.5253, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.2268822637695806, |
| "grad_norm": 0.44825106859207153, |
| "learning_rate": 4.733558109832447e-05, |
| "loss": 0.254, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.22738756947953512, |
| "grad_norm": 0.45430219173431396, |
| "learning_rate": 4.732142258111468e-05, |
| "loss": 0.2459, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.22789287518948964, |
| "grad_norm": 0.44165119528770447, |
| "learning_rate": 4.730722867391346e-05, |
| "loss": 0.23, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.22839818089944416, |
| "grad_norm": 0.4309244155883789, |
| "learning_rate": 4.729299939922485e-05, |
| "loss": 0.2213, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.2289034866093987, |
| "grad_norm": 0.6634194254875183, |
| "learning_rate": 4.727873477960896e-05, |
| "loss": 0.3263, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.2294087923193532, |
| "grad_norm": 0.2821512222290039, |
| "learning_rate": 4.726443483768195e-05, |
| "loss": 0.1682, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.22991409802930773, |
| "grad_norm": 0.7871589064598083, |
| "learning_rate": 4.7250099596115985e-05, |
| "loss": 0.4115, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.23041940373926226, |
| "grad_norm": 0.3980810344219208, |
| "learning_rate": 4.723572907763917e-05, |
| "loss": 0.2222, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.23092470944921678, |
| "grad_norm": 0.34110260009765625, |
| "learning_rate": 4.722132330503558e-05, |
| "loss": 0.2064, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.2314300151591713, |
| "grad_norm": 0.5213835835456848, |
| "learning_rate": 4.7206882301145164e-05, |
| "loss": 0.3346, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.23193532086912583, |
| "grad_norm": 0.6506749987602234, |
| "learning_rate": 4.719240608886372e-05, |
| "loss": 0.4456, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.23244062657908035, |
| "grad_norm": 0.7075578570365906, |
| "learning_rate": 4.7177894691142874e-05, |
| "loss": 0.3906, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.23294593228903487, |
| "grad_norm": 0.6361711025238037, |
| "learning_rate": 4.7163348130990055e-05, |
| "loss": 0.497, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.2334512379989894, |
| "grad_norm": 0.4896164536476135, |
| "learning_rate": 4.714876643146842e-05, |
| "loss": 0.2448, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.23395654370894392, |
| "grad_norm": 0.36150890588760376, |
| "learning_rate": 4.7134149615696846e-05, |
| "loss": 0.2344, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.23446184941889844, |
| "grad_norm": 0.5678621530532837, |
| "learning_rate": 4.711949770684989e-05, |
| "loss": 0.594, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.23496715512885297, |
| "grad_norm": 0.42508170008659363, |
| "learning_rate": 4.7104810728157745e-05, |
| "loss": 0.2382, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.2354724608388075, |
| "grad_norm": 0.6501306295394897, |
| "learning_rate": 4.709008870290619e-05, |
| "loss": 0.3541, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.235977766548762, |
| "grad_norm": 0.6006211638450623, |
| "learning_rate": 4.7075331654436606e-05, |
| "loss": 0.4746, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.23648307225871654, |
| "grad_norm": 0.349697083234787, |
| "learning_rate": 4.706053960614586e-05, |
| "loss": 0.1804, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.23698837796867103, |
| "grad_norm": 0.4210861325263977, |
| "learning_rate": 4.7045712581486345e-05, |
| "loss": 0.3136, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.23749368367862556, |
| "grad_norm": 0.42396318912506104, |
| "learning_rate": 4.703085060396589e-05, |
| "loss": 0.217, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.23799898938858008, |
| "grad_norm": 0.4881645739078522, |
| "learning_rate": 4.701595369714775e-05, |
| "loss": 0.3285, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.2385042950985346, |
| "grad_norm": 0.33498990535736084, |
| "learning_rate": 4.7001021884650545e-05, |
| "loss": 0.2067, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.23900960080848913, |
| "grad_norm": 0.5688101649284363, |
| "learning_rate": 4.6986055190148255e-05, |
| "loss": 0.2357, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.23951490651844365, |
| "grad_norm": 0.4370393753051758, |
| "learning_rate": 4.697105363737014e-05, |
| "loss": 0.213, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.24002021222839817, |
| "grad_norm": 0.31808745861053467, |
| "learning_rate": 4.695601725010077e-05, |
| "loss": 0.2219, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.2405255179383527, |
| "grad_norm": 0.533358633518219, |
| "learning_rate": 4.694094605217989e-05, |
| "loss": 0.2173, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.24103082364830722, |
| "grad_norm": 0.4781337380409241, |
| "learning_rate": 4.6925840067502475e-05, |
| "loss": 0.3474, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.24153612935826174, |
| "grad_norm": 0.43365997076034546, |
| "learning_rate": 4.691069932001864e-05, |
| "loss": 0.1846, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.24204143506821627, |
| "grad_norm": 0.6059222221374512, |
| "learning_rate": 4.689552383373361e-05, |
| "loss": 0.3881, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.2425467407781708, |
| "grad_norm": 0.5931808352470398, |
| "learning_rate": 4.6880313632707714e-05, |
| "loss": 0.2737, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.2430520464881253, |
| "grad_norm": 0.4886901378631592, |
| "learning_rate": 4.6865068741056276e-05, |
| "loss": 0.2409, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.24355735219807984, |
| "grad_norm": 0.8345271348953247, |
| "learning_rate": 4.684978918294967e-05, |
| "loss": 0.3453, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.24406265790803436, |
| "grad_norm": 0.5799844861030579, |
| "learning_rate": 4.683447498261318e-05, |
| "loss": 0.403, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.24456796361798888, |
| "grad_norm": 0.5229687690734863, |
| "learning_rate": 4.6819126164327067e-05, |
| "loss": 0.3206, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.2450732693279434, |
| "grad_norm": 0.8277320861816406, |
| "learning_rate": 4.680374275242644e-05, |
| "loss": 0.5465, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.24557857503789793, |
| "grad_norm": 0.6758053302764893, |
| "learning_rate": 4.678832477130127e-05, |
| "loss": 0.3294, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.24608388074785245, |
| "grad_norm": 0.5581978559494019, |
| "learning_rate": 4.677287224539634e-05, |
| "loss": 0.2746, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.24658918645780697, |
| "grad_norm": 0.21748855710029602, |
| "learning_rate": 4.675738519921119e-05, |
| "loss": 0.1429, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.2470944921677615, |
| "grad_norm": 1.0185459852218628, |
| "learning_rate": 4.6741863657300114e-05, |
| "loss": 0.4322, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.24759979787771602, |
| "grad_norm": 0.37968578934669495, |
| "learning_rate": 4.672630764427206e-05, |
| "loss": 0.2318, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.24810510358767054, |
| "grad_norm": 0.32278138399124146, |
| "learning_rate": 4.671071718479067e-05, |
| "loss": 0.2341, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.24861040929762507, |
| "grad_norm": 0.49912557005882263, |
| "learning_rate": 4.669509230357418e-05, |
| "loss": 0.3063, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.2491157150075796, |
| "grad_norm": 0.6750683188438416, |
| "learning_rate": 4.667943302539539e-05, |
| "loss": 0.3489, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.24962102071753411, |
| "grad_norm": 0.6013567447662354, |
| "learning_rate": 4.666373937508166e-05, |
| "loss": 0.3084, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.2501263264274886, |
| "grad_norm": 0.4183938503265381, |
| "learning_rate": 4.6648011377514834e-05, |
| "loss": 0.2177, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.25063163213744316, |
| "grad_norm": 0.5014682412147522, |
| "learning_rate": 4.6632249057631205e-05, |
| "loss": 0.189, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.25113693784739766, |
| "grad_norm": 0.7215374708175659, |
| "learning_rate": 4.661645244042149e-05, |
| "loss": 0.4073, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.2516422435573522, |
| "grad_norm": 0.5658249855041504, |
| "learning_rate": 4.660062155093079e-05, |
| "loss": 0.3558, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.2521475492673067, |
| "grad_norm": 0.5246372818946838, |
| "learning_rate": 4.6584756414258534e-05, |
| "loss": 0.2615, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.25265285497726125, |
| "grad_norm": 0.4146447777748108, |
| "learning_rate": 4.656885705555846e-05, |
| "loss": 0.1839, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.25315816068721575, |
| "grad_norm": 0.7092597484588623, |
| "learning_rate": 4.6552923500038544e-05, |
| "loss": 0.4383, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.2536634663971703, |
| "grad_norm": 0.44563567638397217, |
| "learning_rate": 4.6536955772961e-05, |
| "loss": 0.2642, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.2541687721071248, |
| "grad_norm": 0.4488155245780945, |
| "learning_rate": 4.652095389964222e-05, |
| "loss": 0.3054, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.25467407781707935, |
| "grad_norm": 0.828281044960022, |
| "learning_rate": 4.6504917905452704e-05, |
| "loss": 0.5113, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.25517938352703384, |
| "grad_norm": 1.007767677307129, |
| "learning_rate": 4.6488847815817096e-05, |
| "loss": 0.6539, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.2556846892369884, |
| "grad_norm": 0.3789096176624298, |
| "learning_rate": 4.647274365621407e-05, |
| "loss": 0.277, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.2561899949469429, |
| "grad_norm": 0.37934887409210205, |
| "learning_rate": 4.6456605452176306e-05, |
| "loss": 0.2674, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.25669530065689744, |
| "grad_norm": 0.39194759726524353, |
| "learning_rate": 4.644043322929049e-05, |
| "loss": 0.2938, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.25720060636685194, |
| "grad_norm": 0.5336602926254272, |
| "learning_rate": 4.642422701319723e-05, |
| "loss": 0.4041, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.2577059120768065, |
| "grad_norm": 0.3528728783130646, |
| "learning_rate": 4.640798682959101e-05, |
| "loss": 0.2254, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.258211217786761, |
| "grad_norm": 0.42827707529067993, |
| "learning_rate": 4.6391712704220215e-05, |
| "loss": 0.1916, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.25871652349671553, |
| "grad_norm": 0.4275967478752136, |
| "learning_rate": 4.637540466288699e-05, |
| "loss": 0.2075, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.25922182920667003, |
| "grad_norm": 0.8210417032241821, |
| "learning_rate": 4.6359062731447296e-05, |
| "loss": 0.5352, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.2597271349166246, |
| "grad_norm": 0.4613182544708252, |
| "learning_rate": 4.63426869358108e-05, |
| "loss": 0.2759, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.2602324406265791, |
| "grad_norm": 0.3744739890098572, |
| "learning_rate": 4.632627730194087e-05, |
| "loss": 0.2091, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.2607377463365336, |
| "grad_norm": 0.43345507979393005, |
| "learning_rate": 4.630983385585452e-05, |
| "loss": 0.3114, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.2612430520464881, |
| "grad_norm": 1.5208643674850464, |
| "learning_rate": 4.6293356623622376e-05, |
| "loss": 0.5386, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.2617483577564427, |
| "grad_norm": 0.3538508117198944, |
| "learning_rate": 4.627684563136863e-05, |
| "loss": 0.2279, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.26225366346639717, |
| "grad_norm": 0.5201990008354187, |
| "learning_rate": 4.6260300905271e-05, |
| "loss": 0.2865, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.2627589691763517, |
| "grad_norm": 0.8713021874427795, |
| "learning_rate": 4.624372247156068e-05, |
| "loss": 0.5142, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.2632642748863062, |
| "grad_norm": 0.8699602484703064, |
| "learning_rate": 4.622711035652232e-05, |
| "loss": 0.6462, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.2637695805962607, |
| "grad_norm": 0.3401387333869934, |
| "learning_rate": 4.6210464586493963e-05, |
| "loss": 0.1582, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.26427488630621526, |
| "grad_norm": 0.5219199061393738, |
| "learning_rate": 4.6193785187867005e-05, |
| "loss": 0.3941, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.26478019201616976, |
| "grad_norm": 0.4327852129936218, |
| "learning_rate": 4.617707218708617e-05, |
| "loss": 0.2176, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.2652854977261243, |
| "grad_norm": 0.4247454106807709, |
| "learning_rate": 4.6160325610649465e-05, |
| "loss": 0.1823, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.2657908034360788, |
| "grad_norm": 0.8779489398002625, |
| "learning_rate": 4.6143545485108094e-05, |
| "loss": 0.5002, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.26629610914603336, |
| "grad_norm": 0.7940444946289062, |
| "learning_rate": 4.6126731837066505e-05, |
| "loss": 0.334, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.26680141485598785, |
| "grad_norm": 0.7262102365493774, |
| "learning_rate": 4.610988469318225e-05, |
| "loss": 0.3101, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.2673067205659424, |
| "grad_norm": 0.36386728286743164, |
| "learning_rate": 4.6093004080166e-05, |
| "loss": 0.2009, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.2678120262758969, |
| "grad_norm": 0.31724250316619873, |
| "learning_rate": 4.607609002478152e-05, |
| "loss": 0.1564, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.26831733198585145, |
| "grad_norm": 0.9002480506896973, |
| "learning_rate": 4.605914255384554e-05, |
| "loss": 0.3996, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.26882263769580594, |
| "grad_norm": 0.28965526819229126, |
| "learning_rate": 4.604216169422783e-05, |
| "loss": 0.1917, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.2693279434057605, |
| "grad_norm": 0.3934868574142456, |
| "learning_rate": 4.6025147472851036e-05, |
| "loss": 0.2175, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.269833249115715, |
| "grad_norm": 1.457297682762146, |
| "learning_rate": 4.600809991669076e-05, |
| "loss": 0.5182, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.27033855482566954, |
| "grad_norm": 0.6048038601875305, |
| "learning_rate": 4.599101905277541e-05, |
| "loss": 0.3971, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.27084386053562404, |
| "grad_norm": 0.4743158519268036, |
| "learning_rate": 4.597390490818623e-05, |
| "loss": 0.5295, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.2713491662455786, |
| "grad_norm": 0.7490699291229248, |
| "learning_rate": 4.59567575100572e-05, |
| "loss": 0.3392, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.2718544719555331, |
| "grad_norm": 0.66379314661026, |
| "learning_rate": 4.5939576885575065e-05, |
| "loss": 0.2745, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.27235977766548763, |
| "grad_norm": 0.3140435218811035, |
| "learning_rate": 4.59223630619792e-05, |
| "loss": 0.2245, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.27286508337544213, |
| "grad_norm": 0.6745867133140564, |
| "learning_rate": 4.590511606656165e-05, |
| "loss": 0.3131, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.2733703890853967, |
| "grad_norm": 0.7853817939758301, |
| "learning_rate": 4.588783592666704e-05, |
| "loss": 0.5628, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.2738756947953512, |
| "grad_norm": 0.6165689826011658, |
| "learning_rate": 4.5870522669692574e-05, |
| "loss": 0.2666, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.2743810005053057, |
| "grad_norm": 0.5359889268875122, |
| "learning_rate": 4.58531763230879e-05, |
| "loss": 0.3545, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.2748863062152602, |
| "grad_norm": 0.6092820763587952, |
| "learning_rate": 4.5835796914355195e-05, |
| "loss": 0.3448, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.2753916119252148, |
| "grad_norm": 0.6095251441001892, |
| "learning_rate": 4.5818384471049006e-05, |
| "loss": 0.2423, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.27589691763516927, |
| "grad_norm": 0.5478048920631409, |
| "learning_rate": 4.5800939020776286e-05, |
| "loss": 0.3708, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.2764022233451238, |
| "grad_norm": 0.3708425462245941, |
| "learning_rate": 4.5783460591196305e-05, |
| "loss": 0.228, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.2769075290550783, |
| "grad_norm": 0.7903876304626465, |
| "learning_rate": 4.576594921002062e-05, |
| "loss": 0.3286, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.27741283476503287, |
| "grad_norm": 0.40874460339546204, |
| "learning_rate": 4.5748404905013045e-05, |
| "loss": 0.3423, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.27791814047498736, |
| "grad_norm": 0.2980653941631317, |
| "learning_rate": 4.5730827703989574e-05, |
| "loss": 0.2125, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.2784234461849419, |
| "grad_norm": 0.6296504139900208, |
| "learning_rate": 4.5713217634818384e-05, |
| "loss": 0.4838, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.2789287518948964, |
| "grad_norm": 0.3194442391395569, |
| "learning_rate": 4.569557472541973e-05, |
| "loss": 0.2049, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.27943405760485096, |
| "grad_norm": 0.4441561698913574, |
| "learning_rate": 4.5677899003765965e-05, |
| "loss": 0.2307, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.27993936331480546, |
| "grad_norm": 0.6459462642669678, |
| "learning_rate": 4.566019049788145e-05, |
| "loss": 0.4587, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.28044466902476, |
| "grad_norm": 0.5939819812774658, |
| "learning_rate": 4.564244923584251e-05, |
| "loss": 0.3031, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.2809499747347145, |
| "grad_norm": 0.5529703497886658, |
| "learning_rate": 4.562467524577744e-05, |
| "loss": 0.2677, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.281455280444669, |
| "grad_norm": 0.8261821269989014, |
| "learning_rate": 4.56068685558664e-05, |
| "loss": 0.3718, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.28196058615462355, |
| "grad_norm": 0.42143872380256653, |
| "learning_rate": 4.558902919434139e-05, |
| "loss": 0.3227, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.28246589186457804, |
| "grad_norm": 0.5339514017105103, |
| "learning_rate": 4.557115718948622e-05, |
| "loss": 0.2646, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.2829711975745326, |
| "grad_norm": 0.4582768380641937, |
| "learning_rate": 4.5553252569636466e-05, |
| "loss": 0.252, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.2834765032844871, |
| "grad_norm": 0.4415362477302551, |
| "learning_rate": 4.5535315363179395e-05, |
| "loss": 0.2225, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.28398180899444164, |
| "grad_norm": 0.6997858881950378, |
| "learning_rate": 4.551734559855394e-05, |
| "loss": 0.4024, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.28448711470439614, |
| "grad_norm": 0.34175369143486023, |
| "learning_rate": 4.549934330425067e-05, |
| "loss": 0.2033, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.2849924204143507, |
| "grad_norm": 0.589993953704834, |
| "learning_rate": 4.548130850881171e-05, |
| "loss": 0.3179, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.2854977261243052, |
| "grad_norm": 0.4499538242816925, |
| "learning_rate": 4.546324124083073e-05, |
| "loss": 0.3204, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.28600303183425974, |
| "grad_norm": 0.33767470717430115, |
| "learning_rate": 4.544514152895289e-05, |
| "loss": 0.1882, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.28650833754421423, |
| "grad_norm": 0.2902340292930603, |
| "learning_rate": 4.542700940187476e-05, |
| "loss": 0.1537, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.2870136432541688, |
| "grad_norm": 0.9272497892379761, |
| "learning_rate": 4.5408844888344315e-05, |
| "loss": 0.6278, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.2875189489641233, |
| "grad_norm": 0.4656500816345215, |
| "learning_rate": 4.5390648017160904e-05, |
| "loss": 0.2201, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.28802425467407783, |
| "grad_norm": 0.6928651928901672, |
| "learning_rate": 4.5372418817175146e-05, |
| "loss": 0.3212, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.2885295603840323, |
| "grad_norm": 0.40516453981399536, |
| "learning_rate": 4.535415731728892e-05, |
| "loss": 0.2295, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.2890348660939869, |
| "grad_norm": 0.8511061072349548, |
| "learning_rate": 4.533586354645534e-05, |
| "loss": 0.6501, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.28954017180394137, |
| "grad_norm": 0.3017459809780121, |
| "learning_rate": 4.531753753367867e-05, |
| "loss": 0.1855, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.2900454775138959, |
| "grad_norm": 0.6671580076217651, |
| "learning_rate": 4.5299179308014266e-05, |
| "loss": 0.5083, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.2905507832238504, |
| "grad_norm": 0.323030561208725, |
| "learning_rate": 4.52807888985686e-05, |
| "loss": 0.228, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.29105608893380497, |
| "grad_norm": 0.5347647666931152, |
| "learning_rate": 4.526236633449916e-05, |
| "loss": 0.2377, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.29156139464375946, |
| "grad_norm": 0.4077557325363159, |
| "learning_rate": 4.524391164501439e-05, |
| "loss": 0.2062, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.292066700353714, |
| "grad_norm": 0.2940428853034973, |
| "learning_rate": 4.522542485937369e-05, |
| "loss": 0.2001, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.2925720060636685, |
| "grad_norm": 0.48638758063316345, |
| "learning_rate": 4.520690600688734e-05, |
| "loss": 0.4731, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.29307731177362306, |
| "grad_norm": 0.6385740637779236, |
| "learning_rate": 4.5188355116916465e-05, |
| "loss": 0.3074, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.29358261748357756, |
| "grad_norm": 0.6961050033569336, |
| "learning_rate": 4.516977221887299e-05, |
| "loss": 0.4279, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.2940879231935321, |
| "grad_norm": 0.3082885444164276, |
| "learning_rate": 4.515115734221956e-05, |
| "loss": 0.1595, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.2945932289034866, |
| "grad_norm": 0.27871477603912354, |
| "learning_rate": 4.5132510516469553e-05, |
| "loss": 0.3318, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.29509853461344115, |
| "grad_norm": 0.464947909116745, |
| "learning_rate": 4.5113831771187e-05, |
| "loss": 0.2083, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.29560384032339565, |
| "grad_norm": 0.6374766230583191, |
| "learning_rate": 4.509512113598652e-05, |
| "loss": 0.3364, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.2961091460333502, |
| "grad_norm": 0.6303216814994812, |
| "learning_rate": 4.507637864053329e-05, |
| "loss": 0.4063, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.2966144517433047, |
| "grad_norm": 0.7377685904502869, |
| "learning_rate": 4.5057604314543033e-05, |
| "loss": 0.3406, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.29711975745325925, |
| "grad_norm": 0.6760829091072083, |
| "learning_rate": 4.503879818778191e-05, |
| "loss": 0.3499, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.29762506316321374, |
| "grad_norm": 0.8320351839065552, |
| "learning_rate": 4.501996029006651e-05, |
| "loss": 0.3893, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.2981303688731683, |
| "grad_norm": 0.657802402973175, |
| "learning_rate": 4.500109065126379e-05, |
| "loss": 0.4104, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.2986356745831228, |
| "grad_norm": 0.4237852394580841, |
| "learning_rate": 4.4982189301291037e-05, |
| "loss": 0.2241, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.2991409802930773, |
| "grad_norm": 0.4519661068916321, |
| "learning_rate": 4.496325627011581e-05, |
| "loss": 0.2499, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.29964628600303184, |
| "grad_norm": 0.9270481467247009, |
| "learning_rate": 4.49442915877559e-05, |
| "loss": 0.3713, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.30015159171298633, |
| "grad_norm": 1.016149640083313, |
| "learning_rate": 4.492529528427929e-05, |
| "loss": 0.8942, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.3006568974229409, |
| "grad_norm": 0.617546558380127, |
| "learning_rate": 4.490626738980408e-05, |
| "loss": 0.361, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.3011622031328954, |
| "grad_norm": 0.873954176902771, |
| "learning_rate": 4.488720793449847e-05, |
| "loss": 0.3791, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.30166750884284993, |
| "grad_norm": 0.4339880347251892, |
| "learning_rate": 4.486811694858069e-05, |
| "loss": 0.197, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.3021728145528044, |
| "grad_norm": 0.8034185171127319, |
| "learning_rate": 4.484899446231896e-05, |
| "loss": 0.3023, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.302678120262759, |
| "grad_norm": 0.6054482460021973, |
| "learning_rate": 4.4829840506031454e-05, |
| "loss": 0.2276, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.30318342597271347, |
| "grad_norm": 0.37860724329948425, |
| "learning_rate": 4.481065511008623e-05, |
| "loss": 0.2614, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.303688731682668, |
| "grad_norm": 0.6753828525543213, |
| "learning_rate": 4.47914383049012e-05, |
| "loss": 0.4661, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.3041940373926225, |
| "grad_norm": 0.8002330660820007, |
| "learning_rate": 4.477219012094407e-05, |
| "loss": 0.7009, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.30469934310257707, |
| "grad_norm": 0.45732784271240234, |
| "learning_rate": 4.475291058873231e-05, |
| "loss": 0.2619, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.30520464881253156, |
| "grad_norm": 0.6500813364982605, |
| "learning_rate": 4.473359973883305e-05, |
| "loss": 0.6237, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.3057099545224861, |
| "grad_norm": 0.4476456642150879, |
| "learning_rate": 4.471425760186313e-05, |
| "loss": 0.2877, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.3062152602324406, |
| "grad_norm": 0.2967967689037323, |
| "learning_rate": 4.4694884208488955e-05, |
| "loss": 0.2686, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.30672056594239516, |
| "grad_norm": 0.320054829120636, |
| "learning_rate": 4.4675479589426496e-05, |
| "loss": 0.1553, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.30722587165234966, |
| "grad_norm": 0.7721436619758606, |
| "learning_rate": 4.465604377544124e-05, |
| "loss": 0.3458, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.3077311773623042, |
| "grad_norm": 0.25658470392227173, |
| "learning_rate": 4.463657679734813e-05, |
| "loss": 0.1976, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.3082364830722587, |
| "grad_norm": 0.5959826707839966, |
| "learning_rate": 4.4617078686011506e-05, |
| "loss": 0.3751, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.30874178878221326, |
| "grad_norm": 0.7740558981895447, |
| "learning_rate": 4.459754947234508e-05, |
| "loss": 0.5662, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.30924709449216775, |
| "grad_norm": 0.4466153085231781, |
| "learning_rate": 4.4577989187311886e-05, |
| "loss": 0.2398, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.3097524002021223, |
| "grad_norm": 0.40263572335243225, |
| "learning_rate": 4.4558397861924195e-05, |
| "loss": 0.2314, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.3102577059120768, |
| "grad_norm": 0.5410189032554626, |
| "learning_rate": 4.453877552724352e-05, |
| "loss": 0.3845, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.31076301162203135, |
| "grad_norm": 0.32013383507728577, |
| "learning_rate": 4.451912221438051e-05, |
| "loss": 0.1808, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.31126831733198584, |
| "grad_norm": 0.8941938877105713, |
| "learning_rate": 4.449943795449494e-05, |
| "loss": 0.4481, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.3117736230419404, |
| "grad_norm": 0.5344945788383484, |
| "learning_rate": 4.4479722778795675e-05, |
| "loss": 0.3771, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.3122789287518949, |
| "grad_norm": 0.5631364583969116, |
| "learning_rate": 4.4459976718540555e-05, |
| "loss": 0.3271, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.31278423446184944, |
| "grad_norm": 0.33273768424987793, |
| "learning_rate": 4.444019980503641e-05, |
| "loss": 0.1694, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.31328954017180394, |
| "grad_norm": 0.7633798122406006, |
| "learning_rate": 4.4420392069638994e-05, |
| "loss": 0.3704, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.3137948458817585, |
| "grad_norm": 0.45242956280708313, |
| "learning_rate": 4.44005535437529e-05, |
| "loss": 0.1935, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.314300151591713, |
| "grad_norm": 0.8318801522254944, |
| "learning_rate": 4.438068425883158e-05, |
| "loss": 0.3171, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.31480545730166754, |
| "grad_norm": 0.5340676307678223, |
| "learning_rate": 4.4360784246377205e-05, |
| "loss": 0.2594, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.31531076301162203, |
| "grad_norm": 0.9593076705932617, |
| "learning_rate": 4.434085353794071e-05, |
| "loss": 0.813, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.3158160687215765, |
| "grad_norm": 0.7513559460639954, |
| "learning_rate": 4.432089216512166e-05, |
| "loss": 0.3647, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.3163213744315311, |
| "grad_norm": 0.5096583366394043, |
| "learning_rate": 4.430090015956826e-05, |
| "loss": 0.2323, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.3168266801414856, |
| "grad_norm": 0.587281346321106, |
| "learning_rate": 4.4280877552977276e-05, |
| "loss": 0.2932, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.3173319858514401, |
| "grad_norm": 0.575646698474884, |
| "learning_rate": 4.426082437709399e-05, |
| "loss": 0.532, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.3178372915613946, |
| "grad_norm": 0.38853687047958374, |
| "learning_rate": 4.4240740663712155e-05, |
| "loss": 0.1897, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.31834259727134917, |
| "grad_norm": 0.7989168167114258, |
| "learning_rate": 4.4220626444673926e-05, |
| "loss": 0.4079, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.31884790298130367, |
| "grad_norm": 0.783780574798584, |
| "learning_rate": 4.420048175186985e-05, |
| "loss": 0.3934, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.3193532086912582, |
| "grad_norm": 0.7664374113082886, |
| "learning_rate": 4.418030661723877e-05, |
| "loss": 0.2589, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.3198585144012127, |
| "grad_norm": 0.9156386852264404, |
| "learning_rate": 4.416010107276779e-05, |
| "loss": 0.3592, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.32036382011116726, |
| "grad_norm": 0.4596467912197113, |
| "learning_rate": 4.413986515049224e-05, |
| "loss": 0.3141, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.32086912582112176, |
| "grad_norm": 0.5608931183815002, |
| "learning_rate": 4.4119598882495606e-05, |
| "loss": 0.2723, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.3213744315310763, |
| "grad_norm": 0.6327331066131592, |
| "learning_rate": 4.40993023009095e-05, |
| "loss": 0.273, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.3218797372410308, |
| "grad_norm": 0.46783167123794556, |
| "learning_rate": 4.4078975437913575e-05, |
| "loss": 0.3164, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.32238504295098536, |
| "grad_norm": 0.9565727710723877, |
| "learning_rate": 4.4058618325735506e-05, |
| "loss": 0.5771, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.32289034866093985, |
| "grad_norm": 0.8507360816001892, |
| "learning_rate": 4.403823099665093e-05, |
| "loss": 0.5183, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.3233956543708944, |
| "grad_norm": 0.8383386135101318, |
| "learning_rate": 4.401781348298338e-05, |
| "loss": 0.4447, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.3239009600808489, |
| "grad_norm": 0.4394550621509552, |
| "learning_rate": 4.3997365817104255e-05, |
| "loss": 0.2804, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.32440626579080345, |
| "grad_norm": 0.3480008840560913, |
| "learning_rate": 4.397688803143276e-05, |
| "loss": 0.2214, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.32491157150075795, |
| "grad_norm": 0.7126933336257935, |
| "learning_rate": 4.395638015843585e-05, |
| "loss": 0.5511, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.3254168772107125, |
| "grad_norm": 0.681785523891449, |
| "learning_rate": 4.393584223062819e-05, |
| "loss": 0.5339, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.325922182920667, |
| "grad_norm": 0.4552098214626312, |
| "learning_rate": 4.391527428057207e-05, |
| "loss": 0.3332, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.32642748863062154, |
| "grad_norm": 0.3405364751815796, |
| "learning_rate": 4.3894676340877425e-05, |
| "loss": 0.1796, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.32693279434057604, |
| "grad_norm": 0.3657936155796051, |
| "learning_rate": 4.387404844420171e-05, |
| "loss": 0.2149, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.3274381000505306, |
| "grad_norm": 0.7833860516548157, |
| "learning_rate": 4.385339062324986e-05, |
| "loss": 0.5498, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.3279434057604851, |
| "grad_norm": 0.7392014265060425, |
| "learning_rate": 4.3832702910774294e-05, |
| "loss": 0.424, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.32844871147043964, |
| "grad_norm": 0.37213972210884094, |
| "learning_rate": 4.381198533957478e-05, |
| "loss": 0.2245, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.32895401718039413, |
| "grad_norm": 0.7969143986701965, |
| "learning_rate": 4.379123794249846e-05, |
| "loss": 0.5756, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.3294593228903487, |
| "grad_norm": 0.46545347571372986, |
| "learning_rate": 4.377046075243976e-05, |
| "loss": 0.2736, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.3299646286003032, |
| "grad_norm": 0.7078908681869507, |
| "learning_rate": 4.3749653802340315e-05, |
| "loss": 0.454, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.33046993431025773, |
| "grad_norm": 0.43309125304222107, |
| "learning_rate": 4.372881712518898e-05, |
| "loss": 0.2874, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.3309752400202122, |
| "grad_norm": 0.8200904726982117, |
| "learning_rate": 4.3707950754021715e-05, |
| "loss": 0.4872, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.3314805457301668, |
| "grad_norm": 0.46345481276512146, |
| "learning_rate": 4.368705472192157e-05, |
| "loss": 0.3902, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.33198585144012127, |
| "grad_norm": 0.6028680205345154, |
| "learning_rate": 4.366612906201862e-05, |
| "loss": 0.3987, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.3324911571500758, |
| "grad_norm": 1.018164873123169, |
| "learning_rate": 4.364517380748991e-05, |
| "loss": 0.5398, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.3329964628600303, |
| "grad_norm": 0.5439718961715698, |
| "learning_rate": 4.362418899155941e-05, |
| "loss": 0.2593, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.3335017685699848, |
| "grad_norm": 0.5061584115028381, |
| "learning_rate": 4.360317464749798e-05, |
| "loss": 0.1962, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.33400707427993936, |
| "grad_norm": 0.5780636072158813, |
| "learning_rate": 4.358213080862324e-05, |
| "loss": 0.2549, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.33451237998989386, |
| "grad_norm": 0.6662091016769409, |
| "learning_rate": 4.356105750829963e-05, |
| "loss": 0.4128, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.3350176856998484, |
| "grad_norm": 0.473575234413147, |
| "learning_rate": 4.3539954779938276e-05, |
| "loss": 0.2602, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.3355229914098029, |
| "grad_norm": 0.6012740135192871, |
| "learning_rate": 4.351882265699696e-05, |
| "loss": 0.5341, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.33602829711975746, |
| "grad_norm": 0.48335346579551697, |
| "learning_rate": 4.3497661172980074e-05, |
| "loss": 0.2869, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.33653360282971195, |
| "grad_norm": 0.3564821481704712, |
| "learning_rate": 4.347647036143856e-05, |
| "loss": 0.2183, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.3370389085396665, |
| "grad_norm": 0.4883384704589844, |
| "learning_rate": 4.345525025596986e-05, |
| "loss": 0.5619, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.337544214249621, |
| "grad_norm": 0.31306517124176025, |
| "learning_rate": 4.343400089021785e-05, |
| "loss": 0.1676, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.33804951995957555, |
| "grad_norm": 0.7773547172546387, |
| "learning_rate": 4.341272229787281e-05, |
| "loss": 0.3485, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.33855482566953005, |
| "grad_norm": 0.349680632352829, |
| "learning_rate": 4.339141451267136e-05, |
| "loss": 0.2042, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.3390601313794846, |
| "grad_norm": 0.8842143416404724, |
| "learning_rate": 4.337007756839638e-05, |
| "loss": 0.4101, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.3395654370894391, |
| "grad_norm": 0.4938321113586426, |
| "learning_rate": 4.334871149887701e-05, |
| "loss": 0.2784, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.34007074279939364, |
| "grad_norm": 0.6494061350822449, |
| "learning_rate": 4.332731633798857e-05, |
| "loss": 0.4212, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.34057604850934814, |
| "grad_norm": 0.33829325437545776, |
| "learning_rate": 4.3305892119652457e-05, |
| "loss": 0.2634, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.3410813542193027, |
| "grad_norm": 0.5153347849845886, |
| "learning_rate": 4.32844388778362e-05, |
| "loss": 0.235, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.3415866599292572, |
| "grad_norm": 0.4803626835346222, |
| "learning_rate": 4.326295664655329e-05, |
| "loss": 0.3195, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.34209196563921174, |
| "grad_norm": 0.5081639289855957, |
| "learning_rate": 4.3241445459863225e-05, |
| "loss": 0.2381, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.34259727134916623, |
| "grad_norm": 0.5200014710426331, |
| "learning_rate": 4.321990535187138e-05, |
| "loss": 0.3903, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.3431025770591208, |
| "grad_norm": 0.5185570120811462, |
| "learning_rate": 4.319833635672899e-05, |
| "loss": 0.1852, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.3436078827690753, |
| "grad_norm": 0.38153204321861267, |
| "learning_rate": 4.317673850863311e-05, |
| "loss": 0.1491, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.34411318847902983, |
| "grad_norm": 0.4061324894428253, |
| "learning_rate": 4.31551118418265e-05, |
| "loss": 0.3238, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.3446184941889843, |
| "grad_norm": 0.45140764117240906, |
| "learning_rate": 4.313345639059766e-05, |
| "loss": 0.2929, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.3451237998989389, |
| "grad_norm": 0.5511850714683533, |
| "learning_rate": 4.3111772189280686e-05, |
| "loss": 0.2723, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.34562910560889337, |
| "grad_norm": 0.5179259777069092, |
| "learning_rate": 4.309005927225528e-05, |
| "loss": 0.2211, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.3461344113188479, |
| "grad_norm": 0.49272972345352173, |
| "learning_rate": 4.306831767394666e-05, |
| "loss": 0.2261, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.3466397170288024, |
| "grad_norm": 0.6372472047805786, |
| "learning_rate": 4.3046547428825524e-05, |
| "loss": 0.3174, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.34714502273875697, |
| "grad_norm": 1.0002518892288208, |
| "learning_rate": 4.302474857140799e-05, |
| "loss": 0.4862, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.34765032844871147, |
| "grad_norm": 0.46908167004585266, |
| "learning_rate": 4.3002921136255535e-05, |
| "loss": 0.2217, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.348155634158666, |
| "grad_norm": 0.31772932410240173, |
| "learning_rate": 4.298106515797496e-05, |
| "loss": 0.2601, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.3486609398686205, |
| "grad_norm": 0.40609073638916016, |
| "learning_rate": 4.295918067121828e-05, |
| "loss": 0.2816, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.34916624557857506, |
| "grad_norm": 0.5628025531768799, |
| "learning_rate": 4.293726771068275e-05, |
| "loss": 0.2602, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.34967155128852956, |
| "grad_norm": 0.42692917585372925, |
| "learning_rate": 4.2915326311110766e-05, |
| "loss": 0.2569, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.3501768569984841, |
| "grad_norm": 0.41802138090133667, |
| "learning_rate": 4.289335650728979e-05, |
| "loss": 0.3457, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.3506821627084386, |
| "grad_norm": 0.5327426195144653, |
| "learning_rate": 4.287135833405235e-05, |
| "loss": 0.5488, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.3511874684183931, |
| "grad_norm": 0.36314961314201355, |
| "learning_rate": 4.2849331826275905e-05, |
| "loss": 0.3411, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.35169277412834765, |
| "grad_norm": 0.3764992356300354, |
| "learning_rate": 4.282727701888289e-05, |
| "loss": 0.2655, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.35219807983830215, |
| "grad_norm": 0.4138118028640747, |
| "learning_rate": 4.280519394684059e-05, |
| "loss": 0.2747, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.3527033855482567, |
| "grad_norm": 0.31555989384651184, |
| "learning_rate": 4.2783082645161073e-05, |
| "loss": 0.2322, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.3532086912582112, |
| "grad_norm": 0.5712180733680725, |
| "learning_rate": 4.276094314890122e-05, |
| "loss": 0.3735, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.35371399696816574, |
| "grad_norm": 0.764113187789917, |
| "learning_rate": 4.273877549316256e-05, |
| "loss": 0.4034, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.35421930267812024, |
| "grad_norm": 0.8156772255897522, |
| "learning_rate": 4.27165797130913e-05, |
| "loss": 0.3585, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.3547246083880748, |
| "grad_norm": 0.5966439843177795, |
| "learning_rate": 4.269435584387823e-05, |
| "loss": 0.413, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.3552299140980293, |
| "grad_norm": 0.621375560760498, |
| "learning_rate": 4.267210392075867e-05, |
| "loss": 0.4242, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.35573521980798384, |
| "grad_norm": 0.6661776304244995, |
| "learning_rate": 4.2649823979012426e-05, |
| "loss": 0.3709, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.35624052551793833, |
| "grad_norm": 0.8590171933174133, |
| "learning_rate": 4.262751605396372e-05, |
| "loss": 0.4686, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.3567458312278929, |
| "grad_norm": 0.5522347092628479, |
| "learning_rate": 4.260518018098114e-05, |
| "loss": 0.2676, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.3572511369378474, |
| "grad_norm": 0.5579246282577515, |
| "learning_rate": 4.25828163954776e-05, |
| "loss": 0.3184, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.35775644264780193, |
| "grad_norm": 0.6939714550971985, |
| "learning_rate": 4.2560424732910246e-05, |
| "loss": 0.3246, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.3582617483577564, |
| "grad_norm": 0.5740989446640015, |
| "learning_rate": 4.2538005228780426e-05, |
| "loss": 0.261, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.358767054067711, |
| "grad_norm": 0.5999382734298706, |
| "learning_rate": 4.251555791863365e-05, |
| "loss": 0.3403, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.3592723597776655, |
| "grad_norm": 0.7810065746307373, |
| "learning_rate": 4.2493082838059506e-05, |
| "loss": 0.2779, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.35977766548762, |
| "grad_norm": 0.6728970408439636, |
| "learning_rate": 4.2470580022691584e-05, |
| "loss": 0.3938, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.3602829711975745, |
| "grad_norm": 0.42731353640556335, |
| "learning_rate": 4.2448049508207495e-05, |
| "loss": 0.3949, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.36078827690752907, |
| "grad_norm": 0.5872007608413696, |
| "learning_rate": 4.242549133032872e-05, |
| "loss": 0.2941, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.36129358261748357, |
| "grad_norm": 0.7026878595352173, |
| "learning_rate": 4.2402905524820625e-05, |
| "loss": 0.3886, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.3617988883274381, |
| "grad_norm": 0.8333798050880432, |
| "learning_rate": 4.238029212749237e-05, |
| "loss": 0.4645, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.3623041940373926, |
| "grad_norm": 0.4653269946575165, |
| "learning_rate": 4.235765117419688e-05, |
| "loss": 0.2918, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.36280949974734716, |
| "grad_norm": 0.46014687418937683, |
| "learning_rate": 4.233498270083074e-05, |
| "loss": 0.1886, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.36331480545730166, |
| "grad_norm": 0.5073797106742859, |
| "learning_rate": 4.231228674333417e-05, |
| "loss": 0.3901, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.3638201111672562, |
| "grad_norm": 0.4070146679878235, |
| "learning_rate": 4.2289563337691004e-05, |
| "loss": 0.3156, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.3643254168772107, |
| "grad_norm": 0.5377737283706665, |
| "learning_rate": 4.226681251992854e-05, |
| "loss": 0.3167, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.36483072258716526, |
| "grad_norm": 0.590934693813324, |
| "learning_rate": 4.224403432611758e-05, |
| "loss": 0.2468, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.36533602829711975, |
| "grad_norm": 0.47819361090660095, |
| "learning_rate": 4.22212287923723e-05, |
| "loss": 0.2129, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.3658413340070743, |
| "grad_norm": 0.39465945959091187, |
| "learning_rate": 4.219839595485026e-05, |
| "loss": 0.1701, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.3663466397170288, |
| "grad_norm": 0.5328222513198853, |
| "learning_rate": 4.2175535849752254e-05, |
| "loss": 0.2442, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.36685194542698335, |
| "grad_norm": 0.4375799596309662, |
| "learning_rate": 4.215264851332236e-05, |
| "loss": 0.2072, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.36735725113693785, |
| "grad_norm": 0.5246615409851074, |
| "learning_rate": 4.212973398184781e-05, |
| "loss": 0.2764, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.3678625568468924, |
| "grad_norm": 0.5006076097488403, |
| "learning_rate": 4.210679229165894e-05, |
| "loss": 0.2225, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.3683678625568469, |
| "grad_norm": 0.2846226990222931, |
| "learning_rate": 4.2083823479129175e-05, |
| "loss": 0.147, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.3688731682668014, |
| "grad_norm": 0.4136352837085724, |
| "learning_rate": 4.206082758067491e-05, |
| "loss": 0.1903, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.36937847397675594, |
| "grad_norm": 0.8793293833732605, |
| "learning_rate": 4.203780463275551e-05, |
| "loss": 0.4302, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.36988377968671043, |
| "grad_norm": 0.45669475197792053, |
| "learning_rate": 4.2014754671873205e-05, |
| "loss": 0.2598, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.370389085396665, |
| "grad_norm": 0.6066158413887024, |
| "learning_rate": 4.199167773457308e-05, |
| "loss": 0.2745, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.3708943911066195, |
| "grad_norm": 0.6452160477638245, |
| "learning_rate": 4.196857385744295e-05, |
| "loss": 0.2735, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.37139969681657403, |
| "grad_norm": 0.9064080715179443, |
| "learning_rate": 4.194544307711338e-05, |
| "loss": 0.5442, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.3719050025265285, |
| "grad_norm": 0.5397379398345947, |
| "learning_rate": 4.192228543025759e-05, |
| "loss": 0.3049, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.3724103082364831, |
| "grad_norm": 0.7189819812774658, |
| "learning_rate": 4.189910095359135e-05, |
| "loss": 0.4373, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.3729156139464376, |
| "grad_norm": 0.4282380938529968, |
| "learning_rate": 4.187588968387303e-05, |
| "loss": 0.2361, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.3734209196563921, |
| "grad_norm": 0.6697452068328857, |
| "learning_rate": 4.185265165790343e-05, |
| "loss": 0.7482, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.3739262253663466, |
| "grad_norm": 0.8984920978546143, |
| "learning_rate": 4.182938691252579e-05, |
| "loss": 0.3867, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.37443153107630117, |
| "grad_norm": 0.8725920915603638, |
| "learning_rate": 4.180609548462573e-05, |
| "loss": 0.3112, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.37493683678625567, |
| "grad_norm": 0.548840343952179, |
| "learning_rate": 4.178277741113114e-05, |
| "loss": 0.2397, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.3754421424962102, |
| "grad_norm": 0.38334935903549194, |
| "learning_rate": 4.175943272901218e-05, |
| "loss": 0.2286, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.3759474482061647, |
| "grad_norm": 0.3433682918548584, |
| "learning_rate": 4.173606147528119e-05, |
| "loss": 0.2938, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.37645275391611926, |
| "grad_norm": 0.3817785680294037, |
| "learning_rate": 4.171266368699265e-05, |
| "loss": 0.1541, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.37695805962607376, |
| "grad_norm": 0.3005577325820923, |
| "learning_rate": 4.1689239401243086e-05, |
| "loss": 0.1874, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.3774633653360283, |
| "grad_norm": 0.41856083273887634, |
| "learning_rate": 4.166578865517105e-05, |
| "loss": 0.3474, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.3779686710459828, |
| "grad_norm": 0.6588180661201477, |
| "learning_rate": 4.164231148595705e-05, |
| "loss": 0.3855, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.37847397675593736, |
| "grad_norm": 0.5970372557640076, |
| "learning_rate": 4.1618807930823474e-05, |
| "loss": 0.3084, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.37897928246589185, |
| "grad_norm": 0.29243141412734985, |
| "learning_rate": 4.159527802703456e-05, |
| "loss": 0.1611, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.3794845881758464, |
| "grad_norm": 0.5415188670158386, |
| "learning_rate": 4.1571721811896304e-05, |
| "loss": 0.2595, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.3799898938858009, |
| "grad_norm": 0.33659324049949646, |
| "learning_rate": 4.154813932275643e-05, |
| "loss": 0.1616, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.38049519959575545, |
| "grad_norm": 0.542412281036377, |
| "learning_rate": 4.152453059700431e-05, |
| "loss": 0.5679, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.38100050530570995, |
| "grad_norm": 0.43469804525375366, |
| "learning_rate": 4.150089567207094e-05, |
| "loss": 0.1785, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.3815058110156645, |
| "grad_norm": 0.27857232093811035, |
| "learning_rate": 4.14772345854288e-05, |
| "loss": 0.1647, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.382011116725619, |
| "grad_norm": 0.45017367601394653, |
| "learning_rate": 4.145354737459191e-05, |
| "loss": 0.2412, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.38251642243557354, |
| "grad_norm": 0.6093769669532776, |
| "learning_rate": 4.142983407711567e-05, |
| "loss": 0.3251, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.38302172814552804, |
| "grad_norm": 0.9887945652008057, |
| "learning_rate": 4.1406094730596844e-05, |
| "loss": 0.4258, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.3835270338554826, |
| "grad_norm": 0.3608369827270508, |
| "learning_rate": 4.138232937267351e-05, |
| "loss": 0.1477, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.3840323395654371, |
| "grad_norm": 0.6515466570854187, |
| "learning_rate": 4.135853804102497e-05, |
| "loss": 0.346, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.38453764527539164, |
| "grad_norm": 0.6651821732521057, |
| "learning_rate": 4.133472077337173e-05, |
| "loss": 0.4113, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.38504295098534613, |
| "grad_norm": 0.43307211995124817, |
| "learning_rate": 4.131087760747538e-05, |
| "loss": 0.1571, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.3855482566953007, |
| "grad_norm": 0.928577721118927, |
| "learning_rate": 4.128700858113862e-05, |
| "loss": 0.4672, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.3860535624052552, |
| "grad_norm": 0.5100760459899902, |
| "learning_rate": 4.1263113732205106e-05, |
| "loss": 0.3695, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.3865588681152097, |
| "grad_norm": 0.5143333673477173, |
| "learning_rate": 4.123919309855945e-05, |
| "loss": 0.2731, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.3870641738251642, |
| "grad_norm": 0.705123245716095, |
| "learning_rate": 4.121524671812716e-05, |
| "loss": 0.2643, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.3875694795351187, |
| "grad_norm": 0.44116538763046265, |
| "learning_rate": 4.1191274628874545e-05, |
| "loss": 0.2125, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.3880747852450733, |
| "grad_norm": 1.2351047992706299, |
| "learning_rate": 4.116727686880869e-05, |
| "loss": 0.4035, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.38858009095502777, |
| "grad_norm": 0.646878182888031, |
| "learning_rate": 4.114325347597736e-05, |
| "loss": 0.2416, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.3890853966649823, |
| "grad_norm": 0.19294582307338715, |
| "learning_rate": 4.111920448846898e-05, |
| "loss": 0.0957, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.3895907023749368, |
| "grad_norm": 0.45887112617492676, |
| "learning_rate": 4.1095129944412556e-05, |
| "loss": 0.4743, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.39009600808489137, |
| "grad_norm": 0.3489443063735962, |
| "learning_rate": 4.107102988197758e-05, |
| "loss": 0.1996, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.39060131379484586, |
| "grad_norm": 0.6400259137153625, |
| "learning_rate": 4.1046904339374056e-05, |
| "loss": 0.3377, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.3911066195048004, |
| "grad_norm": 0.8612831830978394, |
| "learning_rate": 4.1022753354852345e-05, |
| "loss": 0.3707, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.3916119252147549, |
| "grad_norm": 0.6197051405906677, |
| "learning_rate": 4.099857696670315e-05, |
| "loss": 0.3053, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.39211723092470946, |
| "grad_norm": 0.5743224620819092, |
| "learning_rate": 4.0974375213257475e-05, |
| "loss": 0.3574, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.39262253663466395, |
| "grad_norm": 0.5291123986244202, |
| "learning_rate": 4.0950148132886504e-05, |
| "loss": 0.2928, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.3931278423446185, |
| "grad_norm": 0.9359875917434692, |
| "learning_rate": 4.0925895764001615e-05, |
| "loss": 0.5865, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.393633148054573, |
| "grad_norm": 0.40809711813926697, |
| "learning_rate": 4.090161814505424e-05, |
| "loss": 0.2414, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.39413845376452755, |
| "grad_norm": 0.3236285448074341, |
| "learning_rate": 4.087731531453588e-05, |
| "loss": 0.1779, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.39464375947448205, |
| "grad_norm": 0.8675460815429688, |
| "learning_rate": 4.0852987310978e-05, |
| "loss": 0.4552, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.3951490651844366, |
| "grad_norm": 0.3517732620239258, |
| "learning_rate": 4.082863417295195e-05, |
| "loss": 0.1672, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.3956543708943911, |
| "grad_norm": 0.5657680630683899, |
| "learning_rate": 4.080425593906896e-05, |
| "loss": 0.3311, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.39615967660434565, |
| "grad_norm": 0.7513840198516846, |
| "learning_rate": 4.077985264798003e-05, |
| "loss": 0.3397, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.39666498231430014, |
| "grad_norm": 0.41284772753715515, |
| "learning_rate": 4.075542433837591e-05, |
| "loss": 0.1583, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.3971702880242547, |
| "grad_norm": 0.41029801964759827, |
| "learning_rate": 4.0730971048987e-05, |
| "loss": 0.3041, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.3976755937342092, |
| "grad_norm": 0.7941258549690247, |
| "learning_rate": 4.070649281858329e-05, |
| "loss": 0.3464, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.39818089944416374, |
| "grad_norm": 1.075421929359436, |
| "learning_rate": 4.068198968597435e-05, |
| "loss": 0.5832, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.39868620515411823, |
| "grad_norm": 0.46112877130508423, |
| "learning_rate": 4.0657461690009195e-05, |
| "loss": 0.322, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.3991915108640728, |
| "grad_norm": 0.528905987739563, |
| "learning_rate": 4.063290886957629e-05, |
| "loss": 0.5137, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.3996968165740273, |
| "grad_norm": 0.43017876148223877, |
| "learning_rate": 4.0608331263603437e-05, |
| "loss": 0.2687, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.40020212228398183, |
| "grad_norm": 0.49231600761413574, |
| "learning_rate": 4.058372891105775e-05, |
| "loss": 0.2438, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.4007074279939363, |
| "grad_norm": 0.7696442008018494, |
| "learning_rate": 4.055910185094557e-05, |
| "loss": 0.3306, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.4012127337038909, |
| "grad_norm": 0.6930801272392273, |
| "learning_rate": 4.0534450122312404e-05, |
| "loss": 0.6515, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.4017180394138454, |
| "grad_norm": 0.28239572048187256, |
| "learning_rate": 4.0509773764242886e-05, |
| "loss": 0.2526, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.4022233451237999, |
| "grad_norm": 0.5681050419807434, |
| "learning_rate": 4.048507281586069e-05, |
| "loss": 0.2447, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.4027286508337544, |
| "grad_norm": 0.3820132911205292, |
| "learning_rate": 4.046034731632847e-05, |
| "loss": 0.2624, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.40323395654370897, |
| "grad_norm": 0.9789735674858093, |
| "learning_rate": 4.043559730484784e-05, |
| "loss": 0.5155, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.40373926225366347, |
| "grad_norm": 0.4344816505908966, |
| "learning_rate": 4.0410822820659224e-05, |
| "loss": 0.2145, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.40424456796361796, |
| "grad_norm": 0.33764201402664185, |
| "learning_rate": 4.0386023903041895e-05, |
| "loss": 0.2634, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.4047498736735725, |
| "grad_norm": 0.2780141830444336, |
| "learning_rate": 4.036120059131382e-05, |
| "loss": 0.1559, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.405255179383527, |
| "grad_norm": 0.5865265130996704, |
| "learning_rate": 4.0336352924831674e-05, |
| "loss": 0.3824, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.40576048509348156, |
| "grad_norm": 0.5158589482307434, |
| "learning_rate": 4.0311480942990756e-05, |
| "loss": 0.3323, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.40626579080343606, |
| "grad_norm": 0.5034775733947754, |
| "learning_rate": 4.0286584685224885e-05, |
| "loss": 0.2576, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.4067710965133906, |
| "grad_norm": 0.5502609610557556, |
| "learning_rate": 4.026166419100638e-05, |
| "loss": 0.3489, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.4072764022233451, |
| "grad_norm": 0.4883863031864166, |
| "learning_rate": 4.023671949984598e-05, |
| "loss": 0.2665, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.40778170793329965, |
| "grad_norm": 0.40211907029151917, |
| "learning_rate": 4.02117506512928e-05, |
| "loss": 0.2291, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.40828701364325415, |
| "grad_norm": 0.35028353333473206, |
| "learning_rate": 4.0186757684934275e-05, |
| "loss": 0.2617, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.4087923193532087, |
| "grad_norm": 0.6691428422927856, |
| "learning_rate": 4.0161740640396015e-05, |
| "loss": 0.4747, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.4092976250631632, |
| "grad_norm": 0.3790917694568634, |
| "learning_rate": 4.0136699557341874e-05, |
| "loss": 0.1726, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.40980293077311775, |
| "grad_norm": 0.2536380887031555, |
| "learning_rate": 4.011163447547378e-05, |
| "loss": 0.1844, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.41030823648307224, |
| "grad_norm": 0.7773703336715698, |
| "learning_rate": 4.0086545434531717e-05, |
| "loss": 0.2856, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.4108135421930268, |
| "grad_norm": 0.36982572078704834, |
| "learning_rate": 4.0061432474293656e-05, |
| "loss": 0.1549, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.4113188479029813, |
| "grad_norm": 0.3912838399410248, |
| "learning_rate": 4.00362956345755e-05, |
| "loss": 0.2821, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.41182415361293584, |
| "grad_norm": 0.4090442359447479, |
| "learning_rate": 4.001113495523101e-05, |
| "loss": 0.2502, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.41232945932289033, |
| "grad_norm": 0.860283374786377, |
| "learning_rate": 3.998595047615172e-05, |
| "loss": 0.3686, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.4128347650328449, |
| "grad_norm": 0.3724400997161865, |
| "learning_rate": 3.9960742237266946e-05, |
| "loss": 0.1763, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.4133400707427994, |
| "grad_norm": 0.2720755636692047, |
| "learning_rate": 3.993551027854362e-05, |
| "loss": 0.1704, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.41384537645275393, |
| "grad_norm": 0.37592801451683044, |
| "learning_rate": 3.991025463998632e-05, |
| "loss": 0.2151, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.41435068216270843, |
| "grad_norm": 0.5671641826629639, |
| "learning_rate": 3.9884975361637146e-05, |
| "loss": 0.5564, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.414855987872663, |
| "grad_norm": 0.6280914545059204, |
| "learning_rate": 3.9859672483575694e-05, |
| "loss": 0.3185, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.4153612935826175, |
| "grad_norm": 0.3018336892127991, |
| "learning_rate": 3.983434604591898e-05, |
| "loss": 0.235, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.415866599292572, |
| "grad_norm": 0.7508260607719421, |
| "learning_rate": 3.980899608882134e-05, |
| "loss": 0.3689, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.4163719050025265, |
| "grad_norm": 0.7853586077690125, |
| "learning_rate": 3.9783622652474436e-05, |
| "loss": 0.3986, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.41687721071248107, |
| "grad_norm": 0.6880490779876709, |
| "learning_rate": 3.975822577710714e-05, |
| "loss": 0.3101, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.41738251642243557, |
| "grad_norm": 0.686315655708313, |
| "learning_rate": 3.973280550298549e-05, |
| "loss": 0.3909, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.4178878221323901, |
| "grad_norm": 0.22469963133335114, |
| "learning_rate": 3.970736187041262e-05, |
| "loss": 0.1319, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.4183931278423446, |
| "grad_norm": 0.5535637736320496, |
| "learning_rate": 3.968189491972869e-05, |
| "loss": 0.3014, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.41889843355229917, |
| "grad_norm": 0.5633406639099121, |
| "learning_rate": 3.9656404691310846e-05, |
| "loss": 0.3009, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.41940373926225366, |
| "grad_norm": 0.6593132615089417, |
| "learning_rate": 3.963089122557313e-05, |
| "loss": 0.5059, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.4199090449722082, |
| "grad_norm": 0.3774155378341675, |
| "learning_rate": 3.960535456296643e-05, |
| "loss": 0.3018, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.4204143506821627, |
| "grad_norm": 0.39002594351768494, |
| "learning_rate": 3.957979474397841e-05, |
| "loss": 0.3974, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.42091965639211726, |
| "grad_norm": 0.5461925268173218, |
| "learning_rate": 3.9554211809133446e-05, |
| "loss": 0.3468, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.42142496210207175, |
| "grad_norm": 0.34177666902542114, |
| "learning_rate": 3.9528605798992566e-05, |
| "loss": 0.1479, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.42193026781202625, |
| "grad_norm": 0.5321192741394043, |
| "learning_rate": 3.950297675415339e-05, |
| "loss": 0.3468, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.4224355735219808, |
| "grad_norm": 0.4154065251350403, |
| "learning_rate": 3.9477324715250045e-05, |
| "loss": 0.3419, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.4229408792319353, |
| "grad_norm": 0.4703141450881958, |
| "learning_rate": 3.945164972295313e-05, |
| "loss": 0.3884, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.42344618494188985, |
| "grad_norm": 0.43752673268318176, |
| "learning_rate": 3.942595181796961e-05, |
| "loss": 0.2079, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.42395149065184434, |
| "grad_norm": 0.3551587760448456, |
| "learning_rate": 3.940023104104281e-05, |
| "loss": 0.2126, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.4244567963617989, |
| "grad_norm": 0.8521201610565186, |
| "learning_rate": 3.9374487432952304e-05, |
| "loss": 0.4272, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.4249621020717534, |
| "grad_norm": 0.477527916431427, |
| "learning_rate": 3.934872103451386e-05, |
| "loss": 0.2675, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.42546740778170794, |
| "grad_norm": 0.46821895241737366, |
| "learning_rate": 3.932293188657937e-05, |
| "loss": 0.2343, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.42597271349166244, |
| "grad_norm": 0.7220411896705627, |
| "learning_rate": 3.929712003003681e-05, |
| "loss": 0.4072, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.426478019201617, |
| "grad_norm": 0.7784841656684875, |
| "learning_rate": 3.927128550581019e-05, |
| "loss": 0.5188, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.4269833249115715, |
| "grad_norm": 0.8209807872772217, |
| "learning_rate": 3.924542835485938e-05, |
| "loss": 0.6626, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.42748863062152603, |
| "grad_norm": 0.4226566553115845, |
| "learning_rate": 3.9219548618180194e-05, |
| "loss": 0.2538, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.42799393633148053, |
| "grad_norm": 0.45771926641464233, |
| "learning_rate": 3.919364633680423e-05, |
| "loss": 0.3554, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.4284992420414351, |
| "grad_norm": 0.4026903808116913, |
| "learning_rate": 3.916772155179882e-05, |
| "loss": 0.2283, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.4290045477513896, |
| "grad_norm": 0.474115252494812, |
| "learning_rate": 3.9141774304266995e-05, |
| "loss": 0.3667, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.4295098534613441, |
| "grad_norm": 0.5068637132644653, |
| "learning_rate": 3.91158046353474e-05, |
| "loss": 0.3598, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.4300151591712986, |
| "grad_norm": 0.63497394323349, |
| "learning_rate": 3.9089812586214195e-05, |
| "loss": 0.3532, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.4305204648812532, |
| "grad_norm": 1.019968032836914, |
| "learning_rate": 3.906379819807707e-05, |
| "loss": 0.5262, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.43102577059120767, |
| "grad_norm": 0.7702900171279907, |
| "learning_rate": 3.903776151218109e-05, |
| "loss": 0.4335, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.4315310763011622, |
| "grad_norm": 0.4589867889881134, |
| "learning_rate": 3.901170256980671e-05, |
| "loss": 0.1948, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.4320363820111167, |
| "grad_norm": 0.3241965174674988, |
| "learning_rate": 3.8985621412269644e-05, |
| "loss": 0.3025, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.43254168772107127, |
| "grad_norm": 0.5903672575950623, |
| "learning_rate": 3.895951808092084e-05, |
| "loss": 0.2925, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.43304699343102576, |
| "grad_norm": 0.42580774426460266, |
| "learning_rate": 3.893339261714642e-05, |
| "loss": 0.4694, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.4335522991409803, |
| "grad_norm": 0.4376884698867798, |
| "learning_rate": 3.890724506236753e-05, |
| "loss": 0.2743, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.4340576048509348, |
| "grad_norm": 0.5972741842269897, |
| "learning_rate": 3.8881075458040426e-05, |
| "loss": 0.3676, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.43456291056088936, |
| "grad_norm": 0.6178309321403503, |
| "learning_rate": 3.8854883845656275e-05, |
| "loss": 0.3012, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.43506821627084385, |
| "grad_norm": 0.8123751878738403, |
| "learning_rate": 3.882867026674114e-05, |
| "loss": 0.4476, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.4355735219807984, |
| "grad_norm": 0.5609534978866577, |
| "learning_rate": 3.880243476285593e-05, |
| "loss": 0.3029, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.4360788276907529, |
| "grad_norm": 0.5837926268577576, |
| "learning_rate": 3.877617737559629e-05, |
| "loss": 0.4239, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.43658413340070745, |
| "grad_norm": 0.40057340264320374, |
| "learning_rate": 3.874989814659258e-05, |
| "loss": 0.1811, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.43708943911066195, |
| "grad_norm": 0.45030128955841064, |
| "learning_rate": 3.872359711750978e-05, |
| "loss": 0.2334, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.4375947448206165, |
| "grad_norm": 0.3968111574649811, |
| "learning_rate": 3.8697274330047427e-05, |
| "loss": 0.2225, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.438100050530571, |
| "grad_norm": 0.6887713074684143, |
| "learning_rate": 3.8670929825939586e-05, |
| "loss": 0.5391, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.4386053562405255, |
| "grad_norm": 1.0266462564468384, |
| "learning_rate": 3.864456364695472e-05, |
| "loss": 0.4051, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.43911066195048004, |
| "grad_norm": 0.40764105319976807, |
| "learning_rate": 3.8618175834895664e-05, |
| "loss": 0.2342, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.43961596766043454, |
| "grad_norm": 0.3224446475505829, |
| "learning_rate": 3.8591766431599566e-05, |
| "loss": 0.1845, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.4401212733703891, |
| "grad_norm": 0.8195884227752686, |
| "learning_rate": 3.856533547893779e-05, |
| "loss": 0.4636, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.4406265790803436, |
| "grad_norm": 0.4880749583244324, |
| "learning_rate": 3.853888301881587e-05, |
| "loss": 0.3212, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.44113188479029813, |
| "grad_norm": 0.6332755088806152, |
| "learning_rate": 3.8512409093173453e-05, |
| "loss": 0.3923, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.44163719050025263, |
| "grad_norm": 0.41636690497398376, |
| "learning_rate": 3.8485913743984206e-05, |
| "loss": 0.2354, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.4421424962102072, |
| "grad_norm": 0.5678810477256775, |
| "learning_rate": 3.845939701325575e-05, |
| "loss": 0.2633, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.4426478019201617, |
| "grad_norm": 0.3602360188961029, |
| "learning_rate": 3.8432858943029625e-05, |
| "loss": 0.2014, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.4431531076301162, |
| "grad_norm": 0.48009565472602844, |
| "learning_rate": 3.840629957538121e-05, |
| "loss": 0.3181, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.4436584133400707, |
| "grad_norm": 0.3176063001155853, |
| "learning_rate": 3.837971895241964e-05, |
| "loss": 0.1613, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.4441637190500253, |
| "grad_norm": 0.5749449133872986, |
| "learning_rate": 3.835311711628774e-05, |
| "loss": 0.2763, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.44466902475997977, |
| "grad_norm": 0.40219807624816895, |
| "learning_rate": 3.832649410916198e-05, |
| "loss": 0.1707, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.4451743304699343, |
| "grad_norm": 0.6188439130783081, |
| "learning_rate": 3.8299849973252386e-05, |
| "loss": 0.2757, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.4456796361798888, |
| "grad_norm": 0.6787897348403931, |
| "learning_rate": 3.827318475080252e-05, |
| "loss": 0.2632, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.44618494188984337, |
| "grad_norm": 0.46739840507507324, |
| "learning_rate": 3.8246498484089306e-05, |
| "loss": 0.2511, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.44669024759979786, |
| "grad_norm": 0.5028379559516907, |
| "learning_rate": 3.82197912154231e-05, |
| "loss": 0.3607, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.4471955533097524, |
| "grad_norm": 0.45272600650787354, |
| "learning_rate": 3.819306298714752e-05, |
| "loss": 0.2706, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.4477008590197069, |
| "grad_norm": 0.37481459975242615, |
| "learning_rate": 3.816631384163944e-05, |
| "loss": 0.1997, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.44820616472966146, |
| "grad_norm": 0.7351203560829163, |
| "learning_rate": 3.813954382130886e-05, |
| "loss": 0.2668, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.44871147043961596, |
| "grad_norm": 0.7236170768737793, |
| "learning_rate": 3.8112752968598907e-05, |
| "loss": 0.3759, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.4492167761495705, |
| "grad_norm": 0.36974865198135376, |
| "learning_rate": 3.808594132598574e-05, |
| "loss": 0.2733, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.449722081859525, |
| "grad_norm": 0.76992267370224, |
| "learning_rate": 3.805910893597845e-05, |
| "loss": 0.4098, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.45022738756947955, |
| "grad_norm": 0.31903111934661865, |
| "learning_rate": 3.803225584111907e-05, |
| "loss": 0.1713, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.45073269327943405, |
| "grad_norm": 0.7338394522666931, |
| "learning_rate": 3.8005382083982386e-05, |
| "loss": 0.7272, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.4512379989893886, |
| "grad_norm": 0.9718958139419556, |
| "learning_rate": 3.797848770717603e-05, |
| "loss": 0.4103, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.4517433046993431, |
| "grad_norm": 0.24202537536621094, |
| "learning_rate": 3.795157275334027e-05, |
| "loss": 0.2362, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.45224861040929765, |
| "grad_norm": 1.0490413904190063, |
| "learning_rate": 3.7924637265148e-05, |
| "loss": 0.4398, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.45275391611925214, |
| "grad_norm": 0.46118178963661194, |
| "learning_rate": 3.789768128530471e-05, |
| "loss": 0.3123, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.4532592218292067, |
| "grad_norm": 0.3323919475078583, |
| "learning_rate": 3.787070485654833e-05, |
| "loss": 0.1714, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.4537645275391612, |
| "grad_norm": 0.8963467478752136, |
| "learning_rate": 3.784370802164925e-05, |
| "loss": 0.5634, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.45426983324911574, |
| "grad_norm": 0.9990172386169434, |
| "learning_rate": 3.781669082341018e-05, |
| "loss": 0.7088, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.45477513895907024, |
| "grad_norm": 0.5156543850898743, |
| "learning_rate": 3.7789653304666136e-05, |
| "loss": 0.2425, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.4552804446690248, |
| "grad_norm": 0.3596484363079071, |
| "learning_rate": 3.776259550828437e-05, |
| "loss": 0.1751, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.4557857503789793, |
| "grad_norm": 0.7215254902839661, |
| "learning_rate": 3.773551747716423e-05, |
| "loss": 0.373, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.4562910560889338, |
| "grad_norm": 0.36745965480804443, |
| "learning_rate": 3.77084192542372e-05, |
| "loss": 0.1979, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.45679636179888833, |
| "grad_norm": 0.3110678791999817, |
| "learning_rate": 3.768130088246674e-05, |
| "loss": 0.1676, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.4573016675088428, |
| "grad_norm": 0.6911036968231201, |
| "learning_rate": 3.765416240484828e-05, |
| "loss": 0.331, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.4578069732187974, |
| "grad_norm": 0.48644575476646423, |
| "learning_rate": 3.7627003864409116e-05, |
| "loss": 0.4377, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.45831227892875187, |
| "grad_norm": 0.5354007482528687, |
| "learning_rate": 3.759982530420835e-05, |
| "loss": 0.2489, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.4588175846387064, |
| "grad_norm": 0.4465010464191437, |
| "learning_rate": 3.757262676733684e-05, |
| "loss": 0.2772, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.4593228903486609, |
| "grad_norm": 0.47116124629974365, |
| "learning_rate": 3.754540829691708e-05, |
| "loss": 0.202, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.45982819605861547, |
| "grad_norm": 0.635790228843689, |
| "learning_rate": 3.751816993610323e-05, |
| "loss": 0.2873, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.46033350176856996, |
| "grad_norm": 0.3087238371372223, |
| "learning_rate": 3.749091172808093e-05, |
| "loss": 0.19, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.4608388074785245, |
| "grad_norm": 0.3472057282924652, |
| "learning_rate": 3.74636337160673e-05, |
| "loss": 0.2691, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.461344113188479, |
| "grad_norm": 0.5002533197402954, |
| "learning_rate": 3.743633594331089e-05, |
| "loss": 0.2269, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.46184941889843356, |
| "grad_norm": 0.5435811877250671, |
| "learning_rate": 3.740901845309152e-05, |
| "loss": 0.2686, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.46235472460838806, |
| "grad_norm": 0.5896182060241699, |
| "learning_rate": 3.738168128872033e-05, |
| "loss": 0.3024, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.4628600303183426, |
| "grad_norm": 0.6780158281326294, |
| "learning_rate": 3.735432449353963e-05, |
| "loss": 0.3342, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.4633653360282971, |
| "grad_norm": 0.30589649081230164, |
| "learning_rate": 3.7326948110922855e-05, |
| "loss": 0.1874, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.46387064173825165, |
| "grad_norm": 0.729263424873352, |
| "learning_rate": 3.729955218427449e-05, |
| "loss": 0.2734, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.46437594744820615, |
| "grad_norm": 0.4440087080001831, |
| "learning_rate": 3.7272136757029995e-05, |
| "loss": 0.2259, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.4648812531581607, |
| "grad_norm": 0.9144408702850342, |
| "learning_rate": 3.724470187265579e-05, |
| "loss": 0.544, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.4653865588681152, |
| "grad_norm": 0.6351197957992554, |
| "learning_rate": 3.721724757464911e-05, |
| "loss": 0.3172, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.46589186457806975, |
| "grad_norm": 0.7643218040466309, |
| "learning_rate": 3.718977390653796e-05, |
| "loss": 0.5287, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.46639717028802424, |
| "grad_norm": 0.5360459685325623, |
| "learning_rate": 3.7162280911881085e-05, |
| "loss": 0.1788, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.4669024759979788, |
| "grad_norm": 0.4476550221443176, |
| "learning_rate": 3.713476863426787e-05, |
| "loss": 0.2565, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.4674077817079333, |
| "grad_norm": 0.3940451443195343, |
| "learning_rate": 3.710723711731825e-05, |
| "loss": 0.2263, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.46791308741788784, |
| "grad_norm": 0.5123595595359802, |
| "learning_rate": 3.7079686404682674e-05, |
| "loss": 0.2216, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.46841839312784234, |
| "grad_norm": 0.36884805560112, |
| "learning_rate": 3.7052116540042025e-05, |
| "loss": 0.2212, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.4689236988377969, |
| "grad_norm": 0.6213935017585754, |
| "learning_rate": 3.7024527567107564e-05, |
| "loss": 0.3156, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.4694290045477514, |
| "grad_norm": 0.40270182490348816, |
| "learning_rate": 3.6996919529620825e-05, |
| "loss": 0.1813, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.46993431025770593, |
| "grad_norm": 0.9014922976493835, |
| "learning_rate": 3.696929247135359e-05, |
| "loss": 0.46, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.47043961596766043, |
| "grad_norm": 0.5056794881820679, |
| "learning_rate": 3.694164643610777e-05, |
| "loss": 0.2321, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.470944921677615, |
| "grad_norm": 0.47349992394447327, |
| "learning_rate": 3.6913981467715406e-05, |
| "loss": 0.1869, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.4714502273875695, |
| "grad_norm": 0.7985735535621643, |
| "learning_rate": 3.6886297610038514e-05, |
| "loss": 0.37, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.471955533097524, |
| "grad_norm": 0.9785382151603699, |
| "learning_rate": 3.6858594906969077e-05, |
| "loss": 0.4063, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.4724608388074785, |
| "grad_norm": 0.32149001955986023, |
| "learning_rate": 3.6830873402428964e-05, |
| "loss": 0.2423, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.4729661445174331, |
| "grad_norm": 0.8392232060432434, |
| "learning_rate": 3.6803133140369836e-05, |
| "loss": 0.7566, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.47347145022738757, |
| "grad_norm": 0.383240669965744, |
| "learning_rate": 3.6775374164773105e-05, |
| "loss": 0.1921, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.47397675593734206, |
| "grad_norm": 0.6196987628936768, |
| "learning_rate": 3.6747596519649864e-05, |
| "loss": 0.4214, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.4744820616472966, |
| "grad_norm": 0.5443540811538696, |
| "learning_rate": 3.671980024904078e-05, |
| "loss": 0.2819, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.4749873673572511, |
| "grad_norm": 0.5839280486106873, |
| "learning_rate": 3.669198539701606e-05, |
| "loss": 0.345, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.47549267306720566, |
| "grad_norm": 0.43209922313690186, |
| "learning_rate": 3.666415200767538e-05, |
| "loss": 0.2777, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.47599797877716016, |
| "grad_norm": 0.4791796803474426, |
| "learning_rate": 3.6636300125147804e-05, |
| "loss": 0.2516, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.4765032844871147, |
| "grad_norm": 0.5791419148445129, |
| "learning_rate": 3.660842979359171e-05, |
| "loss": 0.4067, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.4770085901970692, |
| "grad_norm": 0.4313831329345703, |
| "learning_rate": 3.6580541057194726e-05, |
| "loss": 0.1971, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.47751389590702376, |
| "grad_norm": 0.3102171719074249, |
| "learning_rate": 3.655263396017367e-05, |
| "loss": 0.2316, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.47801920161697825, |
| "grad_norm": 0.31960973143577576, |
| "learning_rate": 3.652470854677446e-05, |
| "loss": 0.2716, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.4785245073269328, |
| "grad_norm": 0.24606995284557343, |
| "learning_rate": 3.649676486127206e-05, |
| "loss": 0.1721, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.4790298130368873, |
| "grad_norm": 0.5517093539237976, |
| "learning_rate": 3.64688029479704e-05, |
| "loss": 0.3408, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.47953511874684185, |
| "grad_norm": 0.6657114028930664, |
| "learning_rate": 3.644082285120231e-05, |
| "loss": 0.5508, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.48004042445679634, |
| "grad_norm": 0.39712589979171753, |
| "learning_rate": 3.641282461532946e-05, |
| "loss": 0.2008, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.4805457301667509, |
| "grad_norm": 0.5426539182662964, |
| "learning_rate": 3.6384808284742254e-05, |
| "loss": 0.3242, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.4810510358767054, |
| "grad_norm": 0.4150180518627167, |
| "learning_rate": 3.635677390385982e-05, |
| "loss": 0.2666, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.48155634158665994, |
| "grad_norm": 0.3427567780017853, |
| "learning_rate": 3.632872151712988e-05, |
| "loss": 0.2593, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.48206164729661444, |
| "grad_norm": 0.5839005708694458, |
| "learning_rate": 3.63006511690287e-05, |
| "loss": 0.4472, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.482566953006569, |
| "grad_norm": 0.6474454998970032, |
| "learning_rate": 3.627256290406103e-05, |
| "loss": 0.3743, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.4830722587165235, |
| "grad_norm": 0.361176073551178, |
| "learning_rate": 3.624445676676004e-05, |
| "loss": 0.2014, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.48357756442647803, |
| "grad_norm": 0.6297400593757629, |
| "learning_rate": 3.621633280168723e-05, |
| "loss": 0.5208, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.48408287013643253, |
| "grad_norm": 0.38691607117652893, |
| "learning_rate": 3.618819105343234e-05, |
| "loss": 0.3079, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.4845881758463871, |
| "grad_norm": 0.48631709814071655, |
| "learning_rate": 3.6160031566613336e-05, |
| "loss": 0.3092, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.4850934815563416, |
| "grad_norm": 0.30502668023109436, |
| "learning_rate": 3.6131854385876314e-05, |
| "loss": 0.2976, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.48559878726629613, |
| "grad_norm": 0.6193214654922485, |
| "learning_rate": 3.6103659555895376e-05, |
| "loss": 0.3162, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.4861040929762506, |
| "grad_norm": 0.5869472026824951, |
| "learning_rate": 3.607544712137265e-05, |
| "loss": 0.5059, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.4866093986862052, |
| "grad_norm": 0.6832082867622375, |
| "learning_rate": 3.604721712703817e-05, |
| "loss": 0.2766, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.48711470439615967, |
| "grad_norm": 0.8644431829452515, |
| "learning_rate": 3.601896961764978e-05, |
| "loss": 0.632, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.4876200101061142, |
| "grad_norm": 0.7030072212219238, |
| "learning_rate": 3.599070463799315e-05, |
| "loss": 0.4698, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.4881253158160687, |
| "grad_norm": 0.49395883083343506, |
| "learning_rate": 3.5962422232881585e-05, |
| "loss": 0.1997, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.48863062152602327, |
| "grad_norm": 0.3058975338935852, |
| "learning_rate": 3.5934122447156064e-05, |
| "loss": 0.1957, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.48913592723597776, |
| "grad_norm": 0.6280907392501831, |
| "learning_rate": 3.59058053256851e-05, |
| "loss": 0.2984, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.4896412329459323, |
| "grad_norm": 0.3592299818992615, |
| "learning_rate": 3.5877470913364695e-05, |
| "loss": 0.1573, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.4901465386558868, |
| "grad_norm": 0.3483614921569824, |
| "learning_rate": 3.5849119255118274e-05, |
| "loss": 0.1568, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.49065184436584136, |
| "grad_norm": 0.2722894847393036, |
| "learning_rate": 3.58207503958966e-05, |
| "loss": 0.1799, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.49115715007579586, |
| "grad_norm": 0.4917094111442566, |
| "learning_rate": 3.57923643806777e-05, |
| "loss": 0.2378, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.49166245578575035, |
| "grad_norm": 0.36408787965774536, |
| "learning_rate": 3.57639612544668e-05, |
| "loss": 0.1882, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.4921677614957049, |
| "grad_norm": 0.6604106426239014, |
| "learning_rate": 3.573554106229629e-05, |
| "loss": 0.5856, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.4926730672056594, |
| "grad_norm": 0.6453742384910583, |
| "learning_rate": 3.5707103849225554e-05, |
| "loss": 0.5509, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.49317837291561395, |
| "grad_norm": 0.431162565946579, |
| "learning_rate": 3.5678649660341026e-05, |
| "loss": 0.2175, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.49368367862556845, |
| "grad_norm": 0.510044515132904, |
| "learning_rate": 3.565017854075602e-05, |
| "loss": 0.2819, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.494188984335523, |
| "grad_norm": 0.6410518288612366, |
| "learning_rate": 3.5621690535610684e-05, |
| "loss": 0.3669, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.4946942900454775, |
| "grad_norm": 0.2983573377132416, |
| "learning_rate": 3.559318569007198e-05, |
| "loss": 0.2469, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.49519959575543204, |
| "grad_norm": 0.4974038898944855, |
| "learning_rate": 3.5564664049333527e-05, |
| "loss": 0.2096, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.49570490146538654, |
| "grad_norm": 0.685854434967041, |
| "learning_rate": 3.5536125658615586e-05, |
| "loss": 0.2544, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.4962102071753411, |
| "grad_norm": 0.5799931287765503, |
| "learning_rate": 3.5507570563164996e-05, |
| "loss": 0.3086, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.4967155128852956, |
| "grad_norm": 0.2882770597934723, |
| "learning_rate": 3.5478998808255045e-05, |
| "loss": 0.2204, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.49722081859525014, |
| "grad_norm": 0.40922459959983826, |
| "learning_rate": 3.545041043918546e-05, |
| "loss": 0.1892, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.49772612430520463, |
| "grad_norm": 0.3756551146507263, |
| "learning_rate": 3.542180550128229e-05, |
| "loss": 0.2262, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.4982314300151592, |
| "grad_norm": 0.8342236280441284, |
| "learning_rate": 3.5393184039897876e-05, |
| "loss": 0.4221, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.4987367357251137, |
| "grad_norm": 0.7572706341743469, |
| "learning_rate": 3.5364546100410744e-05, |
| "loss": 0.3071, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.49924204143506823, |
| "grad_norm": 0.44951027631759644, |
| "learning_rate": 3.533589172822554e-05, |
| "loss": 0.3021, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.4997473471450227, |
| "grad_norm": 0.5929198265075684, |
| "learning_rate": 3.530722096877298e-05, |
| "loss": 0.3193, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.5002526528549772, |
| "grad_norm": 0.6089531779289246, |
| "learning_rate": 3.527853386750974e-05, |
| "loss": 0.3985, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.5007579585649318, |
| "grad_norm": 0.6241928935050964, |
| "learning_rate": 3.524983046991842e-05, |
| "loss": 0.4037, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.5012632642748863, |
| "grad_norm": 0.5915480852127075, |
| "learning_rate": 3.522111082150746e-05, |
| "loss": 0.3821, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.5017685699848409, |
| "grad_norm": 0.956665575504303, |
| "learning_rate": 3.519237496781106e-05, |
| "loss": 0.5615, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.5022738756947953, |
| "grad_norm": 0.7047909498214722, |
| "learning_rate": 3.5163622954389105e-05, |
| "loss": 0.3062, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.5027791814047499, |
| "grad_norm": 0.31413036584854126, |
| "learning_rate": 3.5134854826827114e-05, |
| "loss": 0.2965, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.5032844871147044, |
| "grad_norm": 0.6101884245872498, |
| "learning_rate": 3.510607063073614e-05, |
| "loss": 0.4699, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.503789792824659, |
| "grad_norm": 0.6464351415634155, |
| "learning_rate": 3.5077270411752746e-05, |
| "loss": 0.3672, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.5042950985346134, |
| "grad_norm": 0.5930270552635193, |
| "learning_rate": 3.504845421553884e-05, |
| "loss": 0.1938, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.504800404244568, |
| "grad_norm": 0.614264190196991, |
| "learning_rate": 3.501962208778172e-05, |
| "loss": 0.3049, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.5053057099545225, |
| "grad_norm": 0.5080682635307312, |
| "learning_rate": 3.4990774074193906e-05, |
| "loss": 0.3748, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.5058110156644771, |
| "grad_norm": 0.30347660183906555, |
| "learning_rate": 3.496191022051312e-05, |
| "loss": 0.2212, |
| "step": 1001 |
| }, |
| { |
| "epoch": 0.5063163213744315, |
| "grad_norm": 0.2643427848815918, |
| "learning_rate": 3.49330305725022e-05, |
| "loss": 0.1181, |
| "step": 1002 |
| }, |
| { |
| "epoch": 0.506821627084386, |
| "grad_norm": 0.6040931940078735, |
| "learning_rate": 3.490413517594901e-05, |
| "loss": 0.4396, |
| "step": 1003 |
| }, |
| { |
| "epoch": 0.5073269327943406, |
| "grad_norm": 0.4380238652229309, |
| "learning_rate": 3.487522407666641e-05, |
| "loss": 0.2364, |
| "step": 1004 |
| }, |
| { |
| "epoch": 0.5078322385042952, |
| "grad_norm": 1.0693117380142212, |
| "learning_rate": 3.4846297320492124e-05, |
| "loss": 0.6679, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.5083375442142496, |
| "grad_norm": 0.4763444662094116, |
| "learning_rate": 3.4817354953288715e-05, |
| "loss": 0.3052, |
| "step": 1006 |
| }, |
| { |
| "epoch": 0.5088428499242041, |
| "grad_norm": 0.41242119669914246, |
| "learning_rate": 3.478839702094351e-05, |
| "loss": 0.2532, |
| "step": 1007 |
| }, |
| { |
| "epoch": 0.5093481556341587, |
| "grad_norm": 0.41044360399246216, |
| "learning_rate": 3.47594235693685e-05, |
| "loss": 0.4045, |
| "step": 1008 |
| }, |
| { |
| "epoch": 0.5098534613441131, |
| "grad_norm": 0.5892174243927002, |
| "learning_rate": 3.473043464450027e-05, |
| "loss": 0.3431, |
| "step": 1009 |
| }, |
| { |
| "epoch": 0.5103587670540677, |
| "grad_norm": 0.5369464755058289, |
| "learning_rate": 3.470143029229996e-05, |
| "loss": 0.2737, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.5108640727640222, |
| "grad_norm": 0.39442282915115356, |
| "learning_rate": 3.4672410558753155e-05, |
| "loss": 0.2475, |
| "step": 1011 |
| }, |
| { |
| "epoch": 0.5113693784739768, |
| "grad_norm": 0.4975070059299469, |
| "learning_rate": 3.464337548986983e-05, |
| "loss": 0.2419, |
| "step": 1012 |
| }, |
| { |
| "epoch": 0.5118746841839312, |
| "grad_norm": 0.6060254573822021, |
| "learning_rate": 3.4614325131684275e-05, |
| "loss": 0.387, |
| "step": 1013 |
| }, |
| { |
| "epoch": 0.5123799898938858, |
| "grad_norm": 0.3599434196949005, |
| "learning_rate": 3.458525953025503e-05, |
| "loss": 0.1805, |
| "step": 1014 |
| }, |
| { |
| "epoch": 0.5128852956038403, |
| "grad_norm": 0.457060843706131, |
| "learning_rate": 3.4556178731664776e-05, |
| "loss": 0.2104, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.5133906013137949, |
| "grad_norm": 0.6781561970710754, |
| "learning_rate": 3.4527082782020323e-05, |
| "loss": 0.4543, |
| "step": 1016 |
| }, |
| { |
| "epoch": 0.5138959070237493, |
| "grad_norm": 0.4415772259235382, |
| "learning_rate": 3.449797172745247e-05, |
| "loss": 0.2245, |
| "step": 1017 |
| }, |
| { |
| "epoch": 0.5144012127337039, |
| "grad_norm": 0.3077252507209778, |
| "learning_rate": 3.4468845614116e-05, |
| "loss": 0.2002, |
| "step": 1018 |
| }, |
| { |
| "epoch": 0.5149065184436584, |
| "grad_norm": 0.7845990657806396, |
| "learning_rate": 3.443970448818954e-05, |
| "loss": 0.5271, |
| "step": 1019 |
| }, |
| { |
| "epoch": 0.515411824153613, |
| "grad_norm": 0.44302085041999817, |
| "learning_rate": 3.441054839587553e-05, |
| "loss": 0.1878, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.5159171298635674, |
| "grad_norm": 1.1317225694656372, |
| "learning_rate": 3.438137738340014e-05, |
| "loss": 0.4688, |
| "step": 1021 |
| }, |
| { |
| "epoch": 0.516422435573522, |
| "grad_norm": 0.7646364569664001, |
| "learning_rate": 3.435219149701319e-05, |
| "loss": 0.4279, |
| "step": 1022 |
| }, |
| { |
| "epoch": 0.5169277412834765, |
| "grad_norm": 0.3174137771129608, |
| "learning_rate": 3.43229907829881e-05, |
| "loss": 0.207, |
| "step": 1023 |
| }, |
| { |
| "epoch": 0.5174330469934311, |
| "grad_norm": 0.25344839692115784, |
| "learning_rate": 3.429377528762177e-05, |
| "loss": 0.174, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.5179383527033855, |
| "grad_norm": 0.4643212854862213, |
| "learning_rate": 3.4264545057234565e-05, |
| "loss": 0.3102, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.5184436584133401, |
| "grad_norm": 0.3090815842151642, |
| "learning_rate": 3.423530013817019e-05, |
| "loss": 0.1688, |
| "step": 1026 |
| }, |
| { |
| "epoch": 0.5189489641232946, |
| "grad_norm": 0.7463130950927734, |
| "learning_rate": 3.4206040576795644e-05, |
| "loss": 0.6639, |
| "step": 1027 |
| }, |
| { |
| "epoch": 0.5194542698332492, |
| "grad_norm": 0.4260081648826599, |
| "learning_rate": 3.417676641950115e-05, |
| "loss": 0.1466, |
| "step": 1028 |
| }, |
| { |
| "epoch": 0.5199595755432036, |
| "grad_norm": 0.8411809802055359, |
| "learning_rate": 3.414747771270006e-05, |
| "loss": 0.5472, |
| "step": 1029 |
| }, |
| { |
| "epoch": 0.5204648812531582, |
| "grad_norm": 0.590641438961029, |
| "learning_rate": 3.411817450282881e-05, |
| "loss": 0.402, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.5209701869631127, |
| "grad_norm": 0.9213524460792542, |
| "learning_rate": 3.408885683634681e-05, |
| "loss": 0.5014, |
| "step": 1031 |
| }, |
| { |
| "epoch": 0.5214754926730673, |
| "grad_norm": 0.3281521201133728, |
| "learning_rate": 3.4059524759736404e-05, |
| "loss": 0.2153, |
| "step": 1032 |
| }, |
| { |
| "epoch": 0.5219807983830217, |
| "grad_norm": 0.4002426266670227, |
| "learning_rate": 3.4030178319502784e-05, |
| "loss": 0.2112, |
| "step": 1033 |
| }, |
| { |
| "epoch": 0.5224861040929762, |
| "grad_norm": 0.27855736017227173, |
| "learning_rate": 3.40008175621739e-05, |
| "loss": 0.1904, |
| "step": 1034 |
| }, |
| { |
| "epoch": 0.5229914098029308, |
| "grad_norm": 1.192247986793518, |
| "learning_rate": 3.3971442534300414e-05, |
| "loss": 0.5124, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.5234967155128853, |
| "grad_norm": 0.3181244730949402, |
| "learning_rate": 3.3942053282455625e-05, |
| "loss": 0.2413, |
| "step": 1036 |
| }, |
| { |
| "epoch": 0.5240020212228398, |
| "grad_norm": 0.5175408720970154, |
| "learning_rate": 3.3912649853235355e-05, |
| "loss": 0.2505, |
| "step": 1037 |
| }, |
| { |
| "epoch": 0.5245073269327943, |
| "grad_norm": 0.7706686854362488, |
| "learning_rate": 3.3883232293257926e-05, |
| "loss": 0.4528, |
| "step": 1038 |
| }, |
| { |
| "epoch": 0.5250126326427489, |
| "grad_norm": 0.6226242184638977, |
| "learning_rate": 3.385380064916405e-05, |
| "loss": 0.2895, |
| "step": 1039 |
| }, |
| { |
| "epoch": 0.5255179383527034, |
| "grad_norm": 0.6919043064117432, |
| "learning_rate": 3.382435496761679e-05, |
| "loss": 0.5499, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.5260232440626579, |
| "grad_norm": 0.7153246402740479, |
| "learning_rate": 3.379489529530144e-05, |
| "loss": 0.4498, |
| "step": 1041 |
| }, |
| { |
| "epoch": 0.5265285497726124, |
| "grad_norm": 0.3823365271091461, |
| "learning_rate": 3.376542167892548e-05, |
| "loss": 0.3495, |
| "step": 1042 |
| }, |
| { |
| "epoch": 0.527033855482567, |
| "grad_norm": 0.7256558537483215, |
| "learning_rate": 3.373593416521852e-05, |
| "loss": 0.4197, |
| "step": 1043 |
| }, |
| { |
| "epoch": 0.5275391611925214, |
| "grad_norm": 0.6454598903656006, |
| "learning_rate": 3.3706432800932184e-05, |
| "loss": 0.4144, |
| "step": 1044 |
| }, |
| { |
| "epoch": 0.528044466902476, |
| "grad_norm": 0.3100246489048004, |
| "learning_rate": 3.367691763284005e-05, |
| "loss": 0.1587, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.5285497726124305, |
| "grad_norm": 0.44669437408447266, |
| "learning_rate": 3.3647388707737606e-05, |
| "loss": 0.2715, |
| "step": 1046 |
| }, |
| { |
| "epoch": 0.5290550783223851, |
| "grad_norm": 0.31795749068260193, |
| "learning_rate": 3.3617846072442135e-05, |
| "loss": 0.2045, |
| "step": 1047 |
| }, |
| { |
| "epoch": 0.5295603840323395, |
| "grad_norm": 0.34143051505088806, |
| "learning_rate": 3.3588289773792646e-05, |
| "loss": 0.2189, |
| "step": 1048 |
| }, |
| { |
| "epoch": 0.5300656897422941, |
| "grad_norm": 0.5310137271881104, |
| "learning_rate": 3.3558719858649834e-05, |
| "loss": 0.2596, |
| "step": 1049 |
| }, |
| { |
| "epoch": 0.5305709954522486, |
| "grad_norm": 0.7323408722877502, |
| "learning_rate": 3.352913637389598e-05, |
| "loss": 0.3134, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.5310763011622032, |
| "grad_norm": 0.3827390968799591, |
| "learning_rate": 3.349953936643486e-05, |
| "loss": 0.2751, |
| "step": 1051 |
| }, |
| { |
| "epoch": 0.5315816068721576, |
| "grad_norm": 0.701563835144043, |
| "learning_rate": 3.3469928883191706e-05, |
| "loss": 0.3593, |
| "step": 1052 |
| }, |
| { |
| "epoch": 0.5320869125821122, |
| "grad_norm": 0.68122398853302, |
| "learning_rate": 3.344030497111312e-05, |
| "loss": 0.6736, |
| "step": 1053 |
| }, |
| { |
| "epoch": 0.5325922182920667, |
| "grad_norm": 0.47403842210769653, |
| "learning_rate": 3.3410667677166966e-05, |
| "loss": 0.3286, |
| "step": 1054 |
| }, |
| { |
| "epoch": 0.5330975240020213, |
| "grad_norm": 0.24394340813159943, |
| "learning_rate": 3.338101704834236e-05, |
| "loss": 0.161, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.5336028297119757, |
| "grad_norm": 0.6971885561943054, |
| "learning_rate": 3.3351353131649546e-05, |
| "loss": 0.2709, |
| "step": 1056 |
| }, |
| { |
| "epoch": 0.5341081354219303, |
| "grad_norm": 0.7203572988510132, |
| "learning_rate": 3.332167597411983e-05, |
| "loss": 0.3845, |
| "step": 1057 |
| }, |
| { |
| "epoch": 0.5346134411318848, |
| "grad_norm": 0.8475279211997986, |
| "learning_rate": 3.3291985622805516e-05, |
| "loss": 0.436, |
| "step": 1058 |
| }, |
| { |
| "epoch": 0.5351187468418394, |
| "grad_norm": 0.46801191568374634, |
| "learning_rate": 3.326228212477982e-05, |
| "loss": 0.2384, |
| "step": 1059 |
| }, |
| { |
| "epoch": 0.5356240525517938, |
| "grad_norm": 0.4841732680797577, |
| "learning_rate": 3.3232565527136815e-05, |
| "loss": 0.3124, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.5361293582617483, |
| "grad_norm": 0.2917931377887726, |
| "learning_rate": 3.3202835876991346e-05, |
| "loss": 0.178, |
| "step": 1061 |
| }, |
| { |
| "epoch": 0.5366346639717029, |
| "grad_norm": 0.5136047005653381, |
| "learning_rate": 3.317309322147891e-05, |
| "loss": 0.2269, |
| "step": 1062 |
| }, |
| { |
| "epoch": 0.5371399696816574, |
| "grad_norm": 0.504074215888977, |
| "learning_rate": 3.3143337607755694e-05, |
| "loss": 0.2895, |
| "step": 1063 |
| }, |
| { |
| "epoch": 0.5376452753916119, |
| "grad_norm": 0.44980528950691223, |
| "learning_rate": 3.311356908299836e-05, |
| "loss": 0.346, |
| "step": 1064 |
| }, |
| { |
| "epoch": 0.5381505811015664, |
| "grad_norm": 0.45925334095954895, |
| "learning_rate": 3.30837876944041e-05, |
| "loss": 0.2025, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.538655886811521, |
| "grad_norm": 0.48478057980537415, |
| "learning_rate": 3.305399348919045e-05, |
| "loss": 0.3748, |
| "step": 1066 |
| }, |
| { |
| "epoch": 0.5391611925214755, |
| "grad_norm": 0.376342236995697, |
| "learning_rate": 3.3024186514595294e-05, |
| "loss": 0.2637, |
| "step": 1067 |
| }, |
| { |
| "epoch": 0.53966649823143, |
| "grad_norm": 0.8223666548728943, |
| "learning_rate": 3.299436681787677e-05, |
| "loss": 0.3113, |
| "step": 1068 |
| }, |
| { |
| "epoch": 0.5401718039413845, |
| "grad_norm": 0.3819112181663513, |
| "learning_rate": 3.296453444631316e-05, |
| "loss": 0.2424, |
| "step": 1069 |
| }, |
| { |
| "epoch": 0.5406771096513391, |
| "grad_norm": 0.5420382022857666, |
| "learning_rate": 3.293468944720287e-05, |
| "loss": 0.2229, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.5411824153612936, |
| "grad_norm": 0.497595876455307, |
| "learning_rate": 3.290483186786428e-05, |
| "loss": 0.2529, |
| "step": 1071 |
| }, |
| { |
| "epoch": 0.5416877210712481, |
| "grad_norm": 0.7739394307136536, |
| "learning_rate": 3.287496175563578e-05, |
| "loss": 0.3402, |
| "step": 1072 |
| }, |
| { |
| "epoch": 0.5421930267812026, |
| "grad_norm": 0.5213684439659119, |
| "learning_rate": 3.284507915787559e-05, |
| "loss": 0.2469, |
| "step": 1073 |
| }, |
| { |
| "epoch": 0.5426983324911572, |
| "grad_norm": 0.45709890127182007, |
| "learning_rate": 3.281518412196172e-05, |
| "loss": 0.446, |
| "step": 1074 |
| }, |
| { |
| "epoch": 0.5432036382011117, |
| "grad_norm": 0.47826409339904785, |
| "learning_rate": 3.2785276695291935e-05, |
| "loss": 0.3853, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.5437089439110662, |
| "grad_norm": 0.5849752426147461, |
| "learning_rate": 3.27553569252836e-05, |
| "loss": 0.3097, |
| "step": 1076 |
| }, |
| { |
| "epoch": 0.5442142496210207, |
| "grad_norm": 0.3121519982814789, |
| "learning_rate": 3.272542485937369e-05, |
| "loss": 0.1677, |
| "step": 1077 |
| }, |
| { |
| "epoch": 0.5447195553309753, |
| "grad_norm": 0.5105435252189636, |
| "learning_rate": 3.269548054501865e-05, |
| "loss": 0.2058, |
| "step": 1078 |
| }, |
| { |
| "epoch": 0.5452248610409297, |
| "grad_norm": 0.5398941040039062, |
| "learning_rate": 3.2665524029694366e-05, |
| "loss": 0.3785, |
| "step": 1079 |
| }, |
| { |
| "epoch": 0.5457301667508843, |
| "grad_norm": 0.5803938508033752, |
| "learning_rate": 3.263555536089604e-05, |
| "loss": 0.4135, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.5462354724608388, |
| "grad_norm": 0.4731886088848114, |
| "learning_rate": 3.260557458613817e-05, |
| "loss": 0.3249, |
| "step": 1081 |
| }, |
| { |
| "epoch": 0.5467407781707934, |
| "grad_norm": 0.29429805278778076, |
| "learning_rate": 3.257558175295443e-05, |
| "loss": 0.1736, |
| "step": 1082 |
| }, |
| { |
| "epoch": 0.5472460838807478, |
| "grad_norm": 0.4236926734447479, |
| "learning_rate": 3.254557690889762e-05, |
| "loss": 0.2494, |
| "step": 1083 |
| }, |
| { |
| "epoch": 0.5477513895907024, |
| "grad_norm": 0.5632703900337219, |
| "learning_rate": 3.251556010153958e-05, |
| "loss": 0.3988, |
| "step": 1084 |
| }, |
| { |
| "epoch": 0.5482566953006569, |
| "grad_norm": 0.590407133102417, |
| "learning_rate": 3.248553137847112e-05, |
| "loss": 0.3471, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.5487620010106115, |
| "grad_norm": 0.5127763748168945, |
| "learning_rate": 3.245549078730195e-05, |
| "loss": 0.3066, |
| "step": 1086 |
| }, |
| { |
| "epoch": 0.5492673067205659, |
| "grad_norm": 0.5116364359855652, |
| "learning_rate": 3.242543837566057e-05, |
| "loss": 0.5509, |
| "step": 1087 |
| }, |
| { |
| "epoch": 0.5497726124305204, |
| "grad_norm": 0.40164387226104736, |
| "learning_rate": 3.239537419119425e-05, |
| "loss": 0.1977, |
| "step": 1088 |
| }, |
| { |
| "epoch": 0.550277918140475, |
| "grad_norm": 0.646152675151825, |
| "learning_rate": 3.236529828156891e-05, |
| "loss": 0.4706, |
| "step": 1089 |
| }, |
| { |
| "epoch": 0.5507832238504295, |
| "grad_norm": 0.5561092495918274, |
| "learning_rate": 3.233521069446907e-05, |
| "loss": 0.1772, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.551288529560384, |
| "grad_norm": 0.5761879682540894, |
| "learning_rate": 3.230511147759775e-05, |
| "loss": 0.2085, |
| "step": 1091 |
| }, |
| { |
| "epoch": 0.5517938352703385, |
| "grad_norm": 0.4727552533149719, |
| "learning_rate": 3.227500067867642e-05, |
| "loss": 0.2691, |
| "step": 1092 |
| }, |
| { |
| "epoch": 0.5522991409802931, |
| "grad_norm": 0.9213289618492126, |
| "learning_rate": 3.2244878345444905e-05, |
| "loss": 0.4643, |
| "step": 1093 |
| }, |
| { |
| "epoch": 0.5528044466902476, |
| "grad_norm": 0.8000922203063965, |
| "learning_rate": 3.221474452566133e-05, |
| "loss": 0.4779, |
| "step": 1094 |
| }, |
| { |
| "epoch": 0.5533097524002021, |
| "grad_norm": 0.3150307238101959, |
| "learning_rate": 3.218459926710202e-05, |
| "loss": 0.1698, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.5538150581101566, |
| "grad_norm": 0.5562626123428345, |
| "learning_rate": 3.215444261756144e-05, |
| "loss": 0.2622, |
| "step": 1096 |
| }, |
| { |
| "epoch": 0.5543203638201112, |
| "grad_norm": 0.576187014579773, |
| "learning_rate": 3.212427462485212e-05, |
| "loss": 0.2248, |
| "step": 1097 |
| }, |
| { |
| "epoch": 0.5548256695300657, |
| "grad_norm": 0.4368909001350403, |
| "learning_rate": 3.209409533680457e-05, |
| "loss": 0.2079, |
| "step": 1098 |
| }, |
| { |
| "epoch": 0.5553309752400202, |
| "grad_norm": 0.5242764353752136, |
| "learning_rate": 3.206390480126718e-05, |
| "loss": 0.3002, |
| "step": 1099 |
| }, |
| { |
| "epoch": 0.5558362809499747, |
| "grad_norm": 0.5973017811775208, |
| "learning_rate": 3.203370306610624e-05, |
| "loss": 0.2916, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.5563415866599293, |
| "grad_norm": 0.5519207119941711, |
| "learning_rate": 3.200349017920573e-05, |
| "loss": 0.2784, |
| "step": 1101 |
| }, |
| { |
| "epoch": 0.5568468923698838, |
| "grad_norm": 0.5289061069488525, |
| "learning_rate": 3.1973266188467346e-05, |
| "loss": 0.2497, |
| "step": 1102 |
| }, |
| { |
| "epoch": 0.5573521980798383, |
| "grad_norm": 0.5926082134246826, |
| "learning_rate": 3.1943031141810385e-05, |
| "loss": 0.3212, |
| "step": 1103 |
| }, |
| { |
| "epoch": 0.5578575037897928, |
| "grad_norm": 0.5896833539009094, |
| "learning_rate": 3.191278508717166e-05, |
| "loss": 0.3915, |
| "step": 1104 |
| }, |
| { |
| "epoch": 0.5583628094997474, |
| "grad_norm": 0.6870902180671692, |
| "learning_rate": 3.188252807250545e-05, |
| "loss": 0.3646, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.5588681152097019, |
| "grad_norm": 0.3682582676410675, |
| "learning_rate": 3.18522601457834e-05, |
| "loss": 0.2704, |
| "step": 1106 |
| }, |
| { |
| "epoch": 0.5593734209196564, |
| "grad_norm": 0.47527265548706055, |
| "learning_rate": 3.182198135499447e-05, |
| "loss": 0.2499, |
| "step": 1107 |
| }, |
| { |
| "epoch": 0.5598787266296109, |
| "grad_norm": 0.4023267924785614, |
| "learning_rate": 3.179169174814483e-05, |
| "loss": 0.1694, |
| "step": 1108 |
| }, |
| { |
| "epoch": 0.5603840323395655, |
| "grad_norm": 0.6233897805213928, |
| "learning_rate": 3.176139137325781e-05, |
| "loss": 0.4744, |
| "step": 1109 |
| }, |
| { |
| "epoch": 0.56088933804952, |
| "grad_norm": 0.2687748372554779, |
| "learning_rate": 3.173108027837379e-05, |
| "loss": 0.1979, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.5613946437594745, |
| "grad_norm": 0.4329420328140259, |
| "learning_rate": 3.1700758511550185e-05, |
| "loss": 0.2352, |
| "step": 1111 |
| }, |
| { |
| "epoch": 0.561899949469429, |
| "grad_norm": 0.36293160915374756, |
| "learning_rate": 3.1670426120861314e-05, |
| "loss": 0.2382, |
| "step": 1112 |
| }, |
| { |
| "epoch": 0.5624052551793836, |
| "grad_norm": 0.6872026324272156, |
| "learning_rate": 3.1640083154398306e-05, |
| "loss": 0.3837, |
| "step": 1113 |
| }, |
| { |
| "epoch": 0.562910560889338, |
| "grad_norm": 0.346775621175766, |
| "learning_rate": 3.160972966026911e-05, |
| "loss": 0.2396, |
| "step": 1114 |
| }, |
| { |
| "epoch": 0.5634158665992925, |
| "grad_norm": 0.6277110576629639, |
| "learning_rate": 3.1579365686598336e-05, |
| "loss": 0.3142, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.5639211723092471, |
| "grad_norm": 0.7550572156906128, |
| "learning_rate": 3.15489912815272e-05, |
| "loss": 0.2231, |
| "step": 1116 |
| }, |
| { |
| "epoch": 0.5644264780192016, |
| "grad_norm": 0.2056116759777069, |
| "learning_rate": 3.15186064932135e-05, |
| "loss": 0.1063, |
| "step": 1117 |
| }, |
| { |
| "epoch": 0.5649317837291561, |
| "grad_norm": 0.7212878465652466, |
| "learning_rate": 3.148821136983144e-05, |
| "loss": 0.3422, |
| "step": 1118 |
| }, |
| { |
| "epoch": 0.5654370894391106, |
| "grad_norm": 0.2987580895423889, |
| "learning_rate": 3.1457805959571665e-05, |
| "loss": 0.2378, |
| "step": 1119 |
| }, |
| { |
| "epoch": 0.5659423951490652, |
| "grad_norm": 0.45587587356567383, |
| "learning_rate": 3.142739031064108e-05, |
| "loss": 0.2602, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.5664477008590197, |
| "grad_norm": 0.943342387676239, |
| "learning_rate": 3.139696447126286e-05, |
| "loss": 0.3641, |
| "step": 1121 |
| }, |
| { |
| "epoch": 0.5669530065689742, |
| "grad_norm": 0.8414391279220581, |
| "learning_rate": 3.1366528489676315e-05, |
| "loss": 0.5864, |
| "step": 1122 |
| }, |
| { |
| "epoch": 0.5674583122789287, |
| "grad_norm": 0.5744806528091431, |
| "learning_rate": 3.133608241413685e-05, |
| "loss": 0.3196, |
| "step": 1123 |
| }, |
| { |
| "epoch": 0.5679636179888833, |
| "grad_norm": 0.7059041857719421, |
| "learning_rate": 3.130562629291586e-05, |
| "loss": 0.4094, |
| "step": 1124 |
| }, |
| { |
| "epoch": 0.5684689236988378, |
| "grad_norm": 0.4427710175514221, |
| "learning_rate": 3.127516017430067e-05, |
| "loss": 0.2116, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.5689742294087923, |
| "grad_norm": 0.8828384280204773, |
| "learning_rate": 3.124468410659448e-05, |
| "loss": 0.3982, |
| "step": 1126 |
| }, |
| { |
| "epoch": 0.5694795351187468, |
| "grad_norm": 0.9311108589172363, |
| "learning_rate": 3.121419813811622e-05, |
| "loss": 0.4192, |
| "step": 1127 |
| }, |
| { |
| "epoch": 0.5699848408287014, |
| "grad_norm": 0.8841239809989929, |
| "learning_rate": 3.118370231720055e-05, |
| "loss": 0.4229, |
| "step": 1128 |
| }, |
| { |
| "epoch": 0.5704901465386559, |
| "grad_norm": 0.549286425113678, |
| "learning_rate": 3.1153196692197745e-05, |
| "loss": 0.3589, |
| "step": 1129 |
| }, |
| { |
| "epoch": 0.5709954522486104, |
| "grad_norm": 0.4687441885471344, |
| "learning_rate": 3.112268131147361e-05, |
| "loss": 0.2813, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.5715007579585649, |
| "grad_norm": 0.5466412305831909, |
| "learning_rate": 3.109215622340944e-05, |
| "loss": 0.2345, |
| "step": 1131 |
| }, |
| { |
| "epoch": 0.5720060636685195, |
| "grad_norm": 0.39715632796287537, |
| "learning_rate": 3.10616214764019e-05, |
| "loss": 0.2707, |
| "step": 1132 |
| }, |
| { |
| "epoch": 0.572511369378474, |
| "grad_norm": 0.3774857521057129, |
| "learning_rate": 3.103107711886299e-05, |
| "loss": 0.3072, |
| "step": 1133 |
| }, |
| { |
| "epoch": 0.5730166750884285, |
| "grad_norm": 0.5280380845069885, |
| "learning_rate": 3.1000523199219925e-05, |
| "loss": 0.2819, |
| "step": 1134 |
| }, |
| { |
| "epoch": 0.573521980798383, |
| "grad_norm": 0.4964953064918518, |
| "learning_rate": 3.096995976591508e-05, |
| "loss": 0.2877, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.5740272865083376, |
| "grad_norm": 0.4315342903137207, |
| "learning_rate": 3.093938686740594e-05, |
| "loss": 0.2174, |
| "step": 1136 |
| }, |
| { |
| "epoch": 0.5745325922182921, |
| "grad_norm": 0.5171803832054138, |
| "learning_rate": 3.0908804552164985e-05, |
| "loss": 0.4131, |
| "step": 1137 |
| }, |
| { |
| "epoch": 0.5750378979282466, |
| "grad_norm": 0.43991976976394653, |
| "learning_rate": 3.08782128686796e-05, |
| "loss": 0.2688, |
| "step": 1138 |
| }, |
| { |
| "epoch": 0.5755432036382011, |
| "grad_norm": 0.45389074087142944, |
| "learning_rate": 3.084761186545206e-05, |
| "loss": 0.3958, |
| "step": 1139 |
| }, |
| { |
| "epoch": 0.5760485093481557, |
| "grad_norm": 0.4297245740890503, |
| "learning_rate": 3.081700159099939e-05, |
| "loss": 0.2264, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.5765538150581102, |
| "grad_norm": 0.6526983380317688, |
| "learning_rate": 3.0786382093853336e-05, |
| "loss": 0.5204, |
| "step": 1141 |
| }, |
| { |
| "epoch": 0.5770591207680646, |
| "grad_norm": 0.46042600274086, |
| "learning_rate": 3.0755753422560254e-05, |
| "loss": 0.2465, |
| "step": 1142 |
| }, |
| { |
| "epoch": 0.5775644264780192, |
| "grad_norm": 0.8101718425750732, |
| "learning_rate": 3.0725115625681035e-05, |
| "loss": 0.6224, |
| "step": 1143 |
| }, |
| { |
| "epoch": 0.5780697321879738, |
| "grad_norm": 0.6587077379226685, |
| "learning_rate": 3.069446875179106e-05, |
| "loss": 0.5301, |
| "step": 1144 |
| }, |
| { |
| "epoch": 0.5785750378979283, |
| "grad_norm": 0.49807339906692505, |
| "learning_rate": 3.0663812849480075e-05, |
| "loss": 0.2905, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.5790803436078827, |
| "grad_norm": 0.514011800289154, |
| "learning_rate": 3.063314796735218e-05, |
| "loss": 0.2377, |
| "step": 1146 |
| }, |
| { |
| "epoch": 0.5795856493178373, |
| "grad_norm": 0.5293000340461731, |
| "learning_rate": 3.060247415402567e-05, |
| "loss": 0.3341, |
| "step": 1147 |
| }, |
| { |
| "epoch": 0.5800909550277918, |
| "grad_norm": 0.705506443977356, |
| "learning_rate": 3.057179145813302e-05, |
| "loss": 0.3912, |
| "step": 1148 |
| }, |
| { |
| "epoch": 0.5805962607377463, |
| "grad_norm": 0.6125894784927368, |
| "learning_rate": 3.0541099928320804e-05, |
| "loss": 0.2844, |
| "step": 1149 |
| }, |
| { |
| "epoch": 0.5811015664477008, |
| "grad_norm": 0.6330406069755554, |
| "learning_rate": 3.0510399613249556e-05, |
| "loss": 0.3338, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.5816068721576554, |
| "grad_norm": 0.6614536046981812, |
| "learning_rate": 3.0479690561593793e-05, |
| "loss": 0.2598, |
| "step": 1151 |
| }, |
| { |
| "epoch": 0.5821121778676099, |
| "grad_norm": 0.37762120366096497, |
| "learning_rate": 3.0448972822041843e-05, |
| "loss": 0.2355, |
| "step": 1152 |
| }, |
| { |
| "epoch": 0.5826174835775644, |
| "grad_norm": 0.6358450055122375, |
| "learning_rate": 3.0418246443295823e-05, |
| "loss": 0.2811, |
| "step": 1153 |
| }, |
| { |
| "epoch": 0.5831227892875189, |
| "grad_norm": 0.3570258617401123, |
| "learning_rate": 3.0387511474071557e-05, |
| "loss": 0.172, |
| "step": 1154 |
| }, |
| { |
| "epoch": 0.5836280949974735, |
| "grad_norm": 0.3080599308013916, |
| "learning_rate": 3.0356767963098464e-05, |
| "loss": 0.166, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.584133400707428, |
| "grad_norm": 0.3466140031814575, |
| "learning_rate": 3.0326015959119535e-05, |
| "loss": 0.1429, |
| "step": 1156 |
| }, |
| { |
| "epoch": 0.5846387064173825, |
| "grad_norm": 0.4894466996192932, |
| "learning_rate": 3.0295255510891213e-05, |
| "loss": 0.4822, |
| "step": 1157 |
| }, |
| { |
| "epoch": 0.585144012127337, |
| "grad_norm": 0.4862903952598572, |
| "learning_rate": 3.0264486667183322e-05, |
| "loss": 0.2294, |
| "step": 1158 |
| }, |
| { |
| "epoch": 0.5856493178372916, |
| "grad_norm": 0.4425124228000641, |
| "learning_rate": 3.023370947677901e-05, |
| "loss": 0.235, |
| "step": 1159 |
| }, |
| { |
| "epoch": 0.5861546235472461, |
| "grad_norm": 0.4671436846256256, |
| "learning_rate": 3.020292398847464e-05, |
| "loss": 0.2426, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.5866599292572006, |
| "grad_norm": 1.0623973608016968, |
| "learning_rate": 3.0172130251079757e-05, |
| "loss": 0.5149, |
| "step": 1161 |
| }, |
| { |
| "epoch": 0.5871652349671551, |
| "grad_norm": 0.6807923913002014, |
| "learning_rate": 3.014132831341696e-05, |
| "loss": 0.3281, |
| "step": 1162 |
| }, |
| { |
| "epoch": 0.5876705406771097, |
| "grad_norm": 0.3466711640357971, |
| "learning_rate": 3.0110518224321865e-05, |
| "loss": 0.1801, |
| "step": 1163 |
| }, |
| { |
| "epoch": 0.5881758463870642, |
| "grad_norm": 0.6598278284072876, |
| "learning_rate": 3.007970003264301e-05, |
| "loss": 0.3268, |
| "step": 1164 |
| }, |
| { |
| "epoch": 0.5886811520970187, |
| "grad_norm": 0.5974249243736267, |
| "learning_rate": 3.0048873787241765e-05, |
| "loss": 0.2778, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.5891864578069732, |
| "grad_norm": 0.35448309779167175, |
| "learning_rate": 3.0018039536992298e-05, |
| "loss": 0.1175, |
| "step": 1166 |
| }, |
| { |
| "epoch": 0.5896917635169278, |
| "grad_norm": 0.6994403004646301, |
| "learning_rate": 2.9987197330781435e-05, |
| "loss": 0.2628, |
| "step": 1167 |
| }, |
| { |
| "epoch": 0.5901970692268823, |
| "grad_norm": 0.5090075731277466, |
| "learning_rate": 2.9956347217508633e-05, |
| "loss": 0.2562, |
| "step": 1168 |
| }, |
| { |
| "epoch": 0.5907023749368367, |
| "grad_norm": 0.29259657859802246, |
| "learning_rate": 2.9925489246085897e-05, |
| "loss": 0.1874, |
| "step": 1169 |
| }, |
| { |
| "epoch": 0.5912076806467913, |
| "grad_norm": 0.30072706937789917, |
| "learning_rate": 2.989462346543766e-05, |
| "loss": 0.1683, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.5917129863567459, |
| "grad_norm": 0.258619099855423, |
| "learning_rate": 2.9863749924500765e-05, |
| "loss": 0.1159, |
| "step": 1171 |
| }, |
| { |
| "epoch": 0.5922182920667004, |
| "grad_norm": 0.6549366116523743, |
| "learning_rate": 2.9832868672224347e-05, |
| "loss": 0.2978, |
| "step": 1172 |
| }, |
| { |
| "epoch": 0.5927235977766548, |
| "grad_norm": 0.9457119703292847, |
| "learning_rate": 2.9801979757569774e-05, |
| "loss": 0.3657, |
| "step": 1173 |
| }, |
| { |
| "epoch": 0.5932289034866094, |
| "grad_norm": 0.40999117493629456, |
| "learning_rate": 2.9771083229510544e-05, |
| "loss": 0.2076, |
| "step": 1174 |
| }, |
| { |
| "epoch": 0.593734209196564, |
| "grad_norm": 0.629391074180603, |
| "learning_rate": 2.974017913703224e-05, |
| "loss": 0.3435, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.5942395149065185, |
| "grad_norm": 0.42947840690612793, |
| "learning_rate": 2.970926752913245e-05, |
| "loss": 0.2741, |
| "step": 1176 |
| }, |
| { |
| "epoch": 0.5947448206164729, |
| "grad_norm": 0.7551778554916382, |
| "learning_rate": 2.9678348454820648e-05, |
| "loss": 0.2764, |
| "step": 1177 |
| }, |
| { |
| "epoch": 0.5952501263264275, |
| "grad_norm": 0.4660588800907135, |
| "learning_rate": 2.9647421963118177e-05, |
| "loss": 0.1889, |
| "step": 1178 |
| }, |
| { |
| "epoch": 0.595755432036382, |
| "grad_norm": 0.5166782140731812, |
| "learning_rate": 2.9616488103058116e-05, |
| "loss": 0.3303, |
| "step": 1179 |
| }, |
| { |
| "epoch": 0.5962607377463366, |
| "grad_norm": 0.6265774965286255, |
| "learning_rate": 2.9585546923685227e-05, |
| "loss": 0.2055, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.596766043456291, |
| "grad_norm": 0.4520041346549988, |
| "learning_rate": 2.9554598474055905e-05, |
| "loss": 0.2479, |
| "step": 1181 |
| }, |
| { |
| "epoch": 0.5972713491662456, |
| "grad_norm": 0.9512003064155579, |
| "learning_rate": 2.952364280323803e-05, |
| "loss": 0.4837, |
| "step": 1182 |
| }, |
| { |
| "epoch": 0.5977766548762001, |
| "grad_norm": 0.33608609437942505, |
| "learning_rate": 2.9492679960310975e-05, |
| "loss": 0.2544, |
| "step": 1183 |
| }, |
| { |
| "epoch": 0.5982819605861546, |
| "grad_norm": 0.49464377760887146, |
| "learning_rate": 2.9461709994365446e-05, |
| "loss": 0.1771, |
| "step": 1184 |
| }, |
| { |
| "epoch": 0.5987872662961091, |
| "grad_norm": 0.7408649921417236, |
| "learning_rate": 2.9430732954503454e-05, |
| "loss": 0.3388, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.5992925720060637, |
| "grad_norm": 0.6129613518714905, |
| "learning_rate": 2.9399748889838246e-05, |
| "loss": 0.3103, |
| "step": 1186 |
| }, |
| { |
| "epoch": 0.5997978777160182, |
| "grad_norm": 0.5622842311859131, |
| "learning_rate": 2.9368757849494183e-05, |
| "loss": 0.3837, |
| "step": 1187 |
| }, |
| { |
| "epoch": 0.6003031834259727, |
| "grad_norm": 0.7855788469314575, |
| "learning_rate": 2.9337759882606684e-05, |
| "loss": 0.3053, |
| "step": 1188 |
| }, |
| { |
| "epoch": 0.6008084891359272, |
| "grad_norm": 0.424996942281723, |
| "learning_rate": 2.930675503832217e-05, |
| "loss": 0.2624, |
| "step": 1189 |
| }, |
| { |
| "epoch": 0.6013137948458818, |
| "grad_norm": 0.748625636100769, |
| "learning_rate": 2.9275743365797954e-05, |
| "loss": 0.3661, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.6018191005558363, |
| "grad_norm": 0.47824960947036743, |
| "learning_rate": 2.9244724914202176e-05, |
| "loss": 0.2256, |
| "step": 1191 |
| }, |
| { |
| "epoch": 0.6023244062657908, |
| "grad_norm": 0.5377376079559326, |
| "learning_rate": 2.9213699732713717e-05, |
| "loss": 0.2619, |
| "step": 1192 |
| }, |
| { |
| "epoch": 0.6028297119757453, |
| "grad_norm": 0.5620574951171875, |
| "learning_rate": 2.9182667870522147e-05, |
| "loss": 0.3563, |
| "step": 1193 |
| }, |
| { |
| "epoch": 0.6033350176856999, |
| "grad_norm": 0.2533213198184967, |
| "learning_rate": 2.91516293768276e-05, |
| "loss": 0.1327, |
| "step": 1194 |
| }, |
| { |
| "epoch": 0.6038403233956544, |
| "grad_norm": 0.4888068437576294, |
| "learning_rate": 2.9120584300840746e-05, |
| "loss": 0.2132, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.6043456291056089, |
| "grad_norm": 0.357085645198822, |
| "learning_rate": 2.90895326917827e-05, |
| "loss": 0.1772, |
| "step": 1196 |
| }, |
| { |
| "epoch": 0.6048509348155634, |
| "grad_norm": 0.30169978737831116, |
| "learning_rate": 2.9058474598884893e-05, |
| "loss": 0.202, |
| "step": 1197 |
| }, |
| { |
| "epoch": 0.605356240525518, |
| "grad_norm": 0.3223772644996643, |
| "learning_rate": 2.902741007138909e-05, |
| "loss": 0.1932, |
| "step": 1198 |
| }, |
| { |
| "epoch": 0.6058615462354725, |
| "grad_norm": 0.42703449726104736, |
| "learning_rate": 2.899633915854721e-05, |
| "loss": 0.1927, |
| "step": 1199 |
| }, |
| { |
| "epoch": 0.6063668519454269, |
| "grad_norm": 0.39132627844810486, |
| "learning_rate": 2.896526190962132e-05, |
| "loss": 0.2409, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.6068721576553815, |
| "grad_norm": 0.6942405104637146, |
| "learning_rate": 2.8934178373883543e-05, |
| "loss": 0.4429, |
| "step": 1201 |
| }, |
| { |
| "epoch": 0.607377463365336, |
| "grad_norm": 0.6032945513725281, |
| "learning_rate": 2.890308860061594e-05, |
| "loss": 0.4111, |
| "step": 1202 |
| }, |
| { |
| "epoch": 0.6078827690752906, |
| "grad_norm": 0.679614782333374, |
| "learning_rate": 2.8871992639110484e-05, |
| "loss": 0.3658, |
| "step": 1203 |
| }, |
| { |
| "epoch": 0.608388074785245, |
| "grad_norm": 0.714556097984314, |
| "learning_rate": 2.8840890538668952e-05, |
| "loss": 0.3826, |
| "step": 1204 |
| }, |
| { |
| "epoch": 0.6088933804951996, |
| "grad_norm": 0.5705816745758057, |
| "learning_rate": 2.8809782348602864e-05, |
| "loss": 0.3086, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.6093986862051541, |
| "grad_norm": 0.7331538200378418, |
| "learning_rate": 2.877866811823337e-05, |
| "loss": 0.4354, |
| "step": 1206 |
| }, |
| { |
| "epoch": 0.6099039919151087, |
| "grad_norm": 0.5332103967666626, |
| "learning_rate": 2.8747547896891218e-05, |
| "loss": 0.254, |
| "step": 1207 |
| }, |
| { |
| "epoch": 0.6104092976250631, |
| "grad_norm": 0.3042926490306854, |
| "learning_rate": 2.871642173391666e-05, |
| "loss": 0.2578, |
| "step": 1208 |
| }, |
| { |
| "epoch": 0.6109146033350177, |
| "grad_norm": 0.5534253716468811, |
| "learning_rate": 2.868528967865934e-05, |
| "loss": 0.3656, |
| "step": 1209 |
| }, |
| { |
| "epoch": 0.6114199090449722, |
| "grad_norm": 0.6421956419944763, |
| "learning_rate": 2.8654151780478265e-05, |
| "loss": 0.2551, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.6119252147549268, |
| "grad_norm": 0.5455820560455322, |
| "learning_rate": 2.862300808874171e-05, |
| "loss": 0.3176, |
| "step": 1211 |
| }, |
| { |
| "epoch": 0.6124305204648812, |
| "grad_norm": 0.4438186585903168, |
| "learning_rate": 2.859185865282712e-05, |
| "loss": 0.2395, |
| "step": 1212 |
| }, |
| { |
| "epoch": 0.6129358261748358, |
| "grad_norm": 0.3606109917163849, |
| "learning_rate": 2.8560703522121064e-05, |
| "loss": 0.2739, |
| "step": 1213 |
| }, |
| { |
| "epoch": 0.6134411318847903, |
| "grad_norm": 0.4127027094364166, |
| "learning_rate": 2.8529542746019115e-05, |
| "loss": 0.2952, |
| "step": 1214 |
| }, |
| { |
| "epoch": 0.6139464375947449, |
| "grad_norm": 0.5099959969520569, |
| "learning_rate": 2.8498376373925834e-05, |
| "loss": 0.4231, |
| "step": 1215 |
| }, |
| { |
| "epoch": 0.6144517433046993, |
| "grad_norm": 0.68137127161026, |
| "learning_rate": 2.8467204455254614e-05, |
| "loss": 0.2562, |
| "step": 1216 |
| }, |
| { |
| "epoch": 0.6149570490146539, |
| "grad_norm": 0.8698126673698425, |
| "learning_rate": 2.8436027039427667e-05, |
| "loss": 0.4673, |
| "step": 1217 |
| }, |
| { |
| "epoch": 0.6154623547246084, |
| "grad_norm": 0.3290408253669739, |
| "learning_rate": 2.8404844175875912e-05, |
| "loss": 0.1761, |
| "step": 1218 |
| }, |
| { |
| "epoch": 0.6159676604345629, |
| "grad_norm": 0.6963320970535278, |
| "learning_rate": 2.8373655914038904e-05, |
| "loss": 0.3319, |
| "step": 1219 |
| }, |
| { |
| "epoch": 0.6164729661445174, |
| "grad_norm": 0.32454270124435425, |
| "learning_rate": 2.8342462303364777e-05, |
| "loss": 0.1979, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.616978271854472, |
| "grad_norm": 0.40933623909950256, |
| "learning_rate": 2.831126339331011e-05, |
| "loss": 0.1951, |
| "step": 1221 |
| }, |
| { |
| "epoch": 0.6174835775644265, |
| "grad_norm": 0.665077269077301, |
| "learning_rate": 2.8280059233339913e-05, |
| "loss": 0.5259, |
| "step": 1222 |
| }, |
| { |
| "epoch": 0.617988883274381, |
| "grad_norm": 0.48233264684677124, |
| "learning_rate": 2.8248849872927507e-05, |
| "loss": 0.2227, |
| "step": 1223 |
| }, |
| { |
| "epoch": 0.6184941889843355, |
| "grad_norm": 0.3461703062057495, |
| "learning_rate": 2.8217635361554456e-05, |
| "loss": 0.1607, |
| "step": 1224 |
| }, |
| { |
| "epoch": 0.61899949469429, |
| "grad_norm": 0.4308650493621826, |
| "learning_rate": 2.8186415748710504e-05, |
| "loss": 0.2759, |
| "step": 1225 |
| }, |
| { |
| "epoch": 0.6195048004042446, |
| "grad_norm": 0.8697786331176758, |
| "learning_rate": 2.8155191083893467e-05, |
| "loss": 0.4196, |
| "step": 1226 |
| }, |
| { |
| "epoch": 0.620010106114199, |
| "grad_norm": 0.5697680115699768, |
| "learning_rate": 2.8123961416609174e-05, |
| "loss": 0.3709, |
| "step": 1227 |
| }, |
| { |
| "epoch": 0.6205154118241536, |
| "grad_norm": 0.6422898769378662, |
| "learning_rate": 2.8092726796371406e-05, |
| "loss": 0.2012, |
| "step": 1228 |
| }, |
| { |
| "epoch": 0.6210207175341081, |
| "grad_norm": 0.271230012178421, |
| "learning_rate": 2.806148727270176e-05, |
| "loss": 0.136, |
| "step": 1229 |
| }, |
| { |
| "epoch": 0.6215260232440627, |
| "grad_norm": 0.3259721100330353, |
| "learning_rate": 2.8030242895129643e-05, |
| "loss": 0.239, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.6220313289540171, |
| "grad_norm": 0.49231839179992676, |
| "learning_rate": 2.7998993713192128e-05, |
| "loss": 0.2351, |
| "step": 1231 |
| }, |
| { |
| "epoch": 0.6225366346639717, |
| "grad_norm": 0.6262175440788269, |
| "learning_rate": 2.7967739776433933e-05, |
| "loss": 0.2764, |
| "step": 1232 |
| }, |
| { |
| "epoch": 0.6230419403739262, |
| "grad_norm": 0.4242885410785675, |
| "learning_rate": 2.79364811344073e-05, |
| "loss": 0.1978, |
| "step": 1233 |
| }, |
| { |
| "epoch": 0.6235472460838808, |
| "grad_norm": 0.5172401070594788, |
| "learning_rate": 2.7905217836671916e-05, |
| "loss": 0.3884, |
| "step": 1234 |
| }, |
| { |
| "epoch": 0.6240525517938352, |
| "grad_norm": 0.5422953963279724, |
| "learning_rate": 2.787394993279488e-05, |
| "loss": 0.318, |
| "step": 1235 |
| }, |
| { |
| "epoch": 0.6245578575037898, |
| "grad_norm": 0.8409627676010132, |
| "learning_rate": 2.7842677472350577e-05, |
| "loss": 0.3753, |
| "step": 1236 |
| }, |
| { |
| "epoch": 0.6250631632137443, |
| "grad_norm": 0.9036238789558411, |
| "learning_rate": 2.7811400504920622e-05, |
| "loss": 0.5445, |
| "step": 1237 |
| }, |
| { |
| "epoch": 0.6255684689236989, |
| "grad_norm": 0.5030844807624817, |
| "learning_rate": 2.778011908009376e-05, |
| "loss": 0.2084, |
| "step": 1238 |
| }, |
| { |
| "epoch": 0.6260737746336533, |
| "grad_norm": 0.544114351272583, |
| "learning_rate": 2.7748833247465827e-05, |
| "loss": 0.3824, |
| "step": 1239 |
| }, |
| { |
| "epoch": 0.6265790803436079, |
| "grad_norm": 0.5116641521453857, |
| "learning_rate": 2.7717543056639637e-05, |
| "loss": 0.4358, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.6270843860535624, |
| "grad_norm": 0.49113696813583374, |
| "learning_rate": 2.7686248557224904e-05, |
| "loss": 0.2644, |
| "step": 1241 |
| }, |
| { |
| "epoch": 0.627589691763517, |
| "grad_norm": 0.3701680600643158, |
| "learning_rate": 2.765494979883819e-05, |
| "loss": 0.1596, |
| "step": 1242 |
| }, |
| { |
| "epoch": 0.6280949974734714, |
| "grad_norm": 0.5240901708602905, |
| "learning_rate": 2.762364683110279e-05, |
| "loss": 0.3679, |
| "step": 1243 |
| }, |
| { |
| "epoch": 0.628600303183426, |
| "grad_norm": 0.49889007210731506, |
| "learning_rate": 2.7592339703648694e-05, |
| "loss": 0.6432, |
| "step": 1244 |
| }, |
| { |
| "epoch": 0.6291056088933805, |
| "grad_norm": 0.3745577037334442, |
| "learning_rate": 2.7561028466112482e-05, |
| "loss": 0.154, |
| "step": 1245 |
| }, |
| { |
| "epoch": 0.6296109146033351, |
| "grad_norm": 0.5030989050865173, |
| "learning_rate": 2.7529713168137233e-05, |
| "loss": 0.2932, |
| "step": 1246 |
| }, |
| { |
| "epoch": 0.6301162203132895, |
| "grad_norm": 1.0743045806884766, |
| "learning_rate": 2.7498393859372484e-05, |
| "loss": 0.2168, |
| "step": 1247 |
| }, |
| { |
| "epoch": 0.6306215260232441, |
| "grad_norm": 0.46885451674461365, |
| "learning_rate": 2.746707058947412e-05, |
| "loss": 0.2967, |
| "step": 1248 |
| }, |
| { |
| "epoch": 0.6311268317331986, |
| "grad_norm": 0.3801758885383606, |
| "learning_rate": 2.743574340810431e-05, |
| "loss": 0.2834, |
| "step": 1249 |
| }, |
| { |
| "epoch": 0.631632137443153, |
| "grad_norm": 0.700106143951416, |
| "learning_rate": 2.7404412364931427e-05, |
| "loss": 0.5024, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.6321374431531076, |
| "grad_norm": 0.8579809069633484, |
| "learning_rate": 2.7373077509629957e-05, |
| "loss": 0.313, |
| "step": 1251 |
| }, |
| { |
| "epoch": 0.6326427488630622, |
| "grad_norm": 0.3703233599662781, |
| "learning_rate": 2.7341738891880452e-05, |
| "loss": 0.2095, |
| "step": 1252 |
| }, |
| { |
| "epoch": 0.6331480545730167, |
| "grad_norm": 0.2917376160621643, |
| "learning_rate": 2.73103965613694e-05, |
| "loss": 0.1726, |
| "step": 1253 |
| }, |
| { |
| "epoch": 0.6336533602829711, |
| "grad_norm": 0.5064457058906555, |
| "learning_rate": 2.7279050567789194e-05, |
| "loss": 0.2515, |
| "step": 1254 |
| }, |
| { |
| "epoch": 0.6341586659929257, |
| "grad_norm": 0.47626200318336487, |
| "learning_rate": 2.7247700960838034e-05, |
| "loss": 0.4029, |
| "step": 1255 |
| }, |
| { |
| "epoch": 0.6346639717028802, |
| "grad_norm": 0.3616740107536316, |
| "learning_rate": 2.7216347790219843e-05, |
| "loss": 0.2887, |
| "step": 1256 |
| }, |
| { |
| "epoch": 0.6351692774128348, |
| "grad_norm": 0.2952771484851837, |
| "learning_rate": 2.7184991105644192e-05, |
| "loss": 0.2067, |
| "step": 1257 |
| }, |
| { |
| "epoch": 0.6356745831227892, |
| "grad_norm": 0.2758321166038513, |
| "learning_rate": 2.715363095682623e-05, |
| "loss": 0.1615, |
| "step": 1258 |
| }, |
| { |
| "epoch": 0.6361798888327438, |
| "grad_norm": 0.5239960551261902, |
| "learning_rate": 2.7122267393486607e-05, |
| "loss": 0.3267, |
| "step": 1259 |
| }, |
| { |
| "epoch": 0.6366851945426983, |
| "grad_norm": 0.4696899652481079, |
| "learning_rate": 2.7090900465351355e-05, |
| "loss": 0.3621, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.6371905002526529, |
| "grad_norm": 0.40326637029647827, |
| "learning_rate": 2.7059530222151873e-05, |
| "loss": 0.1739, |
| "step": 1261 |
| }, |
| { |
| "epoch": 0.6376958059626073, |
| "grad_norm": 0.5829006433486938, |
| "learning_rate": 2.7028156713624814e-05, |
| "loss": 0.3423, |
| "step": 1262 |
| }, |
| { |
| "epoch": 0.6382011116725619, |
| "grad_norm": 0.3605864644050598, |
| "learning_rate": 2.6996779989511985e-05, |
| "loss": 0.1829, |
| "step": 1263 |
| }, |
| { |
| "epoch": 0.6387064173825164, |
| "grad_norm": 0.38949474692344666, |
| "learning_rate": 2.6965400099560308e-05, |
| "loss": 0.1814, |
| "step": 1264 |
| }, |
| { |
| "epoch": 0.639211723092471, |
| "grad_norm": 0.835013747215271, |
| "learning_rate": 2.6934017093521717e-05, |
| "loss": 0.5331, |
| "step": 1265 |
| }, |
| { |
| "epoch": 0.6397170288024254, |
| "grad_norm": 0.3474362790584564, |
| "learning_rate": 2.6902631021153096e-05, |
| "loss": 0.1773, |
| "step": 1266 |
| }, |
| { |
| "epoch": 0.64022233451238, |
| "grad_norm": 0.7530576586723328, |
| "learning_rate": 2.6871241932216195e-05, |
| "loss": 0.6231, |
| "step": 1267 |
| }, |
| { |
| "epoch": 0.6407276402223345, |
| "grad_norm": 0.46543627977371216, |
| "learning_rate": 2.6839849876477518e-05, |
| "loss": 0.2082, |
| "step": 1268 |
| }, |
| { |
| "epoch": 0.6412329459322891, |
| "grad_norm": 0.6920028328895569, |
| "learning_rate": 2.6808454903708315e-05, |
| "loss": 0.3675, |
| "step": 1269 |
| }, |
| { |
| "epoch": 0.6417382516422435, |
| "grad_norm": 0.639060378074646, |
| "learning_rate": 2.6777057063684406e-05, |
| "loss": 0.4416, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.6422435573521981, |
| "grad_norm": 0.8826596736907959, |
| "learning_rate": 2.674565640618621e-05, |
| "loss": 0.3633, |
| "step": 1271 |
| }, |
| { |
| "epoch": 0.6427488630621526, |
| "grad_norm": 0.6439775824546814, |
| "learning_rate": 2.6714252980998595e-05, |
| "loss": 0.3737, |
| "step": 1272 |
| }, |
| { |
| "epoch": 0.6432541687721072, |
| "grad_norm": 0.3467325270175934, |
| "learning_rate": 2.6682846837910796e-05, |
| "loss": 0.1777, |
| "step": 1273 |
| }, |
| { |
| "epoch": 0.6437594744820616, |
| "grad_norm": 1.040595293045044, |
| "learning_rate": 2.6651438026716396e-05, |
| "loss": 0.4716, |
| "step": 1274 |
| }, |
| { |
| "epoch": 0.6442647801920162, |
| "grad_norm": 0.6756473779678345, |
| "learning_rate": 2.6620026597213167e-05, |
| "loss": 0.3154, |
| "step": 1275 |
| }, |
| { |
| "epoch": 0.6447700859019707, |
| "grad_norm": 0.6702762246131897, |
| "learning_rate": 2.658861259920306e-05, |
| "loss": 0.366, |
| "step": 1276 |
| }, |
| { |
| "epoch": 0.6452753916119253, |
| "grad_norm": 0.34285977482795715, |
| "learning_rate": 2.6557196082492105e-05, |
| "loss": 0.1714, |
| "step": 1277 |
| }, |
| { |
| "epoch": 0.6457806973218797, |
| "grad_norm": 0.5991812348365784, |
| "learning_rate": 2.6525777096890286e-05, |
| "loss": 0.3714, |
| "step": 1278 |
| }, |
| { |
| "epoch": 0.6462860030318343, |
| "grad_norm": 0.39086848497390747, |
| "learning_rate": 2.6494355692211535e-05, |
| "loss": 0.1841, |
| "step": 1279 |
| }, |
| { |
| "epoch": 0.6467913087417888, |
| "grad_norm": 0.3882160186767578, |
| "learning_rate": 2.6462931918273624e-05, |
| "loss": 0.2613, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.6472966144517434, |
| "grad_norm": 0.7259517908096313, |
| "learning_rate": 2.643150582489805e-05, |
| "loss": 0.3005, |
| "step": 1281 |
| }, |
| { |
| "epoch": 0.6478019201616978, |
| "grad_norm": 0.6918402314186096, |
| "learning_rate": 2.6400077461910013e-05, |
| "loss": 0.4195, |
| "step": 1282 |
| }, |
| { |
| "epoch": 0.6483072258716523, |
| "grad_norm": 0.6692409515380859, |
| "learning_rate": 2.6368646879138304e-05, |
| "loss": 0.4622, |
| "step": 1283 |
| }, |
| { |
| "epoch": 0.6488125315816069, |
| "grad_norm": 0.6194551587104797, |
| "learning_rate": 2.633721412641524e-05, |
| "loss": 0.4571, |
| "step": 1284 |
| }, |
| { |
| "epoch": 0.6493178372915613, |
| "grad_norm": 0.4386284351348877, |
| "learning_rate": 2.6305779253576556e-05, |
| "loss": 0.1687, |
| "step": 1285 |
| }, |
| { |
| "epoch": 0.6498231430015159, |
| "grad_norm": 0.43226057291030884, |
| "learning_rate": 2.6274342310461382e-05, |
| "loss": 0.3734, |
| "step": 1286 |
| }, |
| { |
| "epoch": 0.6503284487114704, |
| "grad_norm": 0.3043833076953888, |
| "learning_rate": 2.624290334691209e-05, |
| "loss": 0.1776, |
| "step": 1287 |
| }, |
| { |
| "epoch": 0.650833754421425, |
| "grad_norm": 0.4461258053779602, |
| "learning_rate": 2.6211462412774298e-05, |
| "loss": 0.2532, |
| "step": 1288 |
| }, |
| { |
| "epoch": 0.6513390601313794, |
| "grad_norm": 0.5978369116783142, |
| "learning_rate": 2.6180019557896725e-05, |
| "loss": 0.4937, |
| "step": 1289 |
| }, |
| { |
| "epoch": 0.651844365841334, |
| "grad_norm": 0.4691097140312195, |
| "learning_rate": 2.614857483213113e-05, |
| "loss": 0.3146, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.6523496715512885, |
| "grad_norm": 0.48737433552742004, |
| "learning_rate": 2.6117128285332248e-05, |
| "loss": 0.2174, |
| "step": 1291 |
| }, |
| { |
| "epoch": 0.6528549772612431, |
| "grad_norm": 0.692764937877655, |
| "learning_rate": 2.6085679967357707e-05, |
| "loss": 0.4083, |
| "step": 1292 |
| }, |
| { |
| "epoch": 0.6533602829711975, |
| "grad_norm": 0.6832363605499268, |
| "learning_rate": 2.6054229928067923e-05, |
| "loss": 0.7104, |
| "step": 1293 |
| }, |
| { |
| "epoch": 0.6538655886811521, |
| "grad_norm": 0.2708096504211426, |
| "learning_rate": 2.6022778217326077e-05, |
| "loss": 0.1944, |
| "step": 1294 |
| }, |
| { |
| "epoch": 0.6543708943911066, |
| "grad_norm": 0.2859404385089874, |
| "learning_rate": 2.5991324884997952e-05, |
| "loss": 0.1835, |
| "step": 1295 |
| }, |
| { |
| "epoch": 0.6548762001010612, |
| "grad_norm": 0.6921172142028809, |
| "learning_rate": 2.595986998095194e-05, |
| "loss": 0.3649, |
| "step": 1296 |
| }, |
| { |
| "epoch": 0.6553815058110156, |
| "grad_norm": 0.270704984664917, |
| "learning_rate": 2.5928413555058904e-05, |
| "loss": 0.1674, |
| "step": 1297 |
| }, |
| { |
| "epoch": 0.6558868115209702, |
| "grad_norm": 0.39965078234672546, |
| "learning_rate": 2.5896955657192142e-05, |
| "loss": 0.29, |
| "step": 1298 |
| }, |
| { |
| "epoch": 0.6563921172309247, |
| "grad_norm": 0.6206263303756714, |
| "learning_rate": 2.5865496337227256e-05, |
| "loss": 0.2713, |
| "step": 1299 |
| }, |
| { |
| "epoch": 0.6568974229408793, |
| "grad_norm": 0.4320612847805023, |
| "learning_rate": 2.583403564504213e-05, |
| "loss": 0.2148, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.6574027286508337, |
| "grad_norm": 0.4375782012939453, |
| "learning_rate": 2.5802573630516806e-05, |
| "loss": 0.481, |
| "step": 1301 |
| }, |
| { |
| "epoch": 0.6579080343607883, |
| "grad_norm": 0.40101978182792664, |
| "learning_rate": 2.5771110343533428e-05, |
| "loss": 0.2242, |
| "step": 1302 |
| }, |
| { |
| "epoch": 0.6584133400707428, |
| "grad_norm": 0.7650478482246399, |
| "learning_rate": 2.5739645833976154e-05, |
| "loss": 0.337, |
| "step": 1303 |
| }, |
| { |
| "epoch": 0.6589186457806974, |
| "grad_norm": 0.6947590708732605, |
| "learning_rate": 2.5708180151731103e-05, |
| "loss": 0.4345, |
| "step": 1304 |
| }, |
| { |
| "epoch": 0.6594239514906518, |
| "grad_norm": 0.42338618636131287, |
| "learning_rate": 2.567671334668621e-05, |
| "loss": 0.2952, |
| "step": 1305 |
| }, |
| { |
| "epoch": 0.6599292572006064, |
| "grad_norm": 0.748132586479187, |
| "learning_rate": 2.5645245468731234e-05, |
| "loss": 0.8043, |
| "step": 1306 |
| }, |
| { |
| "epoch": 0.6604345629105609, |
| "grad_norm": 0.40081891417503357, |
| "learning_rate": 2.5613776567757595e-05, |
| "loss": 0.4187, |
| "step": 1307 |
| }, |
| { |
| "epoch": 0.6609398686205155, |
| "grad_norm": 0.5815805792808533, |
| "learning_rate": 2.5582306693658374e-05, |
| "loss": 0.251, |
| "step": 1308 |
| }, |
| { |
| "epoch": 0.6614451743304699, |
| "grad_norm": 0.4176938533782959, |
| "learning_rate": 2.555083589632818e-05, |
| "loss": 0.2369, |
| "step": 1309 |
| }, |
| { |
| "epoch": 0.6619504800404244, |
| "grad_norm": 0.2541157007217407, |
| "learning_rate": 2.5519364225663055e-05, |
| "loss": 0.1507, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.662455785750379, |
| "grad_norm": 0.5441336631774902, |
| "learning_rate": 2.5487891731560482e-05, |
| "loss": 0.2521, |
| "step": 1311 |
| }, |
| { |
| "epoch": 0.6629610914603336, |
| "grad_norm": 0.4862014055252075, |
| "learning_rate": 2.54564184639192e-05, |
| "loss": 0.2766, |
| "step": 1312 |
| }, |
| { |
| "epoch": 0.663466397170288, |
| "grad_norm": 0.7492696046829224, |
| "learning_rate": 2.5424944472639216e-05, |
| "loss": 0.4431, |
| "step": 1313 |
| }, |
| { |
| "epoch": 0.6639717028802425, |
| "grad_norm": 0.45778077840805054, |
| "learning_rate": 2.5393469807621645e-05, |
| "loss": 0.2535, |
| "step": 1314 |
| }, |
| { |
| "epoch": 0.6644770085901971, |
| "grad_norm": 0.8111942410469055, |
| "learning_rate": 2.5361994518768695e-05, |
| "loss": 0.3406, |
| "step": 1315 |
| }, |
| { |
| "epoch": 0.6649823143001516, |
| "grad_norm": 0.48901939392089844, |
| "learning_rate": 2.5330518655983558e-05, |
| "loss": 0.2384, |
| "step": 1316 |
| }, |
| { |
| "epoch": 0.6654876200101061, |
| "grad_norm": 0.67337566614151, |
| "learning_rate": 2.529904226917033e-05, |
| "loss": 0.4046, |
| "step": 1317 |
| }, |
| { |
| "epoch": 0.6659929257200606, |
| "grad_norm": 0.5460554957389832, |
| "learning_rate": 2.5267565408233952e-05, |
| "loss": 0.286, |
| "step": 1318 |
| }, |
| { |
| "epoch": 0.6664982314300152, |
| "grad_norm": 0.34170493483543396, |
| "learning_rate": 2.5236088123080092e-05, |
| "loss": 0.2063, |
| "step": 1319 |
| }, |
| { |
| "epoch": 0.6670035371399696, |
| "grad_norm": 0.47587305307388306, |
| "learning_rate": 2.5204610463615115e-05, |
| "loss": 0.2657, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.6675088428499242, |
| "grad_norm": 0.4068621098995209, |
| "learning_rate": 2.5173132479745975e-05, |
| "loss": 0.3323, |
| "step": 1321 |
| }, |
| { |
| "epoch": 0.6680141485598787, |
| "grad_norm": 0.4757383167743683, |
| "learning_rate": 2.514165422138012e-05, |
| "loss": 0.3589, |
| "step": 1322 |
| }, |
| { |
| "epoch": 0.6685194542698333, |
| "grad_norm": 0.3725772202014923, |
| "learning_rate": 2.5110175738425462e-05, |
| "loss": 0.1664, |
| "step": 1323 |
| }, |
| { |
| "epoch": 0.6690247599797877, |
| "grad_norm": 0.6302722692489624, |
| "learning_rate": 2.507869708079025e-05, |
| "loss": 0.4999, |
| "step": 1324 |
| }, |
| { |
| "epoch": 0.6695300656897423, |
| "grad_norm": 0.44832542538642883, |
| "learning_rate": 2.5047218298383015e-05, |
| "loss": 0.3016, |
| "step": 1325 |
| }, |
| { |
| "epoch": 0.6700353713996968, |
| "grad_norm": 0.4571584463119507, |
| "learning_rate": 2.5015739441112484e-05, |
| "loss": 0.1797, |
| "step": 1326 |
| }, |
| { |
| "epoch": 0.6705406771096514, |
| "grad_norm": 0.6637290120124817, |
| "learning_rate": 2.4984260558887522e-05, |
| "loss": 0.5544, |
| "step": 1327 |
| }, |
| { |
| "epoch": 0.6710459828196058, |
| "grad_norm": 0.337741494178772, |
| "learning_rate": 2.495278170161699e-05, |
| "loss": 0.1652, |
| "step": 1328 |
| }, |
| { |
| "epoch": 0.6715512885295604, |
| "grad_norm": 0.6732521653175354, |
| "learning_rate": 2.4921302919209763e-05, |
| "loss": 0.5967, |
| "step": 1329 |
| }, |
| { |
| "epoch": 0.6720565942395149, |
| "grad_norm": 0.8650082945823669, |
| "learning_rate": 2.4889824261574547e-05, |
| "loss": 0.4235, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.6725618999494695, |
| "grad_norm": 0.7857891321182251, |
| "learning_rate": 2.4858345778619887e-05, |
| "loss": 0.5156, |
| "step": 1331 |
| }, |
| { |
| "epoch": 0.6730672056594239, |
| "grad_norm": 0.42698702216148376, |
| "learning_rate": 2.4826867520254034e-05, |
| "loss": 0.2091, |
| "step": 1332 |
| }, |
| { |
| "epoch": 0.6735725113693785, |
| "grad_norm": 0.32361331582069397, |
| "learning_rate": 2.4795389536384887e-05, |
| "loss": 0.1365, |
| "step": 1333 |
| }, |
| { |
| "epoch": 0.674077817079333, |
| "grad_norm": 0.7567768096923828, |
| "learning_rate": 2.4763911876919917e-05, |
| "loss": 0.4576, |
| "step": 1334 |
| }, |
| { |
| "epoch": 0.6745831227892876, |
| "grad_norm": 0.2475138008594513, |
| "learning_rate": 2.4732434591766057e-05, |
| "loss": 0.1624, |
| "step": 1335 |
| }, |
| { |
| "epoch": 0.675088428499242, |
| "grad_norm": 0.8214117288589478, |
| "learning_rate": 2.4700957730829675e-05, |
| "loss": 0.3756, |
| "step": 1336 |
| }, |
| { |
| "epoch": 0.6755937342091966, |
| "grad_norm": 0.6251058578491211, |
| "learning_rate": 2.4669481344016444e-05, |
| "loss": 0.4255, |
| "step": 1337 |
| }, |
| { |
| "epoch": 0.6760990399191511, |
| "grad_norm": 0.45437926054000854, |
| "learning_rate": 2.4638005481231304e-05, |
| "loss": 0.5109, |
| "step": 1338 |
| }, |
| { |
| "epoch": 0.6766043456291057, |
| "grad_norm": 0.7901365756988525, |
| "learning_rate": 2.4606530192378358e-05, |
| "loss": 0.4083, |
| "step": 1339 |
| }, |
| { |
| "epoch": 0.6771096513390601, |
| "grad_norm": 0.47076261043548584, |
| "learning_rate": 2.4575055527360793e-05, |
| "loss": 0.33, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.6776149570490146, |
| "grad_norm": 0.7028003334999084, |
| "learning_rate": 2.45435815360808e-05, |
| "loss": 0.4038, |
| "step": 1341 |
| }, |
| { |
| "epoch": 0.6781202627589692, |
| "grad_norm": 0.3465520441532135, |
| "learning_rate": 2.4512108268439524e-05, |
| "loss": 0.2881, |
| "step": 1342 |
| }, |
| { |
| "epoch": 0.6786255684689237, |
| "grad_norm": 0.6936810612678528, |
| "learning_rate": 2.4480635774336947e-05, |
| "loss": 0.4991, |
| "step": 1343 |
| }, |
| { |
| "epoch": 0.6791308741788782, |
| "grad_norm": 0.264642596244812, |
| "learning_rate": 2.4449164103671833e-05, |
| "loss": 0.1572, |
| "step": 1344 |
| }, |
| { |
| "epoch": 0.6796361798888327, |
| "grad_norm": 0.6691703200340271, |
| "learning_rate": 2.441769330634163e-05, |
| "loss": 0.4967, |
| "step": 1345 |
| }, |
| { |
| "epoch": 0.6801414855987873, |
| "grad_norm": 0.8646639585494995, |
| "learning_rate": 2.4386223432242407e-05, |
| "loss": 0.3253, |
| "step": 1346 |
| }, |
| { |
| "epoch": 0.6806467913087418, |
| "grad_norm": 0.618078887462616, |
| "learning_rate": 2.435475453126877e-05, |
| "loss": 0.1929, |
| "step": 1347 |
| }, |
| { |
| "epoch": 0.6811520970186963, |
| "grad_norm": 0.8526009917259216, |
| "learning_rate": 2.432328665331379e-05, |
| "loss": 0.4779, |
| "step": 1348 |
| }, |
| { |
| "epoch": 0.6816574027286508, |
| "grad_norm": 0.8699708580970764, |
| "learning_rate": 2.4291819848268906e-05, |
| "loss": 0.3817, |
| "step": 1349 |
| }, |
| { |
| "epoch": 0.6821627084386054, |
| "grad_norm": 0.4946329891681671, |
| "learning_rate": 2.4260354166023848e-05, |
| "loss": 0.2458, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.6826680141485599, |
| "grad_norm": 0.5863744020462036, |
| "learning_rate": 2.4228889656466578e-05, |
| "loss": 0.5891, |
| "step": 1351 |
| }, |
| { |
| "epoch": 0.6831733198585144, |
| "grad_norm": 0.5861146450042725, |
| "learning_rate": 2.41974263694832e-05, |
| "loss": 0.3156, |
| "step": 1352 |
| }, |
| { |
| "epoch": 0.6836786255684689, |
| "grad_norm": 0.590474009513855, |
| "learning_rate": 2.416596435495787e-05, |
| "loss": 0.3012, |
| "step": 1353 |
| }, |
| { |
| "epoch": 0.6841839312784235, |
| "grad_norm": 0.4421933591365814, |
| "learning_rate": 2.4134503662772753e-05, |
| "loss": 0.3528, |
| "step": 1354 |
| }, |
| { |
| "epoch": 0.6846892369883779, |
| "grad_norm": 0.3189122974872589, |
| "learning_rate": 2.4103044342807867e-05, |
| "loss": 0.1986, |
| "step": 1355 |
| }, |
| { |
| "epoch": 0.6851945426983325, |
| "grad_norm": 0.6435018181800842, |
| "learning_rate": 2.4071586444941098e-05, |
| "loss": 0.3458, |
| "step": 1356 |
| }, |
| { |
| "epoch": 0.685699848408287, |
| "grad_norm": 0.8088643550872803, |
| "learning_rate": 2.404013001904807e-05, |
| "loss": 0.5646, |
| "step": 1357 |
| }, |
| { |
| "epoch": 0.6862051541182416, |
| "grad_norm": 0.4556143879890442, |
| "learning_rate": 2.4008675115002054e-05, |
| "loss": 0.3147, |
| "step": 1358 |
| }, |
| { |
| "epoch": 0.686710459828196, |
| "grad_norm": 0.4295537769794464, |
| "learning_rate": 2.3977221782673933e-05, |
| "loss": 0.2078, |
| "step": 1359 |
| }, |
| { |
| "epoch": 0.6872157655381506, |
| "grad_norm": 0.5436996817588806, |
| "learning_rate": 2.394577007193208e-05, |
| "loss": 0.3047, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.6877210712481051, |
| "grad_norm": 0.6986174583435059, |
| "learning_rate": 2.39143200326423e-05, |
| "loss": 0.4117, |
| "step": 1361 |
| }, |
| { |
| "epoch": 0.6882263769580597, |
| "grad_norm": 0.5140484571456909, |
| "learning_rate": 2.3882871714667754e-05, |
| "loss": 0.2067, |
| "step": 1362 |
| }, |
| { |
| "epoch": 0.6887316826680141, |
| "grad_norm": 0.9210074543952942, |
| "learning_rate": 2.3851425167868874e-05, |
| "loss": 0.4738, |
| "step": 1363 |
| }, |
| { |
| "epoch": 0.6892369883779687, |
| "grad_norm": 0.5711868405342102, |
| "learning_rate": 2.3819980442103288e-05, |
| "loss": 0.2715, |
| "step": 1364 |
| }, |
| { |
| "epoch": 0.6897422940879232, |
| "grad_norm": 0.691596508026123, |
| "learning_rate": 2.3788537587225705e-05, |
| "loss": 0.4291, |
| "step": 1365 |
| }, |
| { |
| "epoch": 0.6902475997978778, |
| "grad_norm": 0.6653650999069214, |
| "learning_rate": 2.3757096653087914e-05, |
| "loss": 0.2889, |
| "step": 1366 |
| }, |
| { |
| "epoch": 0.6907529055078322, |
| "grad_norm": 0.5367448329925537, |
| "learning_rate": 2.3725657689538627e-05, |
| "loss": 0.3765, |
| "step": 1367 |
| }, |
| { |
| "epoch": 0.6912582112177867, |
| "grad_norm": 0.4669603705406189, |
| "learning_rate": 2.3694220746423447e-05, |
| "loss": 0.2137, |
| "step": 1368 |
| }, |
| { |
| "epoch": 0.6917635169277413, |
| "grad_norm": 0.5713473558425903, |
| "learning_rate": 2.3662785873584773e-05, |
| "loss": 0.3873, |
| "step": 1369 |
| }, |
| { |
| "epoch": 0.6922688226376958, |
| "grad_norm": 0.444577693939209, |
| "learning_rate": 2.36313531208617e-05, |
| "loss": 0.257, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.6927741283476503, |
| "grad_norm": 0.44812366366386414, |
| "learning_rate": 2.3599922538089993e-05, |
| "loss": 0.2485, |
| "step": 1371 |
| }, |
| { |
| "epoch": 0.6932794340576048, |
| "grad_norm": 0.39372605085372925, |
| "learning_rate": 2.3568494175101958e-05, |
| "loss": 0.2552, |
| "step": 1372 |
| }, |
| { |
| "epoch": 0.6937847397675594, |
| "grad_norm": 0.8668807744979858, |
| "learning_rate": 2.353706808172638e-05, |
| "loss": 0.4462, |
| "step": 1373 |
| }, |
| { |
| "epoch": 0.6942900454775139, |
| "grad_norm": 0.37225598096847534, |
| "learning_rate": 2.3505644307788467e-05, |
| "loss": 0.2313, |
| "step": 1374 |
| }, |
| { |
| "epoch": 0.6947953511874684, |
| "grad_norm": 0.2703179717063904, |
| "learning_rate": 2.3474222903109723e-05, |
| "loss": 0.1954, |
| "step": 1375 |
| }, |
| { |
| "epoch": 0.6953006568974229, |
| "grad_norm": 0.46796905994415283, |
| "learning_rate": 2.34428039175079e-05, |
| "loss": 0.2335, |
| "step": 1376 |
| }, |
| { |
| "epoch": 0.6958059626073775, |
| "grad_norm": 0.4360145926475525, |
| "learning_rate": 2.3411387400796934e-05, |
| "loss": 0.2707, |
| "step": 1377 |
| }, |
| { |
| "epoch": 0.696311268317332, |
| "grad_norm": 0.4478427767753601, |
| "learning_rate": 2.3379973402786832e-05, |
| "loss": 0.2095, |
| "step": 1378 |
| }, |
| { |
| "epoch": 0.6968165740272865, |
| "grad_norm": 0.544204831123352, |
| "learning_rate": 2.3348561973283613e-05, |
| "loss": 0.2305, |
| "step": 1379 |
| }, |
| { |
| "epoch": 0.697321879737241, |
| "grad_norm": 0.7342060208320618, |
| "learning_rate": 2.3317153162089206e-05, |
| "loss": 0.3866, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.6978271854471956, |
| "grad_norm": 0.33234989643096924, |
| "learning_rate": 2.328574701900141e-05, |
| "loss": 0.1789, |
| "step": 1381 |
| }, |
| { |
| "epoch": 0.6983324911571501, |
| "grad_norm": 0.541576623916626, |
| "learning_rate": 2.325434359381379e-05, |
| "loss": 0.2606, |
| "step": 1382 |
| }, |
| { |
| "epoch": 0.6988377968671046, |
| "grad_norm": 0.5520817041397095, |
| "learning_rate": 2.3222942936315606e-05, |
| "loss": 0.2925, |
| "step": 1383 |
| }, |
| { |
| "epoch": 0.6993431025770591, |
| "grad_norm": 0.5592846870422363, |
| "learning_rate": 2.31915450962917e-05, |
| "loss": 0.2995, |
| "step": 1384 |
| }, |
| { |
| "epoch": 0.6998484082870137, |
| "grad_norm": 0.18375256657600403, |
| "learning_rate": 2.3160150123522485e-05, |
| "loss": 0.0958, |
| "step": 1385 |
| }, |
| { |
| "epoch": 0.7003537139969682, |
| "grad_norm": 0.5363619923591614, |
| "learning_rate": 2.3128758067783808e-05, |
| "loss": 0.2511, |
| "step": 1386 |
| }, |
| { |
| "epoch": 0.7008590197069227, |
| "grad_norm": 0.4795052111148834, |
| "learning_rate": 2.30973689788469e-05, |
| "loss": 0.2263, |
| "step": 1387 |
| }, |
| { |
| "epoch": 0.7013643254168772, |
| "grad_norm": 0.6192831993103027, |
| "learning_rate": 2.306598290647829e-05, |
| "loss": 0.3247, |
| "step": 1388 |
| }, |
| { |
| "epoch": 0.7018696311268318, |
| "grad_norm": 0.4603809416294098, |
| "learning_rate": 2.30345999004397e-05, |
| "loss": 0.2794, |
| "step": 1389 |
| }, |
| { |
| "epoch": 0.7023749368367862, |
| "grad_norm": 0.5347074270248413, |
| "learning_rate": 2.300322001048802e-05, |
| "loss": 0.3136, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.7028802425467408, |
| "grad_norm": 0.6828907132148743, |
| "learning_rate": 2.2971843286375192e-05, |
| "loss": 0.2847, |
| "step": 1391 |
| }, |
| { |
| "epoch": 0.7033855482566953, |
| "grad_norm": 0.6386706233024597, |
| "learning_rate": 2.2940469777848123e-05, |
| "loss": 0.4388, |
| "step": 1392 |
| }, |
| { |
| "epoch": 0.7038908539666499, |
| "grad_norm": 0.4837972819805145, |
| "learning_rate": 2.2909099534648658e-05, |
| "loss": 0.2637, |
| "step": 1393 |
| }, |
| { |
| "epoch": 0.7043961596766043, |
| "grad_norm": 0.4311538338661194, |
| "learning_rate": 2.2877732606513406e-05, |
| "loss": 0.2201, |
| "step": 1394 |
| }, |
| { |
| "epoch": 0.7049014653865588, |
| "grad_norm": 0.7682383060455322, |
| "learning_rate": 2.2846369043173775e-05, |
| "loss": 0.3609, |
| "step": 1395 |
| }, |
| { |
| "epoch": 0.7054067710965134, |
| "grad_norm": 0.4595930576324463, |
| "learning_rate": 2.281500889435581e-05, |
| "loss": 0.3006, |
| "step": 1396 |
| }, |
| { |
| "epoch": 0.705912076806468, |
| "grad_norm": 0.45893973112106323, |
| "learning_rate": 2.278365220978016e-05, |
| "loss": 0.2403, |
| "step": 1397 |
| }, |
| { |
| "epoch": 0.7064173825164224, |
| "grad_norm": 0.24530524015426636, |
| "learning_rate": 2.2752299039161975e-05, |
| "loss": 0.1777, |
| "step": 1398 |
| }, |
| { |
| "epoch": 0.7069226882263769, |
| "grad_norm": 0.6683110594749451, |
| "learning_rate": 2.272094943221081e-05, |
| "loss": 0.3115, |
| "step": 1399 |
| }, |
| { |
| "epoch": 0.7074279939363315, |
| "grad_norm": 0.5461668372154236, |
| "learning_rate": 2.2689603438630606e-05, |
| "loss": 0.3772, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.707933299646286, |
| "grad_norm": 0.2557353079319, |
| "learning_rate": 2.2658261108119554e-05, |
| "loss": 0.1629, |
| "step": 1401 |
| }, |
| { |
| "epoch": 0.7084386053562405, |
| "grad_norm": 0.43094396591186523, |
| "learning_rate": 2.262692249037004e-05, |
| "loss": 0.1927, |
| "step": 1402 |
| }, |
| { |
| "epoch": 0.708943911066195, |
| "grad_norm": 0.43190810084342957, |
| "learning_rate": 2.259558763506858e-05, |
| "loss": 0.3081, |
| "step": 1403 |
| }, |
| { |
| "epoch": 0.7094492167761496, |
| "grad_norm": 0.32105064392089844, |
| "learning_rate": 2.2564256591895693e-05, |
| "loss": 0.1161, |
| "step": 1404 |
| }, |
| { |
| "epoch": 0.7099545224861041, |
| "grad_norm": 0.45100855827331543, |
| "learning_rate": 2.2532929410525887e-05, |
| "loss": 0.2658, |
| "step": 1405 |
| }, |
| { |
| "epoch": 0.7104598281960586, |
| "grad_norm": 0.41769468784332275, |
| "learning_rate": 2.2501606140627518e-05, |
| "loss": 0.3101, |
| "step": 1406 |
| }, |
| { |
| "epoch": 0.7109651339060131, |
| "grad_norm": 0.5544657111167908, |
| "learning_rate": 2.2470286831862766e-05, |
| "loss": 0.38, |
| "step": 1407 |
| }, |
| { |
| "epoch": 0.7114704396159677, |
| "grad_norm": 0.45244771242141724, |
| "learning_rate": 2.2438971533887527e-05, |
| "loss": 0.3448, |
| "step": 1408 |
| }, |
| { |
| "epoch": 0.7119757453259222, |
| "grad_norm": 0.3420441150665283, |
| "learning_rate": 2.240766029635131e-05, |
| "loss": 0.2201, |
| "step": 1409 |
| }, |
| { |
| "epoch": 0.7124810510358767, |
| "grad_norm": 0.6040711998939514, |
| "learning_rate": 2.2376353168897214e-05, |
| "loss": 0.4204, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.7129863567458312, |
| "grad_norm": 0.6188685894012451, |
| "learning_rate": 2.2345050201161814e-05, |
| "loss": 0.2601, |
| "step": 1411 |
| }, |
| { |
| "epoch": 0.7134916624557858, |
| "grad_norm": 0.4009005129337311, |
| "learning_rate": 2.2313751442775098e-05, |
| "loss": 0.2132, |
| "step": 1412 |
| }, |
| { |
| "epoch": 0.7139969681657403, |
| "grad_norm": 1.1570433378219604, |
| "learning_rate": 2.2282456943360375e-05, |
| "loss": 0.7633, |
| "step": 1413 |
| }, |
| { |
| "epoch": 0.7145022738756948, |
| "grad_norm": 0.5114752054214478, |
| "learning_rate": 2.225116675253418e-05, |
| "loss": 0.2895, |
| "step": 1414 |
| }, |
| { |
| "epoch": 0.7150075795856493, |
| "grad_norm": 0.3823153078556061, |
| "learning_rate": 2.2219880919906244e-05, |
| "loss": 0.2226, |
| "step": 1415 |
| }, |
| { |
| "epoch": 0.7155128852956039, |
| "grad_norm": 0.41869547963142395, |
| "learning_rate": 2.2188599495079384e-05, |
| "loss": 0.2079, |
| "step": 1416 |
| }, |
| { |
| "epoch": 0.7160181910055584, |
| "grad_norm": 0.43806976079940796, |
| "learning_rate": 2.2157322527649422e-05, |
| "loss": 0.1898, |
| "step": 1417 |
| }, |
| { |
| "epoch": 0.7165234967155129, |
| "grad_norm": 0.8781391382217407, |
| "learning_rate": 2.2126050067205127e-05, |
| "loss": 0.3187, |
| "step": 1418 |
| }, |
| { |
| "epoch": 0.7170288024254674, |
| "grad_norm": 0.5207377672195435, |
| "learning_rate": 2.209478216332809e-05, |
| "loss": 0.2541, |
| "step": 1419 |
| }, |
| { |
| "epoch": 0.717534108135422, |
| "grad_norm": 0.7016043663024902, |
| "learning_rate": 2.206351886559271e-05, |
| "loss": 0.2743, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.7180394138453765, |
| "grad_norm": 0.24266977608203888, |
| "learning_rate": 2.2032260223566072e-05, |
| "loss": 0.1328, |
| "step": 1421 |
| }, |
| { |
| "epoch": 0.718544719555331, |
| "grad_norm": 0.9864704608917236, |
| "learning_rate": 2.2001006286807875e-05, |
| "loss": 0.3871, |
| "step": 1422 |
| }, |
| { |
| "epoch": 0.7190500252652855, |
| "grad_norm": 0.3588007092475891, |
| "learning_rate": 2.1969757104870367e-05, |
| "loss": 0.143, |
| "step": 1423 |
| }, |
| { |
| "epoch": 0.71955533097524, |
| "grad_norm": 0.716530978679657, |
| "learning_rate": 2.1938512727298246e-05, |
| "loss": 0.3186, |
| "step": 1424 |
| }, |
| { |
| "epoch": 0.7200606366851945, |
| "grad_norm": 0.5047246813774109, |
| "learning_rate": 2.19072732036286e-05, |
| "loss": 0.1932, |
| "step": 1425 |
| }, |
| { |
| "epoch": 0.720565942395149, |
| "grad_norm": 0.6216627955436707, |
| "learning_rate": 2.1876038583390825e-05, |
| "loss": 0.4384, |
| "step": 1426 |
| }, |
| { |
| "epoch": 0.7210712481051036, |
| "grad_norm": 0.5591406226158142, |
| "learning_rate": 2.1844808916106536e-05, |
| "loss": 0.2485, |
| "step": 1427 |
| }, |
| { |
| "epoch": 0.7215765538150581, |
| "grad_norm": 0.6369313597679138, |
| "learning_rate": 2.1813584251289505e-05, |
| "loss": 0.4893, |
| "step": 1428 |
| }, |
| { |
| "epoch": 0.7220818595250126, |
| "grad_norm": 0.8026990294456482, |
| "learning_rate": 2.1782364638445546e-05, |
| "loss": 0.6091, |
| "step": 1429 |
| }, |
| { |
| "epoch": 0.7225871652349671, |
| "grad_norm": 0.45339179039001465, |
| "learning_rate": 2.17511501270725e-05, |
| "loss": 0.3557, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.7230924709449217, |
| "grad_norm": 0.5372389554977417, |
| "learning_rate": 2.171994076666009e-05, |
| "loss": 0.5554, |
| "step": 1431 |
| }, |
| { |
| "epoch": 0.7235977766548762, |
| "grad_norm": 0.45178845524787903, |
| "learning_rate": 2.168873660668989e-05, |
| "loss": 0.2379, |
| "step": 1432 |
| }, |
| { |
| "epoch": 0.7241030823648307, |
| "grad_norm": 0.4662405550479889, |
| "learning_rate": 2.1657537696635226e-05, |
| "loss": 0.2246, |
| "step": 1433 |
| }, |
| { |
| "epoch": 0.7246083880747852, |
| "grad_norm": 0.7474414706230164, |
| "learning_rate": 2.16263440859611e-05, |
| "loss": 0.4195, |
| "step": 1434 |
| }, |
| { |
| "epoch": 0.7251136937847398, |
| "grad_norm": 0.6440469622612, |
| "learning_rate": 2.1595155824124097e-05, |
| "loss": 0.3275, |
| "step": 1435 |
| }, |
| { |
| "epoch": 0.7256189994946943, |
| "grad_norm": 0.40439724922180176, |
| "learning_rate": 2.1563972960572343e-05, |
| "loss": 0.1541, |
| "step": 1436 |
| }, |
| { |
| "epoch": 0.7261243052046488, |
| "grad_norm": 0.7414180636405945, |
| "learning_rate": 2.1532795544745392e-05, |
| "loss": 0.3458, |
| "step": 1437 |
| }, |
| { |
| "epoch": 0.7266296109146033, |
| "grad_norm": 0.5826343894004822, |
| "learning_rate": 2.1501623626074175e-05, |
| "loss": 0.2742, |
| "step": 1438 |
| }, |
| { |
| "epoch": 0.7271349166245579, |
| "grad_norm": 0.38583678007125854, |
| "learning_rate": 2.1470457253980887e-05, |
| "loss": 0.1891, |
| "step": 1439 |
| }, |
| { |
| "epoch": 0.7276402223345124, |
| "grad_norm": 0.4571583867073059, |
| "learning_rate": 2.143929647787894e-05, |
| "loss": 0.2069, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.7281455280444669, |
| "grad_norm": 0.37782159447669983, |
| "learning_rate": 2.1408141347172877e-05, |
| "loss": 0.2598, |
| "step": 1441 |
| }, |
| { |
| "epoch": 0.7286508337544214, |
| "grad_norm": 1.0491501092910767, |
| "learning_rate": 2.13769919112583e-05, |
| "loss": 0.4662, |
| "step": 1442 |
| }, |
| { |
| "epoch": 0.729156139464376, |
| "grad_norm": 0.4999569058418274, |
| "learning_rate": 2.134584821952174e-05, |
| "loss": 0.2569, |
| "step": 1443 |
| }, |
| { |
| "epoch": 0.7296614451743305, |
| "grad_norm": 0.422539621591568, |
| "learning_rate": 2.131471032134067e-05, |
| "loss": 0.2263, |
| "step": 1444 |
| }, |
| { |
| "epoch": 0.730166750884285, |
| "grad_norm": 0.7593914866447449, |
| "learning_rate": 2.128357826608335e-05, |
| "loss": 0.3716, |
| "step": 1445 |
| }, |
| { |
| "epoch": 0.7306720565942395, |
| "grad_norm": 0.6366856098175049, |
| "learning_rate": 2.125245210310878e-05, |
| "loss": 0.2571, |
| "step": 1446 |
| }, |
| { |
| "epoch": 0.7311773623041941, |
| "grad_norm": 0.46671852469444275, |
| "learning_rate": 2.122133188176664e-05, |
| "loss": 0.4696, |
| "step": 1447 |
| }, |
| { |
| "epoch": 0.7316826680141486, |
| "grad_norm": 0.37286749482154846, |
| "learning_rate": 2.1190217651397145e-05, |
| "loss": 0.2091, |
| "step": 1448 |
| }, |
| { |
| "epoch": 0.732187973724103, |
| "grad_norm": 0.8244792819023132, |
| "learning_rate": 2.115910946133105e-05, |
| "loss": 0.2842, |
| "step": 1449 |
| }, |
| { |
| "epoch": 0.7326932794340576, |
| "grad_norm": 0.9506634473800659, |
| "learning_rate": 2.112800736088952e-05, |
| "loss": 0.365, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.7331985851440121, |
| "grad_norm": 0.9866954684257507, |
| "learning_rate": 2.1096911399384066e-05, |
| "loss": 0.3483, |
| "step": 1451 |
| }, |
| { |
| "epoch": 0.7337038908539667, |
| "grad_norm": 0.4005601406097412, |
| "learning_rate": 2.1065821626116466e-05, |
| "loss": 0.2033, |
| "step": 1452 |
| }, |
| { |
| "epoch": 0.7342091965639211, |
| "grad_norm": 0.43610894680023193, |
| "learning_rate": 2.1034738090378688e-05, |
| "loss": 0.2345, |
| "step": 1453 |
| }, |
| { |
| "epoch": 0.7347145022738757, |
| "grad_norm": 0.2879337668418884, |
| "learning_rate": 2.10036608414528e-05, |
| "loss": 0.1507, |
| "step": 1454 |
| }, |
| { |
| "epoch": 0.7352198079838302, |
| "grad_norm": 0.8920745849609375, |
| "learning_rate": 2.0972589928610917e-05, |
| "loss": 0.3903, |
| "step": 1455 |
| }, |
| { |
| "epoch": 0.7357251136937848, |
| "grad_norm": 0.8228932619094849, |
| "learning_rate": 2.0941525401115106e-05, |
| "loss": 0.3689, |
| "step": 1456 |
| }, |
| { |
| "epoch": 0.7362304194037392, |
| "grad_norm": 0.3942200839519501, |
| "learning_rate": 2.0910467308217314e-05, |
| "loss": 0.1604, |
| "step": 1457 |
| }, |
| { |
| "epoch": 0.7367357251136938, |
| "grad_norm": 0.47732532024383545, |
| "learning_rate": 2.0879415699159256e-05, |
| "loss": 0.3561, |
| "step": 1458 |
| }, |
| { |
| "epoch": 0.7372410308236483, |
| "grad_norm": 0.6839754581451416, |
| "learning_rate": 2.0848370623172405e-05, |
| "loss": 0.3294, |
| "step": 1459 |
| }, |
| { |
| "epoch": 0.7377463365336028, |
| "grad_norm": 0.35987260937690735, |
| "learning_rate": 2.081733212947786e-05, |
| "loss": 0.2124, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.7382516422435573, |
| "grad_norm": 0.5114133954048157, |
| "learning_rate": 2.078630026728628e-05, |
| "loss": 0.3365, |
| "step": 1461 |
| }, |
| { |
| "epoch": 0.7387569479535119, |
| "grad_norm": 0.6798128485679626, |
| "learning_rate": 2.0755275085797833e-05, |
| "loss": 0.3135, |
| "step": 1462 |
| }, |
| { |
| "epoch": 0.7392622536634664, |
| "grad_norm": 0.7099087238311768, |
| "learning_rate": 2.0724256634202052e-05, |
| "loss": 0.2584, |
| "step": 1463 |
| }, |
| { |
| "epoch": 0.7397675593734209, |
| "grad_norm": 0.6906381845474243, |
| "learning_rate": 2.069324496167783e-05, |
| "loss": 0.2981, |
| "step": 1464 |
| }, |
| { |
| "epoch": 0.7402728650833754, |
| "grad_norm": 0.2697136402130127, |
| "learning_rate": 2.0662240117393318e-05, |
| "loss": 0.174, |
| "step": 1465 |
| }, |
| { |
| "epoch": 0.74077817079333, |
| "grad_norm": 0.5975183248519897, |
| "learning_rate": 2.0631242150505827e-05, |
| "loss": 0.3088, |
| "step": 1466 |
| }, |
| { |
| "epoch": 0.7412834765032845, |
| "grad_norm": 0.4744568467140198, |
| "learning_rate": 2.060025111016176e-05, |
| "loss": 0.1892, |
| "step": 1467 |
| }, |
| { |
| "epoch": 0.741788782213239, |
| "grad_norm": 0.7941866517066956, |
| "learning_rate": 2.056926704549655e-05, |
| "loss": 0.3482, |
| "step": 1468 |
| }, |
| { |
| "epoch": 0.7422940879231935, |
| "grad_norm": 0.827198326587677, |
| "learning_rate": 2.0538290005634564e-05, |
| "loss": 0.3751, |
| "step": 1469 |
| }, |
| { |
| "epoch": 0.7427993936331481, |
| "grad_norm": 0.38413000106811523, |
| "learning_rate": 2.050732003968903e-05, |
| "loss": 0.3045, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.7433046993431026, |
| "grad_norm": 0.4320742189884186, |
| "learning_rate": 2.0476357196761967e-05, |
| "loss": 0.2605, |
| "step": 1471 |
| }, |
| { |
| "epoch": 0.743810005053057, |
| "grad_norm": 0.6644576191902161, |
| "learning_rate": 2.0445401525944107e-05, |
| "loss": 0.2997, |
| "step": 1472 |
| }, |
| { |
| "epoch": 0.7443153107630116, |
| "grad_norm": 0.6894829869270325, |
| "learning_rate": 2.0414453076314776e-05, |
| "loss": 0.4918, |
| "step": 1473 |
| }, |
| { |
| "epoch": 0.7448206164729662, |
| "grad_norm": 0.35164880752563477, |
| "learning_rate": 2.0383511896941893e-05, |
| "loss": 0.3007, |
| "step": 1474 |
| }, |
| { |
| "epoch": 0.7453259221829207, |
| "grad_norm": 0.6786766648292542, |
| "learning_rate": 2.0352578036881826e-05, |
| "loss": 0.4865, |
| "step": 1475 |
| }, |
| { |
| "epoch": 0.7458312278928751, |
| "grad_norm": 0.4968150556087494, |
| "learning_rate": 2.0321651545179348e-05, |
| "loss": 0.2479, |
| "step": 1476 |
| }, |
| { |
| "epoch": 0.7463365336028297, |
| "grad_norm": 0.8302887678146362, |
| "learning_rate": 2.029073247086756e-05, |
| "loss": 0.2688, |
| "step": 1477 |
| }, |
| { |
| "epoch": 0.7468418393127843, |
| "grad_norm": 0.44390469789505005, |
| "learning_rate": 2.0259820862967765e-05, |
| "loss": 0.3654, |
| "step": 1478 |
| }, |
| { |
| "epoch": 0.7473471450227388, |
| "grad_norm": 0.41298776865005493, |
| "learning_rate": 2.0228916770489466e-05, |
| "loss": 0.2194, |
| "step": 1479 |
| }, |
| { |
| "epoch": 0.7478524507326932, |
| "grad_norm": 0.46970030665397644, |
| "learning_rate": 2.019802024243023e-05, |
| "loss": 0.2061, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.7483577564426478, |
| "grad_norm": 0.510265052318573, |
| "learning_rate": 2.0167131327775652e-05, |
| "loss": 0.237, |
| "step": 1481 |
| }, |
| { |
| "epoch": 0.7488630621526023, |
| "grad_norm": 0.2790643870830536, |
| "learning_rate": 2.013625007549924e-05, |
| "loss": 0.2037, |
| "step": 1482 |
| }, |
| { |
| "epoch": 0.7493683678625569, |
| "grad_norm": 0.5583669543266296, |
| "learning_rate": 2.0105376534562344e-05, |
| "loss": 0.1887, |
| "step": 1483 |
| }, |
| { |
| "epoch": 0.7498736735725113, |
| "grad_norm": 0.4619552493095398, |
| "learning_rate": 2.0074510753914112e-05, |
| "loss": 0.1662, |
| "step": 1484 |
| }, |
| { |
| "epoch": 0.7503789792824659, |
| "grad_norm": 0.4680952727794647, |
| "learning_rate": 2.004365278249137e-05, |
| "loss": 0.3742, |
| "step": 1485 |
| }, |
| { |
| "epoch": 0.7508842849924204, |
| "grad_norm": 0.524354875087738, |
| "learning_rate": 2.0012802669218568e-05, |
| "loss": 0.2715, |
| "step": 1486 |
| }, |
| { |
| "epoch": 0.751389590702375, |
| "grad_norm": 0.21866445243358612, |
| "learning_rate": 1.998196046300771e-05, |
| "loss": 0.1283, |
| "step": 1487 |
| }, |
| { |
| "epoch": 0.7518948964123294, |
| "grad_norm": 0.3985232412815094, |
| "learning_rate": 1.9951126212758238e-05, |
| "loss": 0.2006, |
| "step": 1488 |
| }, |
| { |
| "epoch": 0.752400202122284, |
| "grad_norm": 0.4596607983112335, |
| "learning_rate": 1.9920299967356997e-05, |
| "loss": 0.4051, |
| "step": 1489 |
| }, |
| { |
| "epoch": 0.7529055078322385, |
| "grad_norm": 0.3508613705635071, |
| "learning_rate": 1.9889481775678134e-05, |
| "loss": 0.2223, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.7534108135421931, |
| "grad_norm": 0.2925485372543335, |
| "learning_rate": 1.9858671686583037e-05, |
| "loss": 0.1573, |
| "step": 1491 |
| }, |
| { |
| "epoch": 0.7539161192521475, |
| "grad_norm": 0.6288982033729553, |
| "learning_rate": 1.9827869748920252e-05, |
| "loss": 0.4522, |
| "step": 1492 |
| }, |
| { |
| "epoch": 0.7544214249621021, |
| "grad_norm": 0.6666999459266663, |
| "learning_rate": 1.9797076011525365e-05, |
| "loss": 0.5193, |
| "step": 1493 |
| }, |
| { |
| "epoch": 0.7549267306720566, |
| "grad_norm": 0.5883360505104065, |
| "learning_rate": 1.9766290523221e-05, |
| "loss": 0.3678, |
| "step": 1494 |
| }, |
| { |
| "epoch": 0.7554320363820111, |
| "grad_norm": 1.0901826620101929, |
| "learning_rate": 1.973551333281668e-05, |
| "loss": 0.638, |
| "step": 1495 |
| }, |
| { |
| "epoch": 0.7559373420919656, |
| "grad_norm": 0.41034024953842163, |
| "learning_rate": 1.9704744489108792e-05, |
| "loss": 0.1508, |
| "step": 1496 |
| }, |
| { |
| "epoch": 0.7564426478019202, |
| "grad_norm": 0.7889742851257324, |
| "learning_rate": 1.9673984040880468e-05, |
| "loss": 0.44, |
| "step": 1497 |
| }, |
| { |
| "epoch": 0.7569479535118747, |
| "grad_norm": 0.4004971385002136, |
| "learning_rate": 1.964323203690154e-05, |
| "loss": 0.1833, |
| "step": 1498 |
| }, |
| { |
| "epoch": 0.7574532592218292, |
| "grad_norm": 1.0051130056381226, |
| "learning_rate": 1.9612488525928452e-05, |
| "loss": 0.2733, |
| "step": 1499 |
| }, |
| { |
| "epoch": 0.7579585649317837, |
| "grad_norm": 0.49123916029930115, |
| "learning_rate": 1.958175355670418e-05, |
| "loss": 0.2622, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.7584638706417383, |
| "grad_norm": 0.581283688545227, |
| "learning_rate": 1.955102717795817e-05, |
| "loss": 0.1906, |
| "step": 1501 |
| }, |
| { |
| "epoch": 0.7589691763516928, |
| "grad_norm": 0.7873732447624207, |
| "learning_rate": 1.9520309438406217e-05, |
| "loss": 0.2679, |
| "step": 1502 |
| }, |
| { |
| "epoch": 0.7594744820616472, |
| "grad_norm": 0.5490037798881531, |
| "learning_rate": 1.9489600386750447e-05, |
| "loss": 0.2416, |
| "step": 1503 |
| }, |
| { |
| "epoch": 0.7599797877716018, |
| "grad_norm": 0.5317832231521606, |
| "learning_rate": 1.9458900071679202e-05, |
| "loss": 0.2828, |
| "step": 1504 |
| }, |
| { |
| "epoch": 0.7604850934815564, |
| "grad_norm": 0.6812729835510254, |
| "learning_rate": 1.9428208541866973e-05, |
| "loss": 0.3149, |
| "step": 1505 |
| }, |
| { |
| "epoch": 0.7609903991915109, |
| "grad_norm": 0.6650682091712952, |
| "learning_rate": 1.9397525845974336e-05, |
| "loss": 0.4121, |
| "step": 1506 |
| }, |
| { |
| "epoch": 0.7614957049014653, |
| "grad_norm": 0.5870944261550903, |
| "learning_rate": 1.9366852032647826e-05, |
| "loss": 0.3445, |
| "step": 1507 |
| }, |
| { |
| "epoch": 0.7620010106114199, |
| "grad_norm": 0.5600792765617371, |
| "learning_rate": 1.9336187150519924e-05, |
| "loss": 0.2882, |
| "step": 1508 |
| }, |
| { |
| "epoch": 0.7625063163213744, |
| "grad_norm": 0.45185530185699463, |
| "learning_rate": 1.9305531248208946e-05, |
| "loss": 0.1826, |
| "step": 1509 |
| }, |
| { |
| "epoch": 0.763011622031329, |
| "grad_norm": 0.7576361894607544, |
| "learning_rate": 1.9274884374318967e-05, |
| "loss": 0.3394, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.7635169277412834, |
| "grad_norm": 0.3762153089046478, |
| "learning_rate": 1.9244246577439752e-05, |
| "loss": 0.1961, |
| "step": 1511 |
| }, |
| { |
| "epoch": 0.764022233451238, |
| "grad_norm": 0.6146969199180603, |
| "learning_rate": 1.9213617906146663e-05, |
| "loss": 0.3267, |
| "step": 1512 |
| }, |
| { |
| "epoch": 0.7645275391611925, |
| "grad_norm": 0.5639180541038513, |
| "learning_rate": 1.9182998409000606e-05, |
| "loss": 0.2919, |
| "step": 1513 |
| }, |
| { |
| "epoch": 0.7650328448711471, |
| "grad_norm": 0.5925596952438354, |
| "learning_rate": 1.9152388134547945e-05, |
| "loss": 0.2764, |
| "step": 1514 |
| }, |
| { |
| "epoch": 0.7655381505811015, |
| "grad_norm": 0.41065713763237, |
| "learning_rate": 1.91217871313204e-05, |
| "loss": 0.188, |
| "step": 1515 |
| }, |
| { |
| "epoch": 0.7660434562910561, |
| "grad_norm": 0.38348203897476196, |
| "learning_rate": 1.909119544783503e-05, |
| "loss": 0.2152, |
| "step": 1516 |
| }, |
| { |
| "epoch": 0.7665487620010106, |
| "grad_norm": 0.4948267340660095, |
| "learning_rate": 1.9060613132594067e-05, |
| "loss": 0.2266, |
| "step": 1517 |
| }, |
| { |
| "epoch": 0.7670540677109652, |
| "grad_norm": 0.5641569495201111, |
| "learning_rate": 1.9030040234084926e-05, |
| "loss": 0.2611, |
| "step": 1518 |
| }, |
| { |
| "epoch": 0.7675593734209196, |
| "grad_norm": 0.4439907670021057, |
| "learning_rate": 1.8999476800780087e-05, |
| "loss": 0.2271, |
| "step": 1519 |
| }, |
| { |
| "epoch": 0.7680646791308742, |
| "grad_norm": 0.7098090648651123, |
| "learning_rate": 1.8968922881137012e-05, |
| "loss": 0.4192, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.7685699848408287, |
| "grad_norm": 0.6672466397285461, |
| "learning_rate": 1.8938378523598103e-05, |
| "loss": 0.3129, |
| "step": 1521 |
| }, |
| { |
| "epoch": 0.7690752905507833, |
| "grad_norm": 0.672419011592865, |
| "learning_rate": 1.8907843776590567e-05, |
| "loss": 0.3302, |
| "step": 1522 |
| }, |
| { |
| "epoch": 0.7695805962607377, |
| "grad_norm": 0.5676461458206177, |
| "learning_rate": 1.8877318688526393e-05, |
| "loss": 0.2522, |
| "step": 1523 |
| }, |
| { |
| "epoch": 0.7700859019706923, |
| "grad_norm": 0.4994024932384491, |
| "learning_rate": 1.8846803307802265e-05, |
| "loss": 0.1974, |
| "step": 1524 |
| }, |
| { |
| "epoch": 0.7705912076806468, |
| "grad_norm": 0.4182925820350647, |
| "learning_rate": 1.8816297682799452e-05, |
| "loss": 0.1786, |
| "step": 1525 |
| }, |
| { |
| "epoch": 0.7710965133906014, |
| "grad_norm": 0.6781833171844482, |
| "learning_rate": 1.8785801861883788e-05, |
| "loss": 0.4861, |
| "step": 1526 |
| }, |
| { |
| "epoch": 0.7716018191005558, |
| "grad_norm": 0.6011713147163391, |
| "learning_rate": 1.875531589340553e-05, |
| "loss": 0.2655, |
| "step": 1527 |
| }, |
| { |
| "epoch": 0.7721071248105104, |
| "grad_norm": 0.6764652729034424, |
| "learning_rate": 1.8724839825699332e-05, |
| "loss": 0.5056, |
| "step": 1528 |
| }, |
| { |
| "epoch": 0.7726124305204649, |
| "grad_norm": 0.5193274021148682, |
| "learning_rate": 1.869437370708415e-05, |
| "loss": 0.2291, |
| "step": 1529 |
| }, |
| { |
| "epoch": 0.7731177362304194, |
| "grad_norm": 0.5534316301345825, |
| "learning_rate": 1.8663917585863154e-05, |
| "loss": 0.2526, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.7736230419403739, |
| "grad_norm": 0.4299106299877167, |
| "learning_rate": 1.8633471510323694e-05, |
| "loss": 0.1945, |
| "step": 1531 |
| }, |
| { |
| "epoch": 0.7741283476503285, |
| "grad_norm": 0.5962615013122559, |
| "learning_rate": 1.8603035528737146e-05, |
| "loss": 0.3837, |
| "step": 1532 |
| }, |
| { |
| "epoch": 0.774633653360283, |
| "grad_norm": 0.6587054133415222, |
| "learning_rate": 1.8572609689358923e-05, |
| "loss": 0.3066, |
| "step": 1533 |
| }, |
| { |
| "epoch": 0.7751389590702374, |
| "grad_norm": 0.919040858745575, |
| "learning_rate": 1.854219404042834e-05, |
| "loss": 0.369, |
| "step": 1534 |
| }, |
| { |
| "epoch": 0.775644264780192, |
| "grad_norm": 0.4253031611442566, |
| "learning_rate": 1.8511788630168557e-05, |
| "loss": 0.199, |
| "step": 1535 |
| }, |
| { |
| "epoch": 0.7761495704901465, |
| "grad_norm": 0.6377377510070801, |
| "learning_rate": 1.848139350678651e-05, |
| "loss": 0.3452, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.7766548762001011, |
| "grad_norm": 0.9110857248306274, |
| "learning_rate": 1.8451008718472802e-05, |
| "loss": 0.3778, |
| "step": 1537 |
| }, |
| { |
| "epoch": 0.7771601819100555, |
| "grad_norm": 0.42227810621261597, |
| "learning_rate": 1.8420634313401673e-05, |
| "loss": 0.1541, |
| "step": 1538 |
| }, |
| { |
| "epoch": 0.7776654876200101, |
| "grad_norm": 0.5211955904960632, |
| "learning_rate": 1.8390270339730892e-05, |
| "loss": 0.3227, |
| "step": 1539 |
| }, |
| { |
| "epoch": 0.7781707933299646, |
| "grad_norm": 0.366298645734787, |
| "learning_rate": 1.835991684560169e-05, |
| "loss": 0.1747, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.7786760990399192, |
| "grad_norm": 0.4732970893383026, |
| "learning_rate": 1.8329573879138695e-05, |
| "loss": 0.2065, |
| "step": 1541 |
| }, |
| { |
| "epoch": 0.7791814047498736, |
| "grad_norm": 0.43378695845603943, |
| "learning_rate": 1.8299241488449814e-05, |
| "loss": 0.1849, |
| "step": 1542 |
| }, |
| { |
| "epoch": 0.7796867104598282, |
| "grad_norm": 0.6272542476654053, |
| "learning_rate": 1.826891972162621e-05, |
| "loss": 0.2282, |
| "step": 1543 |
| }, |
| { |
| "epoch": 0.7801920161697827, |
| "grad_norm": 0.46285495162010193, |
| "learning_rate": 1.82386086267422e-05, |
| "loss": 0.2114, |
| "step": 1544 |
| }, |
| { |
| "epoch": 0.7806973218797373, |
| "grad_norm": 0.5049265623092651, |
| "learning_rate": 1.8208308251855176e-05, |
| "loss": 0.5401, |
| "step": 1545 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2573, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 515, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.4075086284361073e+18, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|