| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.9995676610462603, |
| "eval_steps": 500, |
| "global_step": 578, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0017293558149589277, |
| "grad_norm": 14.538986412992182, |
| "learning_rate": 0.0, |
| "loss": 1.6313, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0034587116299178555, |
| "grad_norm": 14.326584895058424, |
| "learning_rate": 5.555555555555555e-07, |
| "loss": 1.5796, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.005188067444876783, |
| "grad_norm": 14.303713845230615, |
| "learning_rate": 1.111111111111111e-06, |
| "loss": 1.6146, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.006917423259835711, |
| "grad_norm": 14.270354087741836, |
| "learning_rate": 1.6666666666666667e-06, |
| "loss": 1.601, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.00864677907479464, |
| "grad_norm": 13.056520461824615, |
| "learning_rate": 2.222222222222222e-06, |
| "loss": 1.5589, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.010376134889753566, |
| "grad_norm": 10.770002519875254, |
| "learning_rate": 2.7777777777777783e-06, |
| "loss": 1.4667, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.012105490704712495, |
| "grad_norm": 7.95324664209596, |
| "learning_rate": 3.3333333333333333e-06, |
| "loss": 1.2753, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.013834846519671422, |
| "grad_norm": 7.532832624580702, |
| "learning_rate": 3.88888888888889e-06, |
| "loss": 1.2647, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.01556420233463035, |
| "grad_norm": 5.2620835279629645, |
| "learning_rate": 4.444444444444444e-06, |
| "loss": 1.1498, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.01729355814958928, |
| "grad_norm": 5.262371985501062, |
| "learning_rate": 5e-06, |
| "loss": 1.0606, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.019022913964548204, |
| "grad_norm": 4.75552495213558, |
| "learning_rate": 5.555555555555557e-06, |
| "loss": 1.039, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.020752269779507133, |
| "grad_norm": 4.109629839647021, |
| "learning_rate": 6.111111111111112e-06, |
| "loss": 0.9838, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.02248162559446606, |
| "grad_norm": 3.2895830782411433, |
| "learning_rate": 6.666666666666667e-06, |
| "loss": 0.9485, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.02421098140942499, |
| "grad_norm": 3.4515341118841554, |
| "learning_rate": 7.222222222222223e-06, |
| "loss": 0.8929, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.02594033722438392, |
| "grad_norm": 3.5675009192020917, |
| "learning_rate": 7.77777777777778e-06, |
| "loss": 0.8761, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.027669693039342844, |
| "grad_norm": 3.1238221221969846, |
| "learning_rate": 8.333333333333334e-06, |
| "loss": 0.8579, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.029399048854301772, |
| "grad_norm": 2.730324810433143, |
| "learning_rate": 8.888888888888888e-06, |
| "loss": 0.825, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.0311284046692607, |
| "grad_norm": 2.142543702234471, |
| "learning_rate": 9.444444444444445e-06, |
| "loss": 0.8275, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.03285776048421963, |
| "grad_norm": 2.2407490739694618, |
| "learning_rate": 1e-05, |
| "loss": 0.798, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.03458711629917856, |
| "grad_norm": 2.084673650668219, |
| "learning_rate": 9.999921320324328e-06, |
| "loss": 0.7791, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.03631647211413749, |
| "grad_norm": 2.2050652721508817, |
| "learning_rate": 9.999685283773504e-06, |
| "loss": 0.801, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.03804582792909641, |
| "grad_norm": 1.9949942374964489, |
| "learning_rate": 9.999291897776043e-06, |
| "loss": 0.7673, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.03977518374405534, |
| "grad_norm": 1.9222200778544485, |
| "learning_rate": 9.998741174712534e-06, |
| "loss": 0.7546, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.041504539559014265, |
| "grad_norm": 2.085559689480793, |
| "learning_rate": 9.998033131915266e-06, |
| "loss": 0.7733, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.043233895373973194, |
| "grad_norm": 1.731301007495722, |
| "learning_rate": 9.997167791667668e-06, |
| "loss": 0.7406, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.04496325118893212, |
| "grad_norm": 1.9090014318422763, |
| "learning_rate": 9.996145181203616e-06, |
| "loss": 0.7478, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.04669260700389105, |
| "grad_norm": 1.6716462018458187, |
| "learning_rate": 9.994965332706574e-06, |
| "loss": 0.7293, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.04842196281884998, |
| "grad_norm": 1.641744923224004, |
| "learning_rate": 9.993628283308582e-06, |
| "loss": 0.7157, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.05015131863380891, |
| "grad_norm": 1.5639464733995954, |
| "learning_rate": 9.992134075089085e-06, |
| "loss": 0.7337, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.05188067444876784, |
| "grad_norm": 1.5670396520195686, |
| "learning_rate": 9.990482755073607e-06, |
| "loss": 0.6944, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.05361003026372676, |
| "grad_norm": 1.5501858401764228, |
| "learning_rate": 9.98867437523228e-06, |
| "loss": 0.7382, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.05533938607868569, |
| "grad_norm": 1.5475730980812552, |
| "learning_rate": 9.986708992478202e-06, |
| "loss": 0.6836, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.057068741893644616, |
| "grad_norm": 1.5368459860394041, |
| "learning_rate": 9.984586668665641e-06, |
| "loss": 0.7309, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.058798097708603544, |
| "grad_norm": 1.573756977286856, |
| "learning_rate": 9.982307470588097e-06, |
| "loss": 0.6921, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.06052745352356247, |
| "grad_norm": 1.4598653582851544, |
| "learning_rate": 9.979871469976197e-06, |
| "loss": 0.6824, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.0622568093385214, |
| "grad_norm": 1.3407147196823341, |
| "learning_rate": 9.977278743495434e-06, |
| "loss": 0.695, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.06398616515348032, |
| "grad_norm": 1.3427046109275345, |
| "learning_rate": 9.974529372743762e-06, |
| "loss": 0.6849, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.06571552096843926, |
| "grad_norm": 1.385016607008538, |
| "learning_rate": 9.97162344424902e-06, |
| "loss": 0.704, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.06744487678339818, |
| "grad_norm": 1.3624925480173673, |
| "learning_rate": 9.968561049466214e-06, |
| "loss": 0.6648, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.06917423259835712, |
| "grad_norm": 1.6070208118878249, |
| "learning_rate": 9.965342284774633e-06, |
| "loss": 0.7004, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.07090358841331604, |
| "grad_norm": 1.4348520811083416, |
| "learning_rate": 9.961967251474823e-06, |
| "loss": 0.6625, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.07263294422827497, |
| "grad_norm": 1.3921519261612385, |
| "learning_rate": 9.958436055785391e-06, |
| "loss": 0.6709, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.0743623000432339, |
| "grad_norm": 1.3214257024283271, |
| "learning_rate": 9.954748808839675e-06, |
| "loss": 0.6526, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.07609165585819282, |
| "grad_norm": 1.3703861516781, |
| "learning_rate": 9.950905626682229e-06, |
| "loss": 0.6485, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.07782101167315175, |
| "grad_norm": 1.3610898546796686, |
| "learning_rate": 9.946906630265184e-06, |
| "loss": 0.6509, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.07955036748811067, |
| "grad_norm": 1.4666639612285957, |
| "learning_rate": 9.942751945444437e-06, |
| "loss": 0.6575, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.08127972330306961, |
| "grad_norm": 1.2396616063563637, |
| "learning_rate": 9.938441702975689e-06, |
| "loss": 0.6395, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.08300907911802853, |
| "grad_norm": 1.4092903953844345, |
| "learning_rate": 9.933976038510334e-06, |
| "loss": 0.6554, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.08473843493298747, |
| "grad_norm": 1.7100974038421335, |
| "learning_rate": 9.92935509259118e-06, |
| "loss": 0.6528, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.08646779074794639, |
| "grad_norm": 1.4242700514926885, |
| "learning_rate": 9.924579010648042e-06, |
| "loss": 0.6486, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.08819714656290532, |
| "grad_norm": 1.5209362334431213, |
| "learning_rate": 9.91964794299315e-06, |
| "loss": 0.6773, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.08992650237786425, |
| "grad_norm": 1.4185285804137782, |
| "learning_rate": 9.914562044816424e-06, |
| "loss": 0.6488, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.09165585819282317, |
| "grad_norm": 1.3378741936637544, |
| "learning_rate": 9.909321476180594e-06, |
| "loss": 0.6517, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.0933852140077821, |
| "grad_norm": 1.3516713188326783, |
| "learning_rate": 9.903926402016153e-06, |
| "loss": 0.6396, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.09511456982274102, |
| "grad_norm": 1.5410717485635828, |
| "learning_rate": 9.898376992116179e-06, |
| "loss": 0.6447, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.09684392563769996, |
| "grad_norm": 1.5098400169635777, |
| "learning_rate": 9.892673421130979e-06, |
| "loss": 0.6342, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.09857328145265888, |
| "grad_norm": 1.4229655753415995, |
| "learning_rate": 9.886815868562596e-06, |
| "loss": 0.623, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.10030263726761782, |
| "grad_norm": 1.3977945588526572, |
| "learning_rate": 9.88080451875917e-06, |
| "loss": 0.6397, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.10203199308257674, |
| "grad_norm": 1.2877794451825768, |
| "learning_rate": 9.874639560909118e-06, |
| "loss": 0.6697, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.10376134889753567, |
| "grad_norm": 1.4091354007797192, |
| "learning_rate": 9.868321189035196e-06, |
| "loss": 0.6359, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.1054907047124946, |
| "grad_norm": 1.362969728293516, |
| "learning_rate": 9.861849601988384e-06, |
| "loss": 0.6418, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.10722006052745352, |
| "grad_norm": 1.453550759696413, |
| "learning_rate": 9.855225003441629e-06, |
| "loss": 0.6469, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.10894941634241245, |
| "grad_norm": 1.4051406418213799, |
| "learning_rate": 9.848447601883436e-06, |
| "loss": 0.6257, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.11067877215737137, |
| "grad_norm": 1.3211083898505493, |
| "learning_rate": 9.841517610611309e-06, |
| "loss": 0.6396, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.11240812797233031, |
| "grad_norm": 1.3740318481622646, |
| "learning_rate": 9.834435247725032e-06, |
| "loss": 0.6259, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.11413748378728923, |
| "grad_norm": 1.484320978899618, |
| "learning_rate": 9.827200736119815e-06, |
| "loss": 0.6219, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.11586683960224817, |
| "grad_norm": 1.3380643332618636, |
| "learning_rate": 9.819814303479268e-06, |
| "loss": 0.6192, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.11759619541720709, |
| "grad_norm": 1.4153726476019632, |
| "learning_rate": 9.812276182268236e-06, |
| "loss": 0.617, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.11932555123216602, |
| "grad_norm": 1.435579910730408, |
| "learning_rate": 9.804586609725499e-06, |
| "loss": 0.6082, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.12105490704712495, |
| "grad_norm": 1.3611927229844167, |
| "learning_rate": 9.79674582785628e-06, |
| "loss": 0.6234, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.12278426286208387, |
| "grad_norm": 1.4203295780177876, |
| "learning_rate": 9.788754083424654e-06, |
| "loss": 0.6333, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.1245136186770428, |
| "grad_norm": 1.29595870212252, |
| "learning_rate": 9.78061162794576e-06, |
| "loss": 0.6267, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.12624297449200172, |
| "grad_norm": 1.5861725748430446, |
| "learning_rate": 9.772318717677905e-06, |
| "loss": 0.6008, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.12797233030696065, |
| "grad_norm": 1.3871875221331877, |
| "learning_rate": 9.763875613614482e-06, |
| "loss": 0.6099, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.1297016861219196, |
| "grad_norm": 1.3701024745992456, |
| "learning_rate": 9.755282581475769e-06, |
| "loss": 0.6165, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.13143104193687852, |
| "grad_norm": 1.3461257221248066, |
| "learning_rate": 9.746539891700558e-06, |
| "loss": 0.6329, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.13316039775183744, |
| "grad_norm": 1.3814830311022952, |
| "learning_rate": 9.737647819437645e-06, |
| "loss": 0.6238, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.13488975356679636, |
| "grad_norm": 1.3317024763543674, |
| "learning_rate": 9.728606644537177e-06, |
| "loss": 0.6436, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.13661910938175528, |
| "grad_norm": 1.3559610943853933, |
| "learning_rate": 9.719416651541839e-06, |
| "loss": 0.6093, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.13834846519671423, |
| "grad_norm": 1.3142606469258882, |
| "learning_rate": 9.710078129677895e-06, |
| "loss": 0.6265, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.14007782101167315, |
| "grad_norm": 1.4252162273974214, |
| "learning_rate": 9.700591372846096e-06, |
| "loss": 0.62, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.14180717682663208, |
| "grad_norm": 1.284483722976347, |
| "learning_rate": 9.690956679612422e-06, |
| "loss": 0.6387, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.143536532641591, |
| "grad_norm": 1.4287308609026501, |
| "learning_rate": 9.681174353198687e-06, |
| "loss": 0.6155, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.14526588845654995, |
| "grad_norm": 1.3659377539854851, |
| "learning_rate": 9.671244701472999e-06, |
| "loss": 0.5813, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.14699524427150887, |
| "grad_norm": 1.3254239612936913, |
| "learning_rate": 9.661168036940071e-06, |
| "loss": 0.607, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.1487246000864678, |
| "grad_norm": 1.3325242903071612, |
| "learning_rate": 9.650944676731383e-06, |
| "loss": 0.5811, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.1504539559014267, |
| "grad_norm": 1.3721199226780008, |
| "learning_rate": 9.640574942595195e-06, |
| "loss": 0.6213, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.15218331171638563, |
| "grad_norm": 1.3252154289940847, |
| "learning_rate": 9.63005916088644e-06, |
| "loss": 0.598, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.15391266753134458, |
| "grad_norm": 1.341549271858989, |
| "learning_rate": 9.619397662556434e-06, |
| "loss": 0.6159, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.1556420233463035, |
| "grad_norm": 1.306931095537237, |
| "learning_rate": 9.608590783142471e-06, |
| "loss": 0.5866, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.15737137916126243, |
| "grad_norm": 1.3395574538440298, |
| "learning_rate": 9.597638862757255e-06, |
| "loss": 0.6132, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.15910073497622135, |
| "grad_norm": 1.4247495939885102, |
| "learning_rate": 9.586542246078203e-06, |
| "loss": 0.6033, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.1608300907911803, |
| "grad_norm": 1.4103034043856166, |
| "learning_rate": 9.5753012823366e-06, |
| "loss": 0.5958, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.16255944660613922, |
| "grad_norm": 1.2779807076086858, |
| "learning_rate": 9.563916325306595e-06, |
| "loss": 0.5999, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.16428880242109814, |
| "grad_norm": 1.3678860405045183, |
| "learning_rate": 9.552387733294081e-06, |
| "loss": 0.6047, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.16601815823605706, |
| "grad_norm": 1.4177812307852564, |
| "learning_rate": 9.540715869125407e-06, |
| "loss": 0.6032, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.16774751405101598, |
| "grad_norm": 1.3929063773953412, |
| "learning_rate": 9.528901100135971e-06, |
| "loss": 0.5914, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.16947686986597493, |
| "grad_norm": 1.4452904668325175, |
| "learning_rate": 9.51694379815865e-06, |
| "loss": 0.591, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.17120622568093385, |
| "grad_norm": 1.384558049529939, |
| "learning_rate": 9.504844339512096e-06, |
| "loss": 0.5935, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.17293558149589278, |
| "grad_norm": 1.3744626820811583, |
| "learning_rate": 9.492603104988907e-06, |
| "loss": 0.6093, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.1746649373108517, |
| "grad_norm": 1.4015343739030506, |
| "learning_rate": 9.480220479843627e-06, |
| "loss": 0.5864, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.17639429312581065, |
| "grad_norm": 1.3434145487030242, |
| "learning_rate": 9.467696853780625e-06, |
| "loss": 0.5907, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.17812364894076957, |
| "grad_norm": 1.3106599461212003, |
| "learning_rate": 9.45503262094184e-06, |
| "loss": 0.6035, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.1798530047557285, |
| "grad_norm": 1.2960137141000876, |
| "learning_rate": 9.442228179894362e-06, |
| "loss": 0.58, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.1815823605706874, |
| "grad_norm": 1.339610530818961, |
| "learning_rate": 9.4292839336179e-06, |
| "loss": 0.6019, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.18331171638564633, |
| "grad_norm": 1.437718437019135, |
| "learning_rate": 9.416200289492092e-06, |
| "loss": 0.6098, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.18504107220060528, |
| "grad_norm": 1.4523767804936614, |
| "learning_rate": 9.40297765928369e-06, |
| "loss": 0.6018, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.1867704280155642, |
| "grad_norm": 1.2828185912784842, |
| "learning_rate": 9.389616459133597e-06, |
| "loss": 0.5764, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.18849978383052313, |
| "grad_norm": 1.4256098510891433, |
| "learning_rate": 9.376117109543769e-06, |
| "loss": 0.6159, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.19022913964548205, |
| "grad_norm": 1.3501440943806944, |
| "learning_rate": 9.362480035363987e-06, |
| "loss": 0.5793, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.191958495460441, |
| "grad_norm": 1.3361244720373389, |
| "learning_rate": 9.348705665778479e-06, |
| "loss": 0.5712, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.19368785127539992, |
| "grad_norm": 1.2782083486564007, |
| "learning_rate": 9.334794434292416e-06, |
| "loss": 0.5825, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.19541720709035884, |
| "grad_norm": 1.2823908295358897, |
| "learning_rate": 9.320746778718274e-06, |
| "loss": 0.6332, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.19714656290531776, |
| "grad_norm": 1.4254059120553437, |
| "learning_rate": 9.306563141162046e-06, |
| "loss": 0.6021, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.19887591872027668, |
| "grad_norm": 1.3615562993020114, |
| "learning_rate": 9.292243968009332e-06, |
| "loss": 0.5919, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.20060527453523563, |
| "grad_norm": 1.4003633053761828, |
| "learning_rate": 9.27778970991129e-06, |
| "loss": 0.6122, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.20233463035019456, |
| "grad_norm": 1.4139482327306883, |
| "learning_rate": 9.263200821770462e-06, |
| "loss": 0.5731, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.20406398616515348, |
| "grad_norm": 1.4647997165826316, |
| "learning_rate": 9.248477762726438e-06, |
| "loss": 0.5797, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.2057933419801124, |
| "grad_norm": 1.3922473806972735, |
| "learning_rate": 9.233620996141421e-06, |
| "loss": 0.5598, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.20752269779507135, |
| "grad_norm": 1.3498874982995888, |
| "learning_rate": 9.218630989585647e-06, |
| "loss": 0.5824, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.20925205361003027, |
| "grad_norm": 1.4059303390475026, |
| "learning_rate": 9.203508214822652e-06, |
| "loss": 0.6209, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.2109814094249892, |
| "grad_norm": 1.4908066800771167, |
| "learning_rate": 9.188253147794443e-06, |
| "loss": 0.5784, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.2127107652399481, |
| "grad_norm": 1.3718507338485877, |
| "learning_rate": 9.172866268606514e-06, |
| "loss": 0.5962, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.21444012105490703, |
| "grad_norm": 1.402469081391288, |
| "learning_rate": 9.157348061512728e-06, |
| "loss": 0.5929, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.21616947686986598, |
| "grad_norm": 1.3206199086660828, |
| "learning_rate": 9.141699014900084e-06, |
| "loss": 0.5903, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.2178988326848249, |
| "grad_norm": 1.3281498552638782, |
| "learning_rate": 9.125919621273348e-06, |
| "loss": 0.5946, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.21962818849978383, |
| "grad_norm": 1.3209410288937773, |
| "learning_rate": 9.110010377239552e-06, |
| "loss": 0.5626, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.22135754431474275, |
| "grad_norm": 1.3329308119803185, |
| "learning_rate": 9.093971783492354e-06, |
| "loss": 0.5918, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.2230869001297017, |
| "grad_norm": 1.347489334146743, |
| "learning_rate": 9.077804344796302e-06, |
| "loss": 0.5878, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.22481625594466062, |
| "grad_norm": 1.4072905998791443, |
| "learning_rate": 9.061508569970926e-06, |
| "loss": 0.5895, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.22654561175961954, |
| "grad_norm": 1.4623429864651023, |
| "learning_rate": 9.045084971874738e-06, |
| "loss": 0.5935, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.22827496757457846, |
| "grad_norm": 1.3789532216954916, |
| "learning_rate": 9.028534067389087e-06, |
| "loss": 0.5886, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.23000432338953739, |
| "grad_norm": 1.3644289851875127, |
| "learning_rate": 9.011856377401891e-06, |
| "loss": 0.5682, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.23173367920449633, |
| "grad_norm": 1.2442423854348275, |
| "learning_rate": 8.995052426791247e-06, |
| "loss": 0.5684, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.23346303501945526, |
| "grad_norm": 1.3245488152280875, |
| "learning_rate": 8.978122744408905e-06, |
| "loss": 0.5643, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.23519239083441418, |
| "grad_norm": 1.3934642875348537, |
| "learning_rate": 8.961067863063638e-06, |
| "loss": 0.5707, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.2369217466493731, |
| "grad_norm": 1.3586108466135645, |
| "learning_rate": 8.943888319504456e-06, |
| "loss": 0.5936, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.23865110246433205, |
| "grad_norm": 1.2787757325817652, |
| "learning_rate": 8.926584654403725e-06, |
| "loss": 0.5743, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.24038045827929097, |
| "grad_norm": 1.2401520618532709, |
| "learning_rate": 8.90915741234015e-06, |
| "loss": 0.5738, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.2421098140942499, |
| "grad_norm": 1.3715834375196405, |
| "learning_rate": 8.89160714178163e-06, |
| "loss": 0.594, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.2438391699092088, |
| "grad_norm": 1.3920239192472779, |
| "learning_rate": 8.873934395068006e-06, |
| "loss": 0.5891, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.24556852572416774, |
| "grad_norm": 1.280071307258292, |
| "learning_rate": 8.856139728393667e-06, |
| "loss": 0.586, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.24729788153912668, |
| "grad_norm": 1.4348874555698314, |
| "learning_rate": 8.838223701790057e-06, |
| "loss": 0.6031, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.2490272373540856, |
| "grad_norm": 1.2605512084319181, |
| "learning_rate": 8.820186879108038e-06, |
| "loss": 0.579, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.25075659316904453, |
| "grad_norm": 1.3303160437037416, |
| "learning_rate": 8.802029828000157e-06, |
| "loss": 0.5848, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.25248594898400345, |
| "grad_norm": 1.4582040782143946, |
| "learning_rate": 8.783753119902766e-06, |
| "loss": 0.5758, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.25421530479896237, |
| "grad_norm": 1.322898639608662, |
| "learning_rate": 8.765357330018056e-06, |
| "loss": 0.5869, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.2559446606139213, |
| "grad_norm": 1.384126692032235, |
| "learning_rate": 8.746843037295936e-06, |
| "loss": 0.5588, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.2576740164288802, |
| "grad_norm": 1.3286178990420618, |
| "learning_rate": 8.728210824415829e-06, |
| "loss": 0.5688, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.2594033722438392, |
| "grad_norm": 1.255847642552832, |
| "learning_rate": 8.70946127776832e-06, |
| "loss": 0.5436, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.2611327280587981, |
| "grad_norm": 1.5446363806642398, |
| "learning_rate": 8.690594987436705e-06, |
| "loss": 0.5669, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.26286208387375704, |
| "grad_norm": 1.3185596223777625, |
| "learning_rate": 8.671612547178428e-06, |
| "loss": 0.5652, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.26459143968871596, |
| "grad_norm": 1.3558175344108856, |
| "learning_rate": 8.652514554406388e-06, |
| "loss": 0.5872, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.2663207955036749, |
| "grad_norm": 1.4068740291750244, |
| "learning_rate": 8.633301610170136e-06, |
| "loss": 0.5683, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.2680501513186338, |
| "grad_norm": 1.3133616699361563, |
| "learning_rate": 8.613974319136959e-06, |
| "loss": 0.5693, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.2697795071335927, |
| "grad_norm": 1.2872738406840478, |
| "learning_rate": 8.594533289572852e-06, |
| "loss": 0.5787, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.27150886294855164, |
| "grad_norm": 1.3034688709492834, |
| "learning_rate": 8.574979133323378e-06, |
| "loss": 0.5644, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.27323821876351057, |
| "grad_norm": 1.2605674494333527, |
| "learning_rate": 8.555312465794402e-06, |
| "loss": 0.5758, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.27496757457846954, |
| "grad_norm": 1.3396122302856128, |
| "learning_rate": 8.535533905932739e-06, |
| "loss": 0.6039, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.27669693039342846, |
| "grad_norm": 1.2966490137544022, |
| "learning_rate": 8.515644076206652e-06, |
| "loss": 0.5645, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.2784262862083874, |
| "grad_norm": 1.4311476322477343, |
| "learning_rate": 8.495643602586287e-06, |
| "loss": 0.5705, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.2801556420233463, |
| "grad_norm": 1.3422910724323573, |
| "learning_rate": 8.475533114523954e-06, |
| "loss": 0.5656, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.28188499783830523, |
| "grad_norm": 1.2772032094100403, |
| "learning_rate": 8.455313244934324e-06, |
| "loss": 0.5907, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.28361435365326415, |
| "grad_norm": 1.3072283691035707, |
| "learning_rate": 8.43498463017451e-06, |
| "loss": 0.5696, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.2853437094682231, |
| "grad_norm": 1.3046935804589632, |
| "learning_rate": 8.414547910024035e-06, |
| "loss": 0.5775, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.287073065283182, |
| "grad_norm": 1.2976841761251101, |
| "learning_rate": 8.39400372766471e-06, |
| "loss": 0.5714, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.2888024210981409, |
| "grad_norm": 1.3247225670276452, |
| "learning_rate": 8.373352729660373e-06, |
| "loss": 0.5527, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.2905317769130999, |
| "grad_norm": 1.3304518084464831, |
| "learning_rate": 8.352595565936554e-06, |
| "loss": 0.5772, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.2922611327280588, |
| "grad_norm": 1.3567530464099045, |
| "learning_rate": 8.331732889760021e-06, |
| "loss": 0.5724, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.29399048854301774, |
| "grad_norm": 1.3864092503101992, |
| "learning_rate": 8.310765357718207e-06, |
| "loss": 0.5785, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.29571984435797666, |
| "grad_norm": 1.3431891176626012, |
| "learning_rate": 8.289693629698564e-06, |
| "loss": 0.5907, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.2974492001729356, |
| "grad_norm": 1.3269529964125137, |
| "learning_rate": 8.268518368867781e-06, |
| "loss": 0.5794, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.2991785559878945, |
| "grad_norm": 1.3677645716695777, |
| "learning_rate": 8.247240241650918e-06, |
| "loss": 0.5889, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.3009079118028534, |
| "grad_norm": 1.4344791877726926, |
| "learning_rate": 8.22585991771044e-06, |
| "loss": 0.5501, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.30263726761781234, |
| "grad_norm": 1.3773781206385498, |
| "learning_rate": 8.204378069925121e-06, |
| "loss": 0.5435, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.30436662343277127, |
| "grad_norm": 1.3415343046809804, |
| "learning_rate": 8.182795374368893e-06, |
| "loss": 0.5658, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.30609597924773024, |
| "grad_norm": 1.3607519825023242, |
| "learning_rate": 8.16111251028955e-06, |
| "loss": 0.5726, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.30782533506268916, |
| "grad_norm": 1.3372886670110973, |
| "learning_rate": 8.139330160087374e-06, |
| "loss": 0.5801, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.3095546908776481, |
| "grad_norm": 1.45444571694518, |
| "learning_rate": 8.117449009293668e-06, |
| "loss": 0.5675, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.311284046692607, |
| "grad_norm": 1.4232508965452562, |
| "learning_rate": 8.095469746549172e-06, |
| "loss": 0.5502, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.31301340250756593, |
| "grad_norm": 1.3513827085112067, |
| "learning_rate": 8.073393063582386e-06, |
| "loss": 0.5855, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.31474275832252485, |
| "grad_norm": 1.3052608382865691, |
| "learning_rate": 8.051219655187818e-06, |
| "loss": 0.5618, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.3164721141374838, |
| "grad_norm": 1.306514474737617, |
| "learning_rate": 8.0289502192041e-06, |
| "loss": 0.5617, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.3182014699524427, |
| "grad_norm": 1.2967196879064988, |
| "learning_rate": 8.00658545649203e-06, |
| "loss": 0.5976, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.3199308257674016, |
| "grad_norm": 1.4576437100632567, |
| "learning_rate": 7.984126070912519e-06, |
| "loss": 0.5688, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.3216601815823606, |
| "grad_norm": 1.3704399401870944, |
| "learning_rate": 7.961572769304437e-06, |
| "loss": 0.5631, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.3233895373973195, |
| "grad_norm": 1.319375380678081, |
| "learning_rate": 7.938926261462366e-06, |
| "loss": 0.5498, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.32511889321227844, |
| "grad_norm": 1.3529344720904868, |
| "learning_rate": 7.916187260114264e-06, |
| "loss": 0.5423, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.32684824902723736, |
| "grad_norm": 1.3027982855324163, |
| "learning_rate": 7.89335648089903e-06, |
| "loss": 0.582, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.3285776048421963, |
| "grad_norm": 1.4050936006586736, |
| "learning_rate": 7.870434642343984e-06, |
| "loss": 0.5636, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.3303069606571552, |
| "grad_norm": 1.2865924125836552, |
| "learning_rate": 7.84742246584226e-06, |
| "loss": 0.5494, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.3320363164721141, |
| "grad_norm": 1.3715611689044482, |
| "learning_rate": 7.82432067563009e-06, |
| "loss": 0.566, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.33376567228707305, |
| "grad_norm": 1.3718978175645358, |
| "learning_rate": 7.801129998764014e-06, |
| "loss": 0.5657, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.33549502810203197, |
| "grad_norm": 1.2502241262211298, |
| "learning_rate": 7.777851165098012e-06, |
| "loss": 0.5656, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.33722438391699094, |
| "grad_norm": 1.3852373692132978, |
| "learning_rate": 7.754484907260513e-06, |
| "loss": 0.5658, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.33895373973194987, |
| "grad_norm": 1.3703136522991974, |
| "learning_rate": 7.731031960631354e-06, |
| "loss": 0.5716, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.3406830955469088, |
| "grad_norm": 1.2486585135623867, |
| "learning_rate": 7.70749306331863e-06, |
| "loss": 0.5651, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.3424124513618677, |
| "grad_norm": 1.2601495096730775, |
| "learning_rate": 7.68386895613546e-06, |
| "loss": 0.5546, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.34414180717682663, |
| "grad_norm": 1.2502364746593353, |
| "learning_rate": 7.660160382576683e-06, |
| "loss": 0.5483, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.34587116299178555, |
| "grad_norm": 1.3477149756990934, |
| "learning_rate": 7.636368088795451e-06, |
| "loss": 0.5472, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.3476005188067445, |
| "grad_norm": 1.2901176761284996, |
| "learning_rate": 7.612492823579744e-06, |
| "loss": 0.5421, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.3493298746217034, |
| "grad_norm": 1.4658846157690446, |
| "learning_rate": 7.588535338328816e-06, |
| "loss": 0.5906, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.3510592304366623, |
| "grad_norm": 1.2964558327514957, |
| "learning_rate": 7.564496387029532e-06, |
| "loss": 0.5772, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.3527885862516213, |
| "grad_norm": 1.2170162880503597, |
| "learning_rate": 7.540376726232648e-06, |
| "loss": 0.5808, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.3545179420665802, |
| "grad_norm": 1.3354147357012909, |
| "learning_rate": 7.516177115029002e-06, |
| "loss": 0.5496, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.35624729788153914, |
| "grad_norm": 1.3770402354419473, |
| "learning_rate": 7.491898315025615e-06, |
| "loss": 0.5718, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.35797665369649806, |
| "grad_norm": 1.4004748528087516, |
| "learning_rate": 7.467541090321735e-06, |
| "loss": 0.5696, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.359706009511457, |
| "grad_norm": 1.3103570302853653, |
| "learning_rate": 7.443106207484776e-06, |
| "loss": 0.5817, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.3614353653264159, |
| "grad_norm": 1.4334519797119796, |
| "learning_rate": 7.4185944355261996e-06, |
| "loss": 0.5549, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.3631647211413748, |
| "grad_norm": 1.397522237730983, |
| "learning_rate": 7.3940065458773146e-06, |
| "loss": 0.5436, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.36489407695633375, |
| "grad_norm": 1.2965013000965513, |
| "learning_rate": 7.369343312364994e-06, |
| "loss": 0.5648, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.36662343277129267, |
| "grad_norm": 1.3115268564578664, |
| "learning_rate": 7.344605511187322e-06, |
| "loss": 0.5663, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.36835278858625164, |
| "grad_norm": 1.2857253578787209, |
| "learning_rate": 7.319793920889171e-06, |
| "loss": 0.5465, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.37008214440121057, |
| "grad_norm": 1.2254130773211296, |
| "learning_rate": 7.294909322337689e-06, |
| "loss": 0.5332, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.3718115002161695, |
| "grad_norm": 1.2505477215695469, |
| "learning_rate": 7.269952498697734e-06, |
| "loss": 0.5519, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.3735408560311284, |
| "grad_norm": 1.2309151575793142, |
| "learning_rate": 7.244924235407224e-06, |
| "loss": 0.556, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.37527021184608733, |
| "grad_norm": 1.2736490892998513, |
| "learning_rate": 7.219825320152411e-06, |
| "loss": 0.5753, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.37699956766104625, |
| "grad_norm": 1.188067487294118, |
| "learning_rate": 7.194656542843103e-06, |
| "loss": 0.5457, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.3787289234760052, |
| "grad_norm": 1.3573649976256121, |
| "learning_rate": 7.169418695587791e-06, |
| "loss": 0.5317, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.3804582792909641, |
| "grad_norm": 1.3104765075971507, |
| "learning_rate": 7.1441125726687336e-06, |
| "loss": 0.5416, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.382187635105923, |
| "grad_norm": 1.2524746976217112, |
| "learning_rate": 7.118738970516944e-06, |
| "loss": 0.5231, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.383916990920882, |
| "grad_norm": 1.2912855193020514, |
| "learning_rate": 7.093298687687141e-06, |
| "loss": 0.577, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.3856463467358409, |
| "grad_norm": 1.277812278463467, |
| "learning_rate": 7.067792524832604e-06, |
| "loss": 0.5627, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.38737570255079984, |
| "grad_norm": 1.3047580618945316, |
| "learning_rate": 7.042221284679982e-06, |
| "loss": 0.5385, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.38910505836575876, |
| "grad_norm": 1.1904223056651926, |
| "learning_rate": 7.016585772004026e-06, |
| "loss": 0.5504, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.3908344141807177, |
| "grad_norm": 1.2398301118878279, |
| "learning_rate": 6.990886793602268e-06, |
| "loss": 0.5463, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.3925637699956766, |
| "grad_norm": 1.2249280934558917, |
| "learning_rate": 6.965125158269619e-06, |
| "loss": 0.5524, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.3942931258106355, |
| "grad_norm": 1.3151361816315885, |
| "learning_rate": 6.939301676772927e-06, |
| "loss": 0.5643, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.39602248162559445, |
| "grad_norm": 1.4155270180747608, |
| "learning_rate": 6.913417161825449e-06, |
| "loss": 0.5396, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.39775183744055337, |
| "grad_norm": 1.3122619817083747, |
| "learning_rate": 6.887472428061285e-06, |
| "loss": 0.5533, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.39948119325551235, |
| "grad_norm": 1.2806770091938722, |
| "learning_rate": 6.8614682920097265e-06, |
| "loss": 0.5403, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.40121054907047127, |
| "grad_norm": 1.3470173683287574, |
| "learning_rate": 6.835405572069572e-06, |
| "loss": 0.5395, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.4029399048854302, |
| "grad_norm": 1.3345925151371414, |
| "learning_rate": 6.809285088483361e-06, |
| "loss": 0.5488, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.4046692607003891, |
| "grad_norm": 1.364191950346444, |
| "learning_rate": 6.783107663311566e-06, |
| "loss": 0.5597, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.40639861651534803, |
| "grad_norm": 1.229870751967945, |
| "learning_rate": 6.7568741204067145e-06, |
| "loss": 0.5532, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.40812797233030695, |
| "grad_norm": 1.2511626652001453, |
| "learning_rate": 6.730585285387465e-06, |
| "loss": 0.5599, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.4098573281452659, |
| "grad_norm": 1.1963629945054264, |
| "learning_rate": 6.704241985612625e-06, |
| "loss": 0.5662, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.4115866839602248, |
| "grad_norm": 1.28804770356803, |
| "learning_rate": 6.6778450501551065e-06, |
| "loss": 0.5801, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.4133160397751837, |
| "grad_norm": 1.2590013900847816, |
| "learning_rate": 6.651395309775837e-06, |
| "loss": 0.5644, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.4150453955901427, |
| "grad_norm": 1.2551864064194094, |
| "learning_rate": 6.6248935968976135e-06, |
| "loss": 0.5394, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.4167747514051016, |
| "grad_norm": 1.324809116558963, |
| "learning_rate": 6.598340745578908e-06, |
| "loss": 0.5698, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.41850410722006054, |
| "grad_norm": 1.3179110819475957, |
| "learning_rate": 6.571737591487611e-06, |
| "loss": 0.5438, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.42023346303501946, |
| "grad_norm": 1.2492716164423538, |
| "learning_rate": 6.545084971874738e-06, |
| "loss": 0.5534, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.4219628188499784, |
| "grad_norm": 1.3793013860663719, |
| "learning_rate": 6.518383725548074e-06, |
| "loss": 0.5507, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.4236921746649373, |
| "grad_norm": 1.2552437595758494, |
| "learning_rate": 6.491634692845781e-06, |
| "loss": 0.5283, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.4254215304798962, |
| "grad_norm": 1.320285724366206, |
| "learning_rate": 6.464838715609945e-06, |
| "loss": 0.5381, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.42715088629485515, |
| "grad_norm": 1.321244687463225, |
| "learning_rate": 6.437996637160086e-06, |
| "loss": 0.5632, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.42888024210981407, |
| "grad_norm": 1.298650328971777, |
| "learning_rate": 6.411109302266616e-06, |
| "loss": 0.5436, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.43060959792477305, |
| "grad_norm": 1.2466524194529058, |
| "learning_rate": 6.384177557124247e-06, |
| "loss": 0.536, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.43233895373973197, |
| "grad_norm": 1.2659973914064022, |
| "learning_rate": 6.3572022493253715e-06, |
| "loss": 0.5666, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.4340683095546909, |
| "grad_norm": 1.3125673782338234, |
| "learning_rate": 6.330184227833376e-06, |
| "loss": 0.5269, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.4357976653696498, |
| "grad_norm": 1.2471832027915164, |
| "learning_rate": 6.303124342955928e-06, |
| "loss": 0.5277, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.43752702118460873, |
| "grad_norm": 1.3130769407311655, |
| "learning_rate": 6.276023446318214e-06, |
| "loss": 0.5628, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.43925637699956765, |
| "grad_norm": 1.2407127444236052, |
| "learning_rate": 6.248882390836135e-06, |
| "loss": 0.5583, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.4409857328145266, |
| "grad_norm": 1.374714499529323, |
| "learning_rate": 6.2217020306894705e-06, |
| "loss": 0.5298, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.4427150886294855, |
| "grad_norm": 1.4800838058998909, |
| "learning_rate": 6.194483221294989e-06, |
| "loss": 0.5469, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.4444444444444444, |
| "grad_norm": 1.1817922709588438, |
| "learning_rate": 6.1672268192795285e-06, |
| "loss": 0.5425, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.4461738002594034, |
| "grad_norm": 1.2263700782728304, |
| "learning_rate": 6.139933682453035e-06, |
| "loss": 0.5598, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.4479031560743623, |
| "grad_norm": 1.3081432662421466, |
| "learning_rate": 6.112604669781572e-06, |
| "loss": 0.5451, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.44963251188932124, |
| "grad_norm": 1.3868536914950849, |
| "learning_rate": 6.085240641360281e-06, |
| "loss": 0.5424, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.45136186770428016, |
| "grad_norm": 1.2274166600226197, |
| "learning_rate": 6.057842458386315e-06, |
| "loss": 0.537, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.4530912235192391, |
| "grad_norm": 1.272644249773423, |
| "learning_rate": 6.030410983131733e-06, |
| "loss": 0.56, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.454820579334198, |
| "grad_norm": 1.2893096518593115, |
| "learning_rate": 6.002947078916365e-06, |
| "loss": 0.541, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.4565499351491569, |
| "grad_norm": 1.2447414895886646, |
| "learning_rate": 5.975451610080643e-06, |
| "loss": 0.5439, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.45827929096411585, |
| "grad_norm": 1.217222966677454, |
| "learning_rate": 5.947925441958393e-06, |
| "loss": 0.542, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.46000864677907477, |
| "grad_norm": 1.2886825234554569, |
| "learning_rate": 5.920369440849609e-06, |
| "loss": 0.5543, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.46173800259403375, |
| "grad_norm": 1.3251969210640597, |
| "learning_rate": 5.892784473993184e-06, |
| "loss": 0.5399, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.46346735840899267, |
| "grad_norm": 1.2268752980398028, |
| "learning_rate": 5.865171409539614e-06, |
| "loss": 0.5467, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.4651967142239516, |
| "grad_norm": 1.3068892898030797, |
| "learning_rate": 5.837531116523683e-06, |
| "loss": 0.5401, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.4669260700389105, |
| "grad_norm": 1.2594383194818888, |
| "learning_rate": 5.809864464837105e-06, |
| "loss": 0.5584, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.46865542585386943, |
| "grad_norm": 1.2235669424052997, |
| "learning_rate": 5.782172325201155e-06, |
| "loss": 0.5311, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.47038478166882836, |
| "grad_norm": 1.3219449360910378, |
| "learning_rate": 5.754455569139258e-06, |
| "loss": 0.5299, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.4721141374837873, |
| "grad_norm": 1.2887549822531428, |
| "learning_rate": 5.726715068949564e-06, |
| "loss": 0.5451, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.4738434932987462, |
| "grad_norm": 1.2358733083826365, |
| "learning_rate": 5.698951697677498e-06, |
| "loss": 0.5379, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.4755728491137051, |
| "grad_norm": 1.3090051655251154, |
| "learning_rate": 5.671166329088278e-06, |
| "loss": 0.5213, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.4773022049286641, |
| "grad_norm": 1.3042917976396255, |
| "learning_rate": 5.643359837639419e-06, |
| "loss": 0.5235, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.479031560743623, |
| "grad_norm": 1.298269280525944, |
| "learning_rate": 5.615533098453215e-06, |
| "loss": 0.5545, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.48076091655858194, |
| "grad_norm": 1.2012345750779942, |
| "learning_rate": 5.587686987289189e-06, |
| "loss": 0.5544, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.48249027237354086, |
| "grad_norm": 1.3007588033367934, |
| "learning_rate": 5.559822380516539e-06, |
| "loss": 0.5271, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.4842196281884998, |
| "grad_norm": 1.3534538377292569, |
| "learning_rate": 5.531940155086557e-06, |
| "loss": 0.5688, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.4859489840034587, |
| "grad_norm": 1.243334314478317, |
| "learning_rate": 5.504041188505022e-06, |
| "loss": 0.5473, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.4876783398184176, |
| "grad_norm": 1.306645781202671, |
| "learning_rate": 5.476126358804594e-06, |
| "loss": 0.5422, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.48940769563337655, |
| "grad_norm": 1.280897747736695, |
| "learning_rate": 5.448196544517168e-06, |
| "loss": 0.5516, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.49113705144833547, |
| "grad_norm": 1.4064375573811547, |
| "learning_rate": 5.420252624646238e-06, |
| "loss": 0.5467, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.49286640726329445, |
| "grad_norm": 1.4119469280433727, |
| "learning_rate": 5.392295478639226e-06, |
| "loss": 0.5712, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.49459576307825337, |
| "grad_norm": 1.3832811952775954, |
| "learning_rate": 5.3643259863598015e-06, |
| "loss": 0.5311, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.4963251188932123, |
| "grad_norm": 1.2793380120458782, |
| "learning_rate": 5.336345028060199e-06, |
| "loss": 0.5464, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.4980544747081712, |
| "grad_norm": 1.325075817941503, |
| "learning_rate": 5.308353484353508e-06, |
| "loss": 0.5472, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.49978383052313013, |
| "grad_norm": 1.323776420208779, |
| "learning_rate": 5.2803522361859596e-06, |
| "loss": 0.5162, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.5015131863380891, |
| "grad_norm": 1.2724751142456727, |
| "learning_rate": 5.252342164809204e-06, |
| "loss": 0.5419, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.503242542153048, |
| "grad_norm": 1.3155061940618022, |
| "learning_rate": 5.224324151752575e-06, |
| "loss": 0.5108, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.5049718979680069, |
| "grad_norm": 1.325463563646962, |
| "learning_rate": 5.1962990787953436e-06, |
| "loss": 0.556, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.5067012537829658, |
| "grad_norm": 1.3557284931566147, |
| "learning_rate": 5.168267827938971e-06, |
| "loss": 0.5205, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.5084306095979247, |
| "grad_norm": 1.4690183640639547, |
| "learning_rate": 5.140231281379345e-06, |
| "loss": 0.5314, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.5101599654128837, |
| "grad_norm": 1.3889196264518369, |
| "learning_rate": 5.112190321479026e-06, |
| "loss": 0.5421, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.5118893212278426, |
| "grad_norm": 1.235942882014356, |
| "learning_rate": 5.084145830739462e-06, |
| "loss": 0.5311, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.5136186770428015, |
| "grad_norm": 1.306512327093981, |
| "learning_rate": 5.05609869177323e-06, |
| "loss": 0.5273, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.5153480328577604, |
| "grad_norm": 1.5917366093172325, |
| "learning_rate": 5.028049787276249e-06, |
| "loss": 0.5129, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.5170773886727195, |
| "grad_norm": 1.40594572028163, |
| "learning_rate": 5e-06, |
| "loss": 0.5203, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.5188067444876784, |
| "grad_norm": 1.262588520269215, |
| "learning_rate": 4.971950212723753e-06, |
| "loss": 0.5055, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.5205361003026373, |
| "grad_norm": 1.4092188833304773, |
| "learning_rate": 4.943901308226771e-06, |
| "loss": 0.5434, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.5222654561175962, |
| "grad_norm": 1.382690963218948, |
| "learning_rate": 4.91585416926054e-06, |
| "loss": 0.5449, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.5239948119325551, |
| "grad_norm": 1.3495688952134626, |
| "learning_rate": 4.887809678520976e-06, |
| "loss": 0.5277, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.5257241677475141, |
| "grad_norm": 1.2421088006172942, |
| "learning_rate": 4.859768718620656e-06, |
| "loss": 0.5393, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.527453523562473, |
| "grad_norm": 1.2488445572106857, |
| "learning_rate": 4.831732172061032e-06, |
| "loss": 0.5225, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.5291828793774319, |
| "grad_norm": 1.3844825536152099, |
| "learning_rate": 4.803700921204659e-06, |
| "loss": 0.5448, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.5309122351923908, |
| "grad_norm": 1.2550069336336056, |
| "learning_rate": 4.775675848247427e-06, |
| "loss": 0.5326, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.5326415910073498, |
| "grad_norm": 1.3042371941510982, |
| "learning_rate": 4.747657835190796e-06, |
| "loss": 0.5485, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.5343709468223087, |
| "grad_norm": 1.439469833400696, |
| "learning_rate": 4.719647763814041e-06, |
| "loss": 0.5235, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.5361003026372676, |
| "grad_norm": 1.258005734371576, |
| "learning_rate": 4.691646515646492e-06, |
| "loss": 0.5049, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.5378296584522265, |
| "grad_norm": 1.2574136262278794, |
| "learning_rate": 4.663654971939802e-06, |
| "loss": 0.5306, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.5395590142671854, |
| "grad_norm": 1.211366769644713, |
| "learning_rate": 4.6356740136402e-06, |
| "loss": 0.5158, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.5412883700821444, |
| "grad_norm": 1.404490080536933, |
| "learning_rate": 4.6077045213607765e-06, |
| "loss": 0.5106, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.5430177258971033, |
| "grad_norm": 1.3565090769351222, |
| "learning_rate": 4.579747375353763e-06, |
| "loss": 0.5115, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.5447470817120622, |
| "grad_norm": 1.3745499639927738, |
| "learning_rate": 4.551803455482833e-06, |
| "loss": 0.5352, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.5464764375270211, |
| "grad_norm": 1.3375963306559595, |
| "learning_rate": 4.5238736411954075e-06, |
| "loss": 0.5149, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.5482057933419802, |
| "grad_norm": 1.2416690865187434, |
| "learning_rate": 4.4959588114949785e-06, |
| "loss": 0.5183, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.5499351491569391, |
| "grad_norm": 1.2969100557148994, |
| "learning_rate": 4.468059844913444e-06, |
| "loss": 0.5305, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.551664504971898, |
| "grad_norm": 1.3028745121039649, |
| "learning_rate": 4.4401776194834615e-06, |
| "loss": 0.53, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.5533938607868569, |
| "grad_norm": 1.2529196452246425, |
| "learning_rate": 4.4123130127108125e-06, |
| "loss": 0.5631, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.5551232166018158, |
| "grad_norm": 1.2914281020735483, |
| "learning_rate": 4.384466901546786e-06, |
| "loss": 0.5029, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.5568525724167748, |
| "grad_norm": 1.3030244428041313, |
| "learning_rate": 4.356640162360582e-06, |
| "loss": 0.5547, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.5585819282317337, |
| "grad_norm": 1.310975612444778, |
| "learning_rate": 4.3288336709117246e-06, |
| "loss": 0.5476, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.5603112840466926, |
| "grad_norm": 1.3585233059927981, |
| "learning_rate": 4.3010483023225045e-06, |
| "loss": 0.5096, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.5620406398616515, |
| "grad_norm": 1.3858533161552185, |
| "learning_rate": 4.273284931050438e-06, |
| "loss": 0.5432, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.5637699956766105, |
| "grad_norm": 1.3005876771208573, |
| "learning_rate": 4.245544430860743e-06, |
| "loss": 0.5339, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.5654993514915694, |
| "grad_norm": 1.3987689407306876, |
| "learning_rate": 4.217827674798845e-06, |
| "loss": 0.5096, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.5672287073065283, |
| "grad_norm": 1.309864053992572, |
| "learning_rate": 4.190135535162894e-06, |
| "loss": 0.5074, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.5689580631214872, |
| "grad_norm": 1.3268629259565734, |
| "learning_rate": 4.162468883476319e-06, |
| "loss": 0.5386, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.5706874189364461, |
| "grad_norm": 1.3349966799384403, |
| "learning_rate": 4.1348285904603865e-06, |
| "loss": 0.5208, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.5724167747514051, |
| "grad_norm": 1.2661602913195409, |
| "learning_rate": 4.107215526006818e-06, |
| "loss": 0.5107, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.574146130566364, |
| "grad_norm": 1.2797920680965094, |
| "learning_rate": 4.079630559150392e-06, |
| "loss": 0.513, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.5758754863813229, |
| "grad_norm": 1.3077982663455536, |
| "learning_rate": 4.052074558041608e-06, |
| "loss": 0.5154, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.5776048421962818, |
| "grad_norm": 1.3760657710228763, |
| "learning_rate": 4.02454838991936e-06, |
| "loss": 0.5294, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.5793341980112409, |
| "grad_norm": 1.184811772801038, |
| "learning_rate": 3.997052921083637e-06, |
| "loss": 0.4896, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.5810635538261998, |
| "grad_norm": 1.4503676384261956, |
| "learning_rate": 3.969589016868269e-06, |
| "loss": 0.5131, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.5827929096411587, |
| "grad_norm": 1.2572496861000189, |
| "learning_rate": 3.9421575416136866e-06, |
| "loss": 0.5356, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.5845222654561176, |
| "grad_norm": 1.2168932626954965, |
| "learning_rate": 3.9147593586397195e-06, |
| "loss": 0.5012, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.5862516212710766, |
| "grad_norm": 1.335579085108439, |
| "learning_rate": 3.887395330218429e-06, |
| "loss": 0.5073, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.5879809770860355, |
| "grad_norm": 1.2148641410862266, |
| "learning_rate": 3.8600663175469675e-06, |
| "loss": 0.5216, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.5897103329009944, |
| "grad_norm": 1.2477985045133986, |
| "learning_rate": 3.832773180720475e-06, |
| "loss": 0.509, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.5914396887159533, |
| "grad_norm": 1.3416766428515066, |
| "learning_rate": 3.8055167787050134e-06, |
| "loss": 0.5163, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.5931690445309122, |
| "grad_norm": 1.2682767103868227, |
| "learning_rate": 3.778297969310529e-06, |
| "loss": 0.527, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.5948984003458712, |
| "grad_norm": 1.2414041985446462, |
| "learning_rate": 3.751117609163865e-06, |
| "loss": 0.5001, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.5966277561608301, |
| "grad_norm": 1.375054008335257, |
| "learning_rate": 3.723976553681787e-06, |
| "loss": 0.5342, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.598357111975789, |
| "grad_norm": 1.2480963236384548, |
| "learning_rate": 3.6968756570440735e-06, |
| "loss": 0.5028, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.6000864677907479, |
| "grad_norm": 1.2159599196486677, |
| "learning_rate": 3.669815772166625e-06, |
| "loss": 0.5125, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.6018158236057068, |
| "grad_norm": 1.4104148060749142, |
| "learning_rate": 3.6427977506746293e-06, |
| "loss": 0.5207, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.6035451794206658, |
| "grad_norm": 1.1950066056646578, |
| "learning_rate": 3.6158224428757538e-06, |
| "loss": 0.4943, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.6052745352356247, |
| "grad_norm": 1.147453355609145, |
| "learning_rate": 3.5888906977333858e-06, |
| "loss": 0.4992, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.6070038910505836, |
| "grad_norm": 1.4493578720570028, |
| "learning_rate": 3.562003362839914e-06, |
| "loss": 0.5213, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.6087332468655425, |
| "grad_norm": 1.2658733112800329, |
| "learning_rate": 3.5351612843900555e-06, |
| "loss": 0.5056, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.6104626026805016, |
| "grad_norm": 1.1757360378271826, |
| "learning_rate": 3.50836530715422e-06, |
| "loss": 0.4856, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.6121919584954605, |
| "grad_norm": 1.3055141846929021, |
| "learning_rate": 3.4816162744519266e-06, |
| "loss": 0.5317, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.6139213143104194, |
| "grad_norm": 1.37730807773082, |
| "learning_rate": 3.4549150281252635e-06, |
| "loss": 0.5159, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.6156506701253783, |
| "grad_norm": 1.1947534769598933, |
| "learning_rate": 3.42826240851239e-06, |
| "loss": 0.5214, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.6173800259403373, |
| "grad_norm": 1.3226134189067087, |
| "learning_rate": 3.4016592544210937e-06, |
| "loss": 0.5264, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.6191093817552962, |
| "grad_norm": 1.41629808960968, |
| "learning_rate": 3.375106403102389e-06, |
| "loss": 0.5379, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.6208387375702551, |
| "grad_norm": 1.3266049262461792, |
| "learning_rate": 3.3486046902241663e-06, |
| "loss": 0.5017, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.622568093385214, |
| "grad_norm": 1.3117139861674612, |
| "learning_rate": 3.322154949844897e-06, |
| "loss": 0.5066, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.6242974492001729, |
| "grad_norm": 1.275223937661878, |
| "learning_rate": 3.295758014387375e-06, |
| "loss": 0.5107, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.6260268050151319, |
| "grad_norm": 1.3609897446280972, |
| "learning_rate": 3.269414714612534e-06, |
| "loss": 0.5211, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.6277561608300908, |
| "grad_norm": 1.4136951954746158, |
| "learning_rate": 3.2431258795932863e-06, |
| "loss": 0.5299, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.6294855166450497, |
| "grad_norm": 1.3164541206983948, |
| "learning_rate": 3.216892336688435e-06, |
| "loss": 0.5251, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.6312148724600086, |
| "grad_norm": 1.2729230339815254, |
| "learning_rate": 3.1907149115166403e-06, |
| "loss": 0.5052, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.6329442282749675, |
| "grad_norm": 1.3720577760394352, |
| "learning_rate": 3.1645944279304296e-06, |
| "loss": 0.5142, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.6346735840899265, |
| "grad_norm": 1.3838881292780676, |
| "learning_rate": 3.1385317079902743e-06, |
| "loss": 0.5301, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.6364029399048854, |
| "grad_norm": 1.1823914759517473, |
| "learning_rate": 3.112527571938717e-06, |
| "loss": 0.5038, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.6381322957198443, |
| "grad_norm": 1.4852396956399265, |
| "learning_rate": 3.0865828381745515e-06, |
| "loss": 0.5383, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.6398616515348032, |
| "grad_norm": 1.3537270664670102, |
| "learning_rate": 3.060698323227075e-06, |
| "loss": 0.507, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.6415910073497623, |
| "grad_norm": 1.313357691607948, |
| "learning_rate": 3.0348748417303826e-06, |
| "loss": 0.5076, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.6433203631647212, |
| "grad_norm": 1.3130162160688268, |
| "learning_rate": 3.0091132063977344e-06, |
| "loss": 0.5267, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.6450497189796801, |
| "grad_norm": 1.1920662735224292, |
| "learning_rate": 2.9834142279959754e-06, |
| "loss": 0.5155, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.646779074794639, |
| "grad_norm": 1.2025632883759985, |
| "learning_rate": 2.95777871532002e-06, |
| "loss": 0.5096, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.648508430609598, |
| "grad_norm": 1.3113109709173287, |
| "learning_rate": 2.932207475167398e-06, |
| "loss": 0.5002, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.6502377864245569, |
| "grad_norm": 1.2461184315574925, |
| "learning_rate": 2.906701312312861e-06, |
| "loss": 0.5169, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.6519671422395158, |
| "grad_norm": 1.2608068635952254, |
| "learning_rate": 2.8812610294830568e-06, |
| "loss": 0.5043, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.6536964980544747, |
| "grad_norm": 1.322071977690957, |
| "learning_rate": 2.8558874273312677e-06, |
| "loss": 0.5039, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.6554258538694336, |
| "grad_norm": 1.2333609191041428, |
| "learning_rate": 2.83058130441221e-06, |
| "loss": 0.5066, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.6571552096843926, |
| "grad_norm": 1.2541631476872825, |
| "learning_rate": 2.805343457156898e-06, |
| "loss": 0.5191, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.6588845654993515, |
| "grad_norm": 1.279925954820817, |
| "learning_rate": 2.7801746798475905e-06, |
| "loss": 0.4903, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.6606139213143104, |
| "grad_norm": 1.2393630411242251, |
| "learning_rate": 2.755075764592777e-06, |
| "loss": 0.5007, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.6623432771292693, |
| "grad_norm": 1.2113955627282111, |
| "learning_rate": 2.7300475013022666e-06, |
| "loss": 0.5187, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.6640726329442282, |
| "grad_norm": 1.332875296702971, |
| "learning_rate": 2.705090677662311e-06, |
| "loss": 0.5116, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.6658019887591872, |
| "grad_norm": 1.1776968966832972, |
| "learning_rate": 2.6802060791108304e-06, |
| "loss": 0.4994, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.6675313445741461, |
| "grad_norm": 1.2110870121335493, |
| "learning_rate": 2.6553944888126772e-06, |
| "loss": 0.5184, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.669260700389105, |
| "grad_norm": 1.3343623955630721, |
| "learning_rate": 2.6306566876350072e-06, |
| "loss": 0.5315, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.6709900562040639, |
| "grad_norm": 1.238593700529844, |
| "learning_rate": 2.605993454122687e-06, |
| "loss": 0.5211, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.672719412019023, |
| "grad_norm": 1.2623050551820758, |
| "learning_rate": 2.5814055644738013e-06, |
| "loss": 0.4985, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.6744487678339819, |
| "grad_norm": 1.313293854036044, |
| "learning_rate": 2.5568937925152272e-06, |
| "loss": 0.524, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.6761781236489408, |
| "grad_norm": 1.2517836743413833, |
| "learning_rate": 2.532458909678266e-06, |
| "loss": 0.5098, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.6779074794638997, |
| "grad_norm": 1.1520446930274513, |
| "learning_rate": 2.508101684974387e-06, |
| "loss": 0.5015, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.6796368352788587, |
| "grad_norm": 1.3005504514213648, |
| "learning_rate": 2.483822884971e-06, |
| "loss": 0.4803, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.6813661910938176, |
| "grad_norm": 1.234117732958065, |
| "learning_rate": 2.4596232737673544e-06, |
| "loss": 0.5196, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.6830955469087765, |
| "grad_norm": 1.1981906278111187, |
| "learning_rate": 2.43550361297047e-06, |
| "loss": 0.5059, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.6848249027237354, |
| "grad_norm": 1.2997430500346672, |
| "learning_rate": 2.4114646616711844e-06, |
| "loss": 0.5008, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.6865542585386943, |
| "grad_norm": 1.2614162559191038, |
| "learning_rate": 2.387507176420256e-06, |
| "loss": 0.502, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.6882836143536533, |
| "grad_norm": 1.188230979407375, |
| "learning_rate": 2.3636319112045495e-06, |
| "loss": 0.5164, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.6900129701686122, |
| "grad_norm": 1.334435091915391, |
| "learning_rate": 2.339839617423318e-06, |
| "loss": 0.5069, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.6917423259835711, |
| "grad_norm": 1.2428049634796243, |
| "learning_rate": 2.31613104386454e-06, |
| "loss": 0.4934, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.69347168179853, |
| "grad_norm": 1.283895066008161, |
| "learning_rate": 2.2925069366813718e-06, |
| "loss": 0.5075, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.695201037613489, |
| "grad_norm": 1.276327425480895, |
| "learning_rate": 2.2689680393686457e-06, |
| "loss": 0.5138, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.6969303934284479, |
| "grad_norm": 1.2630101628882442, |
| "learning_rate": 2.245515092739488e-06, |
| "loss": 0.5116, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.6986597492434068, |
| "grad_norm": 1.1913216900412702, |
| "learning_rate": 2.2221488349019903e-06, |
| "loss": 0.5139, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.7003891050583657, |
| "grad_norm": 1.3575890195381133, |
| "learning_rate": 2.1988700012359865e-06, |
| "loss": 0.4975, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.7021184608733246, |
| "grad_norm": 1.205731465743301, |
| "learning_rate": 2.175679324369913e-06, |
| "loss": 0.5196, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.7038478166882837, |
| "grad_norm": 1.2794680777002916, |
| "learning_rate": 2.1525775341577404e-06, |
| "loss": 0.4966, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.7055771725032426, |
| "grad_norm": 1.2296036184996322, |
| "learning_rate": 2.1295653576560168e-06, |
| "loss": 0.4951, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.7073065283182015, |
| "grad_norm": 1.2044790286403084, |
| "learning_rate": 2.1066435191009717e-06, |
| "loss": 0.4853, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.7090358841331604, |
| "grad_norm": 1.2218236581464923, |
| "learning_rate": 2.083812739885738e-06, |
| "loss": 0.4934, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.7107652399481194, |
| "grad_norm": 1.4009155350987372, |
| "learning_rate": 2.061073738537635e-06, |
| "loss": 0.4946, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.7124945957630783, |
| "grad_norm": 1.1812811747354344, |
| "learning_rate": 2.038427230695565e-06, |
| "loss": 0.4951, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.7142239515780372, |
| "grad_norm": 1.2191387291107492, |
| "learning_rate": 2.0158739290874822e-06, |
| "loss": 0.4986, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.7159533073929961, |
| "grad_norm": 1.329295095070758, |
| "learning_rate": 1.9934145435079705e-06, |
| "loss": 0.519, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.717682663207955, |
| "grad_norm": 1.2281812463093098, |
| "learning_rate": 1.971049780795901e-06, |
| "loss": 0.5497, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.719412019022914, |
| "grad_norm": 1.179567921331548, |
| "learning_rate": 1.948780344812181e-06, |
| "loss": 0.4875, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.7211413748378729, |
| "grad_norm": 1.3229803278267123, |
| "learning_rate": 1.9266069364176144e-06, |
| "loss": 0.513, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.7228707306528318, |
| "grad_norm": 1.207231556517107, |
| "learning_rate": 1.9045302534508298e-06, |
| "loss": 0.5077, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.7246000864677907, |
| "grad_norm": 1.239432725326481, |
| "learning_rate": 1.8825509907063328e-06, |
| "loss": 0.5081, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.7263294422827496, |
| "grad_norm": 1.2768017670747227, |
| "learning_rate": 1.8606698399126261e-06, |
| "loss": 0.5047, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.7280587980977086, |
| "grad_norm": 1.2443081672127343, |
| "learning_rate": 1.838887489710452e-06, |
| "loss": 0.5093, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.7297881539126675, |
| "grad_norm": 1.3248575565441225, |
| "learning_rate": 1.8172046256311087e-06, |
| "loss": 0.5096, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.7315175097276264, |
| "grad_norm": 1.2312772110980421, |
| "learning_rate": 1.7956219300748796e-06, |
| "loss": 0.4949, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.7332468655425853, |
| "grad_norm": 1.2725397947766917, |
| "learning_rate": 1.7741400822895633e-06, |
| "loss": 0.4913, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.7349762213575444, |
| "grad_norm": 1.2701112344747274, |
| "learning_rate": 1.7527597583490825e-06, |
| "loss": 0.5107, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.7367055771725033, |
| "grad_norm": 1.22988252225655, |
| "learning_rate": 1.7314816311322219e-06, |
| "loss": 0.487, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.7384349329874622, |
| "grad_norm": 1.23287252087033, |
| "learning_rate": 1.7103063703014372e-06, |
| "loss": 0.533, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.7401642888024211, |
| "grad_norm": 1.184456040905248, |
| "learning_rate": 1.6892346422817945e-06, |
| "loss": 0.4993, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.74189364461738, |
| "grad_norm": 1.220805415373837, |
| "learning_rate": 1.6682671102399806e-06, |
| "loss": 0.4865, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.743623000432339, |
| "grad_norm": 1.1994409780411663, |
| "learning_rate": 1.647404434063447e-06, |
| "loss": 0.4992, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.7453523562472979, |
| "grad_norm": 1.2631533307258702, |
| "learning_rate": 1.6266472703396286e-06, |
| "loss": 0.5236, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.7470817120622568, |
| "grad_norm": 1.1929507809675688, |
| "learning_rate": 1.6059962723352912e-06, |
| "loss": 0.4921, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.7488110678772157, |
| "grad_norm": 1.3115162066864332, |
| "learning_rate": 1.5854520899759656e-06, |
| "loss": 0.5123, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.7505404236921747, |
| "grad_norm": 1.204903938948009, |
| "learning_rate": 1.5650153698254916e-06, |
| "loss": 0.4761, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.7522697795071336, |
| "grad_norm": 1.3927511118217388, |
| "learning_rate": 1.544686755065677e-06, |
| "loss": 0.5209, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.7539991353220925, |
| "grad_norm": 1.1622881435387544, |
| "learning_rate": 1.5244668854760459e-06, |
| "loss": 0.5054, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.7557284911370514, |
| "grad_norm": 1.2474163957236213, |
| "learning_rate": 1.5043563974137132e-06, |
| "loss": 0.5037, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.7574578469520103, |
| "grad_norm": 1.2604380979516712, |
| "learning_rate": 1.4843559237933474e-06, |
| "loss": 0.4891, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.7591872027669693, |
| "grad_norm": 1.2530851990162881, |
| "learning_rate": 1.4644660940672628e-06, |
| "loss": 0.4903, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.7609165585819282, |
| "grad_norm": 1.2300568194662433, |
| "learning_rate": 1.4446875342055988e-06, |
| "loss": 0.5059, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.7626459143968871, |
| "grad_norm": 1.2621446251787094, |
| "learning_rate": 1.4250208666766235e-06, |
| "loss": 0.5084, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.764375270211846, |
| "grad_norm": 1.1714965845789171, |
| "learning_rate": 1.4054667104271497e-06, |
| "loss": 0.4939, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.7661046260268051, |
| "grad_norm": 1.150326828517619, |
| "learning_rate": 1.3860256808630429e-06, |
| "loss": 0.4998, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.767833981841764, |
| "grad_norm": 1.4388728760063987, |
| "learning_rate": 1.3666983898298659e-06, |
| "loss": 0.506, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.7695633376567229, |
| "grad_norm": 1.211792558367054, |
| "learning_rate": 1.3474854455936126e-06, |
| "loss": 0.4908, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.7712926934716818, |
| "grad_norm": 1.1773193686251473, |
| "learning_rate": 1.3283874528215735e-06, |
| "loss": 0.4838, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.7730220492866408, |
| "grad_norm": 1.2910457142537168, |
| "learning_rate": 1.3094050125632973e-06, |
| "loss": 0.5221, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.7747514051015997, |
| "grad_norm": 1.2455298854301133, |
| "learning_rate": 1.2905387222316824e-06, |
| "loss": 0.5098, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.7764807609165586, |
| "grad_norm": 1.2221925176741943, |
| "learning_rate": 1.2717891755841722e-06, |
| "loss": 0.4968, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.7782101167315175, |
| "grad_norm": 1.2690867828669088, |
| "learning_rate": 1.2531569627040636e-06, |
| "loss": 0.497, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.7799394725464764, |
| "grad_norm": 1.1942118517525946, |
| "learning_rate": 1.234642669981946e-06, |
| "loss": 0.524, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.7816688283614354, |
| "grad_norm": 1.188395242204491, |
| "learning_rate": 1.2162468800972344e-06, |
| "loss": 0.4797, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.7833981841763943, |
| "grad_norm": 1.239090182793657, |
| "learning_rate": 1.1979701719998454e-06, |
| "loss": 0.4852, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.7851275399913532, |
| "grad_norm": 1.2443125589753226, |
| "learning_rate": 1.1798131208919628e-06, |
| "loss": 0.4869, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.7868568958063121, |
| "grad_norm": 1.2094210133113774, |
| "learning_rate": 1.1617762982099446e-06, |
| "loss": 0.4977, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.788586251621271, |
| "grad_norm": 1.2353201091140928, |
| "learning_rate": 1.143860271606333e-06, |
| "loss": 0.4867, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.79031560743623, |
| "grad_norm": 1.2334222843362082, |
| "learning_rate": 1.1260656049319957e-06, |
| "loss": 0.4934, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.7920449632511889, |
| "grad_norm": 0.9697687189599682, |
| "learning_rate": 1.1083928582183712e-06, |
| "loss": 0.5128, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.7937743190661478, |
| "grad_norm": 0.9095178347207882, |
| "learning_rate": 1.0908425876598512e-06, |
| "loss": 0.5031, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.7955036748811067, |
| "grad_norm": 0.9310196051126621, |
| "learning_rate": 1.0734153455962765e-06, |
| "loss": 0.5117, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.7972330306960658, |
| "grad_norm": 0.9730539710942893, |
| "learning_rate": 1.0561116804955451e-06, |
| "loss": 0.5113, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.7989623865110247, |
| "grad_norm": 0.894786526061026, |
| "learning_rate": 1.0389321369363636e-06, |
| "loss": 0.5018, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.8006917423259836, |
| "grad_norm": 0.9674498638331304, |
| "learning_rate": 1.0218772555910955e-06, |
| "loss": 0.5042, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.8024210981409425, |
| "grad_norm": 0.9438765948616291, |
| "learning_rate": 1.004947573208756e-06, |
| "loss": 0.5198, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.8041504539559015, |
| "grad_norm": 0.9763518960839491, |
| "learning_rate": 9.881436225981107e-07, |
| "loss": 0.5058, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.8058798097708604, |
| "grad_norm": 1.0177200696682724, |
| "learning_rate": 9.714659326109138e-07, |
| "loss": 0.5146, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.8076091655858193, |
| "grad_norm": 0.9855108276847996, |
| "learning_rate": 9.549150281252633e-07, |
| "loss": 0.5047, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.8093385214007782, |
| "grad_norm": 0.9820568110711639, |
| "learning_rate": 9.384914300290749e-07, |
| "loss": 0.5105, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.8110678772157371, |
| "grad_norm": 0.9543734782446319, |
| "learning_rate": 9.221956552036992e-07, |
| "loss": 0.51, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.8127972330306961, |
| "grad_norm": 1.0542711582531745, |
| "learning_rate": 9.060282165076462e-07, |
| "loss": 0.5025, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.814526588845655, |
| "grad_norm": 0.8575237177652391, |
| "learning_rate": 8.899896227604509e-07, |
| "loss": 0.4907, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.8162559446606139, |
| "grad_norm": 0.9537736768650427, |
| "learning_rate": 8.740803787266522e-07, |
| "loss": 0.502, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.8179853004755728, |
| "grad_norm": 0.940840984310344, |
| "learning_rate": 8.58300985099918e-07, |
| "loss": 0.5079, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.8197146562905318, |
| "grad_norm": 0.9855222060260419, |
| "learning_rate": 8.426519384872733e-07, |
| "loss": 0.5101, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.8214440121054907, |
| "grad_norm": 1.0112701653651899, |
| "learning_rate": 8.271337313934869e-07, |
| "loss": 0.5122, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.8231733679204496, |
| "grad_norm": 0.9202094573581726, |
| "learning_rate": 8.117468522055578e-07, |
| "loss": 0.5083, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.8249027237354085, |
| "grad_norm": 0.9312213063787499, |
| "learning_rate": 7.964917851773496e-07, |
| "loss": 0.5058, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.8266320795503674, |
| "grad_norm": 0.871339607306761, |
| "learning_rate": 7.813690104143557e-07, |
| "loss": 0.4966, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.8283614353653265, |
| "grad_norm": 0.8812181772889855, |
| "learning_rate": 7.663790038585794e-07, |
| "loss": 0.5021, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.8300907911802854, |
| "grad_norm": 0.9210803401855143, |
| "learning_rate": 7.515222372735648e-07, |
| "loss": 0.5084, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.8318201469952443, |
| "grad_norm": 0.9105295776120536, |
| "learning_rate": 7.367991782295392e-07, |
| "loss": 0.5066, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.8335495028102032, |
| "grad_norm": 0.9553285353198891, |
| "learning_rate": 7.222102900887102e-07, |
| "loss": 0.5137, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.8352788586251622, |
| "grad_norm": 0.8726021151068502, |
| "learning_rate": 7.077560319906696e-07, |
| "loss": 0.5172, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.8370082144401211, |
| "grad_norm": 0.907090839860992, |
| "learning_rate": 6.934368588379553e-07, |
| "loss": 0.507, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.83873757025508, |
| "grad_norm": 0.9601616142220535, |
| "learning_rate": 6.792532212817271e-07, |
| "loss": 0.4932, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.8404669260700389, |
| "grad_norm": 0.9038178937915357, |
| "learning_rate": 6.652055657075845e-07, |
| "loss": 0.5158, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.8421962818849978, |
| "grad_norm": 0.8811166725350369, |
| "learning_rate": 6.512943342215234e-07, |
| "loss": 0.4991, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.8439256376999568, |
| "grad_norm": 0.910468026382921, |
| "learning_rate": 6.375199646360142e-07, |
| "loss": 0.4942, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.8456549935149157, |
| "grad_norm": 0.9415463030818443, |
| "learning_rate": 6.238828904562316e-07, |
| "loss": 0.4988, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.8473843493298746, |
| "grad_norm": 0.9306562448394409, |
| "learning_rate": 6.103835408664032e-07, |
| "loss": 0.5218, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.8491137051448335, |
| "grad_norm": 0.9306072629397343, |
| "learning_rate": 5.9702234071631e-07, |
| "loss": 0.5018, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.8508430609597925, |
| "grad_norm": 0.9978545888527431, |
| "learning_rate": 5.83799710507909e-07, |
| "loss": 0.5015, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.8525724167747514, |
| "grad_norm": 0.9281144531309704, |
| "learning_rate": 5.707160663821009e-07, |
| "loss": 0.5154, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.8543017725897103, |
| "grad_norm": 0.9648920267544575, |
| "learning_rate": 5.577718201056392e-07, |
| "loss": 0.5024, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.8560311284046692, |
| "grad_norm": 0.9518063416993736, |
| "learning_rate": 5.449673790581611e-07, |
| "loss": 0.5026, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.8577604842196281, |
| "grad_norm": 0.9090864949437945, |
| "learning_rate": 5.323031462193757e-07, |
| "loss": 0.4975, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.8594898400345872, |
| "grad_norm": 0.9027482960362793, |
| "learning_rate": 5.197795201563744e-07, |
| "loss": 0.5077, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.8612191958495461, |
| "grad_norm": 0.9825856982352978, |
| "learning_rate": 5.073968950110941e-07, |
| "loss": 0.5175, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.862948551664505, |
| "grad_norm": 1.0033645469552896, |
| "learning_rate": 4.951556604879049e-07, |
| "loss": 0.5103, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.8646779074794639, |
| "grad_norm": 1.11450811759059, |
| "learning_rate": 4.830562018413532e-07, |
| "loss": 0.5135, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.8664072632944229, |
| "grad_norm": 0.9286119478176571, |
| "learning_rate": 4.710988998640298e-07, |
| "loss": 0.4853, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.8681366191093818, |
| "grad_norm": 0.9352342724589618, |
| "learning_rate": 4.5928413087459325e-07, |
| "loss": 0.5027, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.8698659749243407, |
| "grad_norm": 0.9742580013974624, |
| "learning_rate": 4.4761226670592074e-07, |
| "loss": 0.5007, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.8715953307392996, |
| "grad_norm": 0.9536093124243782, |
| "learning_rate": 4.3608367469340553e-07, |
| "loss": 0.4894, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.8733246865542585, |
| "grad_norm": 0.9739686798504419, |
| "learning_rate": 4.2469871766340096e-07, |
| "loss": 0.5024, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.8750540423692175, |
| "grad_norm": 0.9379743999219975, |
| "learning_rate": 4.134577539217965e-07, |
| "loss": 0.4924, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.8767833981841764, |
| "grad_norm": 1.003766963981924, |
| "learning_rate": 4.0236113724274716e-07, |
| "loss": 0.4878, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.8785127539991353, |
| "grad_norm": 0.9524223981180386, |
| "learning_rate": 3.9140921685753065e-07, |
| "loss": 0.5068, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.8802421098140942, |
| "grad_norm": 0.8535490006491876, |
| "learning_rate": 3.8060233744356634e-07, |
| "loss": 0.4838, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.8819714656290532, |
| "grad_norm": 0.9082342591220833, |
| "learning_rate": 3.6994083911356114e-07, |
| "loss": 0.5176, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.8837008214440121, |
| "grad_norm": 0.9478217951483088, |
| "learning_rate": 3.5942505740480583e-07, |
| "loss": 0.4883, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.885430177258971, |
| "grad_norm": 0.9157515033348981, |
| "learning_rate": 3.4905532326861946e-07, |
| "loss": 0.4856, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.8871595330739299, |
| "grad_norm": 0.8748310499093214, |
| "learning_rate": 3.3883196305992906e-07, |
| "loss": 0.501, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.8888888888888888, |
| "grad_norm": 0.9579844131883266, |
| "learning_rate": 3.287552985270015e-07, |
| "loss": 0.5008, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.8906182447038478, |
| "grad_norm": 0.9866794087631354, |
| "learning_rate": 3.18825646801314e-07, |
| "loss": 0.5056, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.8923476005188068, |
| "grad_norm": 0.9637431191512927, |
| "learning_rate": 3.0904332038757977e-07, |
| "loss": 0.5165, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.8940769563337657, |
| "grad_norm": 0.9332323606841268, |
| "learning_rate": 2.9940862715390483e-07, |
| "loss": 0.5105, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.8958063121487246, |
| "grad_norm": 1.009400230071864, |
| "learning_rate": 2.899218703221052e-07, |
| "loss": 0.5128, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.8975356679636836, |
| "grad_norm": 0.8996826766855536, |
| "learning_rate": 2.8058334845816214e-07, |
| "loss": 0.5001, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.8992650237786425, |
| "grad_norm": 0.8529312049842479, |
| "learning_rate": 2.7139335546282287e-07, |
| "loss": 0.4989, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.9009943795936014, |
| "grad_norm": 0.9524051149908734, |
| "learning_rate": 2.6235218056235633e-07, |
| "loss": 0.5085, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.9027237354085603, |
| "grad_norm": 0.9450098346365634, |
| "learning_rate": 2.534601082994437e-07, |
| "loss": 0.5041, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.9044530912235192, |
| "grad_norm": 0.9688024639636753, |
| "learning_rate": 2.447174185242324e-07, |
| "loss": 0.5139, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.9061824470384782, |
| "grad_norm": 0.9362455552180009, |
| "learning_rate": 2.3612438638551837e-07, |
| "loss": 0.4956, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.9079118028534371, |
| "grad_norm": 0.8598614937743414, |
| "learning_rate": 2.276812823220964e-07, |
| "loss": 0.4875, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.909641158668396, |
| "grad_norm": 0.909540077460528, |
| "learning_rate": 2.1938837205424002e-07, |
| "loss": 0.5117, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.9113705144833549, |
| "grad_norm": 0.9096506960895155, |
| "learning_rate": 2.1124591657534776e-07, |
| "loss": 0.4955, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.9130998702983139, |
| "grad_norm": 1.0142474014859533, |
| "learning_rate": 2.032541721437209e-07, |
| "loss": 0.507, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.9148292261132728, |
| "grad_norm": 0.9131442997723532, |
| "learning_rate": 1.9541339027450256e-07, |
| "loss": 0.4895, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.9165585819282317, |
| "grad_norm": 0.9853142920778307, |
| "learning_rate": 1.8772381773176417e-07, |
| "loss": 0.5211, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.9182879377431906, |
| "grad_norm": 0.8974474768112111, |
| "learning_rate": 1.801856965207338e-07, |
| "loss": 0.4976, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.9200172935581495, |
| "grad_norm": 0.9550109239119172, |
| "learning_rate": 1.7279926388018564e-07, |
| "loss": 0.5151, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.9217466493731085, |
| "grad_norm": 0.9072672003349709, |
| "learning_rate": 1.6556475227496816e-07, |
| "loss": 0.516, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.9234760051880675, |
| "grad_norm": 0.9399754149330305, |
| "learning_rate": 1.5848238938869332e-07, |
| "loss": 0.4909, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.9252053610030264, |
| "grad_norm": 0.9437594133784243, |
| "learning_rate": 1.5155239811656562e-07, |
| "loss": 0.498, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.9269347168179853, |
| "grad_norm": 0.9357434836371697, |
| "learning_rate": 1.4477499655837278e-07, |
| "loss": 0.4969, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.9286640726329443, |
| "grad_norm": 0.8936101786239388, |
| "learning_rate": 1.3815039801161723e-07, |
| "loss": 0.4913, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.9303934284479032, |
| "grad_norm": 0.9484054108177385, |
| "learning_rate": 1.3167881096480372e-07, |
| "loss": 0.4988, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.9321227842628621, |
| "grad_norm": 0.9563796786443849, |
| "learning_rate": 1.253604390908819e-07, |
| "loss": 0.4925, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.933852140077821, |
| "grad_norm": 0.8950613019917822, |
| "learning_rate": 1.191954812408308e-07, |
| "loss": 0.5052, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.93558149589278, |
| "grad_norm": 0.9125279893925492, |
| "learning_rate": 1.1318413143740436e-07, |
| "loss": 0.5041, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.9373108517077389, |
| "grad_norm": 0.9141300635124516, |
| "learning_rate": 1.0732657886902308e-07, |
| "loss": 0.5096, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.9390402075226978, |
| "grad_norm": 0.9096167507969538, |
| "learning_rate": 1.0162300788382263e-07, |
| "loss": 0.4882, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.9407695633376567, |
| "grad_norm": 0.9753044785021144, |
| "learning_rate": 9.607359798384785e-08, |
| "loss": 0.5103, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.9424989191526156, |
| "grad_norm": 0.8747544695243777, |
| "learning_rate": 9.0678523819408e-08, |
| "loss": 0.504, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.9442282749675746, |
| "grad_norm": 0.8448600329999801, |
| "learning_rate": 8.543795518357767e-08, |
| "loss": 0.5049, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.9459576307825335, |
| "grad_norm": 0.9445491960031619, |
| "learning_rate": 8.035205700685167e-08, |
| "loss": 0.5033, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.9476869865974924, |
| "grad_norm": 0.9108140075113013, |
| "learning_rate": 7.542098935195918e-08, |
| "loss": 0.5138, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.9494163424124513, |
| "grad_norm": 0.9014111637252965, |
| "learning_rate": 7.064490740882057e-08, |
| "loss": 0.505, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.9511456982274102, |
| "grad_norm": 0.8618772410756893, |
| "learning_rate": 6.602396148966795e-08, |
| "loss": 0.4939, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.9528750540423692, |
| "grad_norm": 0.9034604742193665, |
| "learning_rate": 6.15582970243117e-08, |
| "loss": 0.4856, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.9546044098573282, |
| "grad_norm": 0.8858115334722476, |
| "learning_rate": 5.7248054555563704e-08, |
| "loss": 0.4943, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.9563337656722871, |
| "grad_norm": 0.9413567454097705, |
| "learning_rate": 5.3093369734816824e-08, |
| "loss": 0.4983, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.958063121487246, |
| "grad_norm": 0.8631416544061653, |
| "learning_rate": 4.909437331777178e-08, |
| "loss": 0.4991, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.959792477302205, |
| "grad_norm": 0.8972216946376435, |
| "learning_rate": 4.52511911603265e-08, |
| "loss": 0.4979, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.9615218331171639, |
| "grad_norm": 0.9017046495091536, |
| "learning_rate": 4.15639442146093e-08, |
| "loss": 0.4968, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.9632511889321228, |
| "grad_norm": 0.8883293939758925, |
| "learning_rate": 3.8032748525179684e-08, |
| "loss": 0.4959, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.9649805447470817, |
| "grad_norm": 0.9234481550882554, |
| "learning_rate": 3.465771522536854e-08, |
| "loss": 0.5218, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.9667099005620406, |
| "grad_norm": 0.914125086466903, |
| "learning_rate": 3.143895053378698e-08, |
| "loss": 0.5087, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.9684392563769996, |
| "grad_norm": 0.8985893082923397, |
| "learning_rate": 2.837655575097964e-08, |
| "loss": 0.5053, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.9701686121919585, |
| "grad_norm": 0.9385196676766113, |
| "learning_rate": 2.547062725623828e-08, |
| "loss": 0.5116, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.9718979680069174, |
| "grad_norm": 0.9646239444487571, |
| "learning_rate": 2.2721256504567026e-08, |
| "loss": 0.5072, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.9736273238218763, |
| "grad_norm": 0.8747663830607033, |
| "learning_rate": 2.012853002380466e-08, |
| "loss": 0.5022, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.9753566796368353, |
| "grad_norm": 0.9334336705435727, |
| "learning_rate": 1.769252941190458e-08, |
| "loss": 0.5105, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.9770860354517942, |
| "grad_norm": 0.9194588921972738, |
| "learning_rate": 1.541333133436018e-08, |
| "loss": 0.4985, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.9788153912667531, |
| "grad_norm": 0.9520800525639294, |
| "learning_rate": 1.3291007521799015e-08, |
| "loss": 0.4977, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.980544747081712, |
| "grad_norm": 0.9058630469142649, |
| "learning_rate": 1.132562476771959e-08, |
| "loss": 0.4738, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.9822741028966709, |
| "grad_norm": 0.8879281633731727, |
| "learning_rate": 9.51724492639361e-09, |
| "loss": 0.5048, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.9840034587116299, |
| "grad_norm": 0.8984029908537556, |
| "learning_rate": 7.865924910916977e-09, |
| "loss": 0.5024, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.9857328145265889, |
| "grad_norm": 0.9121742433892018, |
| "learning_rate": 6.371716691419005e-09, |
| "loss": 0.5082, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.9874621703415478, |
| "grad_norm": 0.9237443595386216, |
| "learning_rate": 5.034667293427053e-09, |
| "loss": 0.5097, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.9891915261565067, |
| "grad_norm": 0.9117600237724628, |
| "learning_rate": 3.854818796385495e-09, |
| "loss": 0.5129, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.9909208819714657, |
| "grad_norm": 0.9527043115093743, |
| "learning_rate": 2.8322083323334417e-09, |
| "loss": 0.5024, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.9926502377864246, |
| "grad_norm": 0.9724377615202221, |
| "learning_rate": 1.9668680847356735e-09, |
| "loss": 0.4966, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.9943795936013835, |
| "grad_norm": 0.9737523393148931, |
| "learning_rate": 1.2588252874673469e-09, |
| "loss": 0.4976, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.9961089494163424, |
| "grad_norm": 0.8474552307580481, |
| "learning_rate": 7.081022239591173e-10, |
| "loss": 0.4907, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.9978383052313013, |
| "grad_norm": 0.8918148859611189, |
| "learning_rate": 3.147162264971471e-10, |
| "loss": 0.4957, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.9995676610462603, |
| "grad_norm": 0.8738364123206687, |
| "learning_rate": 7.867967567354306e-11, |
| "loss": 0.4892, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.9995676610462603, |
| "step": 578, |
| "total_flos": 6.685487298542305e+17, |
| "train_loss": 0.5686832449324816, |
| "train_runtime": 23812.8313, |
| "train_samples_per_second": 7.141, |
| "train_steps_per_second": 0.024 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 578, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 50, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 6.685487298542305e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|