|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.11466574934067195, |
|
"eval_steps": 500, |
|
"global_step": 500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 7.2451, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 7.3192, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 14.819024134660662, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 7.2901, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 14.819024134660662, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 7.4127, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 14.199093979418544, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 7.4321, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 10.185841979858534, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 7.1568, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 39.91704954913313, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 7.2837, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 18.930408398058322, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 7.2329, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 8.865495224195184, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 7.1651, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 35.166574749663724, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 7.1455, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 14.929376279495122, |
|
"learning_rate": 1.0666666666666667e-05, |
|
"loss": 7.2938, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 14.323589623470497, |
|
"learning_rate": 1.2e-05, |
|
"loss": 7.136, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 12.033497318446193, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 7.2873, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 12.713515670882014, |
|
"learning_rate": 1.4666666666666666e-05, |
|
"loss": 7.1778, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 16.40476702849706, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 7.3031, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 16.463860488428647, |
|
"learning_rate": 1.7333333333333336e-05, |
|
"loss": 6.958, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 11.114739607650908, |
|
"learning_rate": 1.866666666666667e-05, |
|
"loss": 7.328, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 8.560556168903155, |
|
"learning_rate": 2e-05, |
|
"loss": 6.8535, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 11.543963578227485, |
|
"learning_rate": 1.999979021001399e-05, |
|
"loss": 7.1039, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 13.024267824258278, |
|
"learning_rate": 1.999916084885832e-05, |
|
"loss": 7.03, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 24.347656967541983, |
|
"learning_rate": 1.9998111942939727e-05, |
|
"loss": 7.1369, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 13.978477853833892, |
|
"learning_rate": 1.9996643536268202e-05, |
|
"loss": 7.1052, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.01222616499256, |
|
"learning_rate": 1.9994755690455154e-05, |
|
"loss": 7.2016, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.338853855265398, |
|
"learning_rate": 1.99924484847108e-05, |
|
"loss": 7.0447, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 14.599634725835617, |
|
"learning_rate": 1.998972201584088e-05, |
|
"loss": 7.0037, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.332198683157735, |
|
"learning_rate": 1.9986576398242566e-05, |
|
"loss": 7.1784, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 14.23890995116397, |
|
"learning_rate": 1.9983011763899674e-05, |
|
"loss": 6.8922, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.923950492978154, |
|
"learning_rate": 1.997902826237712e-05, |
|
"loss": 6.9407, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.06378020962777, |
|
"learning_rate": 1.997462606081465e-05, |
|
"loss": 7.1025, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 11.24345210949927, |
|
"learning_rate": 1.9969805343919822e-05, |
|
"loss": 6.9762, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.092214633553768, |
|
"learning_rate": 1.9964566313960265e-05, |
|
"loss": 7.0578, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 12.87228727478449, |
|
"learning_rate": 1.995890919075519e-05, |
|
"loss": 6.8295, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.111093086376143, |
|
"learning_rate": 1.995283421166614e-05, |
|
"loss": 6.7973, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.632087862294554, |
|
"learning_rate": 1.9946341631587086e-05, |
|
"loss": 6.7752, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.994822315110934, |
|
"learning_rate": 1.9939431722933678e-05, |
|
"loss": 7.0197, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.486951908393647, |
|
"learning_rate": 1.9932104775631847e-05, |
|
"loss": 6.8283, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.4584761693908375, |
|
"learning_rate": 1.9924361097105624e-05, |
|
"loss": 6.8002, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.261343208572681, |
|
"learning_rate": 1.9916201012264255e-05, |
|
"loss": 6.6381, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 18.79112809499115, |
|
"learning_rate": 1.990762486348855e-05, |
|
"loss": 7.01, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.150915500939425, |
|
"learning_rate": 1.989863301061654e-05, |
|
"loss": 6.9012, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.738925101776216, |
|
"learning_rate": 1.9889225830928365e-05, |
|
"loss": 6.7694, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.944529789995812, |
|
"learning_rate": 1.987940371913044e-05, |
|
"loss": 6.799, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.556349878373683, |
|
"learning_rate": 1.9869167087338908e-05, |
|
"loss": 6.6428, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.611474483622349, |
|
"learning_rate": 1.9858516365062334e-05, |
|
"loss": 6.6243, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.916298787571706, |
|
"learning_rate": 1.9847451999183692e-05, |
|
"loss": 6.6087, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.457565509913838, |
|
"learning_rate": 1.9835974453941623e-05, |
|
"loss": 6.7551, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.540062078386342, |
|
"learning_rate": 1.9824084210910924e-05, |
|
"loss": 6.6972, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.4270052163791185, |
|
"learning_rate": 1.9811781768982392e-05, |
|
"loss": 6.9304, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.31190391936824, |
|
"learning_rate": 1.9799067644341844e-05, |
|
"loss": 6.7148, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.181290414236328, |
|
"learning_rate": 1.978594237044849e-05, |
|
"loss": 6.6086, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.059093545289046, |
|
"learning_rate": 1.977240649801253e-05, |
|
"loss": 6.5689, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.376855166703994, |
|
"learning_rate": 1.9758460594972068e-05, |
|
"loss": 6.5767, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.315509966882654, |
|
"learning_rate": 1.9744105246469264e-05, |
|
"loss": 6.601, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.110322978482923, |
|
"learning_rate": 1.9729341054825783e-05, |
|
"loss": 6.5206, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.348814181636438, |
|
"learning_rate": 1.9714168639517543e-05, |
|
"loss": 6.6781, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.889124746527842, |
|
"learning_rate": 1.9698588637148705e-05, |
|
"loss": 6.363, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.048373264000878, |
|
"learning_rate": 1.9682601701424958e-05, |
|
"loss": 6.5605, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.666923562210865, |
|
"learning_rate": 1.9666208503126115e-05, |
|
"loss": 6.5264, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.215144195698015, |
|
"learning_rate": 1.9649409730077934e-05, |
|
"loss": 6.7055, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.851763399842508, |
|
"learning_rate": 1.9632206087123296e-05, |
|
"loss": 6.3156, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.899818690987695, |
|
"learning_rate": 1.9614598296092603e-05, |
|
"loss": 6.5751, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.233556175977335, |
|
"learning_rate": 1.9596587095773496e-05, |
|
"loss": 6.3861, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.677665311671007, |
|
"learning_rate": 1.957817324187987e-05, |
|
"loss": 6.5699, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.655226097949778, |
|
"learning_rate": 1.9559357507020163e-05, |
|
"loss": 6.5021, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.47281811823693, |
|
"learning_rate": 1.9540140680664915e-05, |
|
"loss": 6.5324, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.6360351804666475, |
|
"learning_rate": 1.952052356911368e-05, |
|
"loss": 6.6096, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.4238841079756615, |
|
"learning_rate": 1.950050699546116e-05, |
|
"loss": 6.2913, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.691271836321062, |
|
"learning_rate": 1.9480091799562706e-05, |
|
"loss": 6.6404, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.779952770675111, |
|
"learning_rate": 1.9459278837999048e-05, |
|
"loss": 6.5131, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.75641781132488, |
|
"learning_rate": 1.9438068984040366e-05, |
|
"loss": 6.4102, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.473240945440776, |
|
"learning_rate": 1.9416463127609655e-05, |
|
"loss": 6.1943, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.639227300435135, |
|
"learning_rate": 1.9394462175245382e-05, |
|
"loss": 6.2661, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.359397311850186, |
|
"learning_rate": 1.937206705006344e-05, |
|
"loss": 6.3749, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.9168823793145835, |
|
"learning_rate": 1.9349278691718426e-05, |
|
"loss": 6.4857, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.934252295974776, |
|
"learning_rate": 1.9326098056364224e-05, |
|
"loss": 6.4401, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.184345891936555, |
|
"learning_rate": 1.9302526116613863e-05, |
|
"loss": 6.272, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.42618267064823, |
|
"learning_rate": 1.9278563861498726e-05, |
|
"loss": 6.4017, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.698764300135597, |
|
"learning_rate": 1.9254212296427043e-05, |
|
"loss": 6.422, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.515269918748001, |
|
"learning_rate": 1.922947244314172e-05, |
|
"loss": 6.4018, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.186220316136429, |
|
"learning_rate": 1.9204345339677442e-05, |
|
"loss": 6.4008, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.071075169984412, |
|
"learning_rate": 1.9178832040317153e-05, |
|
"loss": 6.236, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.211523270674776, |
|
"learning_rate": 1.91529336155478e-05, |
|
"loss": 6.2431, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.654030406820622, |
|
"learning_rate": 1.9126651152015404e-05, |
|
"loss": 6.1377, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.542618960137867, |
|
"learning_rate": 1.9099985752479505e-05, |
|
"loss": 6.3531, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.807919282457549, |
|
"learning_rate": 1.9072938535766864e-05, |
|
"loss": 6.2508, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.852730067283944, |
|
"learning_rate": 1.904551063672452e-05, |
|
"loss": 6.2348, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.925798203153842, |
|
"learning_rate": 1.9017703206172187e-05, |
|
"loss": 6.2765, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.0975569371666865, |
|
"learning_rate": 1.8989517410853956e-05, |
|
"loss": 6.2451, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.699489385610056, |
|
"learning_rate": 1.896095443338935e-05, |
|
"loss": 6.2578, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.660080834025456, |
|
"learning_rate": 1.8932015472223692e-05, |
|
"loss": 6.2843, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.9661954295884225, |
|
"learning_rate": 1.8902701741577844e-05, |
|
"loss": 6.1715, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.888648288627258, |
|
"learning_rate": 1.8873014471397225e-05, |
|
"loss": 6.2495, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.710120588181978, |
|
"learning_rate": 1.8842954907300236e-05, |
|
"loss": 5.9913, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.155284987315298, |
|
"learning_rate": 1.881252431052599e-05, |
|
"loss": 6.3151, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.013674756768704, |
|
"learning_rate": 1.8781723957881374e-05, |
|
"loss": 6.2286, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.011825376784628, |
|
"learning_rate": 1.87505551416875e-05, |
|
"loss": 6.2459, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.835636123417131, |
|
"learning_rate": 1.871901916972547e-05, |
|
"loss": 6.2033, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.171704259527532, |
|
"learning_rate": 1.8687117365181514e-05, |
|
"loss": 6.2608, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.181750952717401, |
|
"learning_rate": 1.865485106659145e-05, |
|
"loss": 6.0338, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.687033388755913, |
|
"learning_rate": 1.862222162778454e-05, |
|
"loss": 6.1161, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.061452310176261, |
|
"learning_rate": 1.85892304178267e-05, |
|
"loss": 6.1018, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.742767764167209, |
|
"learning_rate": 1.8555878820963014e-05, |
|
"loss": 6.2337, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.294660524784012, |
|
"learning_rate": 1.8522168236559693e-05, |
|
"loss": 6.2453, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.1375219719939915, |
|
"learning_rate": 1.8488100079045345e-05, |
|
"loss": 6.1595, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.537495039210601, |
|
"learning_rate": 1.8453675777851627e-05, |
|
"loss": 6.1449, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.402914196313115, |
|
"learning_rate": 1.8418896777353272e-05, |
|
"loss": 6.2237, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.1771424603623615, |
|
"learning_rate": 1.8383764536807486e-05, |
|
"loss": 6.1728, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.198851233644863, |
|
"learning_rate": 1.8348280530292712e-05, |
|
"loss": 6.0628, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.559214073625153, |
|
"learning_rate": 1.831244624664681e-05, |
|
"loss": 6.2743, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.97241616226669, |
|
"learning_rate": 1.827626318940454e-05, |
|
"loss": 6.1513, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.029464327696206, |
|
"learning_rate": 1.8239732876734525e-05, |
|
"loss": 6.1743, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.9362284691818905, |
|
"learning_rate": 1.8202856841375517e-05, |
|
"loss": 6.0945, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.525591589138074, |
|
"learning_rate": 1.816563663057211e-05, |
|
"loss": 6.2648, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.913078134221513, |
|
"learning_rate": 1.81280738060098e-05, |
|
"loss": 6.0767, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.539776653253226, |
|
"learning_rate": 1.8090169943749477e-05, |
|
"loss": 6.2008, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.07561003185471, |
|
"learning_rate": 1.8051926634161282e-05, |
|
"loss": 6.1817, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.4932679628420376, |
|
"learning_rate": 1.8013345481857903e-05, |
|
"loss": 6.0112, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.728104224718154, |
|
"learning_rate": 1.797442810562721e-05, |
|
"loss": 6.1642, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.010653662783029, |
|
"learning_rate": 1.793517613836437e-05, |
|
"loss": 6.0345, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.58293874507086, |
|
"learning_rate": 1.7895591227003316e-05, |
|
"loss": 6.0006, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.357889872637869, |
|
"learning_rate": 1.7855675032447648e-05, |
|
"loss": 6.0577, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.095191120284703, |
|
"learning_rate": 1.7815429229500946e-05, |
|
"loss": 5.9336, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.736888424569404, |
|
"learning_rate": 1.7774855506796497e-05, |
|
"loss": 5.961, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.32933189803368, |
|
"learning_rate": 1.7733955566726438e-05, |
|
"loss": 5.8729, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.352013234575603, |
|
"learning_rate": 1.7692731125370355e-05, |
|
"loss": 5.9631, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.0044514035329986, |
|
"learning_rate": 1.7651183912423228e-05, |
|
"loss": 6.0128, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.158183905120493, |
|
"learning_rate": 1.7609315671122912e-05, |
|
"loss": 6.0247, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.257943967689872, |
|
"learning_rate": 1.7567128158176955e-05, |
|
"loss": 6.2873, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.1763697860525655, |
|
"learning_rate": 1.7524623143688905e-05, |
|
"loss": 5.9392, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.9607955760999056, |
|
"learning_rate": 1.748180241108404e-05, |
|
"loss": 5.8487, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.0896919573576875, |
|
"learning_rate": 1.7438667757034547e-05, |
|
"loss": 6.0042, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.282944803020933, |
|
"learning_rate": 1.739522099138411e-05, |
|
"loss": 6.1652, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.13081574380951, |
|
"learning_rate": 1.7351463937072008e-05, |
|
"loss": 6.0233, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.96960839892131, |
|
"learning_rate": 1.7307398430056595e-05, |
|
"loss": 5.7853, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.459520148451556, |
|
"learning_rate": 1.72630263192383e-05, |
|
"loss": 5.9414, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.7882596771040893, |
|
"learning_rate": 1.7218349466382024e-05, |
|
"loss": 5.8131, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.207888044625232, |
|
"learning_rate": 1.7173369746039026e-05, |
|
"loss": 5.8724, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.6658140789192695, |
|
"learning_rate": 1.7128089045468294e-05, |
|
"loss": 5.9003, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.7722042701323493, |
|
"learning_rate": 1.7082509264557333e-05, |
|
"loss": 5.8756, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.8972909846842185, |
|
"learning_rate": 1.7036632315742464e-05, |
|
"loss": 6.0623, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.703243911637958, |
|
"learning_rate": 1.6990460123928577e-05, |
|
"loss": 5.9731, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.040351904365242, |
|
"learning_rate": 1.6943994626408365e-05, |
|
"loss": 6.0629, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.5014912670407865, |
|
"learning_rate": 1.6897237772781046e-05, |
|
"loss": 5.945, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.183607320000293, |
|
"learning_rate": 1.6850191524870548e-05, |
|
"loss": 5.906, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.441179437405616, |
|
"learning_rate": 1.6802857856643214e-05, |
|
"loss": 5.9791, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.5946652188893187, |
|
"learning_rate": 1.6755238754124965e-05, |
|
"loss": 5.8481, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.030328438406491, |
|
"learning_rate": 1.6707336215317968e-05, |
|
"loss": 6.047, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.048177117370896, |
|
"learning_rate": 1.665915225011681e-05, |
|
"loss": 5.9285, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.299948019967649, |
|
"learning_rate": 1.6610688880224178e-05, |
|
"loss": 5.8888, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.095923631849489, |
|
"learning_rate": 1.6561948139065997e-05, |
|
"loss": 6.2453, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.481920812397553, |
|
"learning_rate": 1.6512932071706153e-05, |
|
"loss": 6.0019, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.3296848974625135, |
|
"learning_rate": 1.646364273476067e-05, |
|
"loss": 5.8587, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.947321316567111, |
|
"learning_rate": 1.6414082196311402e-05, |
|
"loss": 5.928, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.7243925766736705, |
|
"learning_rate": 1.6364252535819284e-05, |
|
"loss": 5.9007, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 13.974458344598341, |
|
"learning_rate": 1.6314155844037074e-05, |
|
"loss": 5.9754, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.17123392978259, |
|
"learning_rate": 1.626379422292162e-05, |
|
"loss": 5.8421, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.043820315204433, |
|
"learning_rate": 1.6213169785545688e-05, |
|
"loss": 5.9519, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.526011441047756, |
|
"learning_rate": 1.6162284656009276e-05, |
|
"loss": 5.8076, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.8459147008292724, |
|
"learning_rate": 1.6111140969350504e-05, |
|
"loss": 5.7672, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.562828950103298, |
|
"learning_rate": 1.6059740871456035e-05, |
|
"loss": 5.898, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.431960095480538, |
|
"learning_rate": 1.6008086518971037e-05, |
|
"loss": 5.7521, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.6428660471557737, |
|
"learning_rate": 1.5956180079208684e-05, |
|
"loss": 5.953, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.767661352257826, |
|
"learning_rate": 1.5904023730059227e-05, |
|
"loss": 6.0916, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.660701500822884, |
|
"learning_rate": 1.5851619659898623e-05, |
|
"loss": 5.8085, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.6310726747437143, |
|
"learning_rate": 1.57989700674967e-05, |
|
"loss": 5.8102, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.0023266213419495, |
|
"learning_rate": 1.5746077161924905e-05, |
|
"loss": 5.8879, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.99237095864865, |
|
"learning_rate": 1.5692943162463628e-05, |
|
"loss": 5.8366, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.5149347856437703, |
|
"learning_rate": 1.5639570298509067e-05, |
|
"loss": 5.7649, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.301730499203444, |
|
"learning_rate": 1.5585960809479698e-05, |
|
"loss": 5.9754, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.462115038986119, |
|
"learning_rate": 1.5532116944722308e-05, |
|
"loss": 5.6883, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.004970215448472, |
|
"learning_rate": 1.547804096341763e-05, |
|
"loss": 5.8396, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.944647631447267, |
|
"learning_rate": 1.5423735134485537e-05, |
|
"loss": 5.9988, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.5900984593685723, |
|
"learning_rate": 1.536920173648984e-05, |
|
"loss": 5.7239, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.6285904810079574, |
|
"learning_rate": 1.5314443057542703e-05, |
|
"loss": 5.8721, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.8894726151488843, |
|
"learning_rate": 1.5259461395208628e-05, |
|
"loss": 5.8299, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.5924238969723072, |
|
"learning_rate": 1.5204259056408046e-05, |
|
"loss": 5.9516, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.538554836605968, |
|
"learning_rate": 1.5148838357320537e-05, |
|
"loss": 5.8963, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.560612079271743, |
|
"learning_rate": 1.5093201623287631e-05, |
|
"loss": 5.888, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.467820770014584, |
|
"learning_rate": 1.5037351188715265e-05, |
|
"loss": 5.7864, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.172673845567331, |
|
"learning_rate": 1.4981289396975818e-05, |
|
"loss": 5.8498, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.1779411060929816, |
|
"learning_rate": 1.4925018600309784e-05, |
|
"loss": 5.6926, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.782675685637412, |
|
"learning_rate": 1.4868541159727097e-05, |
|
"loss": 5.6674, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.7799393833784296, |
|
"learning_rate": 1.4811859444908053e-05, |
|
"loss": 5.8661, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.487305708943541, |
|
"learning_rate": 1.4754975834103877e-05, |
|
"loss": 5.6999, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.107952099039134, |
|
"learning_rate": 1.4697892714036959e-05, |
|
"loss": 5.9596, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.897845235029772, |
|
"learning_rate": 1.4640612479800686e-05, |
|
"loss": 5.8336, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.574618642064676, |
|
"learning_rate": 1.4583137534758968e-05, |
|
"loss": 5.678, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.2767605854680717, |
|
"learning_rate": 1.4525470290445392e-05, |
|
"loss": 5.7779, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.3494797594069343, |
|
"learning_rate": 1.4467613166462024e-05, |
|
"loss": 5.6117, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.4560187160140114, |
|
"learning_rate": 1.4409568590377918e-05, |
|
"loss": 5.9744, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.124637343157441, |
|
"learning_rate": 1.4351338997627233e-05, |
|
"loss": 5.8884, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.802869734676735, |
|
"learning_rate": 1.429292683140706e-05, |
|
"loss": 5.8834, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.830747863900574, |
|
"learning_rate": 1.4234334542574906e-05, |
|
"loss": 5.8741, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.0222266779531313, |
|
"learning_rate": 1.4175564589545853e-05, |
|
"loss": 5.7957, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.7517853962161003, |
|
"learning_rate": 1.411661943818944e-05, |
|
"loss": 5.773, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.481751429815316, |
|
"learning_rate": 1.4057501561726157e-05, |
|
"loss": 5.79, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.032738430652106, |
|
"learning_rate": 1.3998213440623691e-05, |
|
"loss": 5.8233, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.3048433576019045, |
|
"learning_rate": 1.3938757562492873e-05, |
|
"loss": 5.7213, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.42093480136207, |
|
"learning_rate": 1.3879136421983265e-05, |
|
"loss": 5.6734, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.372940690507083, |
|
"learning_rate": 1.3819352520678519e-05, |
|
"loss": 5.8204, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.866414604706453, |
|
"learning_rate": 1.3759408366991391e-05, |
|
"loss": 5.7685, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.0437497252782015, |
|
"learning_rate": 1.3699306476058523e-05, |
|
"loss": 5.9321, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.5806152918043153, |
|
"learning_rate": 1.3639049369634878e-05, |
|
"loss": 5.6817, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.652624509453939, |
|
"learning_rate": 1.357863957598796e-05, |
|
"loss": 5.6123, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.2444221493972063, |
|
"learning_rate": 1.3518079629791725e-05, |
|
"loss": 5.5523, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.46063417126853, |
|
"learning_rate": 1.345737207202023e-05, |
|
"loss": 5.6855, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.3218441227109787, |
|
"learning_rate": 1.3396519449841006e-05, |
|
"loss": 5.7366, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.5944315284742725, |
|
"learning_rate": 1.3335524316508208e-05, |
|
"loss": 5.7278, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.0069349104091603, |
|
"learning_rate": 1.3274389231255466e-05, |
|
"loss": 5.7355, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.9317190820703756, |
|
"learning_rate": 1.3213116759188525e-05, |
|
"loss": 5.6806, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.33285104772104, |
|
"learning_rate": 1.3151709471177589e-05, |
|
"loss": 5.7708, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.6380751570471332, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 5.8772, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.742526748849025, |
|
"learning_rate": 1.3028500758979507e-05, |
|
"loss": 5.6913, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.3393134329516196, |
|
"learning_rate": 1.296670450438317e-05, |
|
"loss": 5.5817, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.887935011194972, |
|
"learning_rate": 1.2904783772807534e-05, |
|
"loss": 5.6364, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.5742955487496175, |
|
"learning_rate": 1.2842741162322487e-05, |
|
"loss": 5.6486, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.2697917373453245, |
|
"learning_rate": 1.2780579276111702e-05, |
|
"loss": 5.7063, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.401421083396212, |
|
"learning_rate": 1.2718300722363431e-05, |
|
"loss": 5.7316, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.6121121716149887, |
|
"learning_rate": 1.2655908114161053e-05, |
|
"loss": 5.7758, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.2222903292023406, |
|
"learning_rate": 1.2593404069373452e-05, |
|
"loss": 5.7089, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.025802281634462, |
|
"learning_rate": 1.2530791210545163e-05, |
|
"loss": 5.6953, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.4673520664847075, |
|
"learning_rate": 1.2468072164786342e-05, |
|
"loss": 5.8728, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.6806603608994735, |
|
"learning_rate": 1.2405249563662539e-05, |
|
"loss": 5.669, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.64700420741913, |
|
"learning_rate": 1.2342326043084268e-05, |
|
"loss": 5.7352, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.0790982186864504, |
|
"learning_rate": 1.2279304243196438e-05, |
|
"loss": 5.5999, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.24646862661049, |
|
"learning_rate": 1.2216186808267544e-05, |
|
"loss": 5.668, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.1831196277858864, |
|
"learning_rate": 1.215297638657875e-05, |
|
"loss": 5.7959, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.4421555634435035, |
|
"learning_rate": 1.2089675630312755e-05, |
|
"loss": 5.6636, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.833448990477623, |
|
"learning_rate": 1.2026287195442503e-05, |
|
"loss": 5.5742, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.221649072609624, |
|
"learning_rate": 1.1962813741619777e-05, |
|
"loss": 5.7345, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.5576181124301, |
|
"learning_rate": 1.189925793206357e-05, |
|
"loss": 5.8758, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.1198515608939252, |
|
"learning_rate": 1.1835622433448361e-05, |
|
"loss": 5.7026, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.2391737753303063, |
|
"learning_rate": 1.177190991579223e-05, |
|
"loss": 5.8245, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.0415855629042006, |
|
"learning_rate": 1.1708123052344803e-05, |
|
"loss": 5.7423, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.417436664351836, |
|
"learning_rate": 1.164426451947513e-05, |
|
"loss": 5.7224, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.0428028654941963, |
|
"learning_rate": 1.1580336996559343e-05, |
|
"loss": 5.5215, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.2412878897803843, |
|
"learning_rate": 1.151634316586828e-05, |
|
"loss": 5.6753, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.9715981798900173, |
|
"learning_rate": 1.1452285712454905e-05, |
|
"loss": 5.6633, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.3507364284363375, |
|
"learning_rate": 1.138816732404167e-05, |
|
"loss": 5.7531, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.695763982565934, |
|
"learning_rate": 1.1323990690907734e-05, |
|
"loss": 5.6431, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.442076843360669, |
|
"learning_rate": 1.1259758505776092e-05, |
|
"loss": 5.763, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.4221454818185797, |
|
"learning_rate": 1.119547346370059e-05, |
|
"loss": 5.7121, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 32.81366795375805, |
|
"learning_rate": 1.1131138261952845e-05, |
|
"loss": 5.7642, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.824713348661609, |
|
"learning_rate": 1.1066755599909065e-05, |
|
"loss": 5.5872, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.2122840787719356, |
|
"learning_rate": 1.1002328178936813e-05, |
|
"loss": 5.6909, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.914139469492162, |
|
"learning_rate": 1.0937858702281631e-05, |
|
"loss": 5.5502, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.372671623706637, |
|
"learning_rate": 1.087334987495364e-05, |
|
"loss": 5.638, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.9794994417982705, |
|
"learning_rate": 1.0808804403614044e-05, |
|
"loss": 5.7327, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.545219321353326, |
|
"learning_rate": 1.0744224996461541e-05, |
|
"loss": 5.773, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.0509122712537904, |
|
"learning_rate": 1.0679614363118718e-05, |
|
"loss": 5.4833, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.1328946212391573, |
|
"learning_rate": 1.061497521451835e-05, |
|
"loss": 5.5846, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.912951012800422, |
|
"learning_rate": 1.055031026278965e-05, |
|
"loss": 5.7325, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.190063789344824, |
|
"learning_rate": 1.0485622221144485e-05, |
|
"loss": 5.6504, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.059074907434253, |
|
"learning_rate": 1.0420913803763522e-05, |
|
"loss": 5.5256, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.2665977095269763, |
|
"learning_rate": 1.0356187725682359e-05, |
|
"loss": 5.6545, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.8731971309590723, |
|
"learning_rate": 1.0291446702677598e-05, |
|
"loss": 5.4238, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.960836799262505, |
|
"learning_rate": 1.02266934511529e-05, |
|
"loss": 5.601, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.479663130288781, |
|
"learning_rate": 1.0161930688025018e-05, |
|
"loss": 5.49, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.458943691824299, |
|
"learning_rate": 1.0097161130609774e-05, |
|
"loss": 5.7461, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.279222709074338, |
|
"learning_rate": 1.003238749650809e-05, |
|
"loss": 5.5439, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.311427762294207, |
|
"learning_rate": 9.967612503491915e-06, |
|
"loss": 5.5148, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.887421444069423, |
|
"learning_rate": 9.90283886939023e-06, |
|
"loss": 5.4186, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.401386334484841, |
|
"learning_rate": 9.838069311974986e-06, |
|
"loss": 5.6778, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.6023206995102135, |
|
"learning_rate": 9.773306548847102e-06, |
|
"loss": 5.6155, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.593734709591131, |
|
"learning_rate": 9.708553297322407e-06, |
|
"loss": 5.6777, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.477325665085784, |
|
"learning_rate": 9.643812274317644e-06, |
|
"loss": 5.5781, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.8887444330887266, |
|
"learning_rate": 9.579086196236483e-06, |
|
"loss": 5.7172, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.8360288485375484, |
|
"learning_rate": 9.514377778855521e-06, |
|
"loss": 5.5803, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.135597210733372, |
|
"learning_rate": 9.449689737210352e-06, |
|
"loss": 5.7742, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.0578215352523745, |
|
"learning_rate": 9.385024785481653e-06, |
|
"loss": 5.5618, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.925737489172214, |
|
"learning_rate": 9.320385636881283e-06, |
|
"loss": 5.6728, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.429796358511194, |
|
"learning_rate": 9.255775003538462e-06, |
|
"loss": 5.7857, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.1866968840925742, |
|
"learning_rate": 9.19119559638596e-06, |
|
"loss": 5.504, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.199919489689139, |
|
"learning_rate": 9.126650125046361e-06, |
|
"loss": 5.6548, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 6.644972124408221, |
|
"learning_rate": 9.062141297718372e-06, |
|
"loss": 5.7688, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.840251387463206, |
|
"learning_rate": 8.99767182106319e-06, |
|
"loss": 5.5356, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.847024195497787, |
|
"learning_rate": 8.933244400090937e-06, |
|
"loss": 5.538, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.300138770028974, |
|
"learning_rate": 8.868861738047158e-06, |
|
"loss": 5.5466, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.2598992072099806, |
|
"learning_rate": 8.804526536299413e-06, |
|
"loss": 5.5438, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.9223921302516205, |
|
"learning_rate": 8.740241494223911e-06, |
|
"loss": 5.5199, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.7228943907903944, |
|
"learning_rate": 8.676009309092273e-06, |
|
"loss": 5.316, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.151284881347976, |
|
"learning_rate": 8.611832675958335e-06, |
|
"loss": 5.605, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.971020945163525, |
|
"learning_rate": 8.5477142875451e-06, |
|
"loss": 5.5074, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.383598237834473, |
|
"learning_rate": 8.48365683413172e-06, |
|
"loss": 5.6454, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.1708795210472065, |
|
"learning_rate": 8.419663003440657e-06, |
|
"loss": 5.6293, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.0060809846941523, |
|
"learning_rate": 8.355735480524874e-06, |
|
"loss": 5.7011, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.082555191182687, |
|
"learning_rate": 8.291876947655197e-06, |
|
"loss": 5.5058, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.1290655800212988, |
|
"learning_rate": 8.228090084207773e-06, |
|
"loss": 5.6645, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.6512062285821107, |
|
"learning_rate": 8.16437756655164e-06, |
|
"loss": 5.632, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.580032474883606, |
|
"learning_rate": 8.100742067936432e-06, |
|
"loss": 5.6529, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.8279735010912828, |
|
"learning_rate": 8.037186258380226e-06, |
|
"loss": 5.653, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.0485365662315185, |
|
"learning_rate": 7.9737128045575e-06, |
|
"loss": 5.6271, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.9000686534385065, |
|
"learning_rate": 7.91032436968725e-06, |
|
"loss": 5.497, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.7477894288698823, |
|
"learning_rate": 7.847023613421251e-06, |
|
"loss": 5.6454, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.1406780416239988, |
|
"learning_rate": 7.78381319173246e-06, |
|
"loss": 5.702, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.8819040313896664, |
|
"learning_rate": 7.720695756803569e-06, |
|
"loss": 5.5487, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.1093053507455433, |
|
"learning_rate": 7.657673956915735e-06, |
|
"loss": 5.6263, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.532080902497016, |
|
"learning_rate": 7.594750436337467e-06, |
|
"loss": 5.7086, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.9271604989314417, |
|
"learning_rate": 7.531927835213657e-06, |
|
"loss": 5.5651, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.9573677159990344, |
|
"learning_rate": 7.469208789454838e-06, |
|
"loss": 5.6534, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.813452933446664, |
|
"learning_rate": 7.40659593062655e-06, |
|
"loss": 5.5948, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.64918124403021, |
|
"learning_rate": 7.344091885838949e-06, |
|
"loss": 5.833, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 8.447026127108531, |
|
"learning_rate": 7.2816992776365714e-06, |
|
"loss": 5.6345, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.487783377440865, |
|
"learning_rate": 7.219420723888301e-06, |
|
"loss": 5.5796, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.3746557421600545, |
|
"learning_rate": 7.157258837677514e-06, |
|
"loss": 5.4475, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.109170816456849, |
|
"learning_rate": 7.095216227192467e-06, |
|
"loss": 5.4312, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.5209993320764634, |
|
"learning_rate": 7.033295495616834e-06, |
|
"loss": 5.6655, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.7885132212833215, |
|
"learning_rate": 6.971499241020495e-06, |
|
"loss": 5.5506, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.8035280155793902, |
|
"learning_rate": 6.909830056250527e-06, |
|
"loss": 5.5899, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.3096990722045665, |
|
"learning_rate": 6.848290528822417e-06, |
|
"loss": 5.5894, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.1035770416615245, |
|
"learning_rate": 6.786883240811479e-06, |
|
"loss": 5.5137, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.934891742489131, |
|
"learning_rate": 6.725610768744535e-06, |
|
"loss": 5.5028, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.371896210185273, |
|
"learning_rate": 6.664475683491797e-06, |
|
"loss": 5.5001, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.8035512763165222, |
|
"learning_rate": 6.603480550158995e-06, |
|
"loss": 5.6237, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.14158466178092, |
|
"learning_rate": 6.542627927979772e-06, |
|
"loss": 5.6736, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.972559988613375, |
|
"learning_rate": 6.481920370208274e-06, |
|
"loss": 5.6447, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.411613112123063, |
|
"learning_rate": 6.421360424012039e-06, |
|
"loss": 5.7263, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.8412153634814308, |
|
"learning_rate": 6.360950630365126e-06, |
|
"loss": 5.5072, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.376692187351766, |
|
"learning_rate": 6.300693523941481e-06, |
|
"loss": 5.6328, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.950706203004661, |
|
"learning_rate": 6.2405916330086106e-06, |
|
"loss": 5.606, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.6164997018204152, |
|
"learning_rate": 6.180647479321484e-06, |
|
"loss": 5.7159, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.6776792490163492, |
|
"learning_rate": 6.120863578016736e-06, |
|
"loss": 5.4034, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.8823500476883783, |
|
"learning_rate": 6.061242437507131e-06, |
|
"loss": 5.5799, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.7310153497627305, |
|
"learning_rate": 6.00178655937631e-06, |
|
"loss": 5.4403, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.8043161109289105, |
|
"learning_rate": 5.942498438273849e-06, |
|
"loss": 5.5201, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.6257376922078577, |
|
"learning_rate": 5.8833805618105635e-06, |
|
"loss": 5.5161, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.100365890406591, |
|
"learning_rate": 5.82443541045415e-06, |
|
"loss": 5.4591, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.164224570414931, |
|
"learning_rate": 5.765665457425102e-06, |
|
"loss": 5.5032, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.8688071534260713, |
|
"learning_rate": 5.707073168592943e-06, |
|
"loss": 5.6875, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.9334355868792925, |
|
"learning_rate": 5.648661002372769e-06, |
|
"loss": 5.4918, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.618203487365493, |
|
"learning_rate": 5.590431409622081e-06, |
|
"loss": 5.6324, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.7468055145560473, |
|
"learning_rate": 5.5323868335379775e-06, |
|
"loss": 5.5696, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.945148993510563, |
|
"learning_rate": 5.4745297095546125e-06, |
|
"loss": 5.6865, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.27286874096374, |
|
"learning_rate": 5.416862465241033e-06, |
|
"loss": 5.574, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.075467269510141, |
|
"learning_rate": 5.359387520199317e-06, |
|
"loss": 5.4386, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 12.979333973830173, |
|
"learning_rate": 5.302107285963045e-06, |
|
"loss": 5.5171, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.946796474047621, |
|
"learning_rate": 5.245024165896126e-06, |
|
"loss": 5.4337, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 47.491660622837905, |
|
"learning_rate": 5.18814055509195e-06, |
|
"loss": 5.3093, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.6684688936423306, |
|
"learning_rate": 5.131458840272905e-06, |
|
"loss": 5.4504, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.9524813775023664, |
|
"learning_rate": 5.074981399690219e-06, |
|
"loss": 5.6183, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.071692317071865, |
|
"learning_rate": 5.018710603024187e-06, |
|
"loss": 5.8018, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.8943426237790604, |
|
"learning_rate": 4.9626488112847384e-06, |
|
"loss": 5.5124, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.643486862482141, |
|
"learning_rate": 4.9067983767123736e-06, |
|
"loss": 5.6324, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.0446879911014793, |
|
"learning_rate": 4.851161642679466e-06, |
|
"loss": 5.5722, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.0223614188577583, |
|
"learning_rate": 4.795740943591955e-06, |
|
"loss": 5.6997, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.139884095364713, |
|
"learning_rate": 4.740538604791371e-06, |
|
"loss": 5.6098, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.0355695744448905, |
|
"learning_rate": 4.685556942457296e-06, |
|
"loss": 5.5642, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.2148124769621145, |
|
"learning_rate": 4.630798263510162e-06, |
|
"loss": 5.4686, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.599100924296892, |
|
"learning_rate": 4.576264865514467e-06, |
|
"loss": 5.6283, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.176903096047862, |
|
"learning_rate": 4.521959036582372e-06, |
|
"loss": 5.6207, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.7882598761964217, |
|
"learning_rate": 4.467883055277696e-06, |
|
"loss": 5.4235, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.4794420663108894, |
|
"learning_rate": 4.414039190520308e-06, |
|
"loss": 5.5631, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.903622461402484, |
|
"learning_rate": 4.360429701490935e-06, |
|
"loss": 5.6318, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 7.699211582040187, |
|
"learning_rate": 4.307056837536373e-06, |
|
"loss": 5.7193, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.637017761567808, |
|
"learning_rate": 4.2539228380750955e-06, |
|
"loss": 5.6718, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.871928827914661, |
|
"learning_rate": 4.201029932503303e-06, |
|
"loss": 5.5911, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.373623961352284, |
|
"learning_rate": 4.14838034010138e-06, |
|
"loss": 5.6983, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.7039462572520025, |
|
"learning_rate": 4.095976269940777e-06, |
|
"loss": 5.4657, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.906259270274048, |
|
"learning_rate": 4.043819920791322e-06, |
|
"loss": 5.6059, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.80215901884808, |
|
"learning_rate": 3.991913481028965e-06, |
|
"loss": 5.3867, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.7611740391342763, |
|
"learning_rate": 3.940259128543967e-06, |
|
"loss": 5.6265, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.496220876703193, |
|
"learning_rate": 3.888859030649498e-06, |
|
"loss": 5.7369, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.069627191877829, |
|
"learning_rate": 3.837715343990727e-06, |
|
"loss": 5.6878, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.8861264690211628, |
|
"learning_rate": 3.7868302144543146e-06, |
|
"loss": 5.6452, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.973826787518843, |
|
"learning_rate": 3.736205777078381e-06, |
|
"loss": 5.4028, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.9173591380715704, |
|
"learning_rate": 3.685844155962931e-06, |
|
"loss": 5.5656, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.147705831696598, |
|
"learning_rate": 3.63574746418072e-06, |
|
"loss": 5.5783, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.072248624553244, |
|
"learning_rate": 3.585917803688603e-06, |
|
"loss": 5.539, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.7322222965861336, |
|
"learning_rate": 3.536357265239333e-06, |
|
"loss": 5.4976, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.658079903087022, |
|
"learning_rate": 3.487067928293848e-06, |
|
"loss": 5.4221, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.680370794084422, |
|
"learning_rate": 3.4380518609340076e-06, |
|
"loss": 5.5785, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.89315062272381, |
|
"learning_rate": 3.3893111197758276e-06, |
|
"loss": 5.4699, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.088114743299729, |
|
"learning_rate": 3.3408477498831917e-06, |
|
"loss": 5.5848, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.3744405750175783, |
|
"learning_rate": 3.2926637846820366e-06, |
|
"loss": 5.5855, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.0949682435479815, |
|
"learning_rate": 3.2447612458750365e-06, |
|
"loss": 5.5665, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.5298918745049064, |
|
"learning_rate": 3.197142143356787e-06, |
|
"loss": 5.4282, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.3415078853618483, |
|
"learning_rate": 3.1498084751294523e-06, |
|
"loss": 5.548, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.6530303000947377, |
|
"learning_rate": 3.1027622272189572e-06, |
|
"loss": 5.4614, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.6198793225005095, |
|
"learning_rate": 3.0560053735916372e-06, |
|
"loss": 5.3387, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.253733908459485, |
|
"learning_rate": 3.009539876071427e-06, |
|
"loss": 5.5892, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.893078721591448, |
|
"learning_rate": 2.9633676842575386e-06, |
|
"loss": 5.5098, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.2332247538663927, |
|
"learning_rate": 2.9174907354426696e-06, |
|
"loss": 5.611, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.717694710480861, |
|
"learning_rate": 2.8719109545317102e-06, |
|
"loss": 5.7531, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.973350020162763, |
|
"learning_rate": 2.8266302539609747e-06, |
|
"loss": 5.5053, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.106939977808064, |
|
"learning_rate": 2.78165053361798e-06, |
|
"loss": 5.5299, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.743314132309298, |
|
"learning_rate": 2.736973680761702e-06, |
|
"loss": 5.6108, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.020284676653917, |
|
"learning_rate": 2.692601569943407e-06, |
|
"loss": 5.5949, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.8688896218347466, |
|
"learning_rate": 2.648536062927999e-06, |
|
"loss": 5.6542, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.8774214420614292, |
|
"learning_rate": 2.604779008615895e-06, |
|
"loss": 5.6006, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.5974922903934443, |
|
"learning_rate": 2.5613322429654573e-06, |
|
"loss": 5.4603, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.1182016556730034, |
|
"learning_rate": 2.5181975889159615e-06, |
|
"loss": 5.4617, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.02296875164515, |
|
"learning_rate": 2.475376856311097e-06, |
|
"loss": 5.5758, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.1443470043442066, |
|
"learning_rate": 2.432871841823047e-06, |
|
"loss": 5.4938, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.21437085093465, |
|
"learning_rate": 2.390684328877089e-06, |
|
"loss": 5.543, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.0664362463556603, |
|
"learning_rate": 2.3488160875767717e-06, |
|
"loss": 5.6436, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.793841886787081, |
|
"learning_rate": 2.307268874629649e-06, |
|
"loss": 5.3901, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.6568460536060137, |
|
"learning_rate": 2.266044433273562e-06, |
|
"loss": 5.5585, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.3923148492013504, |
|
"learning_rate": 2.2251444932035094e-06, |
|
"loss": 5.3515, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.187296832849823, |
|
"learning_rate": 2.184570770499056e-06, |
|
"loss": 5.4722, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.8884881509409555, |
|
"learning_rate": 2.1443249675523536e-06, |
|
"loss": 5.537, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.051680492849398, |
|
"learning_rate": 2.1044087729966856e-06, |
|
"loss": 5.4369, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.8280027110122608, |
|
"learning_rate": 2.064823861635633e-06, |
|
"loss": 5.5705, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.4037253520135216, |
|
"learning_rate": 2.025571894372794e-06, |
|
"loss": 5.5584, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.319540102434969, |
|
"learning_rate": 1.9866545181421016e-06, |
|
"loss": 5.4076, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.808044157619595, |
|
"learning_rate": 1.9480733658387175e-06, |
|
"loss": 5.4129, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.7351469976881306, |
|
"learning_rate": 1.9098300562505266e-06, |
|
"loss": 5.6271, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.7165917157682737, |
|
"learning_rate": 1.8719261939902023e-06, |
|
"loss": 5.4525, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.040380201846354, |
|
"learning_rate": 1.8343633694278895e-06, |
|
"loss": 5.5268, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.8167171485840035, |
|
"learning_rate": 1.7971431586244814e-06, |
|
"loss": 5.4247, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.8317598164442632, |
|
"learning_rate": 1.7602671232654755e-06, |
|
"loss": 5.4478, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.8994461696949276, |
|
"learning_rate": 1.723736810595461e-06, |
|
"loss": 5.6048, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.848027250263409, |
|
"learning_rate": 1.687553753353195e-06, |
|
"loss": 5.6236, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.7979223820102126, |
|
"learning_rate": 1.6517194697072903e-06, |
|
"loss": 5.5997, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.406985290020591, |
|
"learning_rate": 1.6162354631925203e-06, |
|
"loss": 5.6047, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.2716585943989362, |
|
"learning_rate": 1.5811032226467304e-06, |
|
"loss": 5.5763, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.2119530719491967, |
|
"learning_rate": 1.5463242221483742e-06, |
|
"loss": 5.5576, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.7525960595562, |
|
"learning_rate": 1.511899920954656e-06, |
|
"loss": 5.6124, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.575626661151134, |
|
"learning_rate": 1.4778317634403082e-06, |
|
"loss": 5.5585, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.109772746844713, |
|
"learning_rate": 1.4441211790369892e-06, |
|
"loss": 5.6023, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.48770216871105, |
|
"learning_rate": 1.4107695821733026e-06, |
|
"loss": 5.6905, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.776399817120803, |
|
"learning_rate": 1.3777783722154603e-06, |
|
"loss": 5.5346, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.689995264190314, |
|
"learning_rate": 1.3451489334085555e-06, |
|
"loss": 5.776, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.9589020357519855, |
|
"learning_rate": 1.3128826348184886e-06, |
|
"loss": 5.5889, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.926701278155943, |
|
"learning_rate": 1.2809808302745298e-06, |
|
"loss": 5.4377, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 8.614907788515115, |
|
"learning_rate": 1.249444858312502e-06, |
|
"loss": 5.3811, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.844919169807355, |
|
"learning_rate": 1.218276042118629e-06, |
|
"loss": 5.3662, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.200788157807446, |
|
"learning_rate": 1.1874756894740137e-06, |
|
"loss": 5.5491, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.8493126527965655, |
|
"learning_rate": 1.1570450926997657e-06, |
|
"loss": 5.6191, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.199235991842511, |
|
"learning_rate": 1.1269855286027798e-06, |
|
"loss": 5.6003, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.577166179252903, |
|
"learning_rate": 1.0972982584221592e-06, |
|
"loss": 5.5332, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.5080571141871837, |
|
"learning_rate": 1.067984527776309e-06, |
|
"loss": 5.5226, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.9156280169840563, |
|
"learning_rate": 1.0390455666106547e-06, |
|
"loss": 5.4879, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.3439938592575094, |
|
"learning_rate": 1.010482589146048e-06, |
|
"loss": 5.5742, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.70223178081651, |
|
"learning_rate": 9.822967938278172e-07, |
|
"loss": 5.53, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.910070747579829, |
|
"learning_rate": 9.544893632754816e-07, |
|
"loss": 5.5037, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.859773242243526, |
|
"learning_rate": 9.270614642331377e-07, |
|
"loss": 5.5309, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.990603251521546, |
|
"learning_rate": 9.000142475204965e-07, |
|
"loss": 5.4596, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 11.13467016828935, |
|
"learning_rate": 8.733488479845997e-07, |
|
"loss": 5.4875, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.499523656417884, |
|
"learning_rate": 8.470663844522053e-07, |
|
"loss": 5.5061, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.2234243851466418, |
|
"learning_rate": 8.211679596828481e-07, |
|
"loss": 5.5508, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.689783836568065, |
|
"learning_rate": 7.956546603225601e-07, |
|
"loss": 5.4979, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.550751273965273, |
|
"learning_rate": 7.705275568582848e-07, |
|
"loss": 5.6264, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.721521824889205, |
|
"learning_rate": 7.457877035729588e-07, |
|
"loss": 5.5113, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.768024715912084, |
|
"learning_rate": 7.21436138501278e-07, |
|
"loss": 5.5389, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.6088969137232456, |
|
"learning_rate": 6.974738833861383e-07, |
|
"loss": 5.4611, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.693525018419924, |
|
"learning_rate": 6.739019436357774e-07, |
|
"loss": 5.5317, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.5671066935530367, |
|
"learning_rate": 6.507213082815745e-07, |
|
"loss": 5.5331, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.4210107522987996, |
|
"learning_rate": 6.279329499365649e-07, |
|
"loss": 5.6906, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.772985182537546, |
|
"learning_rate": 6.055378247546217e-07, |
|
"loss": 5.6865, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.298986027174084, |
|
"learning_rate": 5.835368723903456e-07, |
|
"loss": 5.5719, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.715794146885051, |
|
"learning_rate": 5.619310159596358e-07, |
|
"loss": 5.6766, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.0933470216619243, |
|
"learning_rate": 5.407211620009545e-07, |
|
"loss": 5.5423, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.143643936396307, |
|
"learning_rate": 5.199082004372958e-07, |
|
"loss": 5.3755, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.868440457281347, |
|
"learning_rate": 4.994930045388414e-07, |
|
"loss": 5.5415, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.9560966126847372, |
|
"learning_rate": 4.794764308863242e-07, |
|
"loss": 5.636, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.658181273470064, |
|
"learning_rate": 4.5985931933508757e-07, |
|
"loss": 5.6803, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.423368722688064, |
|
"learning_rate": 4.406424929798403e-07, |
|
"loss": 5.4089, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.0534636300165188, |
|
"learning_rate": 4.218267581201296e-07, |
|
"loss": 5.5947, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.345779139508837, |
|
"learning_rate": 4.034129042265067e-07, |
|
"loss": 5.5494, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.733287289238342, |
|
"learning_rate": 3.8540170390740097e-07, |
|
"loss": 5.3952, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.578794557649385, |
|
"learning_rate": 3.67793912876705e-07, |
|
"loss": 5.4544, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.462067447086064, |
|
"learning_rate": 3.5059026992206645e-07, |
|
"loss": 5.4354, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.6125800075217755, |
|
"learning_rate": 3.3379149687388866e-07, |
|
"loss": 5.3488, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.7049049281222506, |
|
"learning_rate": 3.1739829857504235e-07, |
|
"loss": 5.5727, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.620043130272865, |
|
"learning_rate": 3.0141136285129825e-07, |
|
"loss": 5.6706, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.0809997678670187, |
|
"learning_rate": 2.8583136048245697e-07, |
|
"loss": 5.7061, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.1616078894179482, |
|
"learning_rate": 2.706589451742181e-07, |
|
"loss": 5.6098, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.6779003366529275, |
|
"learning_rate": 2.5589475353073987e-07, |
|
"loss": 5.4707, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.707485770015231, |
|
"learning_rate": 2.4153940502793185e-07, |
|
"loss": 5.4335, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.1605981177662383, |
|
"learning_rate": 2.2759350198746978e-07, |
|
"loss": 5.5259, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.014186395148381, |
|
"learning_rate": 2.1405762955151178e-07, |
|
"loss": 5.6467, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.889242425776768, |
|
"learning_rate": 2.009323556581566e-07, |
|
"loss": 5.4662, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.095182172474526, |
|
"learning_rate": 1.8821823101760949e-07, |
|
"loss": 5.5991, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.8025822861133, |
|
"learning_rate": 1.7591578908907724e-07, |
|
"loss": 5.6508, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.052158040260495, |
|
"learning_rate": 1.6402554605838173e-07, |
|
"loss": 5.6078, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.0334351565612603, |
|
"learning_rate": 1.5254800081630828e-07, |
|
"loss": 5.6521, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.2390067488864203, |
|
"learning_rate": 1.4148363493766803e-07, |
|
"loss": 5.5932, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.6513576443759246, |
|
"learning_rate": 1.30832912661093e-07, |
|
"loss": 5.4863, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.8858856433117337, |
|
"learning_rate": 1.2059628086956044e-07, |
|
"loss": 5.6502, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.026494893009913, |
|
"learning_rate": 1.1077416907163573e-07, |
|
"loss": 5.6161, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.269750470433105, |
|
"learning_rate": 1.0136698938346012e-07, |
|
"loss": 5.6146, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.915176554260305, |
|
"learning_rate": 9.237513651145224e-08, |
|
"loss": 5.4934, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.703171232414342, |
|
"learning_rate": 8.379898773574924e-08, |
|
"loss": 5.4725, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.955963983229779, |
|
"learning_rate": 7.563890289437825e-08, |
|
"loss": 5.7339, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.089224348103812, |
|
"learning_rate": 6.78952243681541e-08, |
|
"loss": 5.5206, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.1319980991851524, |
|
"learning_rate": 6.056827706632185e-08, |
|
"loss": 5.3931, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.2761777556196376, |
|
"learning_rate": 5.365836841291439e-08, |
|
"loss": 5.4341, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.8804231914855665, |
|
"learning_rate": 4.716578833386054e-08, |
|
"loss": 5.3863, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.2873446601496052, |
|
"learning_rate": 4.109080924481479e-08, |
|
"loss": 5.4484, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.110318580535298, |
|
"learning_rate": 3.543368603973529e-08, |
|
"loss": 5.5471, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.791270347470137, |
|
"learning_rate": 3.019465608018024e-08, |
|
"loss": 5.5891, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.6716828750765056, |
|
"learning_rate": 2.537393918535358e-08, |
|
"loss": 5.5102, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.590128972359705, |
|
"learning_rate": 2.0971737622883515e-08, |
|
"loss": 5.6497, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.491730237188739, |
|
"learning_rate": 1.698823610032929e-08, |
|
"loss": 5.5464, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.541793206348526, |
|
"learning_rate": 1.3423601757436289e-08, |
|
"loss": 5.5446, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.43180997277326, |
|
"learning_rate": 1.0277984159122734e-08, |
|
"loss": 5.5588, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.7657573951340706, |
|
"learning_rate": 7.551515289203615e-09, |
|
"loss": 5.6594, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.8695545506323783, |
|
"learning_rate": 5.2443095448506674e-09, |
|
"loss": 5.625, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.165811767433061, |
|
"learning_rate": 3.3564637317984318e-09, |
|
"loss": 5.4195, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.9299117263430903, |
|
"learning_rate": 1.888057060274173e-09, |
|
"loss": 5.6383, |
|
"step": 500 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 25, |
|
"total_flos": 2968550903808.0, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|