|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9980806142034548, |
|
"eval_steps": 130, |
|
"global_step": 260, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003838771593090211, |
|
"grad_norm": 41.942832946777344, |
|
"learning_rate": 5e-06, |
|
"loss": 29.5342, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003838771593090211, |
|
"eval_loss": 1.869257926940918, |
|
"eval_runtime": 31.5851, |
|
"eval_samples_per_second": 9.372, |
|
"eval_steps_per_second": 4.686, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.007677543186180422, |
|
"grad_norm": 37.62504577636719, |
|
"learning_rate": 1e-05, |
|
"loss": 29.602, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.011516314779270634, |
|
"grad_norm": 27.579742431640625, |
|
"learning_rate": 1.5e-05, |
|
"loss": 28.6884, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.015355086372360844, |
|
"grad_norm": 36.99763870239258, |
|
"learning_rate": 2e-05, |
|
"loss": 29.9575, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.019193857965451054, |
|
"grad_norm": 37.934635162353516, |
|
"learning_rate": 2.5e-05, |
|
"loss": 28.0644, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.023032629558541268, |
|
"grad_norm": 45.45661163330078, |
|
"learning_rate": 3e-05, |
|
"loss": 29.0135, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.026871401151631478, |
|
"grad_norm": 31.74431800842285, |
|
"learning_rate": 3.5e-05, |
|
"loss": 29.0581, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.030710172744721688, |
|
"grad_norm": 32.59209060668945, |
|
"learning_rate": 4e-05, |
|
"loss": 28.5253, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0345489443378119, |
|
"grad_norm": 26.19696617126465, |
|
"learning_rate": 4.5e-05, |
|
"loss": 27.7504, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03838771593090211, |
|
"grad_norm": 30.916200637817383, |
|
"learning_rate": 5e-05, |
|
"loss": 29.3754, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04222648752399232, |
|
"grad_norm": 25.58551597595215, |
|
"learning_rate": 4.9999525683718174e-05, |
|
"loss": 29.0332, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.046065259117082535, |
|
"grad_norm": 30.068437576293945, |
|
"learning_rate": 4.9998102752870765e-05, |
|
"loss": 29.2469, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04990403071017274, |
|
"grad_norm": 25.17665672302246, |
|
"learning_rate": 4.999573126145132e-05, |
|
"loss": 28.2097, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.053742802303262956, |
|
"grad_norm": 27.615007400512695, |
|
"learning_rate": 4.99924112994468e-05, |
|
"loss": 29.7609, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05758157389635317, |
|
"grad_norm": 26.473073959350586, |
|
"learning_rate": 4.998814299283415e-05, |
|
"loss": 29.3846, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.061420345489443376, |
|
"grad_norm": 22.732168197631836, |
|
"learning_rate": 4.998292650357558e-05, |
|
"loss": 28.2741, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06525911708253358, |
|
"grad_norm": 26.874069213867188, |
|
"learning_rate": 4.9976762029612335e-05, |
|
"loss": 30.2697, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0690978886756238, |
|
"grad_norm": 22.940404891967773, |
|
"learning_rate": 4.9969649804857257e-05, |
|
"loss": 29.0416, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.07293666026871401, |
|
"grad_norm": 22.93003273010254, |
|
"learning_rate": 4.996159009918585e-05, |
|
"loss": 30.0236, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.07677543186180422, |
|
"grad_norm": 21.159198760986328, |
|
"learning_rate": 4.995258321842611e-05, |
|
"loss": 29.995, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08061420345489444, |
|
"grad_norm": 23.158321380615234, |
|
"learning_rate": 4.9942629504346827e-05, |
|
"loss": 30.1427, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.08445297504798464, |
|
"grad_norm": 22.745277404785156, |
|
"learning_rate": 4.993172933464471e-05, |
|
"loss": 29.0171, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.08829174664107485, |
|
"grad_norm": 21.473155975341797, |
|
"learning_rate": 4.9919883122929976e-05, |
|
"loss": 29.1784, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.09213051823416507, |
|
"grad_norm": 21.882991790771484, |
|
"learning_rate": 4.990709131871074e-05, |
|
"loss": 27.7198, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.09596928982725528, |
|
"grad_norm": 20.5384464263916, |
|
"learning_rate": 4.989335440737586e-05, |
|
"loss": 29.3608, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.09980806142034548, |
|
"grad_norm": 21.028078079223633, |
|
"learning_rate": 4.9878672910176615e-05, |
|
"loss": 30.7017, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1036468330134357, |
|
"grad_norm": 20.912294387817383, |
|
"learning_rate": 4.9863047384206835e-05, |
|
"loss": 29.2454, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.10748560460652591, |
|
"grad_norm": 22.990535736083984, |
|
"learning_rate": 4.984647842238185e-05, |
|
"loss": 30.5442, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.11132437619961612, |
|
"grad_norm": 20.818967819213867, |
|
"learning_rate": 4.982896665341591e-05, |
|
"loss": 28.0971, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.11516314779270634, |
|
"grad_norm": 22.41204071044922, |
|
"learning_rate": 4.98105127417984e-05, |
|
"loss": 30.0213, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11900191938579655, |
|
"grad_norm": 20.42460823059082, |
|
"learning_rate": 4.979111738776857e-05, |
|
"loss": 29.0625, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.12284069097888675, |
|
"grad_norm": 23.12067413330078, |
|
"learning_rate": 4.977078132728901e-05, |
|
"loss": 30.4828, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.12667946257197696, |
|
"grad_norm": 19.101734161376953, |
|
"learning_rate": 4.974950533201767e-05, |
|
"loss": 27.8432, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.13051823416506717, |
|
"grad_norm": 18.871788024902344, |
|
"learning_rate": 4.972729020927865e-05, |
|
"loss": 28.2289, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1343570057581574, |
|
"grad_norm": 19.23995590209961, |
|
"learning_rate": 4.9704136802031485e-05, |
|
"loss": 28.7979, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1381957773512476, |
|
"grad_norm": 19.988222122192383, |
|
"learning_rate": 4.968004598883923e-05, |
|
"loss": 29.0405, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1420345489443378, |
|
"grad_norm": 17.621200561523438, |
|
"learning_rate": 4.965501868383506e-05, |
|
"loss": 28.8017, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.14587332053742802, |
|
"grad_norm": 19.57996368408203, |
|
"learning_rate": 4.9629055836687665e-05, |
|
"loss": 28.5596, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.14971209213051823, |
|
"grad_norm": 19.62120819091797, |
|
"learning_rate": 4.960215843256511e-05, |
|
"loss": 28.6677, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.15355086372360843, |
|
"grad_norm": 18.79466438293457, |
|
"learning_rate": 4.957432749209755e-05, |
|
"loss": 30.2908, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.15738963531669867, |
|
"grad_norm": 21.75792694091797, |
|
"learning_rate": 4.9545564071338426e-05, |
|
"loss": 28.7169, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.16122840690978887, |
|
"grad_norm": 21.684478759765625, |
|
"learning_rate": 4.9515869261724446e-05, |
|
"loss": 28.8571, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.16506717850287908, |
|
"grad_norm": 20.90673065185547, |
|
"learning_rate": 4.948524419003415e-05, |
|
"loss": 28.5535, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1689059500959693, |
|
"grad_norm": 20.5564022064209, |
|
"learning_rate": 4.9453690018345144e-05, |
|
"loss": 29.5709, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1727447216890595, |
|
"grad_norm": 21.395124435424805, |
|
"learning_rate": 4.942120794399002e-05, |
|
"loss": 28.016, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1765834932821497, |
|
"grad_norm": 18.09257698059082, |
|
"learning_rate": 4.938779919951092e-05, |
|
"loss": 29.6793, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.18042226487523993, |
|
"grad_norm": 19.043846130371094, |
|
"learning_rate": 4.935346505261276e-05, |
|
"loss": 30.1972, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.18426103646833014, |
|
"grad_norm": 20.1807804107666, |
|
"learning_rate": 4.931820680611512e-05, |
|
"loss": 28.7825, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.18809980806142035, |
|
"grad_norm": 18.534324645996094, |
|
"learning_rate": 4.928202579790285e-05, |
|
"loss": 27.895, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.19193857965451055, |
|
"grad_norm": 19.662391662597656, |
|
"learning_rate": 4.9244923400875245e-05, |
|
"loss": 30.0118, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.19577735124760076, |
|
"grad_norm": 19.72050666809082, |
|
"learning_rate": 4.920690102289397e-05, |
|
"loss": 28.6979, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.19961612284069097, |
|
"grad_norm": 20.052833557128906, |
|
"learning_rate": 4.916796010672969e-05, |
|
"loss": 29.5174, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2034548944337812, |
|
"grad_norm": 18.85672378540039, |
|
"learning_rate": 4.9128102130007225e-05, |
|
"loss": 28.3002, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.2072936660268714, |
|
"grad_norm": 18.159709930419922, |
|
"learning_rate": 4.908732860514958e-05, |
|
"loss": 28.7818, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.21113243761996162, |
|
"grad_norm": 20.378021240234375, |
|
"learning_rate": 4.9045641079320484e-05, |
|
"loss": 29.458, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.21497120921305182, |
|
"grad_norm": 21.459836959838867, |
|
"learning_rate": 4.900304113436571e-05, |
|
"loss": 27.3894, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.21880998080614203, |
|
"grad_norm": 19.784387588500977, |
|
"learning_rate": 4.895953038675306e-05, |
|
"loss": 29.3604, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.22264875239923224, |
|
"grad_norm": 19.705406188964844, |
|
"learning_rate": 4.891511048751102e-05, |
|
"loss": 27.4372, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.22648752399232247, |
|
"grad_norm": 19.9382381439209, |
|
"learning_rate": 4.886978312216611e-05, |
|
"loss": 28.2482, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.23032629558541268, |
|
"grad_norm": 19.08539581298828, |
|
"learning_rate": 4.882355001067892e-05, |
|
"loss": 28.4456, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.23416506717850288, |
|
"grad_norm": 18.285186767578125, |
|
"learning_rate": 4.877641290737884e-05, |
|
"loss": 28.745, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2380038387715931, |
|
"grad_norm": 19.981107711791992, |
|
"learning_rate": 4.872837360089754e-05, |
|
"loss": 27.6915, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2418426103646833, |
|
"grad_norm": 18.624874114990234, |
|
"learning_rate": 4.8679433914101006e-05, |
|
"loss": 28.0039, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2456813819577735, |
|
"grad_norm": 22.298728942871094, |
|
"learning_rate": 4.862959570402049e-05, |
|
"loss": 28.3383, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2495201535508637, |
|
"grad_norm": 20.383586883544922, |
|
"learning_rate": 4.857886086178194e-05, |
|
"loss": 28.918, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2533589251439539, |
|
"grad_norm": 18.7249755859375, |
|
"learning_rate": 4.85272313125343e-05, |
|
"loss": 29.0783, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2571976967370441, |
|
"grad_norm": 18.3210391998291, |
|
"learning_rate": 4.8474709015376416e-05, |
|
"loss": 27.3239, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.26103646833013433, |
|
"grad_norm": 19.459135055541992, |
|
"learning_rate": 4.842129596328277e-05, |
|
"loss": 29.2664, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2648752399232246, |
|
"grad_norm": 18.80237579345703, |
|
"learning_rate": 4.836699418302777e-05, |
|
"loss": 28.8449, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2687140115163148, |
|
"grad_norm": 18.05824851989746, |
|
"learning_rate": 4.8311805735108894e-05, |
|
"loss": 27.5182, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.272552783109405, |
|
"grad_norm": 18.784578323364258, |
|
"learning_rate": 4.825573271366851e-05, |
|
"loss": 28.4831, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2763915547024952, |
|
"grad_norm": 18.89769172668457, |
|
"learning_rate": 4.8198777246414373e-05, |
|
"loss": 27.7881, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2802303262955854, |
|
"grad_norm": 17.64800453186035, |
|
"learning_rate": 4.814094149453891e-05, |
|
"loss": 27.7358, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2840690978886756, |
|
"grad_norm": 19.012086868286133, |
|
"learning_rate": 4.808222765263724e-05, |
|
"loss": 29.8711, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.28790786948176583, |
|
"grad_norm": 18.49270248413086, |
|
"learning_rate": 4.802263794862385e-05, |
|
"loss": 28.3761, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.29174664107485604, |
|
"grad_norm": 21.752199172973633, |
|
"learning_rate": 4.796217464364808e-05, |
|
"loss": 28.3796, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.29558541266794625, |
|
"grad_norm": 19.340524673461914, |
|
"learning_rate": 4.790084003200834e-05, |
|
"loss": 29.4554, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.29942418426103645, |
|
"grad_norm": 21.60015869140625, |
|
"learning_rate": 4.783863644106502e-05, |
|
"loss": 28.0435, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.30326295585412666, |
|
"grad_norm": 20.29178237915039, |
|
"learning_rate": 4.777556623115221e-05, |
|
"loss": 27.563, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.30710172744721687, |
|
"grad_norm": 18.13334846496582, |
|
"learning_rate": 4.7711631795488096e-05, |
|
"loss": 27.4931, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.31094049904030713, |
|
"grad_norm": 18.14622688293457, |
|
"learning_rate": 4.764683556008418e-05, |
|
"loss": 28.0506, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.31477927063339733, |
|
"grad_norm": 19.75128936767578, |
|
"learning_rate": 4.758117998365322e-05, |
|
"loss": 27.2963, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.31861804222648754, |
|
"grad_norm": 17.486196517944336, |
|
"learning_rate": 4.751466755751594e-05, |
|
"loss": 28.7996, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.32245681381957775, |
|
"grad_norm": 19.39826011657715, |
|
"learning_rate": 4.7447300805506455e-05, |
|
"loss": 27.6073, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.32629558541266795, |
|
"grad_norm": 17.352880477905273, |
|
"learning_rate": 4.7379082283876566e-05, |
|
"loss": 28.5087, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.33013435700575816, |
|
"grad_norm": 20.071483612060547, |
|
"learning_rate": 4.7310014581198695e-05, |
|
"loss": 27.1697, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.33397312859884837, |
|
"grad_norm": 19.04390525817871, |
|
"learning_rate": 4.7240100318267746e-05, |
|
"loss": 27.9563, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3378119001919386, |
|
"grad_norm": 20.634225845336914, |
|
"learning_rate": 4.716934214800155e-05, |
|
"loss": 27.6816, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3416506717850288, |
|
"grad_norm": 19.746788024902344, |
|
"learning_rate": 4.70977427553403e-05, |
|
"loss": 27.3921, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.345489443378119, |
|
"grad_norm": 19.04279899597168, |
|
"learning_rate": 4.702530485714461e-05, |
|
"loss": 27.2845, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3493282149712092, |
|
"grad_norm": 18.288244247436523, |
|
"learning_rate": 4.695203120209245e-05, |
|
"loss": 28.8947, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3531669865642994, |
|
"grad_norm": 18.553823471069336, |
|
"learning_rate": 4.6877924570574817e-05, |
|
"loss": 27.7966, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3570057581573896, |
|
"grad_norm": 18.615053176879883, |
|
"learning_rate": 4.6802987774590275e-05, |
|
"loss": 27.8787, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.36084452975047987, |
|
"grad_norm": 18.30453109741211, |
|
"learning_rate": 4.672722365763821e-05, |
|
"loss": 29.0792, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3646833013435701, |
|
"grad_norm": 18.134958267211914, |
|
"learning_rate": 4.665063509461097e-05, |
|
"loss": 29.07, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3685220729366603, |
|
"grad_norm": 19.019006729125977, |
|
"learning_rate": 4.6573224991684744e-05, |
|
"loss": 27.708, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3723608445297505, |
|
"grad_norm": 18.863723754882812, |
|
"learning_rate": 4.649499628620931e-05, |
|
"loss": 27.4551, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3761996161228407, |
|
"grad_norm": 18.62997055053711, |
|
"learning_rate": 4.641595194659657e-05, |
|
"loss": 26.7616, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.3800383877159309, |
|
"grad_norm": 18.19691276550293, |
|
"learning_rate": 4.63360949722079e-05, |
|
"loss": 27.6041, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.3838771593090211, |
|
"grad_norm": 19.37999153137207, |
|
"learning_rate": 4.625542839324036e-05, |
|
"loss": 26.9613, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3877159309021113, |
|
"grad_norm": 18.335630416870117, |
|
"learning_rate": 4.617395527061168e-05, |
|
"loss": 27.4261, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.3915547024952015, |
|
"grad_norm": 19.16864585876465, |
|
"learning_rate": 4.6091678695844164e-05, |
|
"loss": 28.8591, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.39539347408829173, |
|
"grad_norm": 20.256633758544922, |
|
"learning_rate": 4.600860179094732e-05, |
|
"loss": 26.8054, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.39923224568138194, |
|
"grad_norm": 18.64192008972168, |
|
"learning_rate": 4.5924727708299445e-05, |
|
"loss": 27.3249, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.40307101727447214, |
|
"grad_norm": 17.976713180541992, |
|
"learning_rate": 4.584005963052799e-05, |
|
"loss": 29.1977, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4069097888675624, |
|
"grad_norm": 17.99713706970215, |
|
"learning_rate": 4.575460077038877e-05, |
|
"loss": 29.0127, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4107485604606526, |
|
"grad_norm": 18.080854415893555, |
|
"learning_rate": 4.5668354370644086e-05, |
|
"loss": 29.716, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.4145873320537428, |
|
"grad_norm": 17.458669662475586, |
|
"learning_rate": 4.558132370393968e-05, |
|
"loss": 29.7465, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.418426103646833, |
|
"grad_norm": 18.444486618041992, |
|
"learning_rate": 4.5493512072680536e-05, |
|
"loss": 30.3556, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.42226487523992323, |
|
"grad_norm": 17.931936264038086, |
|
"learning_rate": 4.540492280890555e-05, |
|
"loss": 27.0296, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.42610364683301344, |
|
"grad_norm": 17.657825469970703, |
|
"learning_rate": 4.5315559274161144e-05, |
|
"loss": 28.3838, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.42994241842610365, |
|
"grad_norm": 17.2442626953125, |
|
"learning_rate": 4.522542485937369e-05, |
|
"loss": 26.963, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.43378119001919385, |
|
"grad_norm": 16.4385929107666, |
|
"learning_rate": 4.5134522984720816e-05, |
|
"loss": 28.0953, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.43761996161228406, |
|
"grad_norm": 16.927474975585938, |
|
"learning_rate": 4.504285709950167e-05, |
|
"loss": 29.0116, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.44145873320537427, |
|
"grad_norm": 18.508386611938477, |
|
"learning_rate": 4.4950430682006e-05, |
|
"loss": 27.5212, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.44529750479846447, |
|
"grad_norm": 17.00088882446289, |
|
"learning_rate": 4.485724723938215e-05, |
|
"loss": 27.2209, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4491362763915547, |
|
"grad_norm": 17.35956573486328, |
|
"learning_rate": 4.476331030750408e-05, |
|
"loss": 28.9031, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.45297504798464494, |
|
"grad_norm": 17.835237503051758, |
|
"learning_rate": 4.4668623450837085e-05, |
|
"loss": 25.8466, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.45681381957773515, |
|
"grad_norm": 18.052587509155273, |
|
"learning_rate": 4.457319026230257e-05, |
|
"loss": 27.7288, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.46065259117082535, |
|
"grad_norm": 21.071945190429688, |
|
"learning_rate": 4.447701436314176e-05, |
|
"loss": 27.2769, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.46449136276391556, |
|
"grad_norm": 17.07927703857422, |
|
"learning_rate": 4.4380099402778244e-05, |
|
"loss": 27.3407, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.46833013435700577, |
|
"grad_norm": 17.430988311767578, |
|
"learning_rate": 4.428244905867952e-05, |
|
"loss": 27.7631, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.472168905950096, |
|
"grad_norm": 18.140281677246094, |
|
"learning_rate": 4.418406703621743e-05, |
|
"loss": 27.4634, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.4760076775431862, |
|
"grad_norm": 18.464765548706055, |
|
"learning_rate": 4.408495706852758e-05, |
|
"loss": 27.1883, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4798464491362764, |
|
"grad_norm": 17.243370056152344, |
|
"learning_rate": 4.398512291636768e-05, |
|
"loss": 29.4247, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4836852207293666, |
|
"grad_norm": 18.10504722595215, |
|
"learning_rate": 4.3884568367974845e-05, |
|
"loss": 27.3026, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.4875239923224568, |
|
"grad_norm": 18.39180564880371, |
|
"learning_rate": 4.378329723892184e-05, |
|
"loss": 28.0279, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.491362763915547, |
|
"grad_norm": 16.66558265686035, |
|
"learning_rate": 4.3681313371972276e-05, |
|
"loss": 27.5308, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.4952015355086372, |
|
"grad_norm": 16.756423950195312, |
|
"learning_rate": 4.357862063693486e-05, |
|
"loss": 28.4946, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.4990403071017274, |
|
"grad_norm": 17.056476593017578, |
|
"learning_rate": 4.347522293051648e-05, |
|
"loss": 27.8562, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4990403071017274, |
|
"eval_loss": 1.7600558996200562, |
|
"eval_runtime": 31.0188, |
|
"eval_samples_per_second": 9.543, |
|
"eval_steps_per_second": 4.771, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5028790786948176, |
|
"grad_norm": 17.216785430908203, |
|
"learning_rate": 4.337112417617439e-05, |
|
"loss": 27.1427, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5067178502879078, |
|
"grad_norm": 16.08450698852539, |
|
"learning_rate": 4.3266328323967333e-05, |
|
"loss": 27.4919, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.510556621880998, |
|
"grad_norm": 17.361270904541016, |
|
"learning_rate": 4.3160839350405606e-05, |
|
"loss": 26.1541, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5143953934740882, |
|
"grad_norm": 17.303585052490234, |
|
"learning_rate": 4.305466125830023e-05, |
|
"loss": 26.0347, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5182341650671785, |
|
"grad_norm": 17.440980911254883, |
|
"learning_rate": 4.294779807661105e-05, |
|
"loss": 27.2826, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5220729366602687, |
|
"grad_norm": 19.435768127441406, |
|
"learning_rate": 4.284025386029381e-05, |
|
"loss": 27.5317, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.525911708253359, |
|
"grad_norm": 17.796663284301758, |
|
"learning_rate": 4.273203269014634e-05, |
|
"loss": 27.8947, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5297504798464492, |
|
"grad_norm": 17.049524307250977, |
|
"learning_rate": 4.2623138672653684e-05, |
|
"loss": 29.3983, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5335892514395394, |
|
"grad_norm": 17.124839782714844, |
|
"learning_rate": 4.2513575939832275e-05, |
|
"loss": 28.4112, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5374280230326296, |
|
"grad_norm": 17.724750518798828, |
|
"learning_rate": 4.2403348649073174e-05, |
|
"loss": 29.1517, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5412667946257198, |
|
"grad_norm": 17.679073333740234, |
|
"learning_rate": 4.2292460982984255e-05, |
|
"loss": 27.1786, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.54510556621881, |
|
"grad_norm": 18.939489364624023, |
|
"learning_rate": 4.218091714923157e-05, |
|
"loss": 28.6373, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.5489443378119002, |
|
"grad_norm": 17.122732162475586, |
|
"learning_rate": 4.206872138037964e-05, |
|
"loss": 27.252, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.5527831094049904, |
|
"grad_norm": 18.2526912689209, |
|
"learning_rate": 4.1955877933730855e-05, |
|
"loss": 29.9347, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5566218809980806, |
|
"grad_norm": 18.54884147644043, |
|
"learning_rate": 4.184239109116393e-05, |
|
"loss": 27.988, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5604606525911708, |
|
"grad_norm": 18.026559829711914, |
|
"learning_rate": 4.172826515897146e-05, |
|
"loss": 28.1473, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.564299424184261, |
|
"grad_norm": 17.47412109375, |
|
"learning_rate": 4.161350446769645e-05, |
|
"loss": 26.7597, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5681381957773513, |
|
"grad_norm": 19.019777297973633, |
|
"learning_rate": 4.149811337196807e-05, |
|
"loss": 27.5097, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5719769673704415, |
|
"grad_norm": 18.873889923095703, |
|
"learning_rate": 4.1382096250336346e-05, |
|
"loss": 26.3739, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5758157389635317, |
|
"grad_norm": 17.524677276611328, |
|
"learning_rate": 4.126545750510605e-05, |
|
"loss": 26.7282, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5796545105566219, |
|
"grad_norm": 17.283926010131836, |
|
"learning_rate": 4.1148201562169685e-05, |
|
"loss": 26.9749, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.5834932821497121, |
|
"grad_norm": 17.261751174926758, |
|
"learning_rate": 4.103033287083946e-05, |
|
"loss": 27.2926, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5873320537428023, |
|
"grad_norm": 16.952129364013672, |
|
"learning_rate": 4.0911855903678534e-05, |
|
"loss": 29.1231, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5911708253358925, |
|
"grad_norm": 16.523609161376953, |
|
"learning_rate": 4.079277515633127e-05, |
|
"loss": 27.9168, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5950095969289827, |
|
"grad_norm": 17.648927688598633, |
|
"learning_rate": 4.067309514735267e-05, |
|
"loss": 26.9956, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5988483685220729, |
|
"grad_norm": 18.750659942626953, |
|
"learning_rate": 4.055282041803685e-05, |
|
"loss": 27.5584, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6026871401151631, |
|
"grad_norm": 17.183300018310547, |
|
"learning_rate": 4.0431955532244827e-05, |
|
"loss": 27.9265, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.6065259117082533, |
|
"grad_norm": 17.270681381225586, |
|
"learning_rate": 4.031050507623125e-05, |
|
"loss": 28.9735, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.6103646833013435, |
|
"grad_norm": 19.801315307617188, |
|
"learning_rate": 4.0188473658470426e-05, |
|
"loss": 26.7811, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6142034548944337, |
|
"grad_norm": 16.823190689086914, |
|
"learning_rate": 4.0065865909481417e-05, |
|
"loss": 26.9071, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6180422264875239, |
|
"grad_norm": 17.074491500854492, |
|
"learning_rate": 3.9942686481652344e-05, |
|
"loss": 27.9241, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.6218809980806143, |
|
"grad_norm": 18.242040634155273, |
|
"learning_rate": 3.981894004906388e-05, |
|
"loss": 28.2823, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.6257197696737045, |
|
"grad_norm": 16.200469970703125, |
|
"learning_rate": 3.969463130731183e-05, |
|
"loss": 27.2651, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6295585412667947, |
|
"grad_norm": 17.807695388793945, |
|
"learning_rate": 3.9569764973329026e-05, |
|
"loss": 28.572, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6333973128598849, |
|
"grad_norm": 16.356901168823242, |
|
"learning_rate": 3.9444345785206285e-05, |
|
"loss": 27.4942, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6372360844529751, |
|
"grad_norm": 18.459415435791016, |
|
"learning_rate": 3.931837850201263e-05, |
|
"loss": 27.4934, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.6410748560460653, |
|
"grad_norm": 18.079626083374023, |
|
"learning_rate": 3.919186790361475e-05, |
|
"loss": 28.0261, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.6449136276391555, |
|
"grad_norm": 18.058738708496094, |
|
"learning_rate": 3.906481879049558e-05, |
|
"loss": 27.2611, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.6487523992322457, |
|
"grad_norm": 17.553970336914062, |
|
"learning_rate": 3.893723598357214e-05, |
|
"loss": 27.5768, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.6525911708253359, |
|
"grad_norm": 18.69593620300293, |
|
"learning_rate": 3.880912432401265e-05, |
|
"loss": 27.5784, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6564299424184261, |
|
"grad_norm": 17.491533279418945, |
|
"learning_rate": 3.8680488673052784e-05, |
|
"loss": 29.1586, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.6602687140115163, |
|
"grad_norm": 18.12783432006836, |
|
"learning_rate": 3.855133391181124e-05, |
|
"loss": 27.3099, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.6641074856046065, |
|
"grad_norm": 18.322479248046875, |
|
"learning_rate": 3.842166494110451e-05, |
|
"loss": 29.5131, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.6679462571976967, |
|
"grad_norm": 17.041065216064453, |
|
"learning_rate": 3.82914866812609e-05, |
|
"loss": 28.5525, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.6717850287907869, |
|
"grad_norm": 17.609098434448242, |
|
"learning_rate": 3.81608040719339e-05, |
|
"loss": 28.8543, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6756238003838771, |
|
"grad_norm": 17.111724853515625, |
|
"learning_rate": 3.802962207191463e-05, |
|
"loss": 25.6639, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.6794625719769674, |
|
"grad_norm": 17.328916549682617, |
|
"learning_rate": 3.789794565894378e-05, |
|
"loss": 26.0536, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.6833013435700576, |
|
"grad_norm": 17.837627410888672, |
|
"learning_rate": 3.7765779829522675e-05, |
|
"loss": 27.5278, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6871401151631478, |
|
"grad_norm": 15.394762992858887, |
|
"learning_rate": 3.7633129598723704e-05, |
|
"loss": 27.7244, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.690978886756238, |
|
"grad_norm": 19.647441864013672, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 25.729, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6948176583493282, |
|
"grad_norm": 17.077699661254883, |
|
"learning_rate": 3.7366396084994475e-05, |
|
"loss": 26.2709, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.6986564299424184, |
|
"grad_norm": 16.858604431152344, |
|
"learning_rate": 3.723232292334809e-05, |
|
"loss": 27.8621, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7024952015355086, |
|
"grad_norm": 17.455453872680664, |
|
"learning_rate": 3.709778560250754e-05, |
|
"loss": 27.0243, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.7063339731285988, |
|
"grad_norm": 16.5456600189209, |
|
"learning_rate": 3.696278922753216e-05, |
|
"loss": 27.6667, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.710172744721689, |
|
"grad_norm": 17.52065086364746, |
|
"learning_rate": 3.6827338920900254e-05, |
|
"loss": 27.4649, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7140115163147792, |
|
"grad_norm": 15.55224323272705, |
|
"learning_rate": 3.669143982231467e-05, |
|
"loss": 26.4226, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7178502879078695, |
|
"grad_norm": 16.189666748046875, |
|
"learning_rate": 3.655509708850783e-05, |
|
"loss": 26.3024, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.7216890595009597, |
|
"grad_norm": 17.70796775817871, |
|
"learning_rate": 3.641831589304602e-05, |
|
"loss": 27.1759, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.72552783109405, |
|
"grad_norm": 16.10495376586914, |
|
"learning_rate": 3.628110142613308e-05, |
|
"loss": 26.5754, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.7293666026871402, |
|
"grad_norm": 17.34556007385254, |
|
"learning_rate": 3.6143458894413465e-05, |
|
"loss": 27.8653, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7332053742802304, |
|
"grad_norm": 18.321557998657227, |
|
"learning_rate": 3.600539352077469e-05, |
|
"loss": 27.7326, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.7370441458733206, |
|
"grad_norm": 15.874064445495605, |
|
"learning_rate": 3.586691054414913e-05, |
|
"loss": 26.683, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.7408829174664108, |
|
"grad_norm": 16.701417922973633, |
|
"learning_rate": 3.572801521931522e-05, |
|
"loss": 25.815, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.744721689059501, |
|
"grad_norm": 17.957345962524414, |
|
"learning_rate": 3.558871281669811e-05, |
|
"loss": 26.726, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.7485604606525912, |
|
"grad_norm": 17.00701141357422, |
|
"learning_rate": 3.544900862216959e-05, |
|
"loss": 26.8739, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.7523992322456814, |
|
"grad_norm": 19.22977638244629, |
|
"learning_rate": 3.5308907936847594e-05, |
|
"loss": 26.1091, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.7562380038387716, |
|
"grad_norm": 18.498205184936523, |
|
"learning_rate": 3.516841607689501e-05, |
|
"loss": 25.4755, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.7600767754318618, |
|
"grad_norm": 18.955081939697266, |
|
"learning_rate": 3.502753837331797e-05, |
|
"loss": 26.3684, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.763915547024952, |
|
"grad_norm": 18.34401512145996, |
|
"learning_rate": 3.488628017176356e-05, |
|
"loss": 28.767, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.7677543186180422, |
|
"grad_norm": 18.423044204711914, |
|
"learning_rate": 3.474464683231698e-05, |
|
"loss": 27.2117, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7715930902111324, |
|
"grad_norm": 18.909847259521484, |
|
"learning_rate": 3.460264372929815e-05, |
|
"loss": 26.408, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.7754318618042226, |
|
"grad_norm": 16.31786346435547, |
|
"learning_rate": 3.446027625105776e-05, |
|
"loss": 26.7065, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.7792706333973128, |
|
"grad_norm": 18.030500411987305, |
|
"learning_rate": 3.431754979977285e-05, |
|
"loss": 27.028, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.783109404990403, |
|
"grad_norm": 16.70123863220215, |
|
"learning_rate": 3.4174469791241806e-05, |
|
"loss": 27.5544, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.7869481765834933, |
|
"grad_norm": 16.9814453125, |
|
"learning_rate": 3.403104165467883e-05, |
|
"loss": 27.5372, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7907869481765835, |
|
"grad_norm": 17.28445053100586, |
|
"learning_rate": 3.388727083250795e-05, |
|
"loss": 27.4625, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.7946257197696737, |
|
"grad_norm": 17.412431716918945, |
|
"learning_rate": 3.374316278015653e-05, |
|
"loss": 27.897, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.7984644913627639, |
|
"grad_norm": 16.263408660888672, |
|
"learning_rate": 3.3598722965848204e-05, |
|
"loss": 27.2817, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.8023032629558541, |
|
"grad_norm": 16.9997615814209, |
|
"learning_rate": 3.345395687039543e-05, |
|
"loss": 26.4631, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.8061420345489443, |
|
"grad_norm": 17.816619873046875, |
|
"learning_rate": 3.330886998699149e-05, |
|
"loss": 25.6059, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8099808061420346, |
|
"grad_norm": 17.235605239868164, |
|
"learning_rate": 3.316346782100208e-05, |
|
"loss": 27.4531, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.8138195777351248, |
|
"grad_norm": 15.711729049682617, |
|
"learning_rate": 3.301775588975638e-05, |
|
"loss": 27.0936, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.817658349328215, |
|
"grad_norm": 16.274784088134766, |
|
"learning_rate": 3.28717397223377e-05, |
|
"loss": 25.6661, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.8214971209213052, |
|
"grad_norm": 15.97848129272461, |
|
"learning_rate": 3.272542485937369e-05, |
|
"loss": 26.772, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.8253358925143954, |
|
"grad_norm": 16.191444396972656, |
|
"learning_rate": 3.257881685282609e-05, |
|
"loss": 25.8711, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8291746641074856, |
|
"grad_norm": 16.814664840698242, |
|
"learning_rate": 3.243192126578007e-05, |
|
"loss": 25.5683, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.8330134357005758, |
|
"grad_norm": 15.929566383361816, |
|
"learning_rate": 3.228474367223312e-05, |
|
"loss": 27.5315, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.836852207293666, |
|
"grad_norm": 16.453189849853516, |
|
"learning_rate": 3.2137289656883556e-05, |
|
"loss": 27.1406, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.8406909788867563, |
|
"grad_norm": 15.205329895019531, |
|
"learning_rate": 3.19895648149186e-05, |
|
"loss": 26.2816, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.8445297504798465, |
|
"grad_norm": 16.378446578979492, |
|
"learning_rate": 3.1841574751802076e-05, |
|
"loss": 26.6438, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8483685220729367, |
|
"grad_norm": 16.06751251220703, |
|
"learning_rate": 3.16933250830617e-05, |
|
"loss": 27.0177, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.8522072936660269, |
|
"grad_norm": 15.032289505004883, |
|
"learning_rate": 3.1544821434076014e-05, |
|
"loss": 27.5912, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.8560460652591171, |
|
"grad_norm": 16.624408721923828, |
|
"learning_rate": 3.1396069439860894e-05, |
|
"loss": 28.211, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.8598848368522073, |
|
"grad_norm": 17.585723876953125, |
|
"learning_rate": 3.124707474485577e-05, |
|
"loss": 26.8424, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.8637236084452975, |
|
"grad_norm": 15.791353225708008, |
|
"learning_rate": 3.109784300270943e-05, |
|
"loss": 26.7544, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.8675623800383877, |
|
"grad_norm": 16.265228271484375, |
|
"learning_rate": 3.094837987606547e-05, |
|
"loss": 26.9071, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.8714011516314779, |
|
"grad_norm": 15.219544410705566, |
|
"learning_rate": 3.0798691036347453e-05, |
|
"loss": 25.127, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.8752399232245681, |
|
"grad_norm": 16.0206241607666, |
|
"learning_rate": 3.064878216354369e-05, |
|
"loss": 27.3355, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.8790786948176583, |
|
"grad_norm": 15.717583656311035, |
|
"learning_rate": 3.049865894599172e-05, |
|
"loss": 27.7224, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.8829174664107485, |
|
"grad_norm": 16.433361053466797, |
|
"learning_rate": 3.0348327080162435e-05, |
|
"loss": 27.4405, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8867562380038387, |
|
"grad_norm": 16.457277297973633, |
|
"learning_rate": 3.0197792270443982e-05, |
|
"loss": 26.5396, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.8905950095969289, |
|
"grad_norm": 15.61976146697998, |
|
"learning_rate": 3.0047060228925256e-05, |
|
"loss": 27.3457, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.8944337811900192, |
|
"grad_norm": 16.303308486938477, |
|
"learning_rate": 2.9896136675179176e-05, |
|
"loss": 26.2405, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.8982725527831094, |
|
"grad_norm": 15.62144660949707, |
|
"learning_rate": 2.974502733604565e-05, |
|
"loss": 26.1536, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.9021113243761996, |
|
"grad_norm": 16.30591583251953, |
|
"learning_rate": 2.9593737945414264e-05, |
|
"loss": 26.1949, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.9059500959692899, |
|
"grad_norm": 16.510095596313477, |
|
"learning_rate": 2.9442274244006722e-05, |
|
"loss": 26.6893, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.9097888675623801, |
|
"grad_norm": 16.378929138183594, |
|
"learning_rate": 2.9290641979158993e-05, |
|
"loss": 28.0321, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.9136276391554703, |
|
"grad_norm": 15.976604461669922, |
|
"learning_rate": 2.913884690460325e-05, |
|
"loss": 27.17, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.9174664107485605, |
|
"grad_norm": 16.334959030151367, |
|
"learning_rate": 2.8986894780249524e-05, |
|
"loss": 26.3522, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.9213051823416507, |
|
"grad_norm": 16.02887725830078, |
|
"learning_rate": 2.8834791371967142e-05, |
|
"loss": 28.4084, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9251439539347409, |
|
"grad_norm": 17.058687210083008, |
|
"learning_rate": 2.868254245136594e-05, |
|
"loss": 26.801, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.9289827255278311, |
|
"grad_norm": 15.894112586975098, |
|
"learning_rate": 2.8530153795577286e-05, |
|
"loss": 25.7278, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.9328214971209213, |
|
"grad_norm": 16.269180297851562, |
|
"learning_rate": 2.8377631187034825e-05, |
|
"loss": 26.3214, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.9366602687140115, |
|
"grad_norm": 15.76980209350586, |
|
"learning_rate": 2.8224980413255086e-05, |
|
"loss": 27.0605, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.9404990403071017, |
|
"grad_norm": 15.396162986755371, |
|
"learning_rate": 2.8072207266617855e-05, |
|
"loss": 26.77, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.944337811900192, |
|
"grad_norm": 16.089946746826172, |
|
"learning_rate": 2.7919317544146405e-05, |
|
"loss": 26.3307, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.9481765834932822, |
|
"grad_norm": 15.087519645690918, |
|
"learning_rate": 2.776631704728752e-05, |
|
"loss": 25.4705, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.9520153550863724, |
|
"grad_norm": 16.0484619140625, |
|
"learning_rate": 2.761321158169134e-05, |
|
"loss": 25.8905, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.9558541266794626, |
|
"grad_norm": 16.0047607421875, |
|
"learning_rate": 2.746000695699107e-05, |
|
"loss": 26.0246, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.9596928982725528, |
|
"grad_norm": 16.781606674194336, |
|
"learning_rate": 2.7306708986582553e-05, |
|
"loss": 25.6385, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.963531669865643, |
|
"grad_norm": 15.778640747070312, |
|
"learning_rate": 2.7153323487403655e-05, |
|
"loss": 26.7195, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.9673704414587332, |
|
"grad_norm": 17.09006118774414, |
|
"learning_rate": 2.6999856279713544e-05, |
|
"loss": 25.475, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.9712092130518234, |
|
"grad_norm": 16.174684524536133, |
|
"learning_rate": 2.6846313186871853e-05, |
|
"loss": 24.9302, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.9750479846449136, |
|
"grad_norm": 15.83430290222168, |
|
"learning_rate": 2.669270003511769e-05, |
|
"loss": 25.4569, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.9788867562380038, |
|
"grad_norm": 16.559917449951172, |
|
"learning_rate": 2.653902265334858e-05, |
|
"loss": 26.9698, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.982725527831094, |
|
"grad_norm": 15.691744804382324, |
|
"learning_rate": 2.638528687289925e-05, |
|
"loss": 25.7697, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.9865642994241842, |
|
"grad_norm": 15.908937454223633, |
|
"learning_rate": 2.6231498527320425e-05, |
|
"loss": 26.0816, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.9904030710172744, |
|
"grad_norm": 15.256813049316406, |
|
"learning_rate": 2.6077663452157397e-05, |
|
"loss": 27.2372, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.9942418426103646, |
|
"grad_norm": 16.329204559326172, |
|
"learning_rate": 2.592378748472863e-05, |
|
"loss": 26.6932, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.9980806142034548, |
|
"grad_norm": 15.60101318359375, |
|
"learning_rate": 2.5769876463904265e-05, |
|
"loss": 26.632, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9980806142034548, |
|
"eval_loss": 1.698987364768982, |
|
"eval_runtime": 31.0703, |
|
"eval_samples_per_second": 9.527, |
|
"eval_steps_per_second": 4.763, |
|
"step": 260 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 520, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 260, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.4952457875972424e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|