|
{ |
|
"best_metric": 1.3558998107910156, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.06536659766859135, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.000435777317790609, |
|
"grad_norm": 2.0851054191589355, |
|
"learning_rate": 5e-06, |
|
"loss": 1.6817, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.000435777317790609, |
|
"eval_loss": 2.0230047702789307, |
|
"eval_runtime": 315.559, |
|
"eval_samples_per_second": 12.248, |
|
"eval_steps_per_second": 6.126, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.000871554635581218, |
|
"grad_norm": 2.216548204421997, |
|
"learning_rate": 1e-05, |
|
"loss": 1.8021, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.001307331953371827, |
|
"grad_norm": 2.3968324661254883, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.9165, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.001743109271162436, |
|
"grad_norm": 2.045064926147461, |
|
"learning_rate": 2e-05, |
|
"loss": 1.8139, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.002178886588953045, |
|
"grad_norm": 2.1230273246765137, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.8931, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.002614663906743654, |
|
"grad_norm": 1.675313949584961, |
|
"learning_rate": 3e-05, |
|
"loss": 1.762, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.003050441224534263, |
|
"grad_norm": 1.3971757888793945, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.5999, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.003486218542324872, |
|
"grad_norm": 1.214509129524231, |
|
"learning_rate": 4e-05, |
|
"loss": 1.663, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.003921995860115481, |
|
"grad_norm": 1.1938960552215576, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.6785, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00435777317790609, |
|
"grad_norm": 1.2489292621612549, |
|
"learning_rate": 5e-05, |
|
"loss": 1.5493, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.004793550495696699, |
|
"grad_norm": 1.2138938903808594, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.622, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.005229327813487308, |
|
"grad_norm": 1.2661142349243164, |
|
"learning_rate": 6e-05, |
|
"loss": 1.5328, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.005665105131277917, |
|
"grad_norm": 1.047982931137085, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 1.5021, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.006100882449068526, |
|
"grad_norm": 0.9978450536727905, |
|
"learning_rate": 7e-05, |
|
"loss": 1.6194, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.006536659766859135, |
|
"grad_norm": 0.8680281043052673, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.3816, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.006972437084649744, |
|
"grad_norm": 0.9196011424064636, |
|
"learning_rate": 8e-05, |
|
"loss": 1.5334, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.007408214402440353, |
|
"grad_norm": 0.9667453765869141, |
|
"learning_rate": 8.5e-05, |
|
"loss": 1.4741, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.007843991720230961, |
|
"grad_norm": 0.8727541565895081, |
|
"learning_rate": 9e-05, |
|
"loss": 1.4547, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.008279769038021571, |
|
"grad_norm": 0.9195423722267151, |
|
"learning_rate": 9.5e-05, |
|
"loss": 1.5097, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.00871554635581218, |
|
"grad_norm": 1.05409574508667, |
|
"learning_rate": 0.0001, |
|
"loss": 1.4515, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.00915132367360279, |
|
"grad_norm": 0.8618441820144653, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 1.3947, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.009587100991393397, |
|
"grad_norm": 0.8809842467308044, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 1.4563, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.010022878309184007, |
|
"grad_norm": 0.8538223505020142, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 1.4483, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.010458655626974615, |
|
"grad_norm": 0.8714299201965332, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 1.438, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.010894432944765225, |
|
"grad_norm": 0.8516822457313538, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 1.382, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.011330210262555833, |
|
"grad_norm": 0.9048829078674316, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 1.469, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.011765987580346443, |
|
"grad_norm": 0.8362917900085449, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 1.4376, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.012201764898137051, |
|
"grad_norm": 0.8152568936347961, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 1.4729, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.012637542215927661, |
|
"grad_norm": 0.8475168943405151, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 1.4389, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.01307331953371827, |
|
"grad_norm": 0.8305416703224182, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 1.444, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.013509096851508879, |
|
"grad_norm": 0.8335140943527222, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 1.3876, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.013944874169299489, |
|
"grad_norm": 0.8393346667289734, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 1.4717, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.014380651487090097, |
|
"grad_norm": 0.8358327150344849, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 1.3911, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.014816428804880707, |
|
"grad_norm": 0.8287831544876099, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 1.3734, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.015252206122671315, |
|
"grad_norm": 0.7427653670310974, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 1.3105, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.015687983440461923, |
|
"grad_norm": 0.8388314843177795, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 1.4373, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.016123760758252535, |
|
"grad_norm": 0.7671462297439575, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 1.3883, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.016559538076043143, |
|
"grad_norm": 0.8673655986785889, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.3485, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.01699531539383375, |
|
"grad_norm": 0.8480393290519714, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 1.4058, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.01743109271162436, |
|
"grad_norm": 0.8280643820762634, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 1.47, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01786687002941497, |
|
"grad_norm": 0.7347378730773926, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 1.2069, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.01830264734720558, |
|
"grad_norm": 0.8081456422805786, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 1.4003, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.018738424664996187, |
|
"grad_norm": 0.7185221314430237, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 1.2813, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.019174201982786795, |
|
"grad_norm": 0.7656445503234863, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 1.4089, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.019609979300577406, |
|
"grad_norm": 0.936586320400238, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 1.3603, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.020045756618368014, |
|
"grad_norm": 0.7969225645065308, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 1.3352, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.020481533936158623, |
|
"grad_norm": 0.8012515306472778, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 1.3508, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02091731125394923, |
|
"grad_norm": 0.7480721473693848, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 1.3603, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.021353088571739842, |
|
"grad_norm": 0.814703643321991, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 1.3205, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.02178886588953045, |
|
"grad_norm": 0.9260087609291077, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 1.3108, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02178886588953045, |
|
"eval_loss": 1.3878036737442017, |
|
"eval_runtime": 317.8565, |
|
"eval_samples_per_second": 12.16, |
|
"eval_steps_per_second": 6.081, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02222464320732106, |
|
"grad_norm": 0.7175142168998718, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 1.3437, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.022660420525111667, |
|
"grad_norm": 0.7583256363868713, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 1.3406, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.023096197842902278, |
|
"grad_norm": 0.677870512008667, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 1.467, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.023531975160692886, |
|
"grad_norm": 0.725477397441864, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 1.3124, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.023967752478483494, |
|
"grad_norm": 0.683293342590332, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 1.4094, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.024403529796274102, |
|
"grad_norm": 0.7447341084480286, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.3858, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.024839307114064714, |
|
"grad_norm": 0.759089469909668, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 1.3845, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.025275084431855322, |
|
"grad_norm": 0.6921642422676086, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 1.4491, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.02571086174964593, |
|
"grad_norm": 0.6369518637657166, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 1.3408, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.02614663906743654, |
|
"grad_norm": 0.7366051077842712, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 1.4087, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02658241638522715, |
|
"grad_norm": 0.7187365293502808, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 1.3983, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.027018193703017758, |
|
"grad_norm": 0.7734277844429016, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 1.5799, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.027453971020808366, |
|
"grad_norm": 0.6761085391044617, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 1.3576, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.027889748338598978, |
|
"grad_norm": 0.783863365650177, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 1.4574, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.028325525656389586, |
|
"grad_norm": 0.7435303330421448, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 1.4734, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.028761302974180194, |
|
"grad_norm": 0.7461963891983032, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 1.4228, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.029197080291970802, |
|
"grad_norm": 0.7791002988815308, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 1.4499, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.029632857609761414, |
|
"grad_norm": 0.692180335521698, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 1.4522, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.03006863492755202, |
|
"grad_norm": 0.7467637658119202, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 1.3698, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.03050441224534263, |
|
"grad_norm": 0.7586157321929932, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 1.454, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.030940189563133238, |
|
"grad_norm": 0.6296893358230591, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 1.34, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.031375966880923846, |
|
"grad_norm": 0.8413500189781189, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 1.3662, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.03181174419871446, |
|
"grad_norm": 0.672438383102417, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 1.3177, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.03224752151650507, |
|
"grad_norm": 0.7362287044525146, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.3813, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.032683298834295674, |
|
"grad_norm": 0.7228344082832336, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 1.4777, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.033119076152086285, |
|
"grad_norm": 0.6640066504478455, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 1.3769, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.03355485346987689, |
|
"grad_norm": 0.6730275750160217, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 1.3875, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0339906307876675, |
|
"grad_norm": 0.7028450965881348, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 1.4002, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.03442640810545811, |
|
"grad_norm": 0.6365458369255066, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 1.2605, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.03486218542324872, |
|
"grad_norm": 0.6803222894668579, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.38, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03529796274103933, |
|
"grad_norm": 0.6870620250701904, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 1.3394, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.03573374005882994, |
|
"grad_norm": 0.7038113474845886, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 1.4702, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.036169517376620546, |
|
"grad_norm": 0.637474775314331, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 1.3277, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.03660529469441116, |
|
"grad_norm": 0.7014281153678894, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 1.405, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.03704107201220176, |
|
"grad_norm": 0.6626453399658203, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 1.4336, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03747684932999237, |
|
"grad_norm": 0.6772466897964478, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 1.3221, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.037912626647782985, |
|
"grad_norm": 0.7135924100875854, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 1.3905, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.03834840396557359, |
|
"grad_norm": 0.7085152864456177, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 1.3089, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.0387841812833642, |
|
"grad_norm": 0.7906420826911926, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 1.3709, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.03921995860115481, |
|
"grad_norm": 0.6890827417373657, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 1.2915, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03965573591894542, |
|
"grad_norm": 0.6224659085273743, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 1.2344, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.04009151323673603, |
|
"grad_norm": 0.6973520517349243, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.3544, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.040527290554526633, |
|
"grad_norm": 0.6966316103935242, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 1.2701, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.040963067872317245, |
|
"grad_norm": 0.7485948801040649, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 1.4688, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.04139884519010786, |
|
"grad_norm": 0.6849566698074341, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 1.2395, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04183462250789846, |
|
"grad_norm": 0.7672956585884094, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 1.2718, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.04227039982568907, |
|
"grad_norm": 0.7076390385627747, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 1.3193, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.042706177143479684, |
|
"grad_norm": 0.7060564756393433, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 1.2844, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.04314195446127029, |
|
"grad_norm": 0.7379676699638367, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 1.2361, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.0435777317790609, |
|
"grad_norm": 0.851557731628418, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 1.3533, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0435777317790609, |
|
"eval_loss": 1.3684649467468262, |
|
"eval_runtime": 318.0205, |
|
"eval_samples_per_second": 12.153, |
|
"eval_steps_per_second": 6.078, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04401350909685151, |
|
"grad_norm": 0.6644337177276611, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 1.43, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.04444928641464212, |
|
"grad_norm": 0.6406741142272949, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 1.4389, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.04488506373243273, |
|
"grad_norm": 0.6088762283325195, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 1.3575, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.04532084105022333, |
|
"grad_norm": 0.5756306052207947, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 1.2714, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.045756618368013945, |
|
"grad_norm": 0.5706756711006165, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 1.3047, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.046192395685804556, |
|
"grad_norm": 0.6538943648338318, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 1.4369, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.04662817300359516, |
|
"grad_norm": 0.6861059665679932, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 1.386, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.04706395032138577, |
|
"grad_norm": 0.667604386806488, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 1.3972, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.047499727639176384, |
|
"grad_norm": 0.5741658210754395, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 1.3382, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.04793550495696699, |
|
"grad_norm": 0.6127682328224182, |
|
"learning_rate": 5e-05, |
|
"loss": 1.3597, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0483712822747576, |
|
"grad_norm": 0.6162969470024109, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 1.3884, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.048807059592548205, |
|
"grad_norm": 0.6853535771369934, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 1.5718, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.049242836910338816, |
|
"grad_norm": 0.6162100434303284, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 1.4271, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.04967861422812943, |
|
"grad_norm": 0.5969780087471008, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 1.3138, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.05011439154592003, |
|
"grad_norm": 0.563022792339325, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 1.2623, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.050550168863710644, |
|
"grad_norm": 0.658589780330658, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 1.4113, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.050985946181501256, |
|
"grad_norm": 0.6488298773765564, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 1.4764, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.05142172349929186, |
|
"grad_norm": 0.6470029950141907, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 1.3366, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.05185750081708247, |
|
"grad_norm": 0.6214532852172852, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 1.3507, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.05229327813487308, |
|
"grad_norm": 0.6346096396446228, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 1.36, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05272905545266369, |
|
"grad_norm": 0.6477368474006653, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 1.4248, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.0531648327704543, |
|
"grad_norm": 0.6995759606361389, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 1.2993, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.053600610088244904, |
|
"grad_norm": 0.6448193788528442, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 1.4006, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.054036387406035516, |
|
"grad_norm": 0.5889584422111511, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 1.3996, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.05447216472382613, |
|
"grad_norm": 0.6430240869522095, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 1.3608, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.05490794204161673, |
|
"grad_norm": 0.6376746296882629, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 1.3421, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.055343719359407344, |
|
"grad_norm": 0.6999002695083618, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 1.4008, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.055779496677197955, |
|
"grad_norm": 0.622456431388855, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.3804, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.05621527399498856, |
|
"grad_norm": 0.6279011368751526, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 1.3072, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.05665105131277917, |
|
"grad_norm": 0.612434446811676, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 1.2956, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.057086828630569776, |
|
"grad_norm": 0.6098705530166626, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 1.3117, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.05752260594836039, |
|
"grad_norm": 0.6360723376274109, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 1.279, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.057958383266151, |
|
"grad_norm": 0.6856507062911987, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 1.3226, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.058394160583941604, |
|
"grad_norm": 0.6899363994598389, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 1.3294, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.058829937901732215, |
|
"grad_norm": 0.6617586016654968, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 1.4432, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.05926571521952283, |
|
"grad_norm": 0.712164580821991, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 1.4219, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.05970149253731343, |
|
"grad_norm": 0.6878612637519836, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 1.3542, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.06013726985510404, |
|
"grad_norm": 0.690792977809906, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 1.319, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.06057304717289465, |
|
"grad_norm": 0.7101471424102783, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 1.3666, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.06100882449068526, |
|
"grad_norm": 0.7041705250740051, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 1.3118, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06144460180847587, |
|
"grad_norm": 0.6758811473846436, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 1.2953, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.061880379126266476, |
|
"grad_norm": 0.7130976319313049, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 1.3082, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.06231615644405709, |
|
"grad_norm": 0.6844936013221741, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 1.2318, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.06275193376184769, |
|
"grad_norm": 0.7062429785728455, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 1.2312, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.0631877110796383, |
|
"grad_norm": 0.6911125779151917, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 1.288, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.06362348839742892, |
|
"grad_norm": 0.6440110802650452, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.2698, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.06405926571521953, |
|
"grad_norm": 0.6771082878112793, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 1.2934, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.06449504303301014, |
|
"grad_norm": 0.6761582493782043, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 1.189, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.06493082035080074, |
|
"grad_norm": 0.722388505935669, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 1.3512, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.06536659766859135, |
|
"grad_norm": 0.7161824107170105, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 1.3205, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06536659766859135, |
|
"eval_loss": 1.3558998107910156, |
|
"eval_runtime": 318.2564, |
|
"eval_samples_per_second": 12.144, |
|
"eval_steps_per_second": 6.074, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1443053581080986e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|