|
{ |
|
"best_metric": 1.000538945198059, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.33783783783783783, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0033783783783783786, |
|
"grad_norm": 3.826683282852173, |
|
"learning_rate": 5e-06, |
|
"loss": 1.4948, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0033783783783783786, |
|
"eval_loss": 2.366645336151123, |
|
"eval_runtime": 45.2663, |
|
"eval_samples_per_second": 11.024, |
|
"eval_steps_per_second": 5.523, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006756756756756757, |
|
"grad_norm": 4.3141398429870605, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6839, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010135135135135136, |
|
"grad_norm": 4.097876071929932, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.5752, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.013513513513513514, |
|
"grad_norm": 4.010673522949219, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5405, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.016891891891891893, |
|
"grad_norm": 3.4538493156433105, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.5342, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02027027027027027, |
|
"grad_norm": 2.8183417320251465, |
|
"learning_rate": 3e-05, |
|
"loss": 1.3748, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02364864864864865, |
|
"grad_norm": 2.11971378326416, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.2566, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02702702702702703, |
|
"grad_norm": 1.5489553213119507, |
|
"learning_rate": 4e-05, |
|
"loss": 1.2766, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.030405405405405407, |
|
"grad_norm": 1.3374286890029907, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.1642, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.033783783783783786, |
|
"grad_norm": 1.4814637899398804, |
|
"learning_rate": 5e-05, |
|
"loss": 1.1382, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.037162162162162164, |
|
"grad_norm": 1.3145912885665894, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.0861, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04054054054054054, |
|
"grad_norm": 1.5343668460845947, |
|
"learning_rate": 6e-05, |
|
"loss": 1.1041, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04391891891891892, |
|
"grad_norm": 1.3800190687179565, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 1.0442, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0472972972972973, |
|
"grad_norm": 1.1731736660003662, |
|
"learning_rate": 7e-05, |
|
"loss": 1.025, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05067567567567568, |
|
"grad_norm": 1.0756690502166748, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.077, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05405405405405406, |
|
"grad_norm": 1.126670479774475, |
|
"learning_rate": 8e-05, |
|
"loss": 1.0699, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.057432432432432436, |
|
"grad_norm": 1.2052531242370605, |
|
"learning_rate": 8.5e-05, |
|
"loss": 1.0481, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.060810810810810814, |
|
"grad_norm": 1.0971605777740479, |
|
"learning_rate": 9e-05, |
|
"loss": 1.1747, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06418918918918919, |
|
"grad_norm": 1.2357207536697388, |
|
"learning_rate": 9.5e-05, |
|
"loss": 1.2298, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06756756756756757, |
|
"grad_norm": 1.0170912742614746, |
|
"learning_rate": 0.0001, |
|
"loss": 0.963, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07094594594594594, |
|
"grad_norm": 1.040585994720459, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 1.0414, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07432432432432433, |
|
"grad_norm": 1.0849173069000244, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 0.9838, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0777027027027027, |
|
"grad_norm": 1.2504794597625732, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 1.132, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08108108108108109, |
|
"grad_norm": 1.1654671430587769, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 1.1147, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08445945945945946, |
|
"grad_norm": 1.2212613821029663, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 1.0757, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08783783783783784, |
|
"grad_norm": 1.6377768516540527, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 1.1149, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09121621621621621, |
|
"grad_norm": 1.4654461145401, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 1.0161, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0945945945945946, |
|
"grad_norm": 1.5125370025634766, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 1.1866, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09797297297297297, |
|
"grad_norm": 1.3907262086868286, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 1.0566, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10135135135135136, |
|
"grad_norm": 1.5802035331726074, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 1.2932, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10472972972972973, |
|
"grad_norm": 1.4079865217208862, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 1.1993, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.10810810810810811, |
|
"grad_norm": 1.471291184425354, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 1.2195, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11148648648648649, |
|
"grad_norm": 1.8197413682937622, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 1.1169, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11486486486486487, |
|
"grad_norm": 1.589645504951477, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 1.2959, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11824324324324324, |
|
"grad_norm": 1.9382315874099731, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 1.1117, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12162162162162163, |
|
"grad_norm": 1.9313946962356567, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 1.1987, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 1.8206257820129395, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 1.1417, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.12837837837837837, |
|
"grad_norm": 1.890135645866394, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.094, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13175675675675674, |
|
"grad_norm": 2.088564872741699, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 1.2143, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 1.9415384531021118, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 1.197, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13851351351351351, |
|
"grad_norm": 1.7540833950042725, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 1.0637, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14189189189189189, |
|
"grad_norm": 1.8551878929138184, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 1.0685, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14527027027027026, |
|
"grad_norm": 1.9881539344787598, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 1.081, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.14864864864864866, |
|
"grad_norm": 2.670828104019165, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 1.2052, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15202702702702703, |
|
"grad_norm": 2.176063060760498, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 0.856, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1554054054054054, |
|
"grad_norm": 2.3070170879364014, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 0.9129, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15878378378378377, |
|
"grad_norm": 2.7001495361328125, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 0.9219, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16216216216216217, |
|
"grad_norm": 2.964418888092041, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 0.9442, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16554054054054054, |
|
"grad_norm": 3.5293803215026855, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 1.1814, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.16891891891891891, |
|
"grad_norm": 3.7675538063049316, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.9654, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.16891891891891891, |
|
"eval_loss": 1.1298441886901855, |
|
"eval_runtime": 46.0078, |
|
"eval_samples_per_second": 10.846, |
|
"eval_steps_per_second": 5.434, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17229729729729729, |
|
"grad_norm": 1.6099838018417358, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 0.9751, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.17567567567567569, |
|
"grad_norm": 1.5371800661087036, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 1.0266, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.17905405405405406, |
|
"grad_norm": 0.87901771068573, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 0.918, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18243243243243243, |
|
"grad_norm": 0.703105628490448, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 0.7213, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1858108108108108, |
|
"grad_norm": 0.8392472863197327, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 0.99, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1891891891891892, |
|
"grad_norm": 0.962077260017395, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.9932, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.19256756756756757, |
|
"grad_norm": 1.064950942993164, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 0.9774, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19594594594594594, |
|
"grad_norm": 1.1014493703842163, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 0.8928, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19932432432432431, |
|
"grad_norm": 0.8882946372032166, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 0.8685, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.20270270270270271, |
|
"grad_norm": 0.7933914661407471, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.9081, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.20608108108108109, |
|
"grad_norm": 0.8522499203681946, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 0.966, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.20945945945945946, |
|
"grad_norm": 0.7208542823791504, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 0.8446, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21283783783783783, |
|
"grad_norm": 0.8307358026504517, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 0.9462, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.21621621621621623, |
|
"grad_norm": 0.9621148109436035, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 0.9179, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2195945945945946, |
|
"grad_norm": 0.836197018623352, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.8942, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22297297297297297, |
|
"grad_norm": 0.8846750259399414, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 0.8464, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.22635135135135134, |
|
"grad_norm": 1.0545690059661865, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 0.9341, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.22972972972972974, |
|
"grad_norm": 0.8369587659835815, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.9228, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.23310810810810811, |
|
"grad_norm": 0.8992530703544617, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 0.933, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.23648648648648649, |
|
"grad_norm": 0.958665132522583, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.9219, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23986486486486486, |
|
"grad_norm": 0.9970949292182922, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 1.0344, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.24324324324324326, |
|
"grad_norm": 0.9001803398132324, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 0.9763, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.24662162162162163, |
|
"grad_norm": 0.9444774985313416, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 0.9402, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1478818655014038, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.9338, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2533783783783784, |
|
"grad_norm": 1.1998094320297241, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 1.1677, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.25675675675675674, |
|
"grad_norm": 1.059282660484314, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 0.8556, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.26013513513513514, |
|
"grad_norm": 1.0620920658111572, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 0.9813, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2635135135135135, |
|
"grad_norm": 1.2135953903198242, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 1.035, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2668918918918919, |
|
"grad_norm": 1.229359745979309, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 1.0898, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 1.1784802675247192, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.1225, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.27364864864864863, |
|
"grad_norm": 1.334152102470398, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 1.1676, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.27702702702702703, |
|
"grad_norm": 1.5334789752960205, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 1.2391, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.28040540540540543, |
|
"grad_norm": 1.2976105213165283, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 1.0586, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.28378378378378377, |
|
"grad_norm": 1.3772521018981934, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 1.0095, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.28716216216216217, |
|
"grad_norm": 1.498609185218811, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 1.0406, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2905405405405405, |
|
"grad_norm": 1.2996132373809814, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 0.9952, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2939189189189189, |
|
"grad_norm": 1.400256872177124, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 0.9913, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.2972972972972973, |
|
"grad_norm": 1.5387816429138184, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 1.0511, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.30067567567567566, |
|
"grad_norm": 1.6207197904586792, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 1.1016, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.30405405405405406, |
|
"grad_norm": 1.9668850898742676, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 1.1591, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.30743243243243246, |
|
"grad_norm": 1.7193620204925537, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 0.8671, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3108108108108108, |
|
"grad_norm": 1.715350866317749, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.0375, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3141891891891892, |
|
"grad_norm": 1.7534435987472534, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 0.9131, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.31756756756756754, |
|
"grad_norm": 1.7787106037139893, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 0.9613, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.32094594594594594, |
|
"grad_norm": 1.7678383588790894, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 0.9469, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.32432432432432434, |
|
"grad_norm": 2.5767292976379395, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 0.8733, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3277027027027027, |
|
"grad_norm": 2.0000483989715576, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 0.7498, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3310810810810811, |
|
"grad_norm": 2.2624642848968506, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 0.7368, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.3344594594594595, |
|
"grad_norm": 2.4766733646392822, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 0.8139, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.33783783783783783, |
|
"grad_norm": 3.698124885559082, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 0.9995, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.33783783783783783, |
|
"eval_loss": 1.000538945198059, |
|
"eval_runtime": 46.0594, |
|
"eval_samples_per_second": 10.834, |
|
"eval_steps_per_second": 5.428, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.5710986748244787e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|