|
{ |
|
"best_metric": 0.9018179774284363, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.2579535683576956, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0017196904557179708, |
|
"grad_norm": 0.04371219500899315, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5013, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0017196904557179708, |
|
"eval_loss": 1.1150983572006226, |
|
"eval_runtime": 26.0196, |
|
"eval_samples_per_second": 37.664, |
|
"eval_steps_per_second": 18.832, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0034393809114359416, |
|
"grad_norm": 0.05066497623920441, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6556, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005159071367153913, |
|
"grad_norm": 0.05329379439353943, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.6847, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.006878761822871883, |
|
"grad_norm": 0.051895491778850555, |
|
"learning_rate": 2e-05, |
|
"loss": 0.5981, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.008598452278589854, |
|
"grad_norm": 0.06517327576875687, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.6962, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010318142734307825, |
|
"grad_norm": 0.05350270867347717, |
|
"learning_rate": 3e-05, |
|
"loss": 0.6678, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012037833190025795, |
|
"grad_norm": 0.062394555658102036, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.706, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.013757523645743766, |
|
"grad_norm": 0.058834996074438095, |
|
"learning_rate": 4e-05, |
|
"loss": 0.7226, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.015477214101461736, |
|
"grad_norm": 0.058221857994794846, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.6948, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.017196904557179708, |
|
"grad_norm": 0.05588304251432419, |
|
"learning_rate": 5e-05, |
|
"loss": 0.6798, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.018916595012897677, |
|
"grad_norm": 0.0636599138379097, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 0.7023, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02063628546861565, |
|
"grad_norm": 0.06145975738763809, |
|
"learning_rate": 6e-05, |
|
"loss": 0.6432, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02235597592433362, |
|
"grad_norm": 0.06953848898410797, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 0.7669, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02407566638005159, |
|
"grad_norm": 0.07056722790002823, |
|
"learning_rate": 7e-05, |
|
"loss": 0.8379, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.025795356835769563, |
|
"grad_norm": 0.06908471137285233, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.8114, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.027515047291487533, |
|
"grad_norm": 0.06860286742448807, |
|
"learning_rate": 8e-05, |
|
"loss": 0.765, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.029234737747205503, |
|
"grad_norm": 0.06991557031869888, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.8112, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.030954428202923472, |
|
"grad_norm": 0.07742810249328613, |
|
"learning_rate": 9e-05, |
|
"loss": 0.8295, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03267411865864144, |
|
"grad_norm": 0.08711075782775879, |
|
"learning_rate": 9.5e-05, |
|
"loss": 0.8386, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.034393809114359415, |
|
"grad_norm": 0.08179605007171631, |
|
"learning_rate": 0.0001, |
|
"loss": 0.9093, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03611349957007739, |
|
"grad_norm": 0.08795295655727386, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 0.9145, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.037833190025795355, |
|
"grad_norm": 0.08331762999296188, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 0.9265, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03955288048151333, |
|
"grad_norm": 0.07908172160387039, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 0.9195, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0412725709372313, |
|
"grad_norm": 0.09061425179243088, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 0.8748, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04299226139294927, |
|
"grad_norm": 0.0851019099354744, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 0.7701, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04471195184866724, |
|
"grad_norm": 0.09329686313867569, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 0.8657, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.046431642304385214, |
|
"grad_norm": 0.09710376709699631, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 0.9217, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04815133276010318, |
|
"grad_norm": 0.10121080279350281, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 0.8995, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04987102321582115, |
|
"grad_norm": 0.1065475270152092, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 0.8886, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.051590713671539126, |
|
"grad_norm": 0.11212456971406937, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 0.867, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05331040412725709, |
|
"grad_norm": 0.0965503603219986, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 0.806, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.055030094582975066, |
|
"grad_norm": 0.10261840373277664, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 0.9247, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05674978503869303, |
|
"grad_norm": 0.11739654093980789, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 0.927, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.058469475494411005, |
|
"grad_norm": 0.12067130953073502, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 0.9822, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06018916595012898, |
|
"grad_norm": 0.11332730948925018, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 0.8538, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.061908856405846945, |
|
"grad_norm": 0.12802854180335999, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 0.9749, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06362854686156492, |
|
"grad_norm": 0.13656459748744965, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 0.9501, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06534823731728288, |
|
"grad_norm": 0.1332613080739975, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.0124, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06706792777300086, |
|
"grad_norm": 0.168551504611969, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 1.1085, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.06878761822871883, |
|
"grad_norm": 0.16357570886611938, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 1.0838, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0705073086844368, |
|
"grad_norm": 0.18186220526695251, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 1.2419, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07222699914015478, |
|
"grad_norm": 0.25837743282318115, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 1.5677, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07394668959587274, |
|
"grad_norm": 0.2586553692817688, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 1.3784, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07566638005159071, |
|
"grad_norm": 0.2590441107749939, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 1.5176, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.07738607050730868, |
|
"grad_norm": 0.240685373544693, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 1.5495, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07910576096302666, |
|
"grad_norm": 0.2492353618144989, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 1.3381, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08082545141874463, |
|
"grad_norm": 0.2738623023033142, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 1.5593, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0825451418744626, |
|
"grad_norm": 0.362883061170578, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 1.6339, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08426483233018056, |
|
"grad_norm": 0.4202761650085449, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 1.6848, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08598452278589853, |
|
"grad_norm": 0.9379512667655945, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 2.2938, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08598452278589853, |
|
"eval_loss": 0.9957740902900696, |
|
"eval_runtime": 26.1033, |
|
"eval_samples_per_second": 37.543, |
|
"eval_steps_per_second": 18.772, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08770421324161651, |
|
"grad_norm": 0.15448810160160065, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 0.6494, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.08942390369733448, |
|
"grad_norm": 0.19541579484939575, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 0.6789, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09114359415305245, |
|
"grad_norm": 0.22956673800945282, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 0.6399, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09286328460877043, |
|
"grad_norm": 0.20343832671642303, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 0.7056, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09458297506448839, |
|
"grad_norm": 0.20558185875415802, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 0.695, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09630266552020636, |
|
"grad_norm": 0.20323364436626434, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.7068, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.09802235597592433, |
|
"grad_norm": 0.18025411665439606, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 0.6984, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0997420464316423, |
|
"grad_norm": 0.15938173234462738, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 0.6744, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10146173688736028, |
|
"grad_norm": 0.1582622081041336, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 0.7612, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10318142734307825, |
|
"grad_norm": 0.12570132315158844, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.7241, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10490111779879621, |
|
"grad_norm": 0.10931485146284103, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 0.7559, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.10662080825451418, |
|
"grad_norm": 0.09841756522655487, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 0.6782, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.10834049871023216, |
|
"grad_norm": 0.09882379323244095, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 0.7601, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11006018916595013, |
|
"grad_norm": 0.10225917398929596, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 0.7911, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1117798796216681, |
|
"grad_norm": 0.10474953055381775, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.7669, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11349957007738606, |
|
"grad_norm": 0.10357484966516495, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 0.7644, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11521926053310404, |
|
"grad_norm": 0.11238912492990494, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 0.8878, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.11693895098882201, |
|
"grad_norm": 0.11020366847515106, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.8643, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.11865864144453998, |
|
"grad_norm": 0.10074474662542343, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 0.8067, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12037833190025796, |
|
"grad_norm": 0.09557520598173141, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.7716, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12209802235597593, |
|
"grad_norm": 0.10227945446968079, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 0.923, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12381771281169389, |
|
"grad_norm": 0.10992958396673203, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 0.8756, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.12553740326741186, |
|
"grad_norm": 0.10675280541181564, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 0.7982, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.12725709372312985, |
|
"grad_norm": 0.1144600510597229, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.7169, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1289767841788478, |
|
"grad_norm": 0.10181998461484909, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 0.736, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13069647463456577, |
|
"grad_norm": 0.10681930184364319, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 0.8836, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13241616509028376, |
|
"grad_norm": 0.11621927469968796, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 0.8581, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.13413585554600171, |
|
"grad_norm": 0.09889838099479675, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 0.7897, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1358555460017197, |
|
"grad_norm": 0.09543070942163467, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 0.8192, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.13757523645743766, |
|
"grad_norm": 0.11200384050607681, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.7359, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13929492691315562, |
|
"grad_norm": 0.10140563547611237, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 0.824, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1410146173688736, |
|
"grad_norm": 0.102725550532341, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 0.8458, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14273430782459157, |
|
"grad_norm": 0.10617943853139877, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 0.7649, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.14445399828030955, |
|
"grad_norm": 0.10800634324550629, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 0.7318, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1461736887360275, |
|
"grad_norm": 0.1077365055680275, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 0.8679, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.14789337919174547, |
|
"grad_norm": 0.11147390305995941, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 0.8635, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.14961306964746346, |
|
"grad_norm": 0.13498784601688385, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 0.9565, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15133276010318142, |
|
"grad_norm": 0.1289157271385193, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 1.041, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1530524505588994, |
|
"grad_norm": 0.1310902088880539, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 0.8777, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.15477214101461736, |
|
"grad_norm": 0.160873144865036, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 1.1091, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15649183147033535, |
|
"grad_norm": 0.15653099119663239, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 1.1321, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.1582115219260533, |
|
"grad_norm": 0.19784240424633026, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.1846, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.15993121238177127, |
|
"grad_norm": 0.19561642408370972, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 1.3051, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16165090283748926, |
|
"grad_norm": 0.19523312151432037, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 1.3682, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.16337059329320722, |
|
"grad_norm": 0.21068979799747467, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 1.2489, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1650902837489252, |
|
"grad_norm": 0.20077933371067047, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 1.3355, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.16680997420464316, |
|
"grad_norm": 0.21723942458629608, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 1.6464, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.16852966466036112, |
|
"grad_norm": 0.28459852933883667, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 1.7727, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1702493551160791, |
|
"grad_norm": 0.37411239743232727, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 2.0809, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.17196904557179707, |
|
"grad_norm": 0.6213313937187195, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 2.2677, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17196904557179707, |
|
"eval_loss": 0.9143250584602356, |
|
"eval_runtime": 26.1292, |
|
"eval_samples_per_second": 37.506, |
|
"eval_steps_per_second": 18.753, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17368873602751506, |
|
"grad_norm": 0.0647941306233406, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 0.5759, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.17540842648323302, |
|
"grad_norm": 0.07381158322095871, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 0.5957, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.177128116938951, |
|
"grad_norm": 0.07971695810556412, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 0.6366, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.17884780739466896, |
|
"grad_norm": 0.073166623711586, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 0.5878, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.18056749785038692, |
|
"grad_norm": 0.08162125200033188, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 0.6366, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1822871883061049, |
|
"grad_norm": 0.0808592289686203, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 0.6407, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.18400687876182287, |
|
"grad_norm": 0.09443782269954681, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 0.6592, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.18572656921754085, |
|
"grad_norm": 0.08486565947532654, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 0.6987, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1874462596732588, |
|
"grad_norm": 0.08635884523391724, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 0.6941, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.18916595012897677, |
|
"grad_norm": 0.0893407091498375, |
|
"learning_rate": 5e-05, |
|
"loss": 0.7226, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19088564058469476, |
|
"grad_norm": 0.09018551558256149, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 0.7208, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.19260533104041272, |
|
"grad_norm": 0.07536663860082626, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 0.5846, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1943250214961307, |
|
"grad_norm": 0.08711609989404678, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 0.6862, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.19604471195184867, |
|
"grad_norm": 0.06564276665449142, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 0.6931, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.19776440240756663, |
|
"grad_norm": 0.08047173917293549, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 0.7913, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1994840928632846, |
|
"grad_norm": 0.08029168099164963, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 0.6937, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.20120378331900257, |
|
"grad_norm": 0.07913923263549805, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 0.7687, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.20292347377472056, |
|
"grad_norm": 0.0775957927107811, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 0.7796, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.20464316423043852, |
|
"grad_norm": 0.07749712467193604, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 0.7374, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2063628546861565, |
|
"grad_norm": 0.07898925989866257, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.7796, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.20808254514187446, |
|
"grad_norm": 0.07860884070396423, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 0.7887, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.20980223559759242, |
|
"grad_norm": 0.077889084815979, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 0.8293, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2115219260533104, |
|
"grad_norm": 0.07636513561010361, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 0.7912, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.21324161650902837, |
|
"grad_norm": 0.07946857064962387, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 0.8765, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.21496130696474636, |
|
"grad_norm": 0.07615929841995239, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 0.6885, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.21668099742046432, |
|
"grad_norm": 0.07800007611513138, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 0.7583, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.21840068787618228, |
|
"grad_norm": 0.0879930779337883, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 0.7962, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.22012037833190026, |
|
"grad_norm": 0.08408991992473602, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.7565, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.22184006878761822, |
|
"grad_norm": 0.08975850045681, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 0.8412, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2235597592433362, |
|
"grad_norm": 0.08965165913105011, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 0.8464, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.22527944969905417, |
|
"grad_norm": 0.09056264162063599, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 0.663, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.22699914015477213, |
|
"grad_norm": 0.0920715406537056, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 0.8896, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.22871883061049011, |
|
"grad_norm": 0.10946381837129593, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 0.7847, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.23043852106620807, |
|
"grad_norm": 0.10407907515764236, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 0.8615, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.23215821152192606, |
|
"grad_norm": 0.09890785813331604, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 0.8844, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.23387790197764402, |
|
"grad_norm": 0.12885335087776184, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 0.7755, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.235597592433362, |
|
"grad_norm": 0.10378793627023697, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 0.7874, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.23731728288907997, |
|
"grad_norm": 0.12850679457187653, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 0.7773, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.23903697334479793, |
|
"grad_norm": 0.1387593150138855, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 0.8046, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2407566638005159, |
|
"grad_norm": 0.14513392746448517, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.9875, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.24247635425623387, |
|
"grad_norm": 0.14788322150707245, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 0.9912, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.24419604471195186, |
|
"grad_norm": 0.16071070730686188, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 1.2427, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.24591573516766982, |
|
"grad_norm": 0.18315501511096954, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 1.3647, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.24763542562338778, |
|
"grad_norm": 0.2040652483701706, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 1.3373, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.24935511607910577, |
|
"grad_norm": 0.20529639720916748, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 1.421, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2510748065348237, |
|
"grad_norm": 0.18773475289344788, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.3484, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2527944969905417, |
|
"grad_norm": 0.20955699682235718, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 1.3517, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2545141874462597, |
|
"grad_norm": 0.22871288657188416, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 1.4738, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.25623387790197766, |
|
"grad_norm": 0.27887439727783203, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 1.8532, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2579535683576956, |
|
"grad_norm": 0.40441471338272095, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 2.2748, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2579535683576956, |
|
"eval_loss": 0.9018179774284363, |
|
"eval_runtime": 26.0633, |
|
"eval_samples_per_second": 37.601, |
|
"eval_steps_per_second": 18.8, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.066393548619776e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|