|
{ |
|
"best_metric": 1.542808175086975, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.0052082316100597, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 5.208231610059699e-05, |
|
"grad_norm": 0.7850510478019714, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.4988, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 5.208231610059699e-05, |
|
"eval_loss": 2.3925089836120605, |
|
"eval_runtime": 1165.5151, |
|
"eval_samples_per_second": 27.746, |
|
"eval_steps_per_second": 6.937, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00010416463220119398, |
|
"grad_norm": 1.204688310623169, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.641, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00015624694830179098, |
|
"grad_norm": 1.1601476669311523, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.6859, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00020832926440238797, |
|
"grad_norm": 1.301094651222229, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.6955, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.000260411580502985, |
|
"grad_norm": 1.086885929107666, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.81, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.00031249389660358197, |
|
"grad_norm": 1.0889768600463867, |
|
"learning_rate": 3e-06, |
|
"loss": 0.7875, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00036457621270417895, |
|
"grad_norm": 1.2988945245742798, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.8043, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.00041665852880477594, |
|
"grad_norm": 1.2984793186187744, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.8566, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0004687408449053729, |
|
"grad_norm": 1.8114720582962036, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.8709, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00052082316100597, |
|
"grad_norm": 1.5189226865768433, |
|
"learning_rate": 5e-06, |
|
"loss": 0.9123, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0005729054771065669, |
|
"grad_norm": 1.3734829425811768, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 0.8988, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0006249877932071639, |
|
"grad_norm": 1.8695842027664185, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 0.9392, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.000677070109307761, |
|
"grad_norm": 1.7758443355560303, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 0.948, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0007291524254083579, |
|
"grad_norm": 1.7251638174057007, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 1.0075, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0007812347415089549, |
|
"grad_norm": 1.9016642570495605, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 1.0206, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0008333170576095519, |
|
"grad_norm": 2.261992931365967, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 1.0041, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0008853993737101489, |
|
"grad_norm": 1.9689737558364868, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 0.9991, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0009374816898107458, |
|
"grad_norm": 2.2839338779449463, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 1.0233, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0009895640059113429, |
|
"grad_norm": 2.3685903549194336, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 1.0451, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.00104164632201194, |
|
"grad_norm": 2.4562370777130127, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 1.1865, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.001093728638112537, |
|
"grad_norm": 2.2600936889648438, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 1.2595, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0011458109542131338, |
|
"grad_norm": 2.520331859588623, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 1.1546, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0011978932703137308, |
|
"grad_norm": 2.7857682704925537, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 1.1655, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0012499755864143279, |
|
"grad_norm": 2.817095994949341, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 1.3266, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.001302057902514925, |
|
"grad_norm": 2.6866202354431152, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 1.3189, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.001354140218615522, |
|
"grad_norm": 3.191648244857788, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 1.3855, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0014062225347161188, |
|
"grad_norm": 3.362218141555786, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 1.4789, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0014583048508167158, |
|
"grad_norm": 3.0076942443847656, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 1.4305, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0015103871669173128, |
|
"grad_norm": 3.998861312866211, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 1.5949, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0015624694830179099, |
|
"grad_norm": 4.050288200378418, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 1.6032, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0016145517991185067, |
|
"grad_norm": 5.438502788543701, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 1.8929, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0016666341152191037, |
|
"grad_norm": 4.320279121398926, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 1.7818, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0017187164313197008, |
|
"grad_norm": 5.677204132080078, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 1.9708, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0017707987474202978, |
|
"grad_norm": 4.794656276702881, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 1.9317, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0018228810635208949, |
|
"grad_norm": 4.484780311584473, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 1.8344, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0018749633796214917, |
|
"grad_norm": 5.888218879699707, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 2.0714, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0019270456957220887, |
|
"grad_norm": 6.086559772491455, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 2.2107, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0019791280118226858, |
|
"grad_norm": 5.582540512084961, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 2.2159, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0020312103279232826, |
|
"grad_norm": 6.883957862854004, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 2.5386, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.00208329264402388, |
|
"grad_norm": 6.9683098793029785, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 2.7286, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0021353749601244767, |
|
"grad_norm": 7.439619541168213, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 2.9706, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.002187457276225074, |
|
"grad_norm": 8.695789337158203, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 3.1264, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0022395395923256707, |
|
"grad_norm": 8.851489067077637, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 3.2198, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0022916219084262676, |
|
"grad_norm": 11.42151165008545, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 3.5362, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.002343704224526865, |
|
"grad_norm": 9.101286888122559, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 3.4341, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0023957865406274616, |
|
"grad_norm": 9.989834785461426, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 3.8915, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.002447868856728059, |
|
"grad_norm": 12.451299667358398, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 4.4809, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0024999511728286557, |
|
"grad_norm": 15.89113998413086, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 5.9077, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0025520334889292525, |
|
"grad_norm": 22.119922637939453, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 7.5703, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.00260411580502985, |
|
"grad_norm": 23.528160095214844, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 8.9154, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.00260411580502985, |
|
"eval_loss": 1.7300430536270142, |
|
"eval_runtime": 1166.7865, |
|
"eval_samples_per_second": 27.715, |
|
"eval_steps_per_second": 6.929, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0026561981211304466, |
|
"grad_norm": 1.0742809772491455, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 0.3675, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.002708280437231044, |
|
"grad_norm": 1.0545445680618286, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 0.401, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0027603627533316407, |
|
"grad_norm": 1.108581304550171, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 0.4626, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0028124450694322375, |
|
"grad_norm": 1.2265070676803589, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 0.5719, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0028645273855328348, |
|
"grad_norm": 1.1157441139221191, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.5564, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0029166097016334316, |
|
"grad_norm": 1.1121352910995483, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 0.5619, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0029686920177340284, |
|
"grad_norm": 1.0687549114227295, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 0.591, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0030207743338346257, |
|
"grad_norm": 1.1351648569107056, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 0.6076, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.0030728566499352225, |
|
"grad_norm": 1.1356217861175537, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 0.6677, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0031249389660358198, |
|
"grad_norm": 1.1470251083374023, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 0.664, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0031770212821364166, |
|
"grad_norm": 1.2547721862792969, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 0.6278, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0032291035982370134, |
|
"grad_norm": 1.313974142074585, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 0.6576, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0032811859143376107, |
|
"grad_norm": 1.2617135047912598, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 0.6497, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0033332682304382075, |
|
"grad_norm": 1.3057128190994263, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 0.6896, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.0033853505465388047, |
|
"grad_norm": 1.3191314935684204, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 0.679, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0034374328626394016, |
|
"grad_norm": 1.597451090812683, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 0.7526, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.0034895151787399984, |
|
"grad_norm": 1.440009355545044, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 0.7096, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.0035415974948405956, |
|
"grad_norm": 1.5627026557922363, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 0.724, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.0035936798109411925, |
|
"grad_norm": 1.503731608390808, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 0.652, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0036457621270417897, |
|
"grad_norm": 1.6435801982879639, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 0.8539, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0036978444431423865, |
|
"grad_norm": 1.7257816791534424, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 0.761, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0037499267592429834, |
|
"grad_norm": 1.9037182331085205, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 0.7582, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0038020090753435806, |
|
"grad_norm": 1.8669556379318237, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 0.8375, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.0038540913914441774, |
|
"grad_norm": 2.0030980110168457, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 0.9134, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.0039061737075447747, |
|
"grad_norm": 1.8826111555099487, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 0.7892, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0039582560236453715, |
|
"grad_norm": 2.3302550315856934, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 0.864, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.004010338339745969, |
|
"grad_norm": 2.2271835803985596, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 0.9407, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.004062420655846565, |
|
"grad_norm": 2.2922518253326416, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 0.9515, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.004114502971947162, |
|
"grad_norm": 2.4218201637268066, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 0.9807, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.00416658528804776, |
|
"grad_norm": 2.886337995529175, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 1.0902, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.004218667604148356, |
|
"grad_norm": 2.7967350482940674, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 1.2078, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.004270749920248953, |
|
"grad_norm": 3.2428112030029297, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 1.1927, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.004322832236349551, |
|
"grad_norm": 3.46563458442688, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 1.3252, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.004374914552450148, |
|
"grad_norm": 3.441267490386963, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 1.3008, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.004426996868550744, |
|
"grad_norm": 4.186681270599365, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 1.5252, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0044790791846513415, |
|
"grad_norm": 3.94539737701416, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 1.4843, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.004531161500751939, |
|
"grad_norm": 4.773882865905762, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 1.7177, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.004583243816852535, |
|
"grad_norm": 5.422726631164551, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 1.7822, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.004635326132953132, |
|
"grad_norm": 5.55891227722168, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 1.9766, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.00468740844905373, |
|
"grad_norm": 5.845249652862549, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 1.9855, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.004739490765154326, |
|
"grad_norm": 5.987824440002441, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 2.0612, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.004791573081254923, |
|
"grad_norm": 6.048609256744385, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 2.1425, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.0048436553973555205, |
|
"grad_norm": 7.93657112121582, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 2.7068, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.004895737713456118, |
|
"grad_norm": 8.366839408874512, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 2.9492, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.004947820029556714, |
|
"grad_norm": 9.729937553405762, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 3.1672, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.0049999023456573114, |
|
"grad_norm": 10.509390830993652, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 3.2586, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.005051984661757909, |
|
"grad_norm": 13.249284744262695, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 3.8453, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.005104066977858505, |
|
"grad_norm": 18.896095275878906, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 5.4556, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.005156149293959102, |
|
"grad_norm": 24.21651268005371, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 7.5273, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.0052082316100597, |
|
"grad_norm": 27.522985458374023, |
|
"learning_rate": 0.0, |
|
"loss": 8.2395, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0052082316100597, |
|
"eval_loss": 1.542808175086975, |
|
"eval_runtime": 1166.5968, |
|
"eval_samples_per_second": 27.72, |
|
"eval_steps_per_second": 6.93, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.949858011689779e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|