|
{ |
|
"best_metric": 1.62236487865448, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.0017317741593318816, |
|
"eval_steps": 25, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.7317741593318815e-05, |
|
"grad_norm": 1.3246665000915527, |
|
"learning_rate": 4e-05, |
|
"loss": 1.6209, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 1.7317741593318815e-05, |
|
"eval_loss": 2.6184682846069336, |
|
"eval_runtime": 4891.6495, |
|
"eval_samples_per_second": 4.971, |
|
"eval_steps_per_second": 2.485, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 3.463548318663763e-05, |
|
"grad_norm": 1.7559422254562378, |
|
"learning_rate": 8e-05, |
|
"loss": 2.6328, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 5.1953224779956444e-05, |
|
"grad_norm": 1.3109291791915894, |
|
"learning_rate": 0.00012, |
|
"loss": 1.8847, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 6.927096637327526e-05, |
|
"grad_norm": 1.6149224042892456, |
|
"learning_rate": 0.00016, |
|
"loss": 2.4206, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 8.658870796659407e-05, |
|
"grad_norm": 1.6735433340072632, |
|
"learning_rate": 0.0002, |
|
"loss": 2.4259, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.00010390644955991289, |
|
"grad_norm": 1.4547559022903442, |
|
"learning_rate": 0.00019994532573409262, |
|
"loss": 1.7482, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0001212241911532317, |
|
"grad_norm": 1.628714919090271, |
|
"learning_rate": 0.00019978136272187747, |
|
"loss": 2.2129, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.00013854193274655052, |
|
"grad_norm": 1.8027219772338867, |
|
"learning_rate": 0.00019950829025450114, |
|
"loss": 1.8543, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00015585967433986934, |
|
"grad_norm": 2.021082639694214, |
|
"learning_rate": 0.00019912640693269752, |
|
"loss": 2.0592, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00017317741593318815, |
|
"grad_norm": 2.312199115753174, |
|
"learning_rate": 0.00019863613034027224, |
|
"loss": 2.055, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.00019049515752650697, |
|
"grad_norm": 1.8753467798233032, |
|
"learning_rate": 0.00019803799658748094, |
|
"loss": 1.7057, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.00020781289911982577, |
|
"grad_norm": 3.184194803237915, |
|
"learning_rate": 0.0001973326597248006, |
|
"loss": 2.1195, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0002251306407131446, |
|
"grad_norm": 2.2157609462738037, |
|
"learning_rate": 0.00019652089102773488, |
|
"loss": 1.6142, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0002424483823064634, |
|
"grad_norm": 1.8846286535263062, |
|
"learning_rate": 0.00019560357815343577, |
|
"loss": 1.6909, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.00025976612389978223, |
|
"grad_norm": 1.8501865863800049, |
|
"learning_rate": 0.00019458172417006347, |
|
"loss": 1.5635, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.00027708386549310103, |
|
"grad_norm": 1.9967668056488037, |
|
"learning_rate": 0.0001934564464599461, |
|
"loss": 1.7519, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.00029440160708641983, |
|
"grad_norm": 1.8826665878295898, |
|
"learning_rate": 0.00019222897549773848, |
|
"loss": 1.6652, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0003117193486797387, |
|
"grad_norm": 2.2450180053710938, |
|
"learning_rate": 0.00019090065350491626, |
|
"loss": 1.8715, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0003290370902730575, |
|
"grad_norm": 2.0638725757598877, |
|
"learning_rate": 0.00018947293298207635, |
|
"loss": 1.7423, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0003463548318663763, |
|
"grad_norm": 3.073434829711914, |
|
"learning_rate": 0.0001879473751206489, |
|
"loss": 2.1458, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.00036367257345969515, |
|
"grad_norm": 1.9750235080718994, |
|
"learning_rate": 0.00018632564809575742, |
|
"loss": 1.6923, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.00038099031505301395, |
|
"grad_norm": 2.1666667461395264, |
|
"learning_rate": 0.00018460952524209355, |
|
"loss": 1.6162, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.00039830805664633275, |
|
"grad_norm": 1.9802206754684448, |
|
"learning_rate": 0.00018280088311480201, |
|
"loss": 1.6125, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.00041562579823965155, |
|
"grad_norm": 1.9258074760437012, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 1.6174, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0004329435398329704, |
|
"grad_norm": 2.670494556427002, |
|
"learning_rate": 0.00017891405093963938, |
|
"loss": 2.1219, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0004329435398329704, |
|
"eval_loss": 1.7579680681228638, |
|
"eval_runtime": 4921.5904, |
|
"eval_samples_per_second": 4.94, |
|
"eval_steps_per_second": 2.47, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0004502612814262892, |
|
"grad_norm": 2.315001964569092, |
|
"learning_rate": 0.00017684011108568592, |
|
"loss": 1.9471, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.000467579023019608, |
|
"grad_norm": 4.8373613357543945, |
|
"learning_rate": 0.0001746821476984154, |
|
"loss": 1.4014, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0004848967646129268, |
|
"grad_norm": 2.2539186477661133, |
|
"learning_rate": 0.00017244252047910892, |
|
"loss": 1.8659, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0005022145062062456, |
|
"grad_norm": 2.21065616607666, |
|
"learning_rate": 0.00017012367842724887, |
|
"loss": 1.8552, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0005195322477995645, |
|
"grad_norm": 2.3214337825775146, |
|
"learning_rate": 0.00016772815716257412, |
|
"loss": 2.0893, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0005368499893928833, |
|
"grad_norm": 2.827920436859131, |
|
"learning_rate": 0.00016525857615241687, |
|
"loss": 1.7269, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0005541677309862021, |
|
"grad_norm": 2.3385794162750244, |
|
"learning_rate": 0.0001627176358473537, |
|
"loss": 1.4844, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0005714854725795209, |
|
"grad_norm": 2.1411702632904053, |
|
"learning_rate": 0.00016010811472830252, |
|
"loss": 1.6424, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0005888032141728397, |
|
"grad_norm": 1.8949635028839111, |
|
"learning_rate": 0.00015743286626829437, |
|
"loss": 1.8416, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0006061209557661585, |
|
"grad_norm": 2.325913667678833, |
|
"learning_rate": 0.00015469481581224272, |
|
"loss": 1.9681, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0006234386973594774, |
|
"grad_norm": 2.5775182247161865, |
|
"learning_rate": 0.00015189695737812152, |
|
"loss": 2.0605, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0006407564389527961, |
|
"grad_norm": 2.1500771045684814, |
|
"learning_rate": 0.00014904235038305083, |
|
"loss": 1.7005, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.000658074180546115, |
|
"grad_norm": 1.9176790714263916, |
|
"learning_rate": 0.0001461341162978688, |
|
"loss": 1.544, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0006753919221394338, |
|
"grad_norm": 5.9821248054504395, |
|
"learning_rate": 0.00014317543523384928, |
|
"loss": 1.5349, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0006927096637327526, |
|
"grad_norm": 1.723404049873352, |
|
"learning_rate": 0.00014016954246529696, |
|
"loss": 1.4033, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0007100274053260714, |
|
"grad_norm": 2.131654739379883, |
|
"learning_rate": 0.00013711972489182208, |
|
"loss": 1.8816, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0007273451469193903, |
|
"grad_norm": 2.1149301528930664, |
|
"learning_rate": 0.00013402931744416433, |
|
"loss": 1.7547, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.000744662888512709, |
|
"grad_norm": 2.1402599811553955, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 1.4692, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0007619806301060279, |
|
"grad_norm": 2.189884662628174, |
|
"learning_rate": 0.00012774029087618446, |
|
"loss": 1.6887, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0007792983716993466, |
|
"grad_norm": 2.3986988067626953, |
|
"learning_rate": 0.00012454854871407994, |
|
"loss": 1.5637, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0007966161132926655, |
|
"grad_norm": 2.0756654739379883, |
|
"learning_rate": 0.0001213299630743747, |
|
"loss": 1.3412, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0008139338548859844, |
|
"grad_norm": 2.2220942974090576, |
|
"learning_rate": 0.000118088053433211, |
|
"loss": 1.9958, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0008312515964793031, |
|
"grad_norm": 2.2260262966156006, |
|
"learning_rate": 0.0001148263647711842, |
|
"loss": 1.8054, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.000848569338072622, |
|
"grad_norm": 2.96877121925354, |
|
"learning_rate": 0.00011154846369695863, |
|
"loss": 1.7525, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0008658870796659408, |
|
"grad_norm": 2.6540238857269287, |
|
"learning_rate": 0.00010825793454723325, |
|
"loss": 1.731, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0008658870796659408, |
|
"eval_loss": 1.665256142616272, |
|
"eval_runtime": 4923.0054, |
|
"eval_samples_per_second": 4.939, |
|
"eval_steps_per_second": 2.469, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0008832048212592596, |
|
"grad_norm": 1.3105987310409546, |
|
"learning_rate": 0.00010495837546732224, |
|
"loss": 1.4402, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0009005225628525784, |
|
"grad_norm": 1.4101057052612305, |
|
"learning_rate": 0.00010165339447663587, |
|
"loss": 1.4313, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0009178403044458972, |
|
"grad_norm": 1.5237852334976196, |
|
"learning_rate": 9.834660552336415e-05, |
|
"loss": 1.7096, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.000935158046039216, |
|
"grad_norm": 1.6052296161651611, |
|
"learning_rate": 9.504162453267777e-05, |
|
"loss": 1.4336, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0009524757876325349, |
|
"grad_norm": 1.7279136180877686, |
|
"learning_rate": 9.174206545276677e-05, |
|
"loss": 1.7388, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0009697935292258536, |
|
"grad_norm": 1.3830236196517944, |
|
"learning_rate": 8.845153630304139e-05, |
|
"loss": 1.2797, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0009871112708191726, |
|
"grad_norm": 2.031076669692993, |
|
"learning_rate": 8.517363522881579e-05, |
|
"loss": 1.9524, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0010044290124124912, |
|
"grad_norm": 1.437910556793213, |
|
"learning_rate": 8.191194656678904e-05, |
|
"loss": 1.4775, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.00102174675400581, |
|
"grad_norm": 1.4608979225158691, |
|
"learning_rate": 7.867003692562534e-05, |
|
"loss": 1.4998, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.001039064495599129, |
|
"grad_norm": 1.6951931715011597, |
|
"learning_rate": 7.54514512859201e-05, |
|
"loss": 1.8601, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0010563822371924478, |
|
"grad_norm": 1.654209852218628, |
|
"learning_rate": 7.225970912381556e-05, |
|
"loss": 1.8643, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0010736999787857666, |
|
"grad_norm": 1.837178349494934, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 1.7543, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0010910177203790853, |
|
"grad_norm": 1.8673418760299683, |
|
"learning_rate": 6.59706825558357e-05, |
|
"loss": 1.408, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0011083354619724041, |
|
"grad_norm": 1.660733938217163, |
|
"learning_rate": 6.28802751081779e-05, |
|
"loss": 1.6586, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.001125653203565723, |
|
"grad_norm": 1.6716222763061523, |
|
"learning_rate": 5.983045753470308e-05, |
|
"loss": 1.7785, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0011429709451590418, |
|
"grad_norm": 1.4793193340301514, |
|
"learning_rate": 5.6824564766150726e-05, |
|
"loss": 1.524, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.0011602886867523607, |
|
"grad_norm": 1.4535232782363892, |
|
"learning_rate": 5.386588370213124e-05, |
|
"loss": 1.3191, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.0011776064283456793, |
|
"grad_norm": 1.9624608755111694, |
|
"learning_rate": 5.095764961694922e-05, |
|
"loss": 1.5482, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.0011949241699389982, |
|
"grad_norm": 1.7577117681503296, |
|
"learning_rate": 4.810304262187852e-05, |
|
"loss": 1.5874, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.001212241911532317, |
|
"grad_norm": 1.5105639696121216, |
|
"learning_rate": 4.530518418775733e-05, |
|
"loss": 1.5703, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.001229559653125636, |
|
"grad_norm": 1.529349446296692, |
|
"learning_rate": 4.256713373170564e-05, |
|
"loss": 1.1905, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0012468773947189548, |
|
"grad_norm": 1.8508775234222412, |
|
"learning_rate": 3.9891885271697496e-05, |
|
"loss": 1.8208, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0012641951363122736, |
|
"grad_norm": 1.593199610710144, |
|
"learning_rate": 3.7282364152646297e-05, |
|
"loss": 1.3999, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.0012815128779055922, |
|
"grad_norm": 1.6738864183425903, |
|
"learning_rate": 3.4741423847583134e-05, |
|
"loss": 1.4956, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.001298830619498911, |
|
"grad_norm": 1.758865475654602, |
|
"learning_rate": 3.227184283742591e-05, |
|
"loss": 1.631, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.001298830619498911, |
|
"eval_loss": 1.6363804340362549, |
|
"eval_runtime": 4921.7849, |
|
"eval_samples_per_second": 4.94, |
|
"eval_steps_per_second": 2.47, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.00131614836109223, |
|
"grad_norm": 2.4752883911132812, |
|
"learning_rate": 2.9876321572751144e-05, |
|
"loss": 2.0203, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0013334661026855488, |
|
"grad_norm": 2.1483538150787354, |
|
"learning_rate": 2.7557479520891104e-05, |
|
"loss": 1.5806, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0013507838442788677, |
|
"grad_norm": 1.9293116331100464, |
|
"learning_rate": 2.5317852301584643e-05, |
|
"loss": 2.0804, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.0013681015858721863, |
|
"grad_norm": 1.519399881362915, |
|
"learning_rate": 2.315988891431412e-05, |
|
"loss": 1.4393, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0013854193274655052, |
|
"grad_norm": 1.9969561100006104, |
|
"learning_rate": 2.1085949060360654e-05, |
|
"loss": 1.5912, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.001402737069058824, |
|
"grad_norm": 1.9334006309509277, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 1.5095, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0014200548106521429, |
|
"grad_norm": 1.8917192220687866, |
|
"learning_rate": 1.7199116885197995e-05, |
|
"loss": 1.8712, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.0014373725522454617, |
|
"grad_norm": 1.6807430982589722, |
|
"learning_rate": 1.5390474757906446e-05, |
|
"loss": 1.6938, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.0014546902938387806, |
|
"grad_norm": 1.641627550125122, |
|
"learning_rate": 1.3674351904242611e-05, |
|
"loss": 1.5532, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.0014720080354320992, |
|
"grad_norm": 2.6051013469696045, |
|
"learning_rate": 1.2052624879351104e-05, |
|
"loss": 1.8125, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.001489325777025418, |
|
"grad_norm": 1.6829463243484497, |
|
"learning_rate": 1.0527067017923654e-05, |
|
"loss": 1.5045, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.001506643518618737, |
|
"grad_norm": 1.6704628467559814, |
|
"learning_rate": 9.09934649508375e-06, |
|
"loss": 1.5594, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.0015239612602120558, |
|
"grad_norm": 2.3541147708892822, |
|
"learning_rate": 7.771024502261526e-06, |
|
"loss": 2.2284, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.0015412790018053746, |
|
"grad_norm": 2.744718074798584, |
|
"learning_rate": 6.543553540053926e-06, |
|
"loss": 1.2474, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0015585967433986933, |
|
"grad_norm": 2.3282785415649414, |
|
"learning_rate": 5.418275829936537e-06, |
|
"loss": 1.9081, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0015759144849920121, |
|
"grad_norm": 1.7221317291259766, |
|
"learning_rate": 4.3964218465642355e-06, |
|
"loss": 1.5467, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.001593232226585331, |
|
"grad_norm": 2.2455320358276367, |
|
"learning_rate": 3.4791089722651436e-06, |
|
"loss": 1.8693, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.0016105499681786498, |
|
"grad_norm": 1.896304726600647, |
|
"learning_rate": 2.667340275199426e-06, |
|
"loss": 1.3848, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0016278677097719687, |
|
"grad_norm": 2.8605098724365234, |
|
"learning_rate": 1.9620034125190644e-06, |
|
"loss": 1.6698, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.0016451854513652876, |
|
"grad_norm": 1.6910480260849, |
|
"learning_rate": 1.3638696597277679e-06, |
|
"loss": 1.4249, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.0016625031929586062, |
|
"grad_norm": 3.1670644283294678, |
|
"learning_rate": 8.735930673024806e-07, |
|
"loss": 1.7864, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.001679820934551925, |
|
"grad_norm": 2.209742784500122, |
|
"learning_rate": 4.917097454988584e-07, |
|
"loss": 1.7708, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.001697138676145244, |
|
"grad_norm": 2.015029191970825, |
|
"learning_rate": 2.1863727812254653e-07, |
|
"loss": 1.471, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.0017144564177385628, |
|
"grad_norm": 2.405651569366455, |
|
"learning_rate": 5.467426590739511e-08, |
|
"loss": 1.6094, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.0017317741593318816, |
|
"grad_norm": 4.6894025802612305, |
|
"learning_rate": 0.0, |
|
"loss": 2.0444, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0017317741593318816, |
|
"eval_loss": 1.62236487865448, |
|
"eval_runtime": 4922.5195, |
|
"eval_samples_per_second": 4.939, |
|
"eval_steps_per_second": 2.47, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 25, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 1, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.51427898343424e+16, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|