|
{ |
|
"best_metric": 11.5, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-50", |
|
"epoch": 0.06754474839581223, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00033772374197906115, |
|
"grad_norm": 6.999200559221208e-05, |
|
"learning_rate": 5e-06, |
|
"loss": 46.0, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00033772374197906115, |
|
"eval_loss": 11.5, |
|
"eval_runtime": 38.0491, |
|
"eval_samples_per_second": 131.067, |
|
"eval_steps_per_second": 65.547, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0006754474839581223, |
|
"grad_norm": 5.612642053165473e-05, |
|
"learning_rate": 1e-05, |
|
"loss": 46.0, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0010131712259371835, |
|
"grad_norm": 0.00011171244841534644, |
|
"learning_rate": 1.5e-05, |
|
"loss": 46.0, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0013508949679162446, |
|
"grad_norm": 6.527295772684738e-05, |
|
"learning_rate": 2e-05, |
|
"loss": 46.0, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0016886187098953055, |
|
"grad_norm": 0.00010032207501353696, |
|
"learning_rate": 2.5e-05, |
|
"loss": 46.0, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.002026342451874367, |
|
"grad_norm": 6.297726213233545e-05, |
|
"learning_rate": 3e-05, |
|
"loss": 46.0, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.002364066193853428, |
|
"grad_norm": 0.00010159278463106602, |
|
"learning_rate": 3.5e-05, |
|
"loss": 46.0, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.002701789935832489, |
|
"grad_norm": 6.908881186973304e-05, |
|
"learning_rate": 4e-05, |
|
"loss": 46.0, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00303951367781155, |
|
"grad_norm": 0.00010548412683419883, |
|
"learning_rate": 4.5e-05, |
|
"loss": 46.0, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.003377237419790611, |
|
"grad_norm": 0.00010045844101114199, |
|
"learning_rate": 5e-05, |
|
"loss": 46.0, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0037149611617696724, |
|
"grad_norm": 0.00010918975749518722, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 46.0, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.004052684903748734, |
|
"grad_norm": 0.00010634872887749225, |
|
"learning_rate": 6e-05, |
|
"loss": 46.0, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.004390408645727794, |
|
"grad_norm": 6.084080450818874e-05, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 46.0, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.004728132387706856, |
|
"grad_norm": 8.961093408288434e-05, |
|
"learning_rate": 7e-05, |
|
"loss": 46.0, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.005065856129685917, |
|
"grad_norm": 7.892983558122069e-05, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 46.0, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.005403579871664978, |
|
"grad_norm": 7.510028081014752e-05, |
|
"learning_rate": 8e-05, |
|
"loss": 46.0, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.005741303613644039, |
|
"grad_norm": 0.00010316520638298243, |
|
"learning_rate": 8.5e-05, |
|
"loss": 46.0, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0060790273556231, |
|
"grad_norm": 0.00015889042697381228, |
|
"learning_rate": 9e-05, |
|
"loss": 46.0, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.006416751097602162, |
|
"grad_norm": 9.389503247803077e-05, |
|
"learning_rate": 9.5e-05, |
|
"loss": 46.0, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.006754474839581222, |
|
"grad_norm": 0.00012303520634304732, |
|
"learning_rate": 0.0001, |
|
"loss": 46.0, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0070921985815602835, |
|
"grad_norm": 0.00016235452494584024, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 46.0, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.007429922323539345, |
|
"grad_norm": 0.00012188320397399366, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 46.0, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.007767646065518406, |
|
"grad_norm": 0.00015171366976574063, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 46.0, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.008105369807497468, |
|
"grad_norm": 9.316073555964977e-05, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 46.0, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.008443093549476529, |
|
"grad_norm": 0.00014290536637417972, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 46.0, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.008780817291455589, |
|
"grad_norm": 0.00015599915059283376, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 46.0, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.00911854103343465, |
|
"grad_norm": 0.00015680905198678374, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 46.0, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.009456264775413711, |
|
"grad_norm": 0.00016664821305312216, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 46.0, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.009793988517392773, |
|
"grad_norm": 0.0001463688095100224, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 46.0, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.010131712259371834, |
|
"grad_norm": 0.0001783478946890682, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 46.0, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.010469436001350895, |
|
"grad_norm": 0.0001655457599554211, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 46.0, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.010807159743329957, |
|
"grad_norm": 0.00018240793724544346, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 46.0, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.011144883485309016, |
|
"grad_norm": 0.00016705192683730274, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 46.0, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.011482607227288078, |
|
"grad_norm": 0.0002768947451841086, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 46.0, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.01182033096926714, |
|
"grad_norm": 0.00024317330098710954, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 46.0, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0121580547112462, |
|
"grad_norm": 0.00013411836698651314, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 46.0, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.012495778453225262, |
|
"grad_norm": 0.00028809363720938563, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 46.0, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.012833502195204323, |
|
"grad_norm": 0.00029307673685252666, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 46.0, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.013171225937183385, |
|
"grad_norm": 0.0002233909472124651, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 46.0, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.013508949679162444, |
|
"grad_norm": 0.0002125003666151315, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 46.0, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.013846673421141506, |
|
"grad_norm": 0.00015506868658121675, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 46.0, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.014184397163120567, |
|
"grad_norm": 0.00023671003873459995, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 46.0, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.014522120905099628, |
|
"grad_norm": 0.00023820622300263494, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 46.0, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.01485984464707869, |
|
"grad_norm": 0.00013906476669944823, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 46.0, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.015197568389057751, |
|
"grad_norm": 0.00024479327839799225, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 46.0, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.015535292131036813, |
|
"grad_norm": 0.0002795088803395629, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 46.0, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.015873015873015872, |
|
"grad_norm": 0.00026209637871943414, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 46.0, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.016210739614994935, |
|
"grad_norm": 0.00022261285630520433, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 46.0, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.016548463356973995, |
|
"grad_norm": 0.0002231644029961899, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 46.0, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.016886187098953058, |
|
"grad_norm": 0.00023359505576081574, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 46.0, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.016886187098953058, |
|
"eval_loss": 11.5, |
|
"eval_runtime": 38.1051, |
|
"eval_samples_per_second": 130.875, |
|
"eval_steps_per_second": 65.451, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.017223910840932118, |
|
"grad_norm": 0.0004388255765661597, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 46.0, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.017561634582911177, |
|
"grad_norm": 0.00034231445170007646, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 46.0, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.01789935832489024, |
|
"grad_norm": 0.00040279701352119446, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 46.0, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0182370820668693, |
|
"grad_norm": 0.00048692570999264717, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 46.0, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.018574805808848363, |
|
"grad_norm": 0.0004796187567990273, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 46.0, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.018912529550827423, |
|
"grad_norm": 0.0004256981483194977, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 46.0, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.019250253292806486, |
|
"grad_norm": 0.0003702337562572211, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 46.0, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.019587977034785545, |
|
"grad_norm": 0.00028856948483735323, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 46.0, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.019925700776764605, |
|
"grad_norm": 0.00043020248995162547, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 46.0, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.020263424518743668, |
|
"grad_norm": 0.00032656811526976526, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 46.0, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.020601148260722728, |
|
"grad_norm": 0.00044006985262967646, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 46.0, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.02093887200270179, |
|
"grad_norm": 0.0005264987121336162, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 46.0, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.02127659574468085, |
|
"grad_norm": 0.0005930354818701744, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 46.0, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.021614319486659914, |
|
"grad_norm": 0.0004631902265828103, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 46.0, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.021952043228638973, |
|
"grad_norm": 0.0004996751667931676, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 46.0, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.022289766970618033, |
|
"grad_norm": 0.0005587644409388304, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 46.0, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.022627490712597096, |
|
"grad_norm": 0.0005852883914485574, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 46.0, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.022965214454576156, |
|
"grad_norm": 0.0007204718422144651, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 46.0, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.02330293819655522, |
|
"grad_norm": 0.0005952971987426281, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 46.0, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.02364066193853428, |
|
"grad_norm": 0.0005659193848259747, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 46.0, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02397838568051334, |
|
"grad_norm": 0.0004821221227757633, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 46.0, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0243161094224924, |
|
"grad_norm": 0.00043129047844558954, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 46.0, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.02465383316447146, |
|
"grad_norm": 0.0007432058919221163, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 46.0, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.024991556906450524, |
|
"grad_norm": 0.0005778361228294671, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 46.0, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.025329280648429583, |
|
"grad_norm": 0.0006119939498603344, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 46.0, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.025667004390408647, |
|
"grad_norm": 0.0006990509573370218, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 46.0, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.026004728132387706, |
|
"grad_norm": 0.000691780005581677, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 46.0, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.02634245187436677, |
|
"grad_norm": 0.0007424818468280137, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 46.0, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.02668017561634583, |
|
"grad_norm": 0.0009253322496078908, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 46.0, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.02701789935832489, |
|
"grad_norm": 0.0006558927707374096, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 46.0, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02735562310030395, |
|
"grad_norm": 0.000801301037427038, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 46.0, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.02769334684228301, |
|
"grad_norm": 0.0007340286974795163, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 46.0, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.028031070584262074, |
|
"grad_norm": 0.0005543944425880909, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 46.0, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.028368794326241134, |
|
"grad_norm": 0.0004185195139143616, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 46.0, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.028706518068220197, |
|
"grad_norm": 0.0008723873761482537, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 46.0, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.029044241810199257, |
|
"grad_norm": 0.0006252967286854982, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 46.0, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.029381965552178316, |
|
"grad_norm": 0.0008132215589284897, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 46.0, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.02971968929415738, |
|
"grad_norm": 0.0006520528695546091, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 46.0, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.03005741303613644, |
|
"grad_norm": 0.0006046611815690994, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 46.0, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.030395136778115502, |
|
"grad_norm": 0.0007351114763878286, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 46.0, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.030732860520094562, |
|
"grad_norm": 0.0005443833651952446, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 46.0, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.031070584262073625, |
|
"grad_norm": 0.0007517460617236793, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 46.0, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.031408308004052685, |
|
"grad_norm": 0.0006298055523075163, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 46.0, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.031746031746031744, |
|
"grad_norm": 0.0006449134671129286, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 46.0, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.032083755488010804, |
|
"grad_norm": 0.0005613227258436382, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 46.0, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03242147922998987, |
|
"grad_norm": 0.0006533685373142362, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 46.0, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.03275920297196893, |
|
"grad_norm": 0.000610053539276123, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 46.0, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.03309692671394799, |
|
"grad_norm": 0.0006236035260371864, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 46.0, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.03343465045592705, |
|
"grad_norm": 0.0006816457607783377, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 46.0, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.033772374197906116, |
|
"grad_norm": 0.0008262648480013013, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 46.0, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.033772374197906116, |
|
"eval_loss": 11.5, |
|
"eval_runtime": 38.395, |
|
"eval_samples_per_second": 129.887, |
|
"eval_steps_per_second": 64.956, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.034110097939885176, |
|
"grad_norm": 0.0007305089384317398, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 46.0, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.034447821681864235, |
|
"grad_norm": 0.0010143495164811611, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 46.0, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.034785545423843295, |
|
"grad_norm": 0.000756265944801271, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 46.0, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.035123269165822354, |
|
"grad_norm": 0.0008156715193763375, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 46.0, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.03546099290780142, |
|
"grad_norm": 0.0009573604329489172, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 46.0, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.03579871664978048, |
|
"grad_norm": 0.0008011315367184579, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 46.0, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.03613644039175954, |
|
"grad_norm": 0.0010019487235695124, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 46.0, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.0364741641337386, |
|
"grad_norm": 0.0008329348056577146, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 46.0, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.03681188787571766, |
|
"grad_norm": 0.0009144492214545608, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 46.0, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.037149611617696726, |
|
"grad_norm": 0.0006390439230017364, |
|
"learning_rate": 5e-05, |
|
"loss": 46.0, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.037487335359675786, |
|
"grad_norm": 0.001059593167155981, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 46.0, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.037825059101654845, |
|
"grad_norm": 0.0006486097117885947, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 46.0, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.038162782843633905, |
|
"grad_norm": 0.000764590164180845, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 46.0, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.03850050658561297, |
|
"grad_norm": 0.0013067929539829493, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 46.0, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.03883823032759203, |
|
"grad_norm": 0.0011828240239992738, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 46.0, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.03917595406957109, |
|
"grad_norm": 0.0008867242140695453, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 46.0, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.03951367781155015, |
|
"grad_norm": 0.000849198317155242, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 46.0, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.03985140155352921, |
|
"grad_norm": 0.0011799026979133487, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 46.0, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.04018912529550828, |
|
"grad_norm": 0.0015928293578326702, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 46.0, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.040526849037487336, |
|
"grad_norm": 0.0006499377777799964, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 46.0, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.040864572779466396, |
|
"grad_norm": 0.0010604766430333257, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 46.0, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.041202296521445456, |
|
"grad_norm": 0.0006666944245807827, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 46.0, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.04154002026342452, |
|
"grad_norm": 0.001197998528368771, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 46.0, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.04187774400540358, |
|
"grad_norm": 0.0011294663418084383, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 46.0, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.04221546774738264, |
|
"grad_norm": 0.0009281404782086611, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 46.0, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0425531914893617, |
|
"grad_norm": 0.0008012927719391882, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 46.0, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.04289091523134076, |
|
"grad_norm": 0.0012234264286234975, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 46.0, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.04322863897331983, |
|
"grad_norm": 0.0011153658851981163, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 46.0, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.04356636271529889, |
|
"grad_norm": 0.0008695648284628987, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 46.0, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.04390408645727795, |
|
"grad_norm": 0.0010590883903205395, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 46.0, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.044241810199257006, |
|
"grad_norm": 0.0011277355952188373, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 46.0, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.044579533941236066, |
|
"grad_norm": 0.0008733632857911289, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 46.0, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.04491725768321513, |
|
"grad_norm": 0.0009897883282974362, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 46.0, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.04525498142519419, |
|
"grad_norm": 0.0009033156675286591, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 46.0, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.04559270516717325, |
|
"grad_norm": 0.0009513600380159914, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 46.0, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.04593042890915231, |
|
"grad_norm": 0.0009552270639687777, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 46.0, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.04626815265113138, |
|
"grad_norm": 0.0007903517107479274, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 46.0, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.04660587639311044, |
|
"grad_norm": 0.001167718437500298, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 46.0, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.0469436001350895, |
|
"grad_norm": 0.0008934563957154751, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 46.0, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.04728132387706856, |
|
"grad_norm": 0.0007390851387754083, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 46.0, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.047619047619047616, |
|
"grad_norm": 0.001171390525996685, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 46.0, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.04795677136102668, |
|
"grad_norm": 0.0009869023924693465, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 46.0, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.04829449510300574, |
|
"grad_norm": 0.00077177828643471, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 46.0, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.0486322188449848, |
|
"grad_norm": 0.0006903486209921539, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 46.0, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.04896994258696386, |
|
"grad_norm": 0.0008814368047751486, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 46.0, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.04930766632894292, |
|
"grad_norm": 0.001011370331980288, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 46.0, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.04964539007092199, |
|
"grad_norm": 0.0010025115916505456, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 46.0, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.04998311381290105, |
|
"grad_norm": 0.0007225372246466577, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 46.0, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.05032083755488011, |
|
"grad_norm": 0.0006715228664688766, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 46.0, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.05065856129685917, |
|
"grad_norm": 0.0009384732111357152, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 46.0, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05065856129685917, |
|
"eval_loss": 11.5, |
|
"eval_runtime": 38.1688, |
|
"eval_samples_per_second": 130.656, |
|
"eval_steps_per_second": 65.341, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.050996285038838234, |
|
"grad_norm": 0.0008989636553451419, |
|
"learning_rate": 1.7197048550474643e-05, |
|
"loss": 46.0, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.05133400878081729, |
|
"grad_norm": 0.0011067570885643363, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 46.0, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.05167173252279635, |
|
"grad_norm": 0.0007851620321162045, |
|
"learning_rate": 1.5900081996875083e-05, |
|
"loss": 46.0, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.05200945626477541, |
|
"grad_norm": 0.001054355758242309, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 46.0, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.05234718000675447, |
|
"grad_norm": 0.0011224844492971897, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 46.0, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.05268490374873354, |
|
"grad_norm": 0.0006852549267932773, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 46.0, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.0530226274907126, |
|
"grad_norm": 0.0009039295255206525, |
|
"learning_rate": 1.3432314919041478e-05, |
|
"loss": 46.0, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.05336035123269166, |
|
"grad_norm": 0.000982744968496263, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 46.0, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.05369807497467072, |
|
"grad_norm": 0.0011061737313866615, |
|
"learning_rate": 1.22645209888614e-05, |
|
"loss": 46.0, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.05403579871664978, |
|
"grad_norm": 0.0010985388653352857, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 46.0, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.054373522458628844, |
|
"grad_norm": 0.0011543540749698877, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 46.0, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.0547112462006079, |
|
"grad_norm": 0.0010955431498587132, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 46.0, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.05504896994258696, |
|
"grad_norm": 0.0009253566386178136, |
|
"learning_rate": 1.006822449763537e-05, |
|
"loss": 46.0, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.05538669368456602, |
|
"grad_norm": 0.0011247299844399095, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 46.0, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.05572441742654509, |
|
"grad_norm": 0.0009286398999392986, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 46.0, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.05606214116852415, |
|
"grad_norm": 0.0014445815468207002, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 46.0, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.05639986491050321, |
|
"grad_norm": 0.0010441597551107407, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 46.0, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.05673758865248227, |
|
"grad_norm": 0.001206619548611343, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 46.0, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.05707531239446133, |
|
"grad_norm": 0.0011592115042731166, |
|
"learning_rate": 7.1416349648943894e-06, |
|
"loss": 46.0, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.057413036136440394, |
|
"grad_norm": 0.0009685884579084814, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 46.0, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.057750759878419454, |
|
"grad_norm": 0.0010882606729865074, |
|
"learning_rate": 6.269014643030213e-06, |
|
"loss": 46.0, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.058088483620398514, |
|
"grad_norm": 0.0008229081286117435, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 46.0, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.05842620736237757, |
|
"grad_norm": 0.0009584272629581392, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 46.0, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.05876393110435663, |
|
"grad_norm": 0.001024397206492722, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 46.0, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.0591016548463357, |
|
"grad_norm": 0.0010323076276108623, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 46.0, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.05943937858831476, |
|
"grad_norm": 0.0008894839556887746, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 46.0, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.05977710233029382, |
|
"grad_norm": 0.0008847026037983596, |
|
"learning_rate": 3.974757327377981e-06, |
|
"loss": 46.0, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.06011482607227288, |
|
"grad_norm": 0.000998921343125403, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 46.0, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.060452549814251945, |
|
"grad_norm": 0.0008716392330825329, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 46.0, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.060790273556231005, |
|
"grad_norm": 0.0013428525999188423, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 46.0, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.061127997298210064, |
|
"grad_norm": 0.001158152474090457, |
|
"learning_rate": 2.724071220034158e-06, |
|
"loss": 46.0, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.061465721040189124, |
|
"grad_norm": 0.0012302974937483668, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 46.0, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.06180344478216818, |
|
"grad_norm": 0.0008794470340944827, |
|
"learning_rate": 2.1847622018482283e-06, |
|
"loss": 46.0, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.06214116852414725, |
|
"grad_norm": 0.0013389174127951264, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 46.0, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.06247889226612631, |
|
"grad_norm": 0.0008797872578725219, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 46.0, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.06281661600810537, |
|
"grad_norm": 0.0009669195278547704, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 46.0, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.06315433975008443, |
|
"grad_norm": 0.0011810335563495755, |
|
"learning_rate": 1.2814967607382432e-06, |
|
"loss": 46.0, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.06349206349206349, |
|
"grad_norm": 0.0011306331725791097, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 46.0, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.06382978723404255, |
|
"grad_norm": 0.000884878565557301, |
|
"learning_rate": 9.186408276168013e-07, |
|
"loss": 46.0, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.06416751097602161, |
|
"grad_norm": 0.0013046838575974107, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 46.0, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.06450523471800068, |
|
"grad_norm": 0.0008018773514777422, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 46.0, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.06484295845997974, |
|
"grad_norm": 0.000674662587698549, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 46.0, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.0651806822019588, |
|
"grad_norm": 0.000894166121724993, |
|
"learning_rate": 3.7269241793390085e-07, |
|
"loss": 46.0, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.06551840594393786, |
|
"grad_norm": 0.0007813581614755094, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 46.0, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.06585612968591692, |
|
"grad_norm": 0.0007691160426475108, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 46.0, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.06619385342789598, |
|
"grad_norm": 0.0009770386386662722, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 46.0, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.06653157716987504, |
|
"grad_norm": 0.0008022322435863316, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 46.0, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.0668693009118541, |
|
"grad_norm": 0.0009312187903560698, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 46.0, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.06720702465383316, |
|
"grad_norm": 0.0011881066020578146, |
|
"learning_rate": 7.615242180436522e-09, |
|
"loss": 46.0, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.06754474839581223, |
|
"grad_norm": 0.001179297105409205, |
|
"learning_rate": 0.0, |
|
"loss": 46.0, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.06754474839581223, |
|
"eval_loss": 11.5, |
|
"eval_runtime": 38.3354, |
|
"eval_samples_per_second": 130.089, |
|
"eval_steps_per_second": 65.057, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 3 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 46026799841280.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|