|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.4298328355080438, |
|
"eval_steps": 1000000, |
|
"global_step": 49215, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0043668885046027, |
|
"grad_norm": 1.499011516571045, |
|
"learning_rate": 9.99956331114954e-06, |
|
"loss": 3.0752, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.0087337770092054, |
|
"grad_norm": 1.530051350593567, |
|
"learning_rate": 9.99912662229908e-06, |
|
"loss": 3.0792, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.013100665513808101, |
|
"grad_norm": 1.5715222358703613, |
|
"learning_rate": 9.99868993344862e-06, |
|
"loss": 3.0762, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.0174675540184108, |
|
"grad_norm": 1.519605040550232, |
|
"learning_rate": 9.99825324459816e-06, |
|
"loss": 3.0838, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.021834442523013503, |
|
"grad_norm": 1.5175423622131348, |
|
"learning_rate": 9.9978165557477e-06, |
|
"loss": 3.0795, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.026201331027616202, |
|
"grad_norm": 1.503777027130127, |
|
"learning_rate": 9.997379866897238e-06, |
|
"loss": 3.0768, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.030568219532218905, |
|
"grad_norm": 1.493525743484497, |
|
"learning_rate": 9.996943178046778e-06, |
|
"loss": 3.0818, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.0349351080368216, |
|
"grad_norm": 1.5032507181167603, |
|
"learning_rate": 9.996506489196318e-06, |
|
"loss": 3.0838, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.03930199654142431, |
|
"grad_norm": 1.5484732389450073, |
|
"learning_rate": 9.996069800345858e-06, |
|
"loss": 3.0798, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.043668885046027006, |
|
"grad_norm": 1.5356857776641846, |
|
"learning_rate": 9.995633111495399e-06, |
|
"loss": 3.08, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.048035773550629705, |
|
"grad_norm": 1.5570900440216064, |
|
"learning_rate": 9.995196422644939e-06, |
|
"loss": 3.0784, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.052402662055232405, |
|
"grad_norm": 1.5267707109451294, |
|
"learning_rate": 9.994759733794479e-06, |
|
"loss": 3.0775, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.056769550559835104, |
|
"grad_norm": 1.5429790019989014, |
|
"learning_rate": 9.994323044944017e-06, |
|
"loss": 3.0695, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.06113643906443781, |
|
"grad_norm": 1.8057048320770264, |
|
"learning_rate": 9.993886356093557e-06, |
|
"loss": 3.0802, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.06550332756904051, |
|
"grad_norm": 1.5101138353347778, |
|
"learning_rate": 9.993449667243097e-06, |
|
"loss": 3.0753, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.0698702160736432, |
|
"grad_norm": 1.5532760620117188, |
|
"learning_rate": 9.993012978392636e-06, |
|
"loss": 3.0767, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.07423710457824591, |
|
"grad_norm": 1.5693330764770508, |
|
"learning_rate": 9.992576289542176e-06, |
|
"loss": 3.0792, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.07860399308284861, |
|
"grad_norm": 1.5401012897491455, |
|
"learning_rate": 9.992139600691716e-06, |
|
"loss": 3.069, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.0829708815874513, |
|
"grad_norm": 1.5863893032073975, |
|
"learning_rate": 9.991702911841256e-06, |
|
"loss": 3.0701, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.08733777009205401, |
|
"grad_norm": 1.588409185409546, |
|
"learning_rate": 9.991266222990796e-06, |
|
"loss": 3.0735, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.0917046585966567, |
|
"grad_norm": 1.5264829397201538, |
|
"learning_rate": 9.990829534140335e-06, |
|
"loss": 3.0825, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.09607154710125941, |
|
"grad_norm": 1.5645649433135986, |
|
"learning_rate": 9.990392845289875e-06, |
|
"loss": 3.0802, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.10043843560586212, |
|
"grad_norm": 1.536049485206604, |
|
"learning_rate": 9.989956156439415e-06, |
|
"loss": 3.0739, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.10480532411046481, |
|
"grad_norm": 1.5308794975280762, |
|
"learning_rate": 9.989519467588953e-06, |
|
"loss": 3.0606, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.10917221261506752, |
|
"grad_norm": 1.5116829872131348, |
|
"learning_rate": 9.989082778738493e-06, |
|
"loss": 3.0734, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.11353910111967021, |
|
"grad_norm": 1.5693246126174927, |
|
"learning_rate": 9.988646089888033e-06, |
|
"loss": 3.0732, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.11790598962427291, |
|
"grad_norm": 1.524153709411621, |
|
"learning_rate": 9.988209401037573e-06, |
|
"loss": 3.0743, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.12227287812887562, |
|
"grad_norm": 1.5216927528381348, |
|
"learning_rate": 9.987772712187114e-06, |
|
"loss": 3.0668, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.1266397666334783, |
|
"grad_norm": 1.5442315340042114, |
|
"learning_rate": 9.987336023336654e-06, |
|
"loss": 3.0755, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.13100665513808102, |
|
"grad_norm": 1.555234670639038, |
|
"learning_rate": 9.986899334486194e-06, |
|
"loss": 3.0704, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.13537354364268372, |
|
"grad_norm": 1.5696550607681274, |
|
"learning_rate": 9.986462645635732e-06, |
|
"loss": 3.0757, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.1397404321472864, |
|
"grad_norm": 1.531101107597351, |
|
"learning_rate": 9.986025956785272e-06, |
|
"loss": 3.0655, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.1441073206518891, |
|
"grad_norm": 1.544026494026184, |
|
"learning_rate": 9.985589267934812e-06, |
|
"loss": 3.0747, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.14847420915649182, |
|
"grad_norm": 1.5147799253463745, |
|
"learning_rate": 9.985152579084352e-06, |
|
"loss": 3.0649, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.15284109766109452, |
|
"grad_norm": 1.5733822584152222, |
|
"learning_rate": 9.984715890233891e-06, |
|
"loss": 3.0663, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.15720798616569723, |
|
"grad_norm": 1.590242862701416, |
|
"learning_rate": 9.984279201383431e-06, |
|
"loss": 3.0663, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.1615748746702999, |
|
"grad_norm": 1.5203492641448975, |
|
"learning_rate": 9.983842512532971e-06, |
|
"loss": 3.0747, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.1659417631749026, |
|
"grad_norm": 1.5461294651031494, |
|
"learning_rate": 9.98340582368251e-06, |
|
"loss": 3.0689, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.17030865167950532, |
|
"grad_norm": 1.5351060628890991, |
|
"learning_rate": 9.98296913483205e-06, |
|
"loss": 3.0689, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.17467554018410802, |
|
"grad_norm": 1.5561175346374512, |
|
"learning_rate": 9.98253244598159e-06, |
|
"loss": 3.0756, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.17904242868871073, |
|
"grad_norm": 1.51571524143219, |
|
"learning_rate": 9.982095757131129e-06, |
|
"loss": 3.0644, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.1834093171933134, |
|
"grad_norm": 1.5357000827789307, |
|
"learning_rate": 9.981659068280668e-06, |
|
"loss": 3.0745, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.18777620569791612, |
|
"grad_norm": 1.5208464860916138, |
|
"learning_rate": 9.981222379430208e-06, |
|
"loss": 3.0661, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.19214309420251882, |
|
"grad_norm": 1.4807689189910889, |
|
"learning_rate": 9.980785690579748e-06, |
|
"loss": 3.0718, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.19650998270712153, |
|
"grad_norm": 1.5423370599746704, |
|
"learning_rate": 9.980349001729288e-06, |
|
"loss": 3.0672, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.20087687121172423, |
|
"grad_norm": 1.5475423336029053, |
|
"learning_rate": 9.97991231287883e-06, |
|
"loss": 3.0707, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.2052437597163269, |
|
"grad_norm": 1.5928481817245483, |
|
"learning_rate": 9.97947562402837e-06, |
|
"loss": 3.0704, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.20961064822092962, |
|
"grad_norm": 1.5184266567230225, |
|
"learning_rate": 9.979038935177907e-06, |
|
"loss": 3.0668, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.21397753672553232, |
|
"grad_norm": 1.6194623708724976, |
|
"learning_rate": 9.978602246327447e-06, |
|
"loss": 3.0604, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.21834442523013503, |
|
"grad_norm": 1.5114210844039917, |
|
"learning_rate": 9.978165557476987e-06, |
|
"loss": 3.0643, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.22271131373473774, |
|
"grad_norm": 1.4907346963882446, |
|
"learning_rate": 9.977728868626527e-06, |
|
"loss": 3.0627, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.22707820223934042, |
|
"grad_norm": 1.55216383934021, |
|
"learning_rate": 9.977292179776067e-06, |
|
"loss": 3.0705, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.23144509074394312, |
|
"grad_norm": 1.5280052423477173, |
|
"learning_rate": 9.976855490925607e-06, |
|
"loss": 3.0642, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.23581197924854583, |
|
"grad_norm": 1.6076959371566772, |
|
"learning_rate": 9.976418802075146e-06, |
|
"loss": 3.0607, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.24017886775314853, |
|
"grad_norm": 1.5258777141571045, |
|
"learning_rate": 9.975982113224686e-06, |
|
"loss": 3.0711, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.24454575625775124, |
|
"grad_norm": 1.5106350183486938, |
|
"learning_rate": 9.975545424374226e-06, |
|
"loss": 3.0659, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.24891264476235392, |
|
"grad_norm": 1.5026884078979492, |
|
"learning_rate": 9.975108735523766e-06, |
|
"loss": 3.0629, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.2532795332669566, |
|
"grad_norm": 1.4955607652664185, |
|
"learning_rate": 9.974672046673306e-06, |
|
"loss": 3.0642, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.25764642177155933, |
|
"grad_norm": 1.5555437803268433, |
|
"learning_rate": 9.974235357822844e-06, |
|
"loss": 3.0525, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.26201331027616204, |
|
"grad_norm": 1.5264055728912354, |
|
"learning_rate": 9.973798668972384e-06, |
|
"loss": 3.0656, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.26638019878076474, |
|
"grad_norm": 1.5161739587783813, |
|
"learning_rate": 9.973361980121923e-06, |
|
"loss": 3.067, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.27074708728536745, |
|
"grad_norm": 1.640349268913269, |
|
"learning_rate": 9.972925291271463e-06, |
|
"loss": 3.0617, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.27511397578997016, |
|
"grad_norm": 1.5388609170913696, |
|
"learning_rate": 9.972488602421003e-06, |
|
"loss": 3.0584, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.2794808642945728, |
|
"grad_norm": 1.5143171548843384, |
|
"learning_rate": 9.972051913570545e-06, |
|
"loss": 3.0563, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.2838477527991755, |
|
"grad_norm": 1.5235260725021362, |
|
"learning_rate": 9.971615224720084e-06, |
|
"loss": 3.0601, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.2882146413037782, |
|
"grad_norm": 1.4914684295654297, |
|
"learning_rate": 9.971178535869623e-06, |
|
"loss": 3.052, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.2925815298083809, |
|
"grad_norm": 1.4766864776611328, |
|
"learning_rate": 9.970741847019162e-06, |
|
"loss": 3.0549, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.29694841831298363, |
|
"grad_norm": 1.5566754341125488, |
|
"learning_rate": 9.970305158168702e-06, |
|
"loss": 3.055, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.30131530681758634, |
|
"grad_norm": 1.546927809715271, |
|
"learning_rate": 9.969868469318242e-06, |
|
"loss": 3.063, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.30568219532218904, |
|
"grad_norm": 1.5629053115844727, |
|
"learning_rate": 9.969431780467782e-06, |
|
"loss": 3.059, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.31004908382679175, |
|
"grad_norm": 1.538415789604187, |
|
"learning_rate": 9.968995091617322e-06, |
|
"loss": 3.054, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.31441597233139446, |
|
"grad_norm": 1.541513442993164, |
|
"learning_rate": 9.968558402766862e-06, |
|
"loss": 3.0538, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.31878286083599716, |
|
"grad_norm": 1.5637692213058472, |
|
"learning_rate": 9.968121713916401e-06, |
|
"loss": 3.0538, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.3231497493405998, |
|
"grad_norm": 1.5472180843353271, |
|
"learning_rate": 9.967685025065941e-06, |
|
"loss": 3.0539, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.3275166378452025, |
|
"grad_norm": 1.4945605993270874, |
|
"learning_rate": 9.967248336215481e-06, |
|
"loss": 3.0576, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.3318835263498052, |
|
"grad_norm": 1.4775537252426147, |
|
"learning_rate": 9.96681164736502e-06, |
|
"loss": 3.0553, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.33625041485440793, |
|
"grad_norm": 1.487927794456482, |
|
"learning_rate": 9.966374958514559e-06, |
|
"loss": 3.0577, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.34061730335901064, |
|
"grad_norm": 1.538345456123352, |
|
"learning_rate": 9.965938269664099e-06, |
|
"loss": 3.0541, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.34498419186361334, |
|
"grad_norm": 1.543130874633789, |
|
"learning_rate": 9.965501580813639e-06, |
|
"loss": 3.0559, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.34935108036821605, |
|
"grad_norm": 1.5487135648727417, |
|
"learning_rate": 9.965064891963178e-06, |
|
"loss": 3.0514, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.35371796887281876, |
|
"grad_norm": 1.5321663618087769, |
|
"learning_rate": 9.964628203112718e-06, |
|
"loss": 3.0524, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.35808485737742146, |
|
"grad_norm": 1.5257816314697266, |
|
"learning_rate": 9.96419151426226e-06, |
|
"loss": 3.056, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.36245174588202417, |
|
"grad_norm": 1.478402018547058, |
|
"learning_rate": 9.9637548254118e-06, |
|
"loss": 3.0518, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.3668186343866268, |
|
"grad_norm": 1.5002671480178833, |
|
"learning_rate": 9.963318136561338e-06, |
|
"loss": 3.0531, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.3711855228912295, |
|
"grad_norm": 1.5446417331695557, |
|
"learning_rate": 9.962881447710878e-06, |
|
"loss": 3.0483, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.37555241139583223, |
|
"grad_norm": 1.4993116855621338, |
|
"learning_rate": 9.962444758860417e-06, |
|
"loss": 3.0522, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.37991929990043494, |
|
"grad_norm": 1.576460838317871, |
|
"learning_rate": 9.962008070009957e-06, |
|
"loss": 3.0547, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.38428618840503764, |
|
"grad_norm": 1.5173957347869873, |
|
"learning_rate": 9.961571381159497e-06, |
|
"loss": 3.0484, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.38865307690964035, |
|
"grad_norm": 1.5036677122116089, |
|
"learning_rate": 9.961134692309037e-06, |
|
"loss": 3.0432, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.39301996541424306, |
|
"grad_norm": 1.4855741262435913, |
|
"learning_rate": 9.960698003458577e-06, |
|
"loss": 3.0552, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.39738685391884576, |
|
"grad_norm": 1.4877578020095825, |
|
"learning_rate": 9.960261314608117e-06, |
|
"loss": 3.077, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.40175374242344847, |
|
"grad_norm": 1.5206501483917236, |
|
"learning_rate": 9.959824625757656e-06, |
|
"loss": 3.0821, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.4061206309280511, |
|
"grad_norm": 1.4510574340820312, |
|
"learning_rate": 9.959387936907196e-06, |
|
"loss": 3.0795, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.4104875194326538, |
|
"grad_norm": 1.5214266777038574, |
|
"learning_rate": 9.958951248056736e-06, |
|
"loss": 3.076, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.41485440793725653, |
|
"grad_norm": 1.553749442100525, |
|
"learning_rate": 9.958514559206274e-06, |
|
"loss": 3.0868, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.41922129644185924, |
|
"grad_norm": 1.5479093790054321, |
|
"learning_rate": 9.958077870355814e-06, |
|
"loss": 3.0771, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.42358818494646194, |
|
"grad_norm": 1.4554359912872314, |
|
"learning_rate": 9.957641181505354e-06, |
|
"loss": 3.0783, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.42795507345106465, |
|
"grad_norm": 1.482832908630371, |
|
"learning_rate": 9.957204492654894e-06, |
|
"loss": 3.074, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.4298328355080438, |
|
"step": 49215, |
|
"total_flos": 1.2345107794414797e+18, |
|
"train_loss": 3.067183213959159, |
|
"train_runtime": 46797.6916, |
|
"train_samples_per_second": 11743.883, |
|
"train_steps_per_second": 244.666 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 11449800, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 100, |
|
"save_steps": 1000000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.2345107794414797e+18, |
|
"train_batch_size": 48, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|