|
{ |
|
"best_metric": 6.290847301483154, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-50", |
|
"epoch": 0.009231479344564966, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 4.615739672282483e-05, |
|
"grad_norm": 22445.78125, |
|
"learning_rate": 1e-05, |
|
"loss": 25.784, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 4.615739672282483e-05, |
|
"eval_loss": 6.5567145347595215, |
|
"eval_runtime": 116.9938, |
|
"eval_samples_per_second": 311.888, |
|
"eval_steps_per_second": 77.978, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 9.231479344564966e-05, |
|
"grad_norm": 12910.5078125, |
|
"learning_rate": 2e-05, |
|
"loss": 27.8226, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0001384721901684745, |
|
"grad_norm": 17848.23046875, |
|
"learning_rate": 3e-05, |
|
"loss": 27.2563, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00018462958689129932, |
|
"grad_norm": 12322.72265625, |
|
"learning_rate": 4e-05, |
|
"loss": 27.3904, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00023078698361412417, |
|
"grad_norm": 10741.056640625, |
|
"learning_rate": 5e-05, |
|
"loss": 28.2983, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.000276944380336949, |
|
"grad_norm": 13121.9970703125, |
|
"learning_rate": 6e-05, |
|
"loss": 26.237, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00032310177705977385, |
|
"grad_norm": 9293.3037109375, |
|
"learning_rate": 7e-05, |
|
"loss": 25.5588, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.00036925917378259865, |
|
"grad_norm": 10365.8857421875, |
|
"learning_rate": 8e-05, |
|
"loss": 26.4811, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0004154165705054235, |
|
"grad_norm": 12928.04296875, |
|
"learning_rate": 9e-05, |
|
"loss": 25.8373, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00046157396722824835, |
|
"grad_norm": 14451.408203125, |
|
"learning_rate": 0.0001, |
|
"loss": 26.4935, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0005077313639510731, |
|
"grad_norm": 19713.658203125, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 25.1195, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.000553888760673898, |
|
"grad_norm": 15381.8095703125, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 24.9361, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0006000461573967228, |
|
"grad_norm": 24253.5546875, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 25.9728, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0006462035541195477, |
|
"grad_norm": 14783.53125, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 23.7536, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0006923609508423724, |
|
"grad_norm": 15313.6494140625, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 25.9306, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0007385183475651973, |
|
"grad_norm": 22333.41015625, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 22.6163, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0007846757442880221, |
|
"grad_norm": 20572.08984375, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 23.9031, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.000830833141010847, |
|
"grad_norm": 19698.630859375, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 23.5744, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0008769905377336718, |
|
"grad_norm": 13384.6240234375, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 23.5693, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0009231479344564967, |
|
"grad_norm": 26537.767578125, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 23.3416, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0009693053311793214, |
|
"grad_norm": 19650.4921875, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 23.3517, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0010154627279021463, |
|
"grad_norm": 18629.486328125, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 20.4999, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0010616201246249712, |
|
"grad_norm": 22304.865234375, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 22.6462, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.001107777521347796, |
|
"grad_norm": 14506.77734375, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 23.4764, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0011539349180706207, |
|
"grad_norm": 14228.169921875, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 23.762, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0012000923147934457, |
|
"grad_norm": 30771.2109375, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 22.8363, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0012462497115162704, |
|
"grad_norm": 56593.390625, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 21.8793, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0012924071082390954, |
|
"grad_norm": 20859.037109375, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 22.4923, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0013385645049619201, |
|
"grad_norm": 27690.1875, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 21.5724, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0013847219016847449, |
|
"grad_norm": 16696.064453125, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 19.0704, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0014308792984075698, |
|
"grad_norm": 31018.873046875, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 20.8645, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0014770366951303946, |
|
"grad_norm": 31504.15625, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 20.111, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0015231940918532195, |
|
"grad_norm": 32905.55859375, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 21.8384, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0015693514885760443, |
|
"grad_norm": 25131.814453125, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 23.6569, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0016155088852988692, |
|
"grad_norm": 22027.71484375, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 22.2301, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.001661666282021694, |
|
"grad_norm": 27400.876953125, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 21.5273, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0017078236787445187, |
|
"grad_norm": 24637.80859375, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 22.5958, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0017539810754673437, |
|
"grad_norm": 20805.537109375, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 21.467, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0018001384721901684, |
|
"grad_norm": 36721.640625, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 23.575, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0018462958689129934, |
|
"grad_norm": 24539.470703125, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 23.2596, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0018924532656358181, |
|
"grad_norm": 32397.0703125, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 20.8464, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0019386106623586429, |
|
"grad_norm": 22713.24609375, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 19.6454, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0019847680590814676, |
|
"grad_norm": 15757.3203125, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 21.6769, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0020309254558042926, |
|
"grad_norm": 23368.83203125, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 20.7625, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0020770828525271175, |
|
"grad_norm": 22798.123046875, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 20.2746, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0021232402492499425, |
|
"grad_norm": 27159.142578125, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 23.0228, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.002169397645972767, |
|
"grad_norm": 43737.3984375, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 20.1612, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.002215555042695592, |
|
"grad_norm": 38735.39453125, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 22.1198, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.002261712439418417, |
|
"grad_norm": 24909.6484375, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 23.5771, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0023078698361412415, |
|
"grad_norm": 46400.6328125, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 24.5076, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0023078698361412415, |
|
"eval_loss": 6.290847301483154, |
|
"eval_runtime": 117.0237, |
|
"eval_samples_per_second": 311.809, |
|
"eval_steps_per_second": 77.959, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0023540272328640664, |
|
"grad_norm": 13528.9306640625, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 29.2787, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0024001846295868914, |
|
"grad_norm": 14640.6796875, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 29.954, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0024463420263097163, |
|
"grad_norm": 19368.150390625, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 28.7966, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.002492499423032541, |
|
"grad_norm": 17936.0078125, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 28.7941, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.002538656819755366, |
|
"grad_norm": 23861.60546875, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 29.3609, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.002584814216478191, |
|
"grad_norm": 15433.3095703125, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 27.1609, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0026309716132010153, |
|
"grad_norm": 33403.921875, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 29.2413, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0026771290099238403, |
|
"grad_norm": 24673.494140625, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 27.6385, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.0027232864066466652, |
|
"grad_norm": 14325.19140625, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 29.6801, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.0027694438033694898, |
|
"grad_norm": 17081.51171875, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 28.7645, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0028156012000923147, |
|
"grad_norm": 29413.771484375, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 28.8996, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0028617585968151397, |
|
"grad_norm": 16099.470703125, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 28.155, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0029079159935379646, |
|
"grad_norm": 22059.09765625, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 29.3335, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.002954073390260789, |
|
"grad_norm": 17140.3203125, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 27.0672, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.003000230786983614, |
|
"grad_norm": 28841.115234375, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 28.5047, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.003046388183706439, |
|
"grad_norm": 18518.37109375, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 27.5791, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.0030925455804292636, |
|
"grad_norm": 19345.005859375, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 28.3685, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.0031387029771520886, |
|
"grad_norm": 29759.44921875, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 26.2761, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.0031848603738749135, |
|
"grad_norm": 18335.62109375, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 26.6666, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0032310177705977385, |
|
"grad_norm": 30440.87890625, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 26.5062, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.003277175167320563, |
|
"grad_norm": 19112.134765625, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 25.7603, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.003323332564043388, |
|
"grad_norm": 24852.76171875, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 27.9856, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.003369489960766213, |
|
"grad_norm": 46107.21484375, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 25.6559, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.0034156473574890375, |
|
"grad_norm": 24512.013671875, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 27.834, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.0034618047542118624, |
|
"grad_norm": 29530.43359375, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 26.7404, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0035079621509346874, |
|
"grad_norm": 27868.212890625, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 25.2077, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.003554119547657512, |
|
"grad_norm": 18155.494140625, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 26.284, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.003600276944380337, |
|
"grad_norm": 21550.98046875, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 25.5477, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.003646434341103162, |
|
"grad_norm": 31559.13671875, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 25.403, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0036925917378259868, |
|
"grad_norm": 18903.1328125, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 23.5741, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0037387491345488113, |
|
"grad_norm": 48414.99609375, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 24.3138, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0037849065312716363, |
|
"grad_norm": 21682.453125, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 23.5924, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.0038310639279944612, |
|
"grad_norm": 21422.771484375, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 23.3001, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.0038772213247172858, |
|
"grad_norm": 22182.619140625, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 23.6324, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.003923378721440111, |
|
"grad_norm": 24638.53515625, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 24.0123, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.003969536118162935, |
|
"grad_norm": 42181.08203125, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 24.0208, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.004015693514885761, |
|
"grad_norm": 17223.33203125, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 23.8072, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.004061850911608585, |
|
"grad_norm": 37419.80078125, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 21.9256, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.00410800830833141, |
|
"grad_norm": 18106.880859375, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 22.0732, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.004154165705054235, |
|
"grad_norm": 27022.27734375, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 23.5882, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.00420032310177706, |
|
"grad_norm": 24592.55859375, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 22.5416, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.004246480498499885, |
|
"grad_norm": 26019.220703125, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 22.1786, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.0042926378952227095, |
|
"grad_norm": 19021.7421875, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 21.6818, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.004338795291945534, |
|
"grad_norm": 44170.796875, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 23.2813, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.0043849526886683594, |
|
"grad_norm": 22554.763671875, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 24.2983, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.004431110085391184, |
|
"grad_norm": 37566.48046875, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 22.8671, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.0044772674821140085, |
|
"grad_norm": 46563.9453125, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 22.6329, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.004523424878836834, |
|
"grad_norm": 11167.861328125, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 20.1984, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.004569582275559658, |
|
"grad_norm": 65252.296875, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 23.824, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.004615739672282483, |
|
"grad_norm": 43028.50390625, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 21.3231, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.004615739672282483, |
|
"eval_loss": 6.653355121612549, |
|
"eval_runtime": 116.8995, |
|
"eval_samples_per_second": 312.14, |
|
"eval_steps_per_second": 78.041, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.004661897069005308, |
|
"grad_norm": 13000.0380859375, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 32.0844, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.004708054465728133, |
|
"grad_norm": 12532.7470703125, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 31.0504, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.004754211862450957, |
|
"grad_norm": 21590.57421875, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 30.8403, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.004800369259173783, |
|
"grad_norm": 16138.33203125, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 29.4348, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.004846526655896607, |
|
"grad_norm": 12663.6875, |
|
"learning_rate": 5e-05, |
|
"loss": 29.8385, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.004892684052619433, |
|
"grad_norm": 17485.61328125, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 29.5445, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.004938841449342257, |
|
"grad_norm": 12701.091796875, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 28.8904, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.004984998846065082, |
|
"grad_norm": 17760.87109375, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 29.5446, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.005031156242787907, |
|
"grad_norm": 15029.7021484375, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 28.3419, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.005077313639510732, |
|
"grad_norm": 17356.201171875, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 29.5726, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.005123471036233556, |
|
"grad_norm": 18930.1796875, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 30.3205, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.005169628432956382, |
|
"grad_norm": 33717.44140625, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 29.4096, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.005215785829679206, |
|
"grad_norm": 12564.3232421875, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 27.909, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.005261943226402031, |
|
"grad_norm": 15485.4404296875, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 27.2055, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.005308100623124856, |
|
"grad_norm": 14564.55859375, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 27.1391, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.0053542580198476806, |
|
"grad_norm": 28163.482421875, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 27.5595, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.005400415416570505, |
|
"grad_norm": 15327.2060546875, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 27.7242, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.0054465728132933305, |
|
"grad_norm": 12743.4599609375, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 29.3176, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.005492730210016155, |
|
"grad_norm": 17723.337890625, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 27.6727, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.0055388876067389795, |
|
"grad_norm": 11900.8056640625, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 27.0558, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.005585045003461805, |
|
"grad_norm": 30333.73828125, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 25.7016, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.0056312024001846294, |
|
"grad_norm": 14563.4892578125, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 24.8602, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.005677359796907455, |
|
"grad_norm": 18867.26953125, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 25.5498, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.005723517193630279, |
|
"grad_norm": 17261.439453125, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 24.7577, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.005769674590353104, |
|
"grad_norm": 14557.5341796875, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 24.8685, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.005815831987075929, |
|
"grad_norm": 55429.828125, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 24.7386, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.005861989383798754, |
|
"grad_norm": 37720.05859375, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 25.7687, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.005908146780521578, |
|
"grad_norm": 13499.6328125, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 25.2399, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.005954304177244404, |
|
"grad_norm": 21841.525390625, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 24.0868, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.006000461573967228, |
|
"grad_norm": 18137.44921875, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 25.1236, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.006046618970690053, |
|
"grad_norm": 18170.18359375, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 24.8144, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.006092776367412878, |
|
"grad_norm": 12616.4169921875, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 26.8521, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.006138933764135703, |
|
"grad_norm": 14475.0224609375, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 24.1187, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.006185091160858527, |
|
"grad_norm": 13354.6005859375, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 24.8671, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.006231248557581353, |
|
"grad_norm": 34306.61328125, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 24.901, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.006277405954304177, |
|
"grad_norm": 14065.1328125, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 23.7815, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.006323563351027002, |
|
"grad_norm": 13215.8115234375, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 22.6957, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.006369720747749827, |
|
"grad_norm": 32365.15234375, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 22.3192, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.006415878144472652, |
|
"grad_norm": 21075.822265625, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 23.3173, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.006462035541195477, |
|
"grad_norm": 17104.109375, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 25.3204, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.0065081929379183015, |
|
"grad_norm": 23697.595703125, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 22.6917, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.006554350334641126, |
|
"grad_norm": 20324.15625, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 22.6145, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.006600507731363951, |
|
"grad_norm": 20542.833984375, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 22.4909, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.006646665128086776, |
|
"grad_norm": 43104.8125, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 24.1033, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.0066928225248096005, |
|
"grad_norm": 16736.55078125, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 23.1356, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.006738979921532426, |
|
"grad_norm": 26106.3359375, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 22.9239, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.00678513731825525, |
|
"grad_norm": 16372.2353515625, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 22.2321, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.006831294714978075, |
|
"grad_norm": 27215.10546875, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 22.7279, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.0068774521117009, |
|
"grad_norm": 16365.025390625, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 23.7236, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.006923609508423725, |
|
"grad_norm": 39346.94140625, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 19.913, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.006923609508423725, |
|
"eval_loss": 6.5524163246154785, |
|
"eval_runtime": 117.0961, |
|
"eval_samples_per_second": 311.616, |
|
"eval_steps_per_second": 77.91, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.006969766905146549, |
|
"grad_norm": 13133.1728515625, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 30.7831, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.007015924301869375, |
|
"grad_norm": 14437.8154296875, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 32.2855, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.007062081698592199, |
|
"grad_norm": 12261.015625, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 30.5466, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.007108239095315024, |
|
"grad_norm": 14204.9453125, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 29.4702, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.007154396492037849, |
|
"grad_norm": 35599.56640625, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 30.0853, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.007200553888760674, |
|
"grad_norm": 15908.6923828125, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 30.2363, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.007246711285483499, |
|
"grad_norm": 21062.623046875, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 29.9957, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.007292868682206324, |
|
"grad_norm": 13865.328125, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 29.3194, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.007339026078929148, |
|
"grad_norm": 18418.775390625, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 30.0034, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.0073851834756519736, |
|
"grad_norm": 17927.546875, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 27.3433, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.007431340872374798, |
|
"grad_norm": 16150.69140625, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 29.3834, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.007477498269097623, |
|
"grad_norm": 53190.5546875, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 28.7297, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.007523655665820448, |
|
"grad_norm": 13157.083984375, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 29.4415, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.0075698130625432725, |
|
"grad_norm": 18151.259765625, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 26.6588, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.007615970459266097, |
|
"grad_norm": 14146.330078125, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 28.4469, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.0076621278559889225, |
|
"grad_norm": 54733.3046875, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 26.0468, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.007708285252711747, |
|
"grad_norm": 18280.759765625, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 29.2003, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.0077544426494345715, |
|
"grad_norm": 22410.99609375, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 27.8878, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.007800600046157397, |
|
"grad_norm": 11329.37109375, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 26.4854, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.007846757442880221, |
|
"grad_norm": 20874.708984375, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 26.5411, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.007892914839603046, |
|
"grad_norm": 19707.91796875, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 26.1978, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.00793907223632587, |
|
"grad_norm": 78664.015625, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 26.7962, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.007985229633048697, |
|
"grad_norm": 17584.62890625, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 25.9352, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.008031387029771521, |
|
"grad_norm": 19179.896484375, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 26.8773, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.008077544426494346, |
|
"grad_norm": 17171.498046875, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 25.3397, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.00812370182321717, |
|
"grad_norm": 17196.4609375, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 25.0458, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.008169859219939995, |
|
"grad_norm": 20215.6796875, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 26.9744, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.00821601661666282, |
|
"grad_norm": 18055.56640625, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 25.2058, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.008262174013385646, |
|
"grad_norm": 17127.599609375, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 23.8571, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.00830833141010847, |
|
"grad_norm": 21535.5078125, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 26.9367, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.008354488806831295, |
|
"grad_norm": 20788.958984375, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 22.6921, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.00840064620355412, |
|
"grad_norm": 17802.25, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 25.2687, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.008446803600276944, |
|
"grad_norm": 15224.77734375, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 25.1923, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.00849296099699977, |
|
"grad_norm": 25008.404296875, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 23.9257, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.008539118393722595, |
|
"grad_norm": 19298.392578125, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 23.0269, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.008585275790445419, |
|
"grad_norm": 16348.4912109375, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 23.2064, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.008631433187168244, |
|
"grad_norm": 32499.119140625, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 23.963, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.008677590583891068, |
|
"grad_norm": 19804.919921875, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 23.3042, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.008723747980613893, |
|
"grad_norm": 13096.90625, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 23.024, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.008769905377336719, |
|
"grad_norm": 13534.07421875, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 24.1031, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.008816062774059543, |
|
"grad_norm": 9464.7373046875, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 22.4278, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.008862220170782368, |
|
"grad_norm": 15782.4638671875, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 22.7746, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.008908377567505192, |
|
"grad_norm": 19455.8515625, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 23.3504, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.008954534964228017, |
|
"grad_norm": 25072.66796875, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 21.0983, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.009000692360950842, |
|
"grad_norm": 17993.9296875, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 23.3598, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.009046849757673668, |
|
"grad_norm": 23557.642578125, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 22.9573, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.009093007154396492, |
|
"grad_norm": 19041.326171875, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 23.1464, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.009139164551119317, |
|
"grad_norm": 26971.62890625, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 20.0905, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.009185321947842141, |
|
"grad_norm": 14283.2109375, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 21.5863, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.009231479344564966, |
|
"grad_norm": 17224.2734375, |
|
"learning_rate": 0.0, |
|
"loss": 20.5303, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.009231479344564966, |
|
"eval_loss": 6.4810471534729, |
|
"eval_runtime": 117.0685, |
|
"eval_samples_per_second": 311.689, |
|
"eval_steps_per_second": 77.929, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 3 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 330900386611200.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|