|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 396, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0025252525252525255, |
|
"grad_norm": 32.43943786621094, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.9259, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.012626262626262626, |
|
"grad_norm": 18.802860260009766, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.8543, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.025252525252525252, |
|
"grad_norm": 11.966745376586914, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6558, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03787878787878788, |
|
"grad_norm": 4.041635990142822, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.5411, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.050505050505050504, |
|
"grad_norm": 2.9634244441986084, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5026, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06313131313131314, |
|
"grad_norm": 4.437589168548584, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.4873, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07575757575757576, |
|
"grad_norm": 3.071434497833252, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.4655, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08838383838383838, |
|
"grad_norm": 2.649683713912964, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 0.4277, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10101010101010101, |
|
"grad_norm": 1.9353753328323364, |
|
"learning_rate": 2e-05, |
|
"loss": 0.4123, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11363636363636363, |
|
"grad_norm": 1.7575881481170654, |
|
"learning_rate": 1.9990267166335665e-05, |
|
"loss": 0.3856, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.12626262626262627, |
|
"grad_norm": 1.685341715812683, |
|
"learning_rate": 1.9961087610952893e-05, |
|
"loss": 0.3652, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1388888888888889, |
|
"grad_norm": 1.361409068107605, |
|
"learning_rate": 1.9912518133803466e-05, |
|
"loss": 0.3402, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.15151515151515152, |
|
"grad_norm": 1.4926209449768066, |
|
"learning_rate": 1.9844653278615836e-05, |
|
"loss": 0.3274, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.16414141414141414, |
|
"grad_norm": 1.358234167098999, |
|
"learning_rate": 1.9757625148859444e-05, |
|
"loss": 0.3158, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.17676767676767677, |
|
"grad_norm": 1.3026068210601807, |
|
"learning_rate": 1.9651603150596497e-05, |
|
"loss": 0.3039, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1893939393939394, |
|
"grad_norm": 1.4417624473571777, |
|
"learning_rate": 1.9526793662721768e-05, |
|
"loss": 0.2947, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.20202020202020202, |
|
"grad_norm": 1.306173324584961, |
|
"learning_rate": 1.9383439635232296e-05, |
|
"loss": 0.2866, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.21464646464646464, |
|
"grad_norm": 1.3218271732330322, |
|
"learning_rate": 1.922182011630902e-05, |
|
"loss": 0.2807, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.22727272727272727, |
|
"grad_norm": 1.3361605405807495, |
|
"learning_rate": 1.904224970913085e-05, |
|
"loss": 0.2705, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2398989898989899, |
|
"grad_norm": 1.2049956321716309, |
|
"learning_rate": 1.8845077959478615e-05, |
|
"loss": 0.2647, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.25252525252525254, |
|
"grad_norm": 1.0850019454956055, |
|
"learning_rate": 1.8630688675320844e-05, |
|
"loss": 0.2558, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.26515151515151514, |
|
"grad_norm": 1.0417327880859375, |
|
"learning_rate": 1.839949917970596e-05, |
|
"loss": 0.2519, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.2777777777777778, |
|
"grad_norm": 1.1492786407470703, |
|
"learning_rate": 1.815195949841512e-05, |
|
"loss": 0.2491, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2904040404040404, |
|
"grad_norm": 1.007467269897461, |
|
"learning_rate": 1.788855148395699e-05, |
|
"loss": 0.2399, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 0.9345642924308777, |
|
"learning_rate": 1.7609787877609678e-05, |
|
"loss": 0.2389, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.31565656565656564, |
|
"grad_norm": 0.907543957233429, |
|
"learning_rate": 1.731621131133564e-05, |
|
"loss": 0.2398, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3282828282828283, |
|
"grad_norm": 0.9041042923927307, |
|
"learning_rate": 1.700839325151233e-05, |
|
"loss": 0.2342, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3409090909090909, |
|
"grad_norm": 0.9225403070449829, |
|
"learning_rate": 1.668693288653478e-05, |
|
"loss": 0.2311, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.35353535353535354, |
|
"grad_norm": 0.8411688208580017, |
|
"learning_rate": 1.6352455960455385e-05, |
|
"loss": 0.2281, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3661616161616162, |
|
"grad_norm": 0.8488685488700867, |
|
"learning_rate": 1.600561355493137e-05, |
|
"loss": 0.2218, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3787878787878788, |
|
"grad_norm": 0.8601275086402893, |
|
"learning_rate": 1.5647080821850868e-05, |
|
"loss": 0.2223, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.39141414141414144, |
|
"grad_norm": 0.8070423007011414, |
|
"learning_rate": 1.5277555669104743e-05, |
|
"loss": 0.2153, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.40404040404040403, |
|
"grad_norm": 0.8097870945930481, |
|
"learning_rate": 1.4897757402062285e-05, |
|
"loss": 0.2162, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 0.8055990934371948, |
|
"learning_rate": 1.4508425323395319e-05, |
|
"loss": 0.2134, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4292929292929293, |
|
"grad_norm": 0.8727415800094604, |
|
"learning_rate": 1.411031729397622e-05, |
|
"loss": 0.2115, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.44191919191919193, |
|
"grad_norm": 0.79546058177948, |
|
"learning_rate": 1.370420825765114e-05, |
|
"loss": 0.2123, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.7648718953132629, |
|
"learning_rate": 1.32908887327601e-05, |
|
"loss": 0.2101, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4671717171717172, |
|
"grad_norm": 0.7550036311149597, |
|
"learning_rate": 1.2871163273340309e-05, |
|
"loss": 0.2027, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.4797979797979798, |
|
"grad_norm": 0.7257375121116638, |
|
"learning_rate": 1.2445848903008001e-05, |
|
"loss": 0.202, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.49242424242424243, |
|
"grad_norm": 0.7294561266899109, |
|
"learning_rate": 1.201577352456748e-05, |
|
"loss": 0.2022, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5050505050505051, |
|
"grad_norm": 0.7660893797874451, |
|
"learning_rate": 1.1581774308443042e-05, |
|
"loss": 0.1997, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5176767676767676, |
|
"grad_norm": 0.7221814393997192, |
|
"learning_rate": 1.1144696063070884e-05, |
|
"loss": 0.1963, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.5303030303030303, |
|
"grad_norm": 0.6848293542861938, |
|
"learning_rate": 1.070538959042311e-05, |
|
"loss": 0.1957, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5429292929292929, |
|
"grad_norm": 0.6825312972068787, |
|
"learning_rate": 1.026471002986491e-05, |
|
"loss": 0.1951, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 0.6754021644592285, |
|
"learning_rate": 9.823515193568715e-06, |
|
"loss": 0.1902, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5681818181818182, |
|
"grad_norm": 0.6493006348609924, |
|
"learning_rate": 9.382663896725578e-06, |
|
"loss": 0.1892, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.5808080808080808, |
|
"grad_norm": 0.6259664297103882, |
|
"learning_rate": 8.943014285804072e-06, |
|
"loss": 0.1919, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5934343434343434, |
|
"grad_norm": 0.6426423192024231, |
|
"learning_rate": 8.505422168110936e-06, |
|
"loss": 0.1871, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 0.6354442834854126, |
|
"learning_rate": 8.070739345905032e-06, |
|
"loss": 0.1852, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6186868686868687, |
|
"grad_norm": 0.6016388535499573, |
|
"learning_rate": 7.639811958307421e-06, |
|
"loss": 0.1856, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.6313131313131313, |
|
"grad_norm": 0.6031273007392883, |
|
"learning_rate": 7.213478834235079e-06, |
|
"loss": 0.1854, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.6439393939393939, |
|
"grad_norm": 0.6175743937492371, |
|
"learning_rate": 6.792569859564445e-06, |
|
"loss": 0.183, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.6565656565656566, |
|
"grad_norm": 0.6094260215759277, |
|
"learning_rate": 6.3779043617031775e-06, |
|
"loss": 0.1829, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.6691919191919192, |
|
"grad_norm": 0.5978714227676392, |
|
"learning_rate": 5.9702895147146765e-06, |
|
"loss": 0.1824, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.6818181818181818, |
|
"grad_norm": 0.5708032846450806, |
|
"learning_rate": 5.570518768099918e-06, |
|
"loss": 0.1785, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6944444444444444, |
|
"grad_norm": 0.5738463401794434, |
|
"learning_rate": 5.179370302295037e-06, |
|
"loss": 0.1816, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.7070707070707071, |
|
"grad_norm": 0.5719105005264282, |
|
"learning_rate": 4.797605513891179e-06, |
|
"loss": 0.1758, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7196969696969697, |
|
"grad_norm": 0.5968112945556641, |
|
"learning_rate": 4.425967533525229e-06, |
|
"loss": 0.1779, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.7323232323232324, |
|
"grad_norm": 0.5471134781837463, |
|
"learning_rate": 4.0651797793264356e-06, |
|
"loss": 0.1751, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.7449494949494949, |
|
"grad_norm": 0.5474812984466553, |
|
"learning_rate": 3.7159445487347546e-06, |
|
"loss": 0.1769, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.7575757575757576, |
|
"grad_norm": 0.5562990307807922, |
|
"learning_rate": 3.378941651431996e-06, |
|
"loss": 0.1744, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.7702020202020202, |
|
"grad_norm": 0.5516139268875122, |
|
"learning_rate": 3.054827086046931e-06, |
|
"loss": 0.1718, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.7828282828282829, |
|
"grad_norm": 0.5370557308197021, |
|
"learning_rate": 2.7442317632101743e-06, |
|
"loss": 0.171, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.7954545454545454, |
|
"grad_norm": 0.5318647623062134, |
|
"learning_rate": 2.447760277444543e-06, |
|
"loss": 0.1714, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.8080808080808081, |
|
"grad_norm": 0.5430074334144592, |
|
"learning_rate": 2.165989730281475e-06, |
|
"loss": 0.1727, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.8207070707070707, |
|
"grad_norm": 0.575107216835022, |
|
"learning_rate": 1.8994686068943792e-06, |
|
"loss": 0.1695, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.566429615020752, |
|
"learning_rate": 1.6487157084356454e-06, |
|
"loss": 0.1718, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.8459595959595959, |
|
"grad_norm": 0.5522480607032776, |
|
"learning_rate": 1.414219142155585e-06, |
|
"loss": 0.1694, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.8585858585858586, |
|
"grad_norm": 0.5301287174224854, |
|
"learning_rate": 1.196435371269089e-06, |
|
"loss": 0.1695, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.8712121212121212, |
|
"grad_norm": 0.5381254553794861, |
|
"learning_rate": 9.957883264195224e-07, |
|
"loss": 0.1709, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.8838383838383839, |
|
"grad_norm": 0.52545166015625, |
|
"learning_rate": 8.126685804694401e-07, |
|
"loss": 0.1686, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.8964646464646465, |
|
"grad_norm": 0.5226617455482483, |
|
"learning_rate": 6.47432588224437e-07, |
|
"loss": 0.1674, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.5346973538398743, |
|
"learning_rate": 5.004019925700921e-07, |
|
"loss": 0.1692, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.9217171717171717, |
|
"grad_norm": 0.5276767015457153, |
|
"learning_rate": 3.7186299837261855e-07, |
|
"loss": 0.1699, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.9343434343434344, |
|
"grad_norm": 0.5401973128318787, |
|
"learning_rate": 2.620658153619959e-07, |
|
"loss": 0.1684, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.946969696969697, |
|
"grad_norm": 0.5313433408737183, |
|
"learning_rate": 1.7122417108203727e-07, |
|
"loss": 0.1691, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.9595959595959596, |
|
"grad_norm": 0.5390282273292542, |
|
"learning_rate": 9.951489485545696e-08, |
|
"loss": 0.169, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.9722222222222222, |
|
"grad_norm": 0.5103330016136169, |
|
"learning_rate": 4.7077573573793836e-08, |
|
"loss": 0.1653, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.9848484848484849, |
|
"grad_norm": 0.5155799388885498, |
|
"learning_rate": 1.4014279982216272e-08, |
|
"loss": 0.1668, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.9974747474747475, |
|
"grad_norm": 0.5293468832969666, |
|
"learning_rate": 3.893739881088987e-10, |
|
"loss": 0.1711, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.12780985236167908, |
|
"eval_runtime": 1.3224, |
|
"eval_samples_per_second": 0.756, |
|
"eval_steps_per_second": 0.756, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 396, |
|
"total_flos": 165828687298560.0, |
|
"train_loss": 0.24546745942548068, |
|
"train_runtime": 6726.335, |
|
"train_samples_per_second": 7.533, |
|
"train_steps_per_second": 0.059 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 396, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 165828687298560.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|