|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 297, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010101010101010102, |
|
"grad_norm": 1.4315768480300903, |
|
"learning_rate": 2.98989898989899e-05, |
|
"loss": 10.0726, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.020202020202020204, |
|
"grad_norm": 1.5613890886306763, |
|
"learning_rate": 2.97979797979798e-05, |
|
"loss": 10.1289, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.030303030303030304, |
|
"grad_norm": 1.4086546897888184, |
|
"learning_rate": 2.96969696969697e-05, |
|
"loss": 10.0402, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04040404040404041, |
|
"grad_norm": 1.508208155632019, |
|
"learning_rate": 2.9595959595959595e-05, |
|
"loss": 10.0206, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.050505050505050504, |
|
"grad_norm": 1.6154247522354126, |
|
"learning_rate": 2.9494949494949495e-05, |
|
"loss": 10.0272, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06060606060606061, |
|
"grad_norm": 1.8789210319519043, |
|
"learning_rate": 2.9393939393939394e-05, |
|
"loss": 10.0711, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0707070707070707, |
|
"grad_norm": 1.879401683807373, |
|
"learning_rate": 2.9292929292929294e-05, |
|
"loss": 9.9488, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08080808080808081, |
|
"grad_norm": 1.9335699081420898, |
|
"learning_rate": 2.9191919191919193e-05, |
|
"loss": 9.8084, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 1.9985203742980957, |
|
"learning_rate": 2.9090909090909093e-05, |
|
"loss": 9.7694, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.10101010101010101, |
|
"grad_norm": 2.0941152572631836, |
|
"learning_rate": 2.8989898989898992e-05, |
|
"loss": 9.7268, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 2.344831943511963, |
|
"learning_rate": 2.8888888888888888e-05, |
|
"loss": 9.7139, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 2.27956485748291, |
|
"learning_rate": 2.8787878787878788e-05, |
|
"loss": 9.5774, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13131313131313133, |
|
"grad_norm": 2.440272331237793, |
|
"learning_rate": 2.8686868686868687e-05, |
|
"loss": 9.4884, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1414141414141414, |
|
"grad_norm": 2.7465505599975586, |
|
"learning_rate": 2.8585858585858587e-05, |
|
"loss": 9.4327, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15151515151515152, |
|
"grad_norm": 2.5827443599700928, |
|
"learning_rate": 2.8484848484848486e-05, |
|
"loss": 9.2545, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.16161616161616163, |
|
"grad_norm": 2.7065556049346924, |
|
"learning_rate": 2.8383838383838386e-05, |
|
"loss": 9.2304, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1717171717171717, |
|
"grad_norm": 2.944704294204712, |
|
"learning_rate": 2.8282828282828285e-05, |
|
"loss": 9.1568, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 2.667189598083496, |
|
"learning_rate": 2.8181818181818185e-05, |
|
"loss": 9.0014, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1919191919191919, |
|
"grad_norm": 2.928550958633423, |
|
"learning_rate": 2.808080808080808e-05, |
|
"loss": 8.9312, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.20202020202020202, |
|
"grad_norm": 2.930938482284546, |
|
"learning_rate": 2.797979797979798e-05, |
|
"loss": 8.8656, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21212121212121213, |
|
"grad_norm": 2.750476121902466, |
|
"learning_rate": 2.787878787878788e-05, |
|
"loss": 8.7607, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 2.8917646408081055, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 8.699, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.23232323232323232, |
|
"grad_norm": 2.8639166355133057, |
|
"learning_rate": 2.767676767676768e-05, |
|
"loss": 8.5703, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 2.9358019828796387, |
|
"learning_rate": 2.7575757575757578e-05, |
|
"loss": 8.519, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.25252525252525254, |
|
"grad_norm": 3.063692092895508, |
|
"learning_rate": 2.7474747474747478e-05, |
|
"loss": 8.3564, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.26262626262626265, |
|
"grad_norm": 2.702965259552002, |
|
"learning_rate": 2.7373737373737374e-05, |
|
"loss": 8.3403, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 2.8974392414093018, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 8.217, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2828282828282828, |
|
"grad_norm": 3.0662784576416016, |
|
"learning_rate": 2.7171717171717173e-05, |
|
"loss": 8.1575, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.29292929292929293, |
|
"grad_norm": 2.8070247173309326, |
|
"learning_rate": 2.7070707070707072e-05, |
|
"loss": 8.0814, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 2.507457971572876, |
|
"learning_rate": 2.696969696969697e-05, |
|
"loss": 8.0387, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.31313131313131315, |
|
"grad_norm": 2.7476532459259033, |
|
"learning_rate": 2.686868686868687e-05, |
|
"loss": 7.96, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.32323232323232326, |
|
"grad_norm": 2.3512086868286133, |
|
"learning_rate": 2.676767676767677e-05, |
|
"loss": 7.8822, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 2.1365387439727783, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 7.8282, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3434343434343434, |
|
"grad_norm": 2.2421772480010986, |
|
"learning_rate": 2.6565656565656566e-05, |
|
"loss": 7.7613, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.35353535353535354, |
|
"grad_norm": 2.111851930618286, |
|
"learning_rate": 2.6464646464646466e-05, |
|
"loss": 7.7516, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 2.2756099700927734, |
|
"learning_rate": 2.6363636363636365e-05, |
|
"loss": 7.6066, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.37373737373737376, |
|
"grad_norm": 1.5493675470352173, |
|
"learning_rate": 2.6262626262626265e-05, |
|
"loss": 7.6497, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3838383838383838, |
|
"grad_norm": 1.7187950611114502, |
|
"learning_rate": 2.6161616161616164e-05, |
|
"loss": 7.5753, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3939393939393939, |
|
"grad_norm": 1.5378119945526123, |
|
"learning_rate": 2.6060606060606063e-05, |
|
"loss": 7.591, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.40404040404040403, |
|
"grad_norm": 1.3718913793563843, |
|
"learning_rate": 2.595959595959596e-05, |
|
"loss": 7.6196, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.41414141414141414, |
|
"grad_norm": 1.9546277523040771, |
|
"learning_rate": 2.585858585858586e-05, |
|
"loss": 7.3466, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.42424242424242425, |
|
"grad_norm": 1.3100489377975464, |
|
"learning_rate": 2.575757575757576e-05, |
|
"loss": 7.588, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.43434343434343436, |
|
"grad_norm": 1.1814428567886353, |
|
"learning_rate": 2.5656565656565658e-05, |
|
"loss": 7.392, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 1.3818390369415283, |
|
"learning_rate": 2.5555555555555557e-05, |
|
"loss": 7.413, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 1.725190281867981, |
|
"learning_rate": 2.5454545454545457e-05, |
|
"loss": 7.1846, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.46464646464646464, |
|
"grad_norm": 1.098010778427124, |
|
"learning_rate": 2.5353535353535356e-05, |
|
"loss": 7.384, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.47474747474747475, |
|
"grad_norm": 1.1586623191833496, |
|
"learning_rate": 2.5252525252525256e-05, |
|
"loss": 7.2481, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 1.3093281984329224, |
|
"learning_rate": 2.5151515151515152e-05, |
|
"loss": 7.1453, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.494949494949495, |
|
"grad_norm": 1.2143803834915161, |
|
"learning_rate": 2.505050505050505e-05, |
|
"loss": 7.1706, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5050505050505051, |
|
"grad_norm": 1.4759756326675415, |
|
"learning_rate": 2.494949494949495e-05, |
|
"loss": 7.1918, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5151515151515151, |
|
"grad_norm": 1.170322060585022, |
|
"learning_rate": 2.484848484848485e-05, |
|
"loss": 7.1158, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5252525252525253, |
|
"grad_norm": 1.025293231010437, |
|
"learning_rate": 2.474747474747475e-05, |
|
"loss": 7.085, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5353535353535354, |
|
"grad_norm": 1.673163890838623, |
|
"learning_rate": 2.464646464646465e-05, |
|
"loss": 7.4093, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 0.892270565032959, |
|
"learning_rate": 2.454545454545455e-05, |
|
"loss": 7.1903, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 1.1452521085739136, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 7.2127, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5656565656565656, |
|
"grad_norm": 1.4096879959106445, |
|
"learning_rate": 2.4343434343434344e-05, |
|
"loss": 7.2507, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5757575757575758, |
|
"grad_norm": 1.3830610513687134, |
|
"learning_rate": 2.4242424242424244e-05, |
|
"loss": 7.2779, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5858585858585859, |
|
"grad_norm": 0.88773512840271, |
|
"learning_rate": 2.4141414141414143e-05, |
|
"loss": 7.1313, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5959595959595959, |
|
"grad_norm": 1.5023548603057861, |
|
"learning_rate": 2.4040404040404043e-05, |
|
"loss": 7.3029, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 0.9497267603874207, |
|
"learning_rate": 2.3939393939393942e-05, |
|
"loss": 7.1915, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6161616161616161, |
|
"grad_norm": 1.035706639289856, |
|
"learning_rate": 2.3838383838383842e-05, |
|
"loss": 7.2144, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6262626262626263, |
|
"grad_norm": 0.7967200875282288, |
|
"learning_rate": 2.3737373737373738e-05, |
|
"loss": 7.1566, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 0.7237263917922974, |
|
"learning_rate": 2.3636363636363637e-05, |
|
"loss": 7.1931, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6464646464646465, |
|
"grad_norm": 1.0147182941436768, |
|
"learning_rate": 2.3535353535353537e-05, |
|
"loss": 7.2676, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6565656565656566, |
|
"grad_norm": 0.8428776264190674, |
|
"learning_rate": 2.3434343434343436e-05, |
|
"loss": 7.0818, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.9503577947616577, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 7.0711, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6767676767676768, |
|
"grad_norm": 0.8867577314376831, |
|
"learning_rate": 2.3232323232323235e-05, |
|
"loss": 7.0556, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6868686868686869, |
|
"grad_norm": 0.8943458795547485, |
|
"learning_rate": 2.3131313131313135e-05, |
|
"loss": 7.2772, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.696969696969697, |
|
"grad_norm": 0.8544779419898987, |
|
"learning_rate": 2.303030303030303e-05, |
|
"loss": 7.0846, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7070707070707071, |
|
"grad_norm": 0.8547456860542297, |
|
"learning_rate": 2.292929292929293e-05, |
|
"loss": 7.092, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7171717171717171, |
|
"grad_norm": 0.924784779548645, |
|
"learning_rate": 2.282828282828283e-05, |
|
"loss": 7.0622, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 1.1316595077514648, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 7.0545, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7373737373737373, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 0.0, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7474747474747475, |
|
"grad_norm": 0.7388734221458435, |
|
"learning_rate": 2.262626262626263e-05, |
|
"loss": 7.0892, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7575757575757576, |
|
"grad_norm": 1.9678359031677246, |
|
"learning_rate": 2.2525252525252525e-05, |
|
"loss": 7.0092, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7676767676767676, |
|
"grad_norm": 0.736301064491272, |
|
"learning_rate": 2.2424242424242424e-05, |
|
"loss": 7.1827, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7777777777777778, |
|
"grad_norm": 0.7673182487487793, |
|
"learning_rate": 2.2323232323232324e-05, |
|
"loss": 7.0768, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7878787878787878, |
|
"grad_norm": 1.4001637697219849, |
|
"learning_rate": 2.222222222222222e-05, |
|
"loss": 6.9754, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.797979797979798, |
|
"grad_norm": 0.8279094696044922, |
|
"learning_rate": 2.212121212121212e-05, |
|
"loss": 7.1222, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8080808080808081, |
|
"grad_norm": 0.6914481520652771, |
|
"learning_rate": 2.202020202020202e-05, |
|
"loss": 7.0812, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 0.7313191294670105, |
|
"learning_rate": 2.191919191919192e-05, |
|
"loss": 7.05, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.8282828282828283, |
|
"grad_norm": 1.4300901889801025, |
|
"learning_rate": 2.1818181818181818e-05, |
|
"loss": 7.2278, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.8383838383838383, |
|
"grad_norm": 1.2713080644607544, |
|
"learning_rate": 2.1717171717171717e-05, |
|
"loss": 6.9029, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 0.814030110836029, |
|
"learning_rate": 2.1616161616161617e-05, |
|
"loss": 7.1241, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.8585858585858586, |
|
"grad_norm": 1.1318764686584473, |
|
"learning_rate": 2.1515151515151513e-05, |
|
"loss": 7.1621, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8686868686868687, |
|
"grad_norm": 1.0205698013305664, |
|
"learning_rate": 2.1414141414141412e-05, |
|
"loss": 7.0894, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8787878787878788, |
|
"grad_norm": 0.5895187854766846, |
|
"learning_rate": 2.1313131313131312e-05, |
|
"loss": 6.9731, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.5949335098266602, |
|
"learning_rate": 2.121212121212121e-05, |
|
"loss": 6.9764, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.898989898989899, |
|
"grad_norm": 0.909782886505127, |
|
"learning_rate": 2.111111111111111e-05, |
|
"loss": 6.8954, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.6604152321815491, |
|
"learning_rate": 2.101010101010101e-05, |
|
"loss": 7.015, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.9191919191919192, |
|
"grad_norm": 1.1579092741012573, |
|
"learning_rate": 2.090909090909091e-05, |
|
"loss": 7.1838, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.9292929292929293, |
|
"grad_norm": 1.1899914741516113, |
|
"learning_rate": 2.0808080808080806e-05, |
|
"loss": 7.196, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.9393939393939394, |
|
"grad_norm": 1.013912558555603, |
|
"learning_rate": 2.0707070707070705e-05, |
|
"loss": 7.1402, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9494949494949495, |
|
"grad_norm": 0.907071053981781, |
|
"learning_rate": 2.0606060606060605e-05, |
|
"loss": 7.129, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.9595959595959596, |
|
"grad_norm": 0.7546154260635376, |
|
"learning_rate": 2.0505050505050504e-05, |
|
"loss": 7.0195, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 0.662983775138855, |
|
"learning_rate": 2.0404040404040404e-05, |
|
"loss": 7.0075, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.9797979797979798, |
|
"grad_norm": 0.673218846321106, |
|
"learning_rate": 2.0303030303030303e-05, |
|
"loss": 7.0797, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.98989898989899, |
|
"grad_norm": 1.617126703262329, |
|
"learning_rate": 2.0202020202020203e-05, |
|
"loss": 6.8117, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.0646132230758667, |
|
"learning_rate": 2.01010101010101e-05, |
|
"loss": 7.174, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_audio_cosine_sim": 0.383101224899292, |
|
"eval_loss": 4.921389102935791, |
|
"eval_runtime": 153.7637, |
|
"eval_samples_per_second": 0.059, |
|
"eval_steps_per_second": 0.02, |
|
"eval_text_cosine_sim": 0.16359001398086548, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.0101010101010102, |
|
"grad_norm": 0.7128923535346985, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 7.0139, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.02020202020202, |
|
"grad_norm": 0.8200396299362183, |
|
"learning_rate": 1.9898989898989898e-05, |
|
"loss": 7.2107, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.0303030303030303, |
|
"grad_norm": 0.6839468479156494, |
|
"learning_rate": 1.9797979797979797e-05, |
|
"loss": 7.1309, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.0404040404040404, |
|
"grad_norm": 1.4797368049621582, |
|
"learning_rate": 1.9696969696969697e-05, |
|
"loss": 6.9112, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.0505050505050506, |
|
"grad_norm": 0.7093661427497864, |
|
"learning_rate": 1.9595959595959596e-05, |
|
"loss": 7.1014, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.0606060606060606, |
|
"grad_norm": 1.2138644456863403, |
|
"learning_rate": 1.9494949494949496e-05, |
|
"loss": 6.9265, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.0707070707070707, |
|
"grad_norm": 0.8460476994514465, |
|
"learning_rate": 1.9393939393939395e-05, |
|
"loss": 7.2191, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.0808080808080809, |
|
"grad_norm": 0.7066294550895691, |
|
"learning_rate": 1.929292929292929e-05, |
|
"loss": 7.1066, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 1.7321295738220215, |
|
"learning_rate": 1.919191919191919e-05, |
|
"loss": 6.8371, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.101010101010101, |
|
"grad_norm": 0.9022343158721924, |
|
"learning_rate": 1.909090909090909e-05, |
|
"loss": 6.9655, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 0.6070654988288879, |
|
"learning_rate": 1.898989898989899e-05, |
|
"loss": 6.9956, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.121212121212121, |
|
"grad_norm": 0.6426708698272705, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 6.964, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.1313131313131313, |
|
"grad_norm": 2.1155552864074707, |
|
"learning_rate": 1.878787878787879e-05, |
|
"loss": 6.7698, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.1414141414141414, |
|
"grad_norm": 0.9715363383293152, |
|
"learning_rate": 1.8686868686868688e-05, |
|
"loss": 7.1096, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.1515151515151516, |
|
"grad_norm": 1.2896918058395386, |
|
"learning_rate": 1.8585858585858584e-05, |
|
"loss": 6.7545, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.1616161616161615, |
|
"grad_norm": 0.6836603283882141, |
|
"learning_rate": 1.8484848484848484e-05, |
|
"loss": 7.042, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.1717171717171717, |
|
"grad_norm": 0.6922496557235718, |
|
"learning_rate": 1.8383838383838383e-05, |
|
"loss": 6.9593, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.1818181818181819, |
|
"grad_norm": 1.5327054262161255, |
|
"learning_rate": 1.8282828282828283e-05, |
|
"loss": 6.7356, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.1919191919191918, |
|
"grad_norm": 0.686898410320282, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 6.8844, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.202020202020202, |
|
"grad_norm": 2.4868946075439453, |
|
"learning_rate": 1.808080808080808e-05, |
|
"loss": 7.2632, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.2121212121212122, |
|
"grad_norm": 0.705142080783844, |
|
"learning_rate": 1.797979797979798e-05, |
|
"loss": 6.9367, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.2222222222222223, |
|
"grad_norm": 0.6701662540435791, |
|
"learning_rate": 1.7878787878787877e-05, |
|
"loss": 6.8656, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.2323232323232323, |
|
"grad_norm": 2.3600728511810303, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 7.2908, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.2424242424242424, |
|
"grad_norm": 0.729280948638916, |
|
"learning_rate": 1.7676767676767676e-05, |
|
"loss": 6.9778, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.2525252525252526, |
|
"grad_norm": 0.9563952684402466, |
|
"learning_rate": 1.7575757575757576e-05, |
|
"loss": 6.8367, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.2626262626262625, |
|
"grad_norm": 0.679764449596405, |
|
"learning_rate": 1.7474747474747475e-05, |
|
"loss": 6.9984, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.2727272727272727, |
|
"grad_norm": 1.1830710172653198, |
|
"learning_rate": 1.7373737373737375e-05, |
|
"loss": 7.0285, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.2828282828282829, |
|
"grad_norm": 0.7950251698493958, |
|
"learning_rate": 1.7272727272727274e-05, |
|
"loss": 7.0085, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.2929292929292928, |
|
"grad_norm": 0.8154851794242859, |
|
"learning_rate": 1.717171717171717e-05, |
|
"loss": 6.8398, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.303030303030303, |
|
"grad_norm": 1.1023356914520264, |
|
"learning_rate": 1.707070707070707e-05, |
|
"loss": 7.1022, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.3131313131313131, |
|
"grad_norm": 1.013553500175476, |
|
"learning_rate": 1.696969696969697e-05, |
|
"loss": 6.8513, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.3232323232323233, |
|
"grad_norm": 1.4370501041412354, |
|
"learning_rate": 1.686868686868687e-05, |
|
"loss": 7.208, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.7258216738700867, |
|
"learning_rate": 1.6767676767676768e-05, |
|
"loss": 6.954, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.3434343434343434, |
|
"grad_norm": 0.7258408069610596, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 7.0258, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.3535353535353536, |
|
"grad_norm": 0.8000563383102417, |
|
"learning_rate": 1.6565656565656567e-05, |
|
"loss": 7.0238, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.3636363636363638, |
|
"grad_norm": 0.9891355633735657, |
|
"learning_rate": 1.6464646464646466e-05, |
|
"loss": 6.9668, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.3737373737373737, |
|
"grad_norm": 0.7241990566253662, |
|
"learning_rate": 1.6363636363636363e-05, |
|
"loss": 7.0859, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.3838383838383839, |
|
"grad_norm": 0.5333958268165588, |
|
"learning_rate": 1.6262626262626262e-05, |
|
"loss": 7.0339, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.393939393939394, |
|
"grad_norm": 0.7480002641677856, |
|
"learning_rate": 1.616161616161616e-05, |
|
"loss": 6.9745, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.404040404040404, |
|
"grad_norm": 0.8260339498519897, |
|
"learning_rate": 1.606060606060606e-05, |
|
"loss": 7.0461, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.4141414141414141, |
|
"grad_norm": 0.7007152438163757, |
|
"learning_rate": 1.595959595959596e-05, |
|
"loss": 7.046, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.4242424242424243, |
|
"grad_norm": 1.532183051109314, |
|
"learning_rate": 1.585858585858586e-05, |
|
"loss": 6.8063, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.4343434343434343, |
|
"grad_norm": 0.6525304317474365, |
|
"learning_rate": 1.575757575757576e-05, |
|
"loss": 6.9673, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.4444444444444444, |
|
"grad_norm": 0.875150203704834, |
|
"learning_rate": 1.5656565656565655e-05, |
|
"loss": 6.9106, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 0.7586594223976135, |
|
"learning_rate": 1.5555555555555555e-05, |
|
"loss": 6.9954, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.4646464646464645, |
|
"grad_norm": 1.1310893297195435, |
|
"learning_rate": 1.5454545454545454e-05, |
|
"loss": 7.1989, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.4747474747474747, |
|
"grad_norm": 1.3009579181671143, |
|
"learning_rate": 1.5353535353535354e-05, |
|
"loss": 7.1353, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.4848484848484849, |
|
"grad_norm": 0.6209822297096252, |
|
"learning_rate": 1.5252525252525253e-05, |
|
"loss": 6.9722, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.494949494949495, |
|
"grad_norm": 0.7535139322280884, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 7.0278, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.5050505050505052, |
|
"grad_norm": 0.6947258114814758, |
|
"learning_rate": 1.5050505050505052e-05, |
|
"loss": 7.0663, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.5151515151515151, |
|
"grad_norm": 0.7372407913208008, |
|
"learning_rate": 1.494949494949495e-05, |
|
"loss": 6.9891, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.5252525252525253, |
|
"grad_norm": 0.8788199424743652, |
|
"learning_rate": 1.484848484848485e-05, |
|
"loss": 6.9849, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.5353535353535355, |
|
"grad_norm": 0.9445679187774658, |
|
"learning_rate": 1.4747474747474747e-05, |
|
"loss": 6.976, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 1.7923697233200073, |
|
"learning_rate": 1.4646464646464647e-05, |
|
"loss": 6.7889, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 0.6855000257492065, |
|
"learning_rate": 1.4545454545454546e-05, |
|
"loss": 6.9884, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.5656565656565657, |
|
"grad_norm": 0.8418505191802979, |
|
"learning_rate": 1.4444444444444444e-05, |
|
"loss": 6.9349, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.5757575757575757, |
|
"grad_norm": 0.7767158150672913, |
|
"learning_rate": 1.4343434343434344e-05, |
|
"loss": 6.9518, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.5858585858585859, |
|
"grad_norm": 0.9679882526397705, |
|
"learning_rate": 1.4242424242424243e-05, |
|
"loss": 6.8211, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.595959595959596, |
|
"grad_norm": 0.9250569343566895, |
|
"learning_rate": 1.4141414141414143e-05, |
|
"loss": 6.9044, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.606060606060606, |
|
"grad_norm": 0.8403336405754089, |
|
"learning_rate": 1.404040404040404e-05, |
|
"loss": 6.8393, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.6161616161616161, |
|
"grad_norm": 0.9166443347930908, |
|
"learning_rate": 1.393939393939394e-05, |
|
"loss": 6.8174, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.6262626262626263, |
|
"grad_norm": 0.5435317754745483, |
|
"learning_rate": 1.383838383838384e-05, |
|
"loss": 6.9202, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.6363636363636362, |
|
"grad_norm": 1.0361636877059937, |
|
"learning_rate": 1.3737373737373739e-05, |
|
"loss": 6.9836, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.6464646464646466, |
|
"grad_norm": 1.709415316581726, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 7.1939, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.6565656565656566, |
|
"grad_norm": 1.3511853218078613, |
|
"learning_rate": 1.3535353535353536e-05, |
|
"loss": 7.0895, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.6736574172973633, |
|
"learning_rate": 1.3434343434343436e-05, |
|
"loss": 6.8909, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.676767676767677, |
|
"grad_norm": 0.6862733364105225, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 6.9872, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.6868686868686869, |
|
"grad_norm": 1.0613605976104736, |
|
"learning_rate": 1.3232323232323233e-05, |
|
"loss": 6.9787, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.696969696969697, |
|
"grad_norm": 0.9397658705711365, |
|
"learning_rate": 1.3131313131313132e-05, |
|
"loss": 6.85, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.7070707070707072, |
|
"grad_norm": 0.6116166114807129, |
|
"learning_rate": 1.3030303030303032e-05, |
|
"loss": 6.9257, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.7171717171717171, |
|
"grad_norm": 1.0234121084213257, |
|
"learning_rate": 1.292929292929293e-05, |
|
"loss": 7.0316, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.7272727272727273, |
|
"grad_norm": 1.68846595287323, |
|
"learning_rate": 1.2828282828282829e-05, |
|
"loss": 7.1854, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.7373737373737375, |
|
"grad_norm": 0.686038076877594, |
|
"learning_rate": 1.2727272727272728e-05, |
|
"loss": 6.9311, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.7474747474747474, |
|
"grad_norm": 1.3312983512878418, |
|
"learning_rate": 1.2626262626262628e-05, |
|
"loss": 7.1633, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.7575757575757576, |
|
"grad_norm": 0.688218355178833, |
|
"learning_rate": 1.2525252525252526e-05, |
|
"loss": 7.0336, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.7676767676767677, |
|
"grad_norm": 0.768675684928894, |
|
"learning_rate": 1.2424242424242425e-05, |
|
"loss": 6.9246, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.6171760559082031, |
|
"learning_rate": 1.2323232323232325e-05, |
|
"loss": 7.0127, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.7878787878787878, |
|
"grad_norm": 0.79632967710495, |
|
"learning_rate": 1.2222222222222222e-05, |
|
"loss": 7.0955, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.797979797979798, |
|
"grad_norm": 0.5743001699447632, |
|
"learning_rate": 1.2121212121212122e-05, |
|
"loss": 7.0305, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.808080808080808, |
|
"grad_norm": 0.648186445236206, |
|
"learning_rate": 1.2020202020202021e-05, |
|
"loss": 7.0791, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 0.9388221502304077, |
|
"learning_rate": 1.1919191919191921e-05, |
|
"loss": 6.8963, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.8282828282828283, |
|
"grad_norm": 0.6248198747634888, |
|
"learning_rate": 1.1818181818181819e-05, |
|
"loss": 7.1456, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.8383838383838382, |
|
"grad_norm": 0.7853441834449768, |
|
"learning_rate": 1.1717171717171718e-05, |
|
"loss": 7.1302, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.8484848484848486, |
|
"grad_norm": 0.7538427710533142, |
|
"learning_rate": 1.1616161616161618e-05, |
|
"loss": 7.0855, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.8585858585858586, |
|
"grad_norm": 1.3595892190933228, |
|
"learning_rate": 1.1515151515151515e-05, |
|
"loss": 6.8817, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.8686868686868687, |
|
"grad_norm": 1.2711068391799927, |
|
"learning_rate": 1.1414141414141415e-05, |
|
"loss": 6.9406, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.878787878787879, |
|
"grad_norm": 0.7485206127166748, |
|
"learning_rate": 1.1313131313131314e-05, |
|
"loss": 7.0214, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.8888888888888888, |
|
"grad_norm": 1.068521499633789, |
|
"learning_rate": 1.1212121212121212e-05, |
|
"loss": 6.9544, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.898989898989899, |
|
"grad_norm": 0.6432831287384033, |
|
"learning_rate": 1.111111111111111e-05, |
|
"loss": 7.0268, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.9090909090909092, |
|
"grad_norm": 1.0608913898468018, |
|
"learning_rate": 1.101010101010101e-05, |
|
"loss": 6.9549, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.9191919191919191, |
|
"grad_norm": 0.7320489883422852, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 6.9866, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.9292929292929293, |
|
"grad_norm": 1.776832103729248, |
|
"learning_rate": 1.0808080808080808e-05, |
|
"loss": 6.8107, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.9393939393939394, |
|
"grad_norm": 0.8337501883506775, |
|
"learning_rate": 1.0707070707070706e-05, |
|
"loss": 6.9117, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.9494949494949494, |
|
"grad_norm": 0.9856388568878174, |
|
"learning_rate": 1.0606060606060606e-05, |
|
"loss": 6.8311, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.9595959595959596, |
|
"grad_norm": 1.3855446577072144, |
|
"learning_rate": 1.0505050505050505e-05, |
|
"loss": 7.1336, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.9696969696969697, |
|
"grad_norm": 1.5992196798324585, |
|
"learning_rate": 1.0404040404040403e-05, |
|
"loss": 6.6699, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.9797979797979797, |
|
"grad_norm": 2.3153018951416016, |
|
"learning_rate": 1.0303030303030302e-05, |
|
"loss": 7.2326, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.98989898989899, |
|
"grad_norm": 0.5802290439605713, |
|
"learning_rate": 1.0202020202020202e-05, |
|
"loss": 6.9314, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.8023201823234558, |
|
"learning_rate": 1.0101010101010101e-05, |
|
"loss": 6.7947, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_audio_cosine_sim": 0.37472105026245117, |
|
"eval_loss": 5.077270030975342, |
|
"eval_runtime": 152.9953, |
|
"eval_samples_per_second": 0.059, |
|
"eval_steps_per_second": 0.02, |
|
"eval_text_cosine_sim": 0.1508849412202835, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.01010101010101, |
|
"grad_norm": 0.8875887989997864, |
|
"learning_rate": 9.999999999999999e-06, |
|
"loss": 6.9461, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.0202020202020203, |
|
"grad_norm": 0.9611765742301941, |
|
"learning_rate": 9.898989898989899e-06, |
|
"loss": 6.9659, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.0303030303030303, |
|
"grad_norm": 1.539200782775879, |
|
"learning_rate": 9.797979797979798e-06, |
|
"loss": 6.6096, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.04040404040404, |
|
"grad_norm": 0.8153275847434998, |
|
"learning_rate": 9.696969696969698e-06, |
|
"loss": 6.8189, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.0505050505050506, |
|
"grad_norm": 0.8883192539215088, |
|
"learning_rate": 9.595959595959595e-06, |
|
"loss": 6.8387, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.0606060606060606, |
|
"grad_norm": 1.5102068185806274, |
|
"learning_rate": 9.494949494949495e-06, |
|
"loss": 6.5686, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.0707070707070705, |
|
"grad_norm": 0.86061692237854, |
|
"learning_rate": 9.393939393939394e-06, |
|
"loss": 6.8586, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.080808080808081, |
|
"grad_norm": 1.1872936487197876, |
|
"learning_rate": 9.292929292929292e-06, |
|
"loss": 6.9541, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.090909090909091, |
|
"grad_norm": 0.6017976999282837, |
|
"learning_rate": 9.191919191919192e-06, |
|
"loss": 6.7969, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.101010101010101, |
|
"grad_norm": 2.1137189865112305, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 7.2209, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.111111111111111, |
|
"grad_norm": 0.7061384916305542, |
|
"learning_rate": 8.98989898989899e-06, |
|
"loss": 6.7687, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.121212121212121, |
|
"grad_norm": 0.7401531934738159, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 6.8944, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.1313131313131315, |
|
"grad_norm": 1.1122565269470215, |
|
"learning_rate": 8.787878787878788e-06, |
|
"loss": 6.6628, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.1414141414141414, |
|
"grad_norm": 0.6532828211784363, |
|
"learning_rate": 8.686868686868687e-06, |
|
"loss": 6.8288, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.1515151515151514, |
|
"grad_norm": 1.3121882677078247, |
|
"learning_rate": 8.585858585858585e-06, |
|
"loss": 7.0173, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.1616161616161618, |
|
"grad_norm": 1.4856034517288208, |
|
"learning_rate": 8.484848484848485e-06, |
|
"loss": 7.0473, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.1717171717171717, |
|
"grad_norm": 1.0639023780822754, |
|
"learning_rate": 8.383838383838384e-06, |
|
"loss": 6.9259, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.1818181818181817, |
|
"grad_norm": 0.9515113830566406, |
|
"learning_rate": 8.282828282828283e-06, |
|
"loss": 6.9661, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.191919191919192, |
|
"grad_norm": 0.6862073540687561, |
|
"learning_rate": 8.181818181818181e-06, |
|
"loss": 6.7838, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.202020202020202, |
|
"grad_norm": 0.6430158615112305, |
|
"learning_rate": 8.08080808080808e-06, |
|
"loss": 6.9264, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.212121212121212, |
|
"grad_norm": 0.9119675159454346, |
|
"learning_rate": 7.97979797979798e-06, |
|
"loss": 6.9994, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 1.3677645921707153, |
|
"learning_rate": 7.87878787878788e-06, |
|
"loss": 6.6887, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.2323232323232323, |
|
"grad_norm": 1.3011837005615234, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 6.6735, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.242424242424242, |
|
"grad_norm": 0.8368801474571228, |
|
"learning_rate": 7.676767676767677e-06, |
|
"loss": 6.9868, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.2525252525252526, |
|
"grad_norm": 1.2124541997909546, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 7.0907, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.2626262626262625, |
|
"grad_norm": 0.5335760116577148, |
|
"learning_rate": 7.474747474747475e-06, |
|
"loss": 6.9152, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.2727272727272725, |
|
"grad_norm": 1.2292662858963013, |
|
"learning_rate": 7.373737373737374e-06, |
|
"loss": 7.0845, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.282828282828283, |
|
"grad_norm": 1.1617155075073242, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 6.8596, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.292929292929293, |
|
"grad_norm": 1.7857162952423096, |
|
"learning_rate": 7.171717171717172e-06, |
|
"loss": 6.7016, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.303030303030303, |
|
"grad_norm": 0.7340604066848755, |
|
"learning_rate": 7.070707070707071e-06, |
|
"loss": 6.924, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.313131313131313, |
|
"grad_norm": 1.880449652671814, |
|
"learning_rate": 6.96969696969697e-06, |
|
"loss": 7.1343, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.323232323232323, |
|
"grad_norm": 0.9435675144195557, |
|
"learning_rate": 6.868686868686869e-06, |
|
"loss": 6.8979, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.3333333333333335, |
|
"grad_norm": 0.877120316028595, |
|
"learning_rate": 6.767676767676768e-06, |
|
"loss": 6.8256, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.3434343434343434, |
|
"grad_norm": 0.9317502975463867, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 6.8409, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.3535353535353534, |
|
"grad_norm": 1.0060955286026, |
|
"learning_rate": 6.565656565656566e-06, |
|
"loss": 7.0845, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.3636363636363638, |
|
"grad_norm": 0.8277117609977722, |
|
"learning_rate": 6.464646464646465e-06, |
|
"loss": 7.092, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.3737373737373737, |
|
"grad_norm": 0.755837619304657, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 6.8859, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.3838383838383836, |
|
"grad_norm": 0.6454436182975769, |
|
"learning_rate": 6.262626262626263e-06, |
|
"loss": 7.0516, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.393939393939394, |
|
"grad_norm": 1.4383716583251953, |
|
"learning_rate": 6.161616161616162e-06, |
|
"loss": 6.7176, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.404040404040404, |
|
"grad_norm": 1.2009567022323608, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 6.9433, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.4141414141414144, |
|
"grad_norm": 0.7479004859924316, |
|
"learning_rate": 5.9595959595959605e-06, |
|
"loss": 6.9019, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.4242424242424243, |
|
"grad_norm": 0.6840262413024902, |
|
"learning_rate": 5.858585858585859e-06, |
|
"loss": 6.9329, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.4343434343434343, |
|
"grad_norm": 0.6570662260055542, |
|
"learning_rate": 5.757575757575758e-06, |
|
"loss": 6.9839, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.4444444444444446, |
|
"grad_norm": 1.0435160398483276, |
|
"learning_rate": 5.656565656565657e-06, |
|
"loss": 6.9886, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.4545454545454546, |
|
"grad_norm": 1.2880451679229736, |
|
"learning_rate": 5.555555555555555e-06, |
|
"loss": 7.0024, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.4646464646464645, |
|
"grad_norm": 1.2956496477127075, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 7.1377, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.474747474747475, |
|
"grad_norm": 1.3433921337127686, |
|
"learning_rate": 5.353535353535353e-06, |
|
"loss": 7.0797, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.484848484848485, |
|
"grad_norm": 0.6715502738952637, |
|
"learning_rate": 5.2525252525252526e-06, |
|
"loss": 6.8772, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.494949494949495, |
|
"grad_norm": 0.8352294564247131, |
|
"learning_rate": 5.151515151515151e-06, |
|
"loss": 7.0575, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.505050505050505, |
|
"grad_norm": 1.1144355535507202, |
|
"learning_rate": 5.050505050505051e-06, |
|
"loss": 6.8229, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.515151515151515, |
|
"grad_norm": 0.6648267507553101, |
|
"learning_rate": 4.949494949494949e-06, |
|
"loss": 7.0238, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.525252525252525, |
|
"grad_norm": 0.7869075536727905, |
|
"learning_rate": 4.848484848484849e-06, |
|
"loss": 7.0076, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.5353535353535355, |
|
"grad_norm": 0.6390742659568787, |
|
"learning_rate": 4.747474747474747e-06, |
|
"loss": 7.0491, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.5454545454545454, |
|
"grad_norm": 0.7328326106071472, |
|
"learning_rate": 4.646464646464646e-06, |
|
"loss": 7.0393, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.5555555555555554, |
|
"grad_norm": 0.8217814564704895, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 6.9342, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.5656565656565657, |
|
"grad_norm": 1.3768435716629028, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 7.124, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.5757575757575757, |
|
"grad_norm": 1.0091509819030762, |
|
"learning_rate": 4.343434343434344e-06, |
|
"loss": 6.8746, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.5858585858585856, |
|
"grad_norm": 1.0249191522598267, |
|
"learning_rate": 4.242424242424242e-06, |
|
"loss": 6.8636, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.595959595959596, |
|
"grad_norm": 0.9580966234207153, |
|
"learning_rate": 4.141414141414142e-06, |
|
"loss": 6.9923, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.606060606060606, |
|
"grad_norm": 1.5487310886383057, |
|
"learning_rate": 4.04040404040404e-06, |
|
"loss": 6.8229, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.616161616161616, |
|
"grad_norm": 0.6319442987442017, |
|
"learning_rate": 3.93939393939394e-06, |
|
"loss": 7.0224, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.6262626262626263, |
|
"grad_norm": 0.7614464163780212, |
|
"learning_rate": 3.8383838383838385e-06, |
|
"loss": 6.9499, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.6363636363636362, |
|
"grad_norm": 0.9656277298927307, |
|
"learning_rate": 3.7373737373737375e-06, |
|
"loss": 6.881, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.6464646464646466, |
|
"grad_norm": 2.4072742462158203, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 6.579, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.6565656565656566, |
|
"grad_norm": 1.361374855041504, |
|
"learning_rate": 3.5353535353535356e-06, |
|
"loss": 7.1662, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.7471557855606079, |
|
"learning_rate": 3.4343434343434347e-06, |
|
"loss": 7.026, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.676767676767677, |
|
"grad_norm": 0.9399821758270264, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 6.8437, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.686868686868687, |
|
"grad_norm": 0.960325300693512, |
|
"learning_rate": 3.2323232323232324e-06, |
|
"loss": 7.1221, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.6969696969696972, |
|
"grad_norm": 0.7455516457557678, |
|
"learning_rate": 3.1313131313131314e-06, |
|
"loss": 6.8993, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.707070707070707, |
|
"grad_norm": 0.7760592103004456, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 6.9543, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.717171717171717, |
|
"grad_norm": 0.7888494729995728, |
|
"learning_rate": 2.9292929292929295e-06, |
|
"loss": 7.0662, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 1.3352677822113037, |
|
"learning_rate": 2.8282828282828286e-06, |
|
"loss": 6.801, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.7373737373737375, |
|
"grad_norm": 0.7915419936180115, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 7.0028, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.7474747474747474, |
|
"grad_norm": 0.5938275456428528, |
|
"learning_rate": 2.6262626262626263e-06, |
|
"loss": 6.9275, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.757575757575758, |
|
"grad_norm": 1.24167001247406, |
|
"learning_rate": 2.5252525252525253e-06, |
|
"loss": 6.7646, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.7676767676767677, |
|
"grad_norm": 1.3691089153289795, |
|
"learning_rate": 2.4242424242424244e-06, |
|
"loss": 7.0922, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.7777777777777777, |
|
"grad_norm": 1.1850454807281494, |
|
"learning_rate": 2.323232323232323e-06, |
|
"loss": 7.0915, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.787878787878788, |
|
"grad_norm": 1.693428874015808, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 6.7004, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.797979797979798, |
|
"grad_norm": 0.5935156345367432, |
|
"learning_rate": 2.121212121212121e-06, |
|
"loss": 6.9309, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.808080808080808, |
|
"grad_norm": 0.6563141942024231, |
|
"learning_rate": 2.02020202020202e-06, |
|
"loss": 7.0097, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.8181818181818183, |
|
"grad_norm": 0.9683242440223694, |
|
"learning_rate": 1.9191919191919192e-06, |
|
"loss": 7.0719, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.8282828282828283, |
|
"grad_norm": 1.0628868341445923, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 7.0084, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.8383838383838382, |
|
"grad_norm": 0.7137049436569214, |
|
"learning_rate": 1.7171717171717173e-06, |
|
"loss": 6.9748, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.8484848484848486, |
|
"grad_norm": 1.1555520296096802, |
|
"learning_rate": 1.6161616161616162e-06, |
|
"loss": 6.8182, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.8585858585858586, |
|
"grad_norm": 1.3852341175079346, |
|
"learning_rate": 1.5151515151515152e-06, |
|
"loss": 7.1022, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.8686868686868685, |
|
"grad_norm": 0.6663222908973694, |
|
"learning_rate": 1.4141414141414143e-06, |
|
"loss": 7.0444, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.878787878787879, |
|
"grad_norm": 0.9903120994567871, |
|
"learning_rate": 1.3131313131313131e-06, |
|
"loss": 6.7819, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.888888888888889, |
|
"grad_norm": 1.510622262954712, |
|
"learning_rate": 1.2121212121212122e-06, |
|
"loss": 7.1841, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.898989898989899, |
|
"grad_norm": 1.2110823392868042, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 7.0199, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.909090909090909, |
|
"grad_norm": 1.0056835412979126, |
|
"learning_rate": 1.01010101010101e-06, |
|
"loss": 6.9958, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.919191919191919, |
|
"grad_norm": 0.8939002156257629, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 7.0829, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.929292929292929, |
|
"grad_norm": 0.6934316158294678, |
|
"learning_rate": 8.080808080808081e-07, |
|
"loss": 7.015, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.9393939393939394, |
|
"grad_norm": 0.7522878050804138, |
|
"learning_rate": 7.070707070707071e-07, |
|
"loss": 7.0191, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.9494949494949494, |
|
"grad_norm": 1.2762123346328735, |
|
"learning_rate": 6.060606060606061e-07, |
|
"loss": 6.7045, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.9595959595959593, |
|
"grad_norm": 1.0797396898269653, |
|
"learning_rate": 5.05050505050505e-07, |
|
"loss": 7.0543, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.9696969696969697, |
|
"grad_norm": 1.322259783744812, |
|
"learning_rate": 4.0404040404040405e-07, |
|
"loss": 6.7815, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.9797979797979797, |
|
"grad_norm": 0.63960200548172, |
|
"learning_rate": 3.0303030303030305e-07, |
|
"loss": 6.9333, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.98989898989899, |
|
"grad_norm": 0.8398324847221375, |
|
"learning_rate": 2.0202020202020202e-07, |
|
"loss": 7.0499, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 1.3194384574890137, |
|
"learning_rate": 1.0101010101010101e-07, |
|
"loss": 7.1508, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_audio_cosine_sim": 0.32165560126304626, |
|
"eval_loss": 5.101603984832764, |
|
"eval_runtime": 153.0389, |
|
"eval_samples_per_second": 0.059, |
|
"eval_steps_per_second": 0.02, |
|
"eval_text_cosine_sim": 0.11808153986930847, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 297, |
|
"total_flos": 6235958582540448.0, |
|
"train_loss": 7.227042491989907, |
|
"train_runtime": 3530.6424, |
|
"train_samples_per_second": 1.343, |
|
"train_steps_per_second": 0.084 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 297, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6235958582540448.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|