|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.333333333333333, |
|
"eval_steps": 500, |
|
"global_step": 600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008888888888888889, |
|
"grad_norm": 0.5841383934020996, |
|
"learning_rate": 8.928571428571428e-07, |
|
"loss": 0.4865, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017777777777777778, |
|
"grad_norm": 0.37317243218421936, |
|
"learning_rate": 1.7857142857142857e-06, |
|
"loss": 0.4116, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02666666666666667, |
|
"grad_norm": 0.16240684688091278, |
|
"learning_rate": 2.6785714285714285e-06, |
|
"loss": 0.3085, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 0.22618648409843445, |
|
"learning_rate": 3.5714285714285714e-06, |
|
"loss": 0.3213, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.044444444444444446, |
|
"grad_norm": 0.33735740184783936, |
|
"learning_rate": 4.464285714285715e-06, |
|
"loss": 0.2869, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 0.20259538292884827, |
|
"learning_rate": 5.357142857142857e-06, |
|
"loss": 0.3825, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06222222222222222, |
|
"grad_norm": 0.3431294560432434, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.3896, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 0.5504677891731262, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 0.3175, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1282545030117035, |
|
"learning_rate": 8.035714285714286e-06, |
|
"loss": 0.2901, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 0.18529950082302094, |
|
"learning_rate": 8.92857142857143e-06, |
|
"loss": 0.3735, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09777777777777778, |
|
"grad_norm": 0.2145024836063385, |
|
"learning_rate": 9.821428571428573e-06, |
|
"loss": 0.3525, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 0.9437930583953857, |
|
"learning_rate": 1.0714285714285714e-05, |
|
"loss": 0.444, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.11555555555555555, |
|
"grad_norm": 0.24417199194431305, |
|
"learning_rate": 1.1607142857142857e-05, |
|
"loss": 0.2967, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12444444444444444, |
|
"grad_norm": 0.2694835662841797, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.2967, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.13333333333333333, |
|
"grad_norm": 0.20757225155830383, |
|
"learning_rate": 1.3392857142857144e-05, |
|
"loss": 0.3487, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14222222222222222, |
|
"grad_norm": 0.2921523153781891, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 0.4247, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1511111111111111, |
|
"grad_norm": 0.20908255875110626, |
|
"learning_rate": 1.5178571428571429e-05, |
|
"loss": 0.3009, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.15087872743606567, |
|
"learning_rate": 1.6071428571428572e-05, |
|
"loss": 0.3145, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1688888888888889, |
|
"grad_norm": 0.8311613202095032, |
|
"learning_rate": 1.6964285714285715e-05, |
|
"loss": 0.3868, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 0.27965763211250305, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 0.351, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.18666666666666668, |
|
"grad_norm": 0.23372279107570648, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 0.3025, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.19555555555555557, |
|
"grad_norm": 0.3673437237739563, |
|
"learning_rate": 1.9642857142857145e-05, |
|
"loss": 0.388, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.20444444444444446, |
|
"grad_norm": 0.39919161796569824, |
|
"learning_rate": 2.0535714285714285e-05, |
|
"loss": 0.3611, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.21333333333333335, |
|
"grad_norm": 0.20096033811569214, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.3244, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.27874326705932617, |
|
"learning_rate": 2.2321428571428575e-05, |
|
"loss": 0.359, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2311111111111111, |
|
"grad_norm": 0.31668320298194885, |
|
"learning_rate": 2.3214285714285715e-05, |
|
"loss": 0.3062, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4131496548652649, |
|
"learning_rate": 2.4107142857142858e-05, |
|
"loss": 0.4351, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.24888888888888888, |
|
"grad_norm": 0.4470804035663605, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.4332, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2577777777777778, |
|
"grad_norm": 0.3746657967567444, |
|
"learning_rate": 2.5892857142857148e-05, |
|
"loss": 0.3811, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 0.8335658311843872, |
|
"learning_rate": 2.6785714285714288e-05, |
|
"loss": 0.4618, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.27555555555555555, |
|
"grad_norm": 0.8326655626296997, |
|
"learning_rate": 2.767857142857143e-05, |
|
"loss": 0.3872, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.28444444444444444, |
|
"grad_norm": 0.7701701521873474, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.3255, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.29333333333333333, |
|
"grad_norm": 0.40850526094436646, |
|
"learning_rate": 2.9464285714285718e-05, |
|
"loss": 0.3167, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3022222222222222, |
|
"grad_norm": 0.35397693514823914, |
|
"learning_rate": 3.0357142857142857e-05, |
|
"loss": 0.2987, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3111111111111111, |
|
"grad_norm": 0.7541699409484863, |
|
"learning_rate": 3.125e-05, |
|
"loss": 0.3413, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.3483797609806061, |
|
"learning_rate": 3.2142857142857144e-05, |
|
"loss": 0.2858, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3288888888888889, |
|
"grad_norm": 0.33964604139328003, |
|
"learning_rate": 3.303571428571429e-05, |
|
"loss": 0.3036, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3377777777777778, |
|
"grad_norm": 0.45213809609413147, |
|
"learning_rate": 3.392857142857143e-05, |
|
"loss": 0.3437, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3466666666666667, |
|
"grad_norm": 0.4597315192222595, |
|
"learning_rate": 3.4821428571428574e-05, |
|
"loss": 0.3318, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 0.44681742787361145, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.3107, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.36444444444444446, |
|
"grad_norm": 0.9226369857788086, |
|
"learning_rate": 3.6607142857142853e-05, |
|
"loss": 0.3231, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.37333333333333335, |
|
"grad_norm": 0.27465879917144775, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.3301, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.38222222222222224, |
|
"grad_norm": 0.4791021943092346, |
|
"learning_rate": 3.839285714285715e-05, |
|
"loss": 0.3527, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.39111111111111113, |
|
"grad_norm": 0.586669385433197, |
|
"learning_rate": 3.928571428571429e-05, |
|
"loss": 0.3359, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.34871557354927063, |
|
"learning_rate": 4.017857142857143e-05, |
|
"loss": 0.3268, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4088888888888889, |
|
"grad_norm": 0.4256209135055542, |
|
"learning_rate": 4.107142857142857e-05, |
|
"loss": 0.2978, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4177777777777778, |
|
"grad_norm": 0.30336323380470276, |
|
"learning_rate": 4.196428571428572e-05, |
|
"loss": 0.2898, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4266666666666667, |
|
"grad_norm": 0.5795422792434692, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 0.3481, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.43555555555555553, |
|
"grad_norm": 0.38410332798957825, |
|
"learning_rate": 4.375e-05, |
|
"loss": 0.3414, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.3348947763442993, |
|
"learning_rate": 4.464285714285715e-05, |
|
"loss": 0.336, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4533333333333333, |
|
"grad_norm": 0.42829787731170654, |
|
"learning_rate": 4.5535714285714286e-05, |
|
"loss": 0.2913, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4622222222222222, |
|
"grad_norm": 4.1896653175354, |
|
"learning_rate": 4.642857142857143e-05, |
|
"loss": 0.8937, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4711111111111111, |
|
"grad_norm": 0.7059090733528137, |
|
"learning_rate": 4.732142857142857e-05, |
|
"loss": 0.4011, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.41015371680259705, |
|
"learning_rate": 4.8214285714285716e-05, |
|
"loss": 0.3351, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.4888888888888889, |
|
"grad_norm": 0.43993642926216125, |
|
"learning_rate": 4.910714285714286e-05, |
|
"loss": 0.3313, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.49777777777777776, |
|
"grad_norm": 1.0636693239212036, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4636, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5066666666666667, |
|
"grad_norm": 1.5080366134643555, |
|
"learning_rate": 5.089285714285714e-05, |
|
"loss": 0.5489, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5155555555555555, |
|
"grad_norm": 0.5345658659934998, |
|
"learning_rate": 5.1785714285714296e-05, |
|
"loss": 0.3057, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5244444444444445, |
|
"grad_norm": 0.7976881861686707, |
|
"learning_rate": 5.267857142857143e-05, |
|
"loss": 0.3581, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 0.726458728313446, |
|
"learning_rate": 5.3571428571428575e-05, |
|
"loss": 0.3279, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5422222222222223, |
|
"grad_norm": 0.5178459286689758, |
|
"learning_rate": 5.446428571428571e-05, |
|
"loss": 0.3217, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5511111111111111, |
|
"grad_norm": 0.6377764940261841, |
|
"learning_rate": 5.535714285714286e-05, |
|
"loss": 0.321, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.3174062967300415, |
|
"learning_rate": 5.6250000000000005e-05, |
|
"loss": 0.3154, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5688888888888889, |
|
"grad_norm": 0.31623443961143494, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 0.2735, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5777777777777777, |
|
"grad_norm": 0.3521466851234436, |
|
"learning_rate": 5.803571428571429e-05, |
|
"loss": 0.2878, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5866666666666667, |
|
"grad_norm": 0.8923875689506531, |
|
"learning_rate": 5.8928571428571435e-05, |
|
"loss": 0.3906, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5955555555555555, |
|
"grad_norm": 0.6803852915763855, |
|
"learning_rate": 5.982142857142857e-05, |
|
"loss": 0.3573, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6044444444444445, |
|
"grad_norm": 0.5237946510314941, |
|
"learning_rate": 6.0714285714285715e-05, |
|
"loss": 0.3068, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6133333333333333, |
|
"grad_norm": 0.6161757111549377, |
|
"learning_rate": 6.160714285714286e-05, |
|
"loss": 0.2944, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6222222222222222, |
|
"grad_norm": 0.3687132000923157, |
|
"learning_rate": 6.25e-05, |
|
"loss": 0.2901, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6311111111111111, |
|
"grad_norm": 0.39233818650245667, |
|
"learning_rate": 6.339285714285714e-05, |
|
"loss": 0.2821, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.7180721759796143, |
|
"learning_rate": 6.428571428571429e-05, |
|
"loss": 0.3509, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6488888888888888, |
|
"grad_norm": 0.9132435917854309, |
|
"learning_rate": 6.517857142857143e-05, |
|
"loss": 0.4027, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6577777777777778, |
|
"grad_norm": 0.2931051552295685, |
|
"learning_rate": 6.607142857142857e-05, |
|
"loss": 0.3303, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.4739736318588257, |
|
"learning_rate": 6.696428571428572e-05, |
|
"loss": 0.286, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6755555555555556, |
|
"grad_norm": 0.645233154296875, |
|
"learning_rate": 6.785714285714286e-05, |
|
"loss": 0.3673, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6844444444444444, |
|
"grad_norm": 0.6568748354911804, |
|
"learning_rate": 6.875e-05, |
|
"loss": 0.3451, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6933333333333334, |
|
"grad_norm": 0.330121785402298, |
|
"learning_rate": 6.964285714285715e-05, |
|
"loss": 0.2916, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7022222222222222, |
|
"grad_norm": 0.6969891786575317, |
|
"learning_rate": 7.053571428571429e-05, |
|
"loss": 0.3799, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 0.3836056590080261, |
|
"learning_rate": 7.142857142857143e-05, |
|
"loss": 0.2785, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.6308532357215881, |
|
"learning_rate": 7.232142857142858e-05, |
|
"loss": 0.3365, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7288888888888889, |
|
"grad_norm": 1.3300080299377441, |
|
"learning_rate": 7.321428571428571e-05, |
|
"loss": 0.4452, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7377777777777778, |
|
"grad_norm": 0.4857744872570038, |
|
"learning_rate": 7.410714285714286e-05, |
|
"loss": 0.3408, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.7466666666666667, |
|
"grad_norm": 0.2752129137516022, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.2481, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7555555555555555, |
|
"grad_norm": 0.37411218881607056, |
|
"learning_rate": 7.589285714285714e-05, |
|
"loss": 0.2714, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7644444444444445, |
|
"grad_norm": 0.34179869294166565, |
|
"learning_rate": 7.67857142857143e-05, |
|
"loss": 0.322, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7733333333333333, |
|
"grad_norm": 0.5072541236877441, |
|
"learning_rate": 7.767857142857144e-05, |
|
"loss": 0.3442, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7822222222222223, |
|
"grad_norm": 0.3834559917449951, |
|
"learning_rate": 7.857142857142858e-05, |
|
"loss": 0.2904, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7911111111111111, |
|
"grad_norm": 0.37922006845474243, |
|
"learning_rate": 7.946428571428571e-05, |
|
"loss": 0.3116, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.41435790061950684, |
|
"learning_rate": 8.035714285714287e-05, |
|
"loss": 0.2949, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8088888888888889, |
|
"grad_norm": 0.5537578463554382, |
|
"learning_rate": 8.125000000000001e-05, |
|
"loss": 0.3337, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8177777777777778, |
|
"grad_norm": 0.24957779049873352, |
|
"learning_rate": 8.214285714285714e-05, |
|
"loss": 0.2709, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8266666666666667, |
|
"grad_norm": 0.43642184138298035, |
|
"learning_rate": 8.30357142857143e-05, |
|
"loss": 0.3033, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8355555555555556, |
|
"grad_norm": 0.835472583770752, |
|
"learning_rate": 8.392857142857144e-05, |
|
"loss": 0.3828, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8444444444444444, |
|
"grad_norm": 0.6168670654296875, |
|
"learning_rate": 8.482142857142857e-05, |
|
"loss": 0.3455, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8533333333333334, |
|
"grad_norm": 0.46005958318710327, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 0.3489, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8622222222222222, |
|
"grad_norm": 0.34383633732795715, |
|
"learning_rate": 8.660714285714287e-05, |
|
"loss": 0.2866, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8711111111111111, |
|
"grad_norm": 0.4366074204444885, |
|
"learning_rate": 8.75e-05, |
|
"loss": 0.3239, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.33174213767051697, |
|
"learning_rate": 8.839285714285714e-05, |
|
"loss": 0.2783, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.26910072565078735, |
|
"learning_rate": 8.92857142857143e-05, |
|
"loss": 0.2807, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8977777777777778, |
|
"grad_norm": 0.9884425401687622, |
|
"learning_rate": 9.017857142857143e-05, |
|
"loss": 0.4379, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9066666666666666, |
|
"grad_norm": 0.24256423115730286, |
|
"learning_rate": 9.107142857142857e-05, |
|
"loss": 0.231, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9155555555555556, |
|
"grad_norm": 0.36811211705207825, |
|
"learning_rate": 9.196428571428572e-05, |
|
"loss": 0.2668, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9244444444444444, |
|
"grad_norm": 0.45584559440612793, |
|
"learning_rate": 9.285714285714286e-05, |
|
"loss": 0.3285, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.9333333333333333, |
|
"grad_norm": 0.4034405052661896, |
|
"learning_rate": 9.375e-05, |
|
"loss": 0.2823, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.9422222222222222, |
|
"grad_norm": 0.48256492614746094, |
|
"learning_rate": 9.464285714285715e-05, |
|
"loss": 0.3389, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9511111111111111, |
|
"grad_norm": 0.4109070897102356, |
|
"learning_rate": 9.553571428571429e-05, |
|
"loss": 0.3515, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.8162788152694702, |
|
"learning_rate": 9.642857142857143e-05, |
|
"loss": 0.5591, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9688888888888889, |
|
"grad_norm": 0.33671724796295166, |
|
"learning_rate": 9.732142857142858e-05, |
|
"loss": 0.3512, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.9777777777777777, |
|
"grad_norm": 0.35684165358543396, |
|
"learning_rate": 9.821428571428572e-05, |
|
"loss": 0.3087, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9866666666666667, |
|
"grad_norm": 0.34859076142311096, |
|
"learning_rate": 9.910714285714286e-05, |
|
"loss": 0.2894, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.9955555555555555, |
|
"grad_norm": 0.3526354432106018, |
|
"learning_rate": 0.0001, |
|
"loss": 0.3052, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0044444444444445, |
|
"grad_norm": 0.2897067368030548, |
|
"learning_rate": 9.999975716105452e-05, |
|
"loss": 0.3058, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0133333333333334, |
|
"grad_norm": 0.4105049967765808, |
|
"learning_rate": 9.999902864657691e-05, |
|
"loss": 0.2635, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.0222222222222221, |
|
"grad_norm": 0.36226218938827515, |
|
"learning_rate": 9.999781446364365e-05, |
|
"loss": 0.3257, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.031111111111111, |
|
"grad_norm": 0.31135883927345276, |
|
"learning_rate": 9.999611462404875e-05, |
|
"loss": 0.2646, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 2.0171165466308594, |
|
"learning_rate": 9.999392914430371e-05, |
|
"loss": 0.5321, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.048888888888889, |
|
"grad_norm": 0.3250499665737152, |
|
"learning_rate": 9.999125804563732e-05, |
|
"loss": 0.3208, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.0577777777777777, |
|
"grad_norm": 0.2886015474796295, |
|
"learning_rate": 9.998810135399546e-05, |
|
"loss": 0.2932, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 0.3275207281112671, |
|
"learning_rate": 9.998445910004082e-05, |
|
"loss": 0.2971, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0755555555555556, |
|
"grad_norm": 0.5577352643013, |
|
"learning_rate": 9.998033131915266e-05, |
|
"loss": 0.3561, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.0844444444444445, |
|
"grad_norm": 1.0877000093460083, |
|
"learning_rate": 9.997571805142639e-05, |
|
"loss": 0.4113, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.0933333333333333, |
|
"grad_norm": 0.5185580253601074, |
|
"learning_rate": 9.997061934167328e-05, |
|
"loss": 0.3085, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1022222222222222, |
|
"grad_norm": 0.4035893678665161, |
|
"learning_rate": 9.996503523941994e-05, |
|
"loss": 0.271, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 0.3941110074520111, |
|
"learning_rate": 9.995896579890784e-05, |
|
"loss": 0.2875, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.3415586054325104, |
|
"learning_rate": 9.99524110790929e-05, |
|
"loss": 0.2711, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.1288888888888888, |
|
"grad_norm": 0.3006160259246826, |
|
"learning_rate": 9.99453711436447e-05, |
|
"loss": 0.2718, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.1377777777777778, |
|
"grad_norm": 0.522278368473053, |
|
"learning_rate": 9.993784606094612e-05, |
|
"loss": 0.3306, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.1466666666666667, |
|
"grad_norm": 0.5858255624771118, |
|
"learning_rate": 9.992983590409246e-05, |
|
"loss": 0.3475, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.1555555555555554, |
|
"grad_norm": 0.5486164689064026, |
|
"learning_rate": 9.992134075089084e-05, |
|
"loss": 0.3259, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1644444444444444, |
|
"grad_norm": 0.2975933253765106, |
|
"learning_rate": 9.991236068385941e-05, |
|
"loss": 0.2588, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.1733333333333333, |
|
"grad_norm": 0.6659825444221497, |
|
"learning_rate": 9.99028957902266e-05, |
|
"loss": 0.3266, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1822222222222223, |
|
"grad_norm": 0.2541256248950958, |
|
"learning_rate": 9.989294616193017e-05, |
|
"loss": 0.2649, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.1911111111111112, |
|
"grad_norm": 0.8277371525764465, |
|
"learning_rate": 9.988251189561645e-05, |
|
"loss": 0.4076, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.3177882432937622, |
|
"learning_rate": 9.987159309263924e-05, |
|
"loss": 0.304, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.208888888888889, |
|
"grad_norm": 0.36816540360450745, |
|
"learning_rate": 9.986018985905901e-05, |
|
"loss": 0.3187, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.2177777777777778, |
|
"grad_norm": 0.4456408619880676, |
|
"learning_rate": 9.984830230564171e-05, |
|
"loss": 0.2769, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.2266666666666666, |
|
"grad_norm": 0.7157383561134338, |
|
"learning_rate": 9.983593054785776e-05, |
|
"loss": 0.382, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.2355555555555555, |
|
"grad_norm": 0.5327372550964355, |
|
"learning_rate": 9.982307470588098e-05, |
|
"loss": 0.2732, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.2444444444444445, |
|
"grad_norm": 0.581408679485321, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 0.3956, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2533333333333334, |
|
"grad_norm": 0.32390910387039185, |
|
"learning_rate": 9.979591127355365e-05, |
|
"loss": 0.2905, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.2622222222222224, |
|
"grad_norm": 0.4703962802886963, |
|
"learning_rate": 9.978160394705668e-05, |
|
"loss": 0.2897, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.271111111111111, |
|
"grad_norm": 0.5023928284645081, |
|
"learning_rate": 9.976681306407148e-05, |
|
"loss": 0.328, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.3573106825351715, |
|
"learning_rate": 9.975153876827008e-05, |
|
"loss": 0.2687, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.2888888888888888, |
|
"grad_norm": 0.6152392625808716, |
|
"learning_rate": 9.973578120802025e-05, |
|
"loss": 0.3375, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.2977777777777777, |
|
"grad_norm": 0.40894556045532227, |
|
"learning_rate": 9.971954053638399e-05, |
|
"loss": 0.2888, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.3066666666666666, |
|
"grad_norm": 0.8339890241622925, |
|
"learning_rate": 9.970281691111598e-05, |
|
"loss": 0.4384, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.3155555555555556, |
|
"grad_norm": 0.41529226303100586, |
|
"learning_rate": 9.968561049466214e-05, |
|
"loss": 0.2831, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.3244444444444445, |
|
"grad_norm": 0.28021934628486633, |
|
"learning_rate": 9.966792145415795e-05, |
|
"loss": 0.2671, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.22352413833141327, |
|
"learning_rate": 9.964974996142698e-05, |
|
"loss": 0.2839, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3422222222222222, |
|
"grad_norm": 0.2703256905078888, |
|
"learning_rate": 9.963109619297905e-05, |
|
"loss": 0.2675, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.3511111111111112, |
|
"grad_norm": 0.2645833194255829, |
|
"learning_rate": 9.961196033000861e-05, |
|
"loss": 0.2708, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3599999999999999, |
|
"grad_norm": 0.40951216220855713, |
|
"learning_rate": 9.959234255839298e-05, |
|
"loss": 0.3015, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.3688888888888888, |
|
"grad_norm": 0.3288329839706421, |
|
"learning_rate": 9.957224306869053e-05, |
|
"loss": 0.325, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.3777777777777778, |
|
"grad_norm": 0.3932753801345825, |
|
"learning_rate": 9.955166205613879e-05, |
|
"loss": 0.2716, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.3866666666666667, |
|
"grad_norm": 0.46717700362205505, |
|
"learning_rate": 9.953059972065265e-05, |
|
"loss": 0.3444, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.3955555555555557, |
|
"grad_norm": 0.20313459634780884, |
|
"learning_rate": 9.950905626682228e-05, |
|
"loss": 0.2584, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.4044444444444444, |
|
"grad_norm": 0.47062796354293823, |
|
"learning_rate": 9.948703190391131e-05, |
|
"loss": 0.372, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.4133333333333333, |
|
"grad_norm": 0.3500126898288727, |
|
"learning_rate": 9.946452684585463e-05, |
|
"loss": 0.2737, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.4222222222222223, |
|
"grad_norm": 0.5783170461654663, |
|
"learning_rate": 9.944154131125642e-05, |
|
"loss": 0.2938, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.431111111111111, |
|
"grad_norm": 0.27551746368408203, |
|
"learning_rate": 9.941807552338804e-05, |
|
"loss": 0.3139, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.2641688287258148, |
|
"learning_rate": 9.939412971018574e-05, |
|
"loss": 0.3009, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.448888888888889, |
|
"grad_norm": 0.4430491030216217, |
|
"learning_rate": 9.936970410424857e-05, |
|
"loss": 0.2779, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.4577777777777778, |
|
"grad_norm": 0.44353199005126953, |
|
"learning_rate": 9.934479894283606e-05, |
|
"loss": 0.2694, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.4666666666666668, |
|
"grad_norm": 0.3297507166862488, |
|
"learning_rate": 9.931941446786594e-05, |
|
"loss": 0.2638, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.4755555555555555, |
|
"grad_norm": 0.5766128897666931, |
|
"learning_rate": 9.92935509259118e-05, |
|
"loss": 0.3052, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.4844444444444445, |
|
"grad_norm": 0.3493499755859375, |
|
"learning_rate": 9.92672085682006e-05, |
|
"loss": 0.2728, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.4933333333333334, |
|
"grad_norm": 0.34138888120651245, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 0.2679, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.5022222222222221, |
|
"grad_norm": 0.40943869948387146, |
|
"learning_rate": 9.921308843366772e-05, |
|
"loss": 0.2556, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.511111111111111, |
|
"grad_norm": 0.4275529384613037, |
|
"learning_rate": 9.918531118254507e-05, |
|
"loss": 0.3012, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.3822483718395233, |
|
"learning_rate": 9.915705616705839e-05, |
|
"loss": 0.2984, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.528888888888889, |
|
"grad_norm": 0.3507990837097168, |
|
"learning_rate": 9.912832366166442e-05, |
|
"loss": 0.2839, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.537777777777778, |
|
"grad_norm": 0.3176634907722473, |
|
"learning_rate": 9.909911394545799e-05, |
|
"loss": 0.2715, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.5466666666666666, |
|
"grad_norm": 0.7413046956062317, |
|
"learning_rate": 9.906942730216939e-05, |
|
"loss": 0.2995, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 0.5602743625640869, |
|
"learning_rate": 9.903926402016153e-05, |
|
"loss": 0.303, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.5644444444444443, |
|
"grad_norm": 0.3049962818622589, |
|
"learning_rate": 9.900862439242719e-05, |
|
"loss": 0.2866, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.5733333333333333, |
|
"grad_norm": 0.20894083380699158, |
|
"learning_rate": 9.89775087165862e-05, |
|
"loss": 0.2801, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.5822222222222222, |
|
"grad_norm": 0.4999159574508667, |
|
"learning_rate": 9.894591729488242e-05, |
|
"loss": 0.3153, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.5911111111111111, |
|
"grad_norm": 0.6849189400672913, |
|
"learning_rate": 9.8913850434181e-05, |
|
"loss": 0.2794, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.44084635376930237, |
|
"learning_rate": 9.888130844596524e-05, |
|
"loss": 0.2953, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.608888888888889, |
|
"grad_norm": 0.2744970917701721, |
|
"learning_rate": 9.884829164633359e-05, |
|
"loss": 0.2654, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.6177777777777778, |
|
"grad_norm": 0.7441728711128235, |
|
"learning_rate": 9.881480035599667e-05, |
|
"loss": 0.4128, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.6266666666666667, |
|
"grad_norm": 0.2983834147453308, |
|
"learning_rate": 9.878083490027406e-05, |
|
"loss": 0.3103, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.6355555555555554, |
|
"grad_norm": 0.2417658269405365, |
|
"learning_rate": 9.874639560909117e-05, |
|
"loss": 0.248, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.6444444444444444, |
|
"grad_norm": 0.4583745002746582, |
|
"learning_rate": 9.871148281697608e-05, |
|
"loss": 0.2747, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.6533333333333333, |
|
"grad_norm": 0.42793506383895874, |
|
"learning_rate": 9.867609686305617e-05, |
|
"loss": 0.282, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.6622222222222223, |
|
"grad_norm": 0.32263195514678955, |
|
"learning_rate": 9.864023809105497e-05, |
|
"loss": 0.2709, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.6711111111111112, |
|
"grad_norm": 0.35320043563842773, |
|
"learning_rate": 9.860390684928873e-05, |
|
"loss": 0.3429, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 0.6125680804252625, |
|
"learning_rate": 9.856710349066307e-05, |
|
"loss": 0.2844, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.6888888888888889, |
|
"grad_norm": 0.36522263288497925, |
|
"learning_rate": 9.852982837266955e-05, |
|
"loss": 0.2413, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6977777777777778, |
|
"grad_norm": 0.3167021870613098, |
|
"learning_rate": 9.849208185738217e-05, |
|
"loss": 0.2682, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.7066666666666666, |
|
"grad_norm": 0.46384674310684204, |
|
"learning_rate": 9.84538643114539e-05, |
|
"loss": 0.2671, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.7155555555555555, |
|
"grad_norm": 0.27667102217674255, |
|
"learning_rate": 9.841517610611309e-05, |
|
"loss": 0.2929, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.7244444444444444, |
|
"grad_norm": 0.34263694286346436, |
|
"learning_rate": 9.837601761715983e-05, |
|
"loss": 0.2837, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.7333333333333334, |
|
"grad_norm": 0.5394869446754456, |
|
"learning_rate": 9.833638922496238e-05, |
|
"loss": 0.2535, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.7422222222222223, |
|
"grad_norm": 0.30996885895729065, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 0.2845, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.751111111111111, |
|
"grad_norm": 0.3415825664997101, |
|
"learning_rate": 9.825572427512632e-05, |
|
"loss": 0.2525, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.23367656767368317, |
|
"learning_rate": 9.82146885010314e-05, |
|
"loss": 0.295, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.7688888888888887, |
|
"grad_norm": 0.32408076524734497, |
|
"learning_rate": 9.817318439077195e-05, |
|
"loss": 0.3182, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.4190412759780884, |
|
"learning_rate": 9.81312123475006e-05, |
|
"loss": 0.2723, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.7866666666666666, |
|
"grad_norm": 0.28320616483688354, |
|
"learning_rate": 9.808877277891521e-05, |
|
"loss": 0.2618, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.7955555555555556, |
|
"grad_norm": 0.3757404386997223, |
|
"learning_rate": 9.804586609725499e-05, |
|
"loss": 0.3327, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.8044444444444445, |
|
"grad_norm": 0.2790829539299011, |
|
"learning_rate": 9.800249271929645e-05, |
|
"loss": 0.2746, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.8133333333333335, |
|
"grad_norm": 0.5675872564315796, |
|
"learning_rate": 9.79586530663494e-05, |
|
"loss": 0.3178, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.8222222222222222, |
|
"grad_norm": 0.7035036683082581, |
|
"learning_rate": 9.791434756425288e-05, |
|
"loss": 0.283, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.8311111111111111, |
|
"grad_norm": 0.22361284494400024, |
|
"learning_rate": 9.78695766433709e-05, |
|
"loss": 0.2674, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.8399999999999999, |
|
"grad_norm": 0.39389535784721375, |
|
"learning_rate": 9.782434073858844e-05, |
|
"loss": 0.3173, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.8488888888888888, |
|
"grad_norm": 0.38015905022621155, |
|
"learning_rate": 9.777864028930705e-05, |
|
"loss": 0.2277, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.8577777777777778, |
|
"grad_norm": 0.44615599513053894, |
|
"learning_rate": 9.773247573944066e-05, |
|
"loss": 0.2951, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.8666666666666667, |
|
"grad_norm": 0.19754816591739655, |
|
"learning_rate": 9.768584753741134e-05, |
|
"loss": 0.2519, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.8755555555555556, |
|
"grad_norm": 0.5915124416351318, |
|
"learning_rate": 9.763875613614482e-05, |
|
"loss": 0.3013, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.8844444444444446, |
|
"grad_norm": 0.4793195426464081, |
|
"learning_rate": 9.759120199306613e-05, |
|
"loss": 0.2667, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.8933333333333333, |
|
"grad_norm": 0.3155132830142975, |
|
"learning_rate": 9.754318557009519e-05, |
|
"loss": 0.2911, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.9022222222222223, |
|
"grad_norm": 0.3596380949020386, |
|
"learning_rate": 9.74947073336423e-05, |
|
"loss": 0.3004, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.911111111111111, |
|
"grad_norm": 0.30630800127983093, |
|
"learning_rate": 9.744576775460364e-05, |
|
"loss": 0.2379, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.3115442097187042, |
|
"learning_rate": 9.73963673083566e-05, |
|
"loss": 0.2837, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.9288888888888889, |
|
"grad_norm": 0.36604344844818115, |
|
"learning_rate": 9.73465064747553e-05, |
|
"loss": 0.3264, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.9377777777777778, |
|
"grad_norm": 0.29043278098106384, |
|
"learning_rate": 9.72961857381258e-05, |
|
"loss": 0.3202, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.9466666666666668, |
|
"grad_norm": 0.31544229388237, |
|
"learning_rate": 9.724540558726151e-05, |
|
"loss": 0.2623, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.9555555555555557, |
|
"grad_norm": 0.30620795488357544, |
|
"learning_rate": 9.719416651541839e-05, |
|
"loss": 0.2421, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.9644444444444444, |
|
"grad_norm": 0.38000237941741943, |
|
"learning_rate": 9.714246902031006e-05, |
|
"loss": 0.237, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.9733333333333334, |
|
"grad_norm": 0.4513542950153351, |
|
"learning_rate": 9.709031360410318e-05, |
|
"loss": 0.2942, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.982222222222222, |
|
"grad_norm": 0.28410232067108154, |
|
"learning_rate": 9.703770077341236e-05, |
|
"loss": 0.2918, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.991111111111111, |
|
"grad_norm": 0.29834455251693726, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 0.2444, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.304579496383667, |
|
"learning_rate": 9.693110491724827e-05, |
|
"loss": 0.3, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.008888888888889, |
|
"grad_norm": 0.49522674083709717, |
|
"learning_rate": 9.687712292719997e-05, |
|
"loss": 0.317, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.017777777777778, |
|
"grad_norm": 0.2522561252117157, |
|
"learning_rate": 9.682268559350771e-05, |
|
"loss": 0.2269, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.026666666666667, |
|
"grad_norm": 0.48555663228034973, |
|
"learning_rate": 9.67677934449517e-05, |
|
"loss": 0.2827, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.0355555555555553, |
|
"grad_norm": 0.3875712752342224, |
|
"learning_rate": 9.671244701472999e-05, |
|
"loss": 0.2813, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.0444444444444443, |
|
"grad_norm": 0.2848590910434723, |
|
"learning_rate": 9.665664684045333e-05, |
|
"loss": 0.2451, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.0533333333333332, |
|
"grad_norm": 0.30722254514694214, |
|
"learning_rate": 9.660039346413994e-05, |
|
"loss": 0.1899, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.062222222222222, |
|
"grad_norm": 0.22592511773109436, |
|
"learning_rate": 9.654368743221022e-05, |
|
"loss": 0.2078, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.071111111111111, |
|
"grad_norm": 0.5311526656150818, |
|
"learning_rate": 9.648652929548152e-05, |
|
"loss": 0.2834, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.28005415201187134, |
|
"learning_rate": 9.642891960916268e-05, |
|
"loss": 0.2628, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.088888888888889, |
|
"grad_norm": 0.29537031054496765, |
|
"learning_rate": 9.637085893284876e-05, |
|
"loss": 0.2429, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.097777777777778, |
|
"grad_norm": 0.5397939682006836, |
|
"learning_rate": 9.631234783051544e-05, |
|
"loss": 0.3032, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.1066666666666665, |
|
"grad_norm": 0.36687228083610535, |
|
"learning_rate": 9.625338687051375e-05, |
|
"loss": 0.2908, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.1155555555555554, |
|
"grad_norm": 0.33907392621040344, |
|
"learning_rate": 9.619397662556435e-05, |
|
"loss": 0.2333, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.1244444444444444, |
|
"grad_norm": 0.43153664469718933, |
|
"learning_rate": 9.613411767275209e-05, |
|
"loss": 0.2341, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.1333333333333333, |
|
"grad_norm": 0.30483999848365784, |
|
"learning_rate": 9.607381059352038e-05, |
|
"loss": 0.2655, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.1422222222222222, |
|
"grad_norm": 1.9127657413482666, |
|
"learning_rate": 9.601305597366554e-05, |
|
"loss": 0.4065, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.151111111111111, |
|
"grad_norm": 0.35035258531570435, |
|
"learning_rate": 9.595185440333103e-05, |
|
"loss": 0.1855, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.5800159573554993, |
|
"learning_rate": 9.589020647700191e-05, |
|
"loss": 0.2758, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.168888888888889, |
|
"grad_norm": 0.6490089297294617, |
|
"learning_rate": 9.582811279349882e-05, |
|
"loss": 0.2891, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.1777777777777776, |
|
"grad_norm": 0.46835729479789734, |
|
"learning_rate": 9.576557395597236e-05, |
|
"loss": 0.2762, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.1866666666666665, |
|
"grad_norm": 0.3493160605430603, |
|
"learning_rate": 9.570259057189717e-05, |
|
"loss": 0.2785, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.1955555555555555, |
|
"grad_norm": 0.2647961378097534, |
|
"learning_rate": 9.563916325306594e-05, |
|
"loss": 0.2389, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.2044444444444444, |
|
"grad_norm": 0.3641429543495178, |
|
"learning_rate": 9.557529261558367e-05, |
|
"loss": 0.2625, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.2133333333333334, |
|
"grad_norm": 0.3188803195953369, |
|
"learning_rate": 9.551097927986144e-05, |
|
"loss": 0.2501, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 0.32517799735069275, |
|
"learning_rate": 9.544622387061055e-05, |
|
"loss": 0.2333, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.2311111111111113, |
|
"grad_norm": 0.36616072058677673, |
|
"learning_rate": 9.538102701683643e-05, |
|
"loss": 0.2942, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.34478914737701416, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 0.2435, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.2488888888888887, |
|
"grad_norm": 0.563099205493927, |
|
"learning_rate": 9.5249311513174e-05, |
|
"loss": 0.2787, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.2577777777777777, |
|
"grad_norm": 0.4205755293369293, |
|
"learning_rate": 9.518279414271183e-05, |
|
"loss": 0.2974, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.2666666666666666, |
|
"grad_norm": 0.32124075293540955, |
|
"learning_rate": 9.511583788656632e-05, |
|
"loss": 0.3255, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.2755555555555556, |
|
"grad_norm": 0.34654173254966736, |
|
"learning_rate": 9.504844339512095e-05, |
|
"loss": 0.272, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.2844444444444445, |
|
"grad_norm": 0.3165460526943207, |
|
"learning_rate": 9.498061132301601e-05, |
|
"loss": 0.2266, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.2933333333333334, |
|
"grad_norm": 0.3656767010688782, |
|
"learning_rate": 9.491234232914221e-05, |
|
"loss": 0.3019, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.3022222222222224, |
|
"grad_norm": 0.28812313079833984, |
|
"learning_rate": 9.484363707663442e-05, |
|
"loss": 0.2631, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.311111111111111, |
|
"grad_norm": 0.5799005031585693, |
|
"learning_rate": 9.477449623286505e-05, |
|
"loss": 0.2721, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.332253098487854, |
|
"learning_rate": 9.470492046943771e-05, |
|
"loss": 0.2335, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.328888888888889, |
|
"grad_norm": 0.39480510354042053, |
|
"learning_rate": 9.463491046218058e-05, |
|
"loss": 0.2565, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.3377777777777777, |
|
"grad_norm": 0.5974012017250061, |
|
"learning_rate": 9.456446689113992e-05, |
|
"loss": 0.286, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.3466666666666667, |
|
"grad_norm": 0.47316139936447144, |
|
"learning_rate": 9.449359044057345e-05, |
|
"loss": 0.2111, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.3555555555555556, |
|
"grad_norm": 0.5243270993232727, |
|
"learning_rate": 9.442228179894362e-05, |
|
"loss": 0.2256, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.3644444444444446, |
|
"grad_norm": 0.6339777112007141, |
|
"learning_rate": 9.435054165891109e-05, |
|
"loss": 0.2635, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.3733333333333335, |
|
"grad_norm": 0.31498321890830994, |
|
"learning_rate": 9.427837071732783e-05, |
|
"loss": 0.2504, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.3822222222222225, |
|
"grad_norm": 1.1357451677322388, |
|
"learning_rate": 9.420576967523049e-05, |
|
"loss": 0.3394, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.391111111111111, |
|
"grad_norm": 0.3476477265357971, |
|
"learning_rate": 9.413273923783346e-05, |
|
"loss": 0.241, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.3083588778972626, |
|
"learning_rate": 9.405928011452211e-05, |
|
"loss": 0.2366, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.408888888888889, |
|
"grad_norm": 0.45807912945747375, |
|
"learning_rate": 9.398539301884592e-05, |
|
"loss": 0.2271, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.417777777777778, |
|
"grad_norm": 0.28123828768730164, |
|
"learning_rate": 9.391107866851143e-05, |
|
"loss": 0.2591, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.4266666666666667, |
|
"grad_norm": 0.45137813687324524, |
|
"learning_rate": 9.38363377853754e-05, |
|
"loss": 0.2254, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.4355555555555557, |
|
"grad_norm": 0.38635390996932983, |
|
"learning_rate": 9.376117109543769e-05, |
|
"loss": 0.2498, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.4444444444444446, |
|
"grad_norm": 0.36426419019699097, |
|
"learning_rate": 9.368557932883432e-05, |
|
"loss": 0.2346, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.453333333333333, |
|
"grad_norm": 0.5773187875747681, |
|
"learning_rate": 9.360956321983028e-05, |
|
"loss": 0.2479, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.462222222222222, |
|
"grad_norm": 0.4950138330459595, |
|
"learning_rate": 9.353312350681242e-05, |
|
"loss": 0.2108, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.471111111111111, |
|
"grad_norm": 0.599851131439209, |
|
"learning_rate": 9.345626093228233e-05, |
|
"loss": 0.2419, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.6381746530532837, |
|
"learning_rate": 9.337897624284906e-05, |
|
"loss": 0.2769, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.488888888888889, |
|
"grad_norm": 0.28884226083755493, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.2746, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.497777777777778, |
|
"grad_norm": 0.3661234378814697, |
|
"learning_rate": 9.322314352620318e-05, |
|
"loss": 0.2438, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.506666666666667, |
|
"grad_norm": 0.4173062741756439, |
|
"learning_rate": 9.314459701268065e-05, |
|
"loss": 0.2864, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.5155555555555553, |
|
"grad_norm": 0.6054656505584717, |
|
"learning_rate": 9.306563141162046e-05, |
|
"loss": 0.2368, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.5244444444444447, |
|
"grad_norm": 0.3535010814666748, |
|
"learning_rate": 9.298624749005951e-05, |
|
"loss": 0.2807, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.533333333333333, |
|
"grad_norm": 0.3705323338508606, |
|
"learning_rate": 9.290644601909815e-05, |
|
"loss": 0.2116, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.542222222222222, |
|
"grad_norm": 0.3306547701358795, |
|
"learning_rate": 9.282622777389258e-05, |
|
"loss": 0.2345, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.551111111111111, |
|
"grad_norm": 0.40624353289604187, |
|
"learning_rate": 9.274559353364734e-05, |
|
"loss": 0.268, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.4179963767528534, |
|
"learning_rate": 9.266454408160779e-05, |
|
"loss": 0.2457, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.568888888888889, |
|
"grad_norm": 0.3766705393791199, |
|
"learning_rate": 9.258308020505247e-05, |
|
"loss": 0.2422, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.5777777777777775, |
|
"grad_norm": 0.26529139280319214, |
|
"learning_rate": 9.250120269528546e-05, |
|
"loss": 0.2562, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.586666666666667, |
|
"grad_norm": 0.5525755286216736, |
|
"learning_rate": 9.241891234762869e-05, |
|
"loss": 0.2836, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.5955555555555554, |
|
"grad_norm": 0.40089136362075806, |
|
"learning_rate": 9.233620996141421e-05, |
|
"loss": 0.2623, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.6044444444444443, |
|
"grad_norm": 0.3758887052536011, |
|
"learning_rate": 9.225309633997641e-05, |
|
"loss": 0.2118, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.6133333333333333, |
|
"grad_norm": 0.2941136062145233, |
|
"learning_rate": 9.21695722906443e-05, |
|
"loss": 0.2449, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.6222222222222222, |
|
"grad_norm": 0.32856640219688416, |
|
"learning_rate": 9.208563862473351e-05, |
|
"loss": 0.2308, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.631111111111111, |
|
"grad_norm": 0.3801966905593872, |
|
"learning_rate": 9.200129615753859e-05, |
|
"loss": 0.2727, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.5766463875770569, |
|
"learning_rate": 9.191654570832496e-05, |
|
"loss": 0.2068, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.648888888888889, |
|
"grad_norm": 0.2755817174911499, |
|
"learning_rate": 9.183138810032099e-05, |
|
"loss": 0.2436, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.6577777777777776, |
|
"grad_norm": 0.292676717042923, |
|
"learning_rate": 9.174582416071007e-05, |
|
"loss": 0.2379, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.5588552951812744, |
|
"learning_rate": 9.165985472062246e-05, |
|
"loss": 0.3178, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.6755555555555555, |
|
"grad_norm": 0.3965974748134613, |
|
"learning_rate": 9.157348061512727e-05, |
|
"loss": 0.2575, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.6844444444444444, |
|
"grad_norm": 0.45277366042137146, |
|
"learning_rate": 9.148670268322438e-05, |
|
"loss": 0.2235, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.6933333333333334, |
|
"grad_norm": 0.34740129113197327, |
|
"learning_rate": 9.139952176783626e-05, |
|
"loss": 0.2199, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.7022222222222223, |
|
"grad_norm": 0.280239075422287, |
|
"learning_rate": 9.131193871579975e-05, |
|
"loss": 0.2442, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.7111111111111112, |
|
"grad_norm": 0.3178425133228302, |
|
"learning_rate": 9.12239543778579e-05, |
|
"loss": 0.2262, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 0.421599417924881, |
|
"learning_rate": 9.113556960865167e-05, |
|
"loss": 0.2326, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.728888888888889, |
|
"grad_norm": 0.25511476397514343, |
|
"learning_rate": 9.104678526671162e-05, |
|
"loss": 0.2261, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.7377777777777776, |
|
"grad_norm": 0.48630788922309875, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 0.2077, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.7466666666666666, |
|
"grad_norm": 0.38153597712516785, |
|
"learning_rate": 9.086802131815031e-05, |
|
"loss": 0.2797, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.7555555555555555, |
|
"grad_norm": 0.5679035186767578, |
|
"learning_rate": 9.077804344796302e-05, |
|
"loss": 0.2584, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.7644444444444445, |
|
"grad_norm": 0.5588884949684143, |
|
"learning_rate": 9.068766947789292e-05, |
|
"loss": 0.2838, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.7733333333333334, |
|
"grad_norm": 0.40492427349090576, |
|
"learning_rate": 9.059690028579284e-05, |
|
"loss": 0.2037, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.7822222222222224, |
|
"grad_norm": 0.3332842290401459, |
|
"learning_rate": 9.050573675335453e-05, |
|
"loss": 0.2567, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.7911111111111113, |
|
"grad_norm": 0.29908373951911926, |
|
"learning_rate": 9.041417976610027e-05, |
|
"loss": 0.2315, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.31926652789115906, |
|
"learning_rate": 9.032223021337414e-05, |
|
"loss": 0.2138, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.8088888888888888, |
|
"grad_norm": 0.31083905696868896, |
|
"learning_rate": 9.022988898833342e-05, |
|
"loss": 0.211, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.8177777777777777, |
|
"grad_norm": 0.3902362287044525, |
|
"learning_rate": 9.013715698793996e-05, |
|
"loss": 0.2503, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.8266666666666667, |
|
"grad_norm": 0.4482881724834442, |
|
"learning_rate": 9.004403511295141e-05, |
|
"loss": 0.2204, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.8355555555555556, |
|
"grad_norm": 1.0134060382843018, |
|
"learning_rate": 8.995052426791247e-05, |
|
"loss": 0.2454, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.8444444444444446, |
|
"grad_norm": 0.4079667329788208, |
|
"learning_rate": 8.985662536114613e-05, |
|
"loss": 0.2865, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.8533333333333335, |
|
"grad_norm": 0.34580424427986145, |
|
"learning_rate": 8.976233930474486e-05, |
|
"loss": 0.2193, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.862222222222222, |
|
"grad_norm": 0.35853755474090576, |
|
"learning_rate": 8.966766701456177e-05, |
|
"loss": 0.2596, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.871111111111111, |
|
"grad_norm": 0.6029608249664307, |
|
"learning_rate": 8.957260941020154e-05, |
|
"loss": 0.264, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.5026920437812805, |
|
"learning_rate": 8.947716741501177e-05, |
|
"loss": 0.2566, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.888888888888889, |
|
"grad_norm": 0.5039583444595337, |
|
"learning_rate": 8.938134195607377e-05, |
|
"loss": 0.2266, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.897777777777778, |
|
"grad_norm": 0.4172382652759552, |
|
"learning_rate": 8.928513396419368e-05, |
|
"loss": 0.251, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.9066666666666667, |
|
"grad_norm": 0.48238590359687805, |
|
"learning_rate": 8.918854437389342e-05, |
|
"loss": 0.2583, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.9155555555555557, |
|
"grad_norm": 0.37107643485069275, |
|
"learning_rate": 8.90915741234015e-05, |
|
"loss": 0.2403, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.924444444444444, |
|
"grad_norm": 0.26561880111694336, |
|
"learning_rate": 8.899422415464409e-05, |
|
"loss": 0.233, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.9333333333333336, |
|
"grad_norm": 0.43001478910446167, |
|
"learning_rate": 8.889649541323574e-05, |
|
"loss": 0.2598, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.942222222222222, |
|
"grad_norm": 0.3992827832698822, |
|
"learning_rate": 8.879838884847025e-05, |
|
"loss": 0.2151, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.951111111111111, |
|
"grad_norm": 0.44580769538879395, |
|
"learning_rate": 8.869990541331138e-05, |
|
"loss": 0.2296, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.3314950168132782, |
|
"learning_rate": 8.860104606438369e-05, |
|
"loss": 0.2331, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.968888888888889, |
|
"grad_norm": 0.6843854784965515, |
|
"learning_rate": 8.850181176196315e-05, |
|
"loss": 0.3505, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.977777777777778, |
|
"grad_norm": 0.2698711156845093, |
|
"learning_rate": 8.840220346996792e-05, |
|
"loss": 0.2272, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.986666666666667, |
|
"grad_norm": 0.26144105195999146, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.2438, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.9955555555555557, |
|
"grad_norm": 0.26964858174324036, |
|
"learning_rate": 8.820186879108038e-05, |
|
"loss": 0.2434, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 3.0044444444444443, |
|
"grad_norm": 0.2074783891439438, |
|
"learning_rate": 8.810114435015054e-05, |
|
"loss": 0.2438, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.013333333333333, |
|
"grad_norm": 0.5041800737380981, |
|
"learning_rate": 8.800004981155208e-05, |
|
"loss": 0.2566, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 3.022222222222222, |
|
"grad_norm": 0.24667270481586456, |
|
"learning_rate": 8.789858615727265e-05, |
|
"loss": 0.2366, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.031111111111111, |
|
"grad_norm": 0.3841206729412079, |
|
"learning_rate": 8.779675437288532e-05, |
|
"loss": 0.2371, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 0.23301474750041962, |
|
"learning_rate": 8.7694555447539e-05, |
|
"loss": 0.1969, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.048888888888889, |
|
"grad_norm": 0.26055586338043213, |
|
"learning_rate": 8.759199037394887e-05, |
|
"loss": 0.2064, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 3.057777777777778, |
|
"grad_norm": 0.4642762243747711, |
|
"learning_rate": 8.748906014838672e-05, |
|
"loss": 0.1877, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.066666666666667, |
|
"grad_norm": 0.28164124488830566, |
|
"learning_rate": 8.738576577067122e-05, |
|
"loss": 0.2482, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.0755555555555554, |
|
"grad_norm": 0.6319877505302429, |
|
"learning_rate": 8.728210824415827e-05, |
|
"loss": 0.196, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.0844444444444443, |
|
"grad_norm": 0.3142436146736145, |
|
"learning_rate": 8.717808857573131e-05, |
|
"loss": 0.1907, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 3.0933333333333333, |
|
"grad_norm": 0.3424491882324219, |
|
"learning_rate": 8.707370777579133e-05, |
|
"loss": 0.228, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.102222222222222, |
|
"grad_norm": 0.34540361166000366, |
|
"learning_rate": 8.696896685824731e-05, |
|
"loss": 0.2292, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 3.111111111111111, |
|
"grad_norm": 0.38283613324165344, |
|
"learning_rate": 8.68638668405062e-05, |
|
"loss": 0.1662, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.4507913887500763, |
|
"learning_rate": 8.67584087434631e-05, |
|
"loss": 0.2408, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 3.128888888888889, |
|
"grad_norm": 1.011723518371582, |
|
"learning_rate": 8.665259359149132e-05, |
|
"loss": 0.2772, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.137777777777778, |
|
"grad_norm": 0.5270500779151917, |
|
"learning_rate": 8.654642241243247e-05, |
|
"loss": 0.2017, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 3.1466666666666665, |
|
"grad_norm": 0.8714268207550049, |
|
"learning_rate": 8.643989623758643e-05, |
|
"loss": 0.2311, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.1555555555555554, |
|
"grad_norm": 0.2898414134979248, |
|
"learning_rate": 8.633301610170135e-05, |
|
"loss": 0.2443, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.1644444444444444, |
|
"grad_norm": 0.6308351159095764, |
|
"learning_rate": 8.622578304296364e-05, |
|
"loss": 0.2517, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.1733333333333333, |
|
"grad_norm": 0.5109901428222656, |
|
"learning_rate": 8.611819810298778e-05, |
|
"loss": 0.2269, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 3.1822222222222223, |
|
"grad_norm": 0.432049959897995, |
|
"learning_rate": 8.601026232680634e-05, |
|
"loss": 0.1978, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.1911111111111112, |
|
"grad_norm": 0.3804490566253662, |
|
"learning_rate": 8.59019767628597e-05, |
|
"loss": 0.2082, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.31701037287712097, |
|
"learning_rate": 8.579334246298593e-05, |
|
"loss": 0.2356, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.2088888888888887, |
|
"grad_norm": 0.25454384088516235, |
|
"learning_rate": 8.56843604824106e-05, |
|
"loss": 0.1967, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 3.2177777777777776, |
|
"grad_norm": 0.3782944977283478, |
|
"learning_rate": 8.557503187973651e-05, |
|
"loss": 0.239, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.2266666666666666, |
|
"grad_norm": 0.6260755062103271, |
|
"learning_rate": 8.546535771693334e-05, |
|
"loss": 0.2061, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.2355555555555555, |
|
"grad_norm": 0.3981928825378418, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.2094, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.2444444444444445, |
|
"grad_norm": 0.5317679643630981, |
|
"learning_rate": 8.524497697559126e-05, |
|
"loss": 0.2448, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.2533333333333334, |
|
"grad_norm": 0.45160210132598877, |
|
"learning_rate": 8.513427253773346e-05, |
|
"loss": 0.2451, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.2622222222222224, |
|
"grad_norm": 0.5067645907402039, |
|
"learning_rate": 8.502322682108792e-05, |
|
"loss": 0.2036, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 3.2711111111111113, |
|
"grad_norm": 0.3842930495738983, |
|
"learning_rate": 8.491184090430364e-05, |
|
"loss": 0.2056, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.2800000000000002, |
|
"grad_norm": 0.33578169345855713, |
|
"learning_rate": 8.480011586933418e-05, |
|
"loss": 0.2292, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 3.2888888888888888, |
|
"grad_norm": 0.4589829444885254, |
|
"learning_rate": 8.468805280142709e-05, |
|
"loss": 0.2026, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.2977777777777777, |
|
"grad_norm": 0.666786789894104, |
|
"learning_rate": 8.457565278911348e-05, |
|
"loss": 0.2031, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.3066666666666666, |
|
"grad_norm": 0.3466854393482208, |
|
"learning_rate": 8.446291692419736e-05, |
|
"loss": 0.2024, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.3155555555555556, |
|
"grad_norm": 0.4151984453201294, |
|
"learning_rate": 8.434984630174509e-05, |
|
"loss": 0.2071, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 3.3244444444444445, |
|
"grad_norm": 0.3907124102115631, |
|
"learning_rate": 8.423644202007467e-05, |
|
"loss": 0.199, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 0.7014510035514832, |
|
"learning_rate": 8.412270518074518e-05, |
|
"loss": 0.2345, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.3422222222222224, |
|
"grad_norm": 0.3714005649089813, |
|
"learning_rate": 8.400863688854597e-05, |
|
"loss": 0.2272, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.351111111111111, |
|
"grad_norm": 0.9573063254356384, |
|
"learning_rate": 8.389423825148598e-05, |
|
"loss": 0.2531, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 0.5541326999664307, |
|
"learning_rate": 8.377951038078302e-05, |
|
"loss": 0.2667, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.368888888888889, |
|
"grad_norm": 0.5363870859146118, |
|
"learning_rate": 8.366445439085286e-05, |
|
"loss": 0.2028, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 3.3777777777777778, |
|
"grad_norm": 0.4728504717350006, |
|
"learning_rate": 8.354907139929851e-05, |
|
"loss": 0.1955, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.3866666666666667, |
|
"grad_norm": 0.46801096200942993, |
|
"learning_rate": 8.343336252689935e-05, |
|
"loss": 0.1981, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.3955555555555557, |
|
"grad_norm": 0.528943657875061, |
|
"learning_rate": 8.33173288976002e-05, |
|
"loss": 0.2185, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.4044444444444446, |
|
"grad_norm": 0.7669292092323303, |
|
"learning_rate": 8.320097163850043e-05, |
|
"loss": 0.264, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.413333333333333, |
|
"grad_norm": 0.46919432282447815, |
|
"learning_rate": 8.308429187984297e-05, |
|
"loss": 0.1932, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.422222222222222, |
|
"grad_norm": 0.5307047367095947, |
|
"learning_rate": 8.296729075500344e-05, |
|
"loss": 0.1948, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.431111111111111, |
|
"grad_norm": 0.4287688136100769, |
|
"learning_rate": 8.284996940047903e-05, |
|
"loss": 0.2424, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 0.44700077176094055, |
|
"learning_rate": 8.273232895587748e-05, |
|
"loss": 0.2041, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.448888888888889, |
|
"grad_norm": 0.5288101434707642, |
|
"learning_rate": 8.261437056390606e-05, |
|
"loss": 0.2205, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.457777777777778, |
|
"grad_norm": 0.4757843613624573, |
|
"learning_rate": 8.249609537036043e-05, |
|
"loss": 0.2398, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 3.466666666666667, |
|
"grad_norm": 0.2801768481731415, |
|
"learning_rate": 8.237750452411353e-05, |
|
"loss": 0.187, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.4755555555555557, |
|
"grad_norm": 0.35519644618034363, |
|
"learning_rate": 8.225859917710439e-05, |
|
"loss": 0.2045, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 3.4844444444444447, |
|
"grad_norm": 0.4472619891166687, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.21, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.493333333333333, |
|
"grad_norm": 0.7717186808586121, |
|
"learning_rate": 8.201984960381894e-05, |
|
"loss": 0.2442, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 3.502222222222222, |
|
"grad_norm": 0.5858293771743774, |
|
"learning_rate": 8.190000769665044e-05, |
|
"loss": 0.2072, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 3.511111111111111, |
|
"grad_norm": 0.4433729648590088, |
|
"learning_rate": 8.177985592691272e-05, |
|
"loss": 0.2933, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.47395846247673035, |
|
"learning_rate": 8.1659395461707e-05, |
|
"loss": 0.218, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 3.528888888888889, |
|
"grad_norm": 0.4193160831928253, |
|
"learning_rate": 8.153862747113292e-05, |
|
"loss": 0.2046, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 3.537777777777778, |
|
"grad_norm": 0.40366363525390625, |
|
"learning_rate": 8.141755312827736e-05, |
|
"loss": 0.2022, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 3.546666666666667, |
|
"grad_norm": 0.543988823890686, |
|
"learning_rate": 8.129617360920296e-05, |
|
"loss": 0.1818, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 3.5555555555555554, |
|
"grad_norm": 0.3089783191680908, |
|
"learning_rate": 8.117449009293668e-05, |
|
"loss": 0.2196, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.5644444444444443, |
|
"grad_norm": 0.399312287569046, |
|
"learning_rate": 8.10525037614584e-05, |
|
"loss": 0.1889, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 3.5733333333333333, |
|
"grad_norm": 0.3407880663871765, |
|
"learning_rate": 8.093021579968941e-05, |
|
"loss": 0.2562, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 3.582222222222222, |
|
"grad_norm": 0.8019405603408813, |
|
"learning_rate": 8.080762739548089e-05, |
|
"loss": 0.2453, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 3.591111111111111, |
|
"grad_norm": 0.308278352022171, |
|
"learning_rate": 8.068473973960238e-05, |
|
"loss": 0.2129, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 0.5972309112548828, |
|
"learning_rate": 8.056155402573024e-05, |
|
"loss": 0.2369, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 3.608888888888889, |
|
"grad_norm": 0.40940365195274353, |
|
"learning_rate": 8.043807145043604e-05, |
|
"loss": 0.2144, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 3.6177777777777775, |
|
"grad_norm": 0.3109559714794159, |
|
"learning_rate": 8.03142932131749e-05, |
|
"loss": 0.2189, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 3.626666666666667, |
|
"grad_norm": 0.3607446551322937, |
|
"learning_rate": 8.019022051627388e-05, |
|
"loss": 0.2197, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 3.6355555555555554, |
|
"grad_norm": 0.4839872419834137, |
|
"learning_rate": 8.006585456492029e-05, |
|
"loss": 0.2797, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 3.6444444444444444, |
|
"grad_norm": 0.33101025223731995, |
|
"learning_rate": 7.994119656715002e-05, |
|
"loss": 0.2042, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.6533333333333333, |
|
"grad_norm": 0.558871865272522, |
|
"learning_rate": 7.981624773383572e-05, |
|
"loss": 0.2372, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 3.6622222222222223, |
|
"grad_norm": 0.29825037717819214, |
|
"learning_rate": 7.969100927867507e-05, |
|
"loss": 0.2156, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 3.671111111111111, |
|
"grad_norm": 0.4881444573402405, |
|
"learning_rate": 7.956548241817912e-05, |
|
"loss": 0.177, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 0.3237267732620239, |
|
"learning_rate": 7.943966837166023e-05, |
|
"loss": 0.2089, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 3.688888888888889, |
|
"grad_norm": 0.32327884435653687, |
|
"learning_rate": 7.931356836122046e-05, |
|
"loss": 0.2082, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 3.6977777777777776, |
|
"grad_norm": 0.4469964802265167, |
|
"learning_rate": 7.91871836117395e-05, |
|
"loss": 0.1687, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 3.7066666666666666, |
|
"grad_norm": 0.29894670844078064, |
|
"learning_rate": 7.906051535086296e-05, |
|
"loss": 0.1816, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 3.7155555555555555, |
|
"grad_norm": 0.3552427291870117, |
|
"learning_rate": 7.89335648089903e-05, |
|
"loss": 0.2157, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 3.7244444444444444, |
|
"grad_norm": 0.6162396669387817, |
|
"learning_rate": 7.880633321926294e-05, |
|
"loss": 0.1988, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 3.7333333333333334, |
|
"grad_norm": 0.4064711332321167, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 0.2438, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.7422222222222223, |
|
"grad_norm": 0.634732723236084, |
|
"learning_rate": 7.855103184244776e-05, |
|
"loss": 0.266, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 3.7511111111111113, |
|
"grad_norm": 1.4341474771499634, |
|
"learning_rate": 7.842296453524463e-05, |
|
"loss": 0.3107, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 0.49320435523986816, |
|
"learning_rate": 7.829462113993207e-05, |
|
"loss": 0.2334, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 3.7688888888888887, |
|
"grad_norm": 0.39640945196151733, |
|
"learning_rate": 7.81660029031811e-05, |
|
"loss": 0.2418, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 3.7777777777777777, |
|
"grad_norm": 0.6148217916488647, |
|
"learning_rate": 7.80371110743324e-05, |
|
"loss": 0.2023, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.7866666666666666, |
|
"grad_norm": 0.3857884109020233, |
|
"learning_rate": 7.79079469053842e-05, |
|
"loss": 0.2146, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 3.7955555555555556, |
|
"grad_norm": 0.38891321420669556, |
|
"learning_rate": 7.777851165098012e-05, |
|
"loss": 0.1895, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 3.8044444444444445, |
|
"grad_norm": 0.3949791491031647, |
|
"learning_rate": 7.764880656839696e-05, |
|
"loss": 0.1818, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 3.8133333333333335, |
|
"grad_norm": 0.36369502544403076, |
|
"learning_rate": 7.751883291753262e-05, |
|
"loss": 0.2442, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 3.822222222222222, |
|
"grad_norm": 0.39478230476379395, |
|
"learning_rate": 7.738859196089358e-05, |
|
"loss": 0.1618, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.8311111111111114, |
|
"grad_norm": 0.34515222907066345, |
|
"learning_rate": 7.725808496358295e-05, |
|
"loss": 0.225, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.5892112851142883, |
|
"learning_rate": 7.712731319328798e-05, |
|
"loss": 0.2617, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 3.848888888888889, |
|
"grad_norm": 0.44167014956474304, |
|
"learning_rate": 7.699627792026783e-05, |
|
"loss": 0.2044, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 3.8577777777777778, |
|
"grad_norm": 0.3467167615890503, |
|
"learning_rate": 7.68649804173412e-05, |
|
"loss": 0.2429, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 3.8666666666666667, |
|
"grad_norm": 0.5901280045509338, |
|
"learning_rate": 7.673342195987397e-05, |
|
"loss": 0.1852, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 3.8755555555555556, |
|
"grad_norm": 0.47540798783302307, |
|
"learning_rate": 7.660160382576683e-05, |
|
"loss": 0.1873, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 3.8844444444444446, |
|
"grad_norm": 0.546599268913269, |
|
"learning_rate": 7.646952729544284e-05, |
|
"loss": 0.2246, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 3.8933333333333335, |
|
"grad_norm": 0.3890751898288727, |
|
"learning_rate": 7.633719365183504e-05, |
|
"loss": 0.2376, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.902222222222222, |
|
"grad_norm": 0.5732626914978027, |
|
"learning_rate": 7.620460418037388e-05, |
|
"loss": 0.2012, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 3.911111111111111, |
|
"grad_norm": 0.5902274250984192, |
|
"learning_rate": 7.60717601689749e-05, |
|
"loss": 0.2548, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 0.5472671985626221, |
|
"learning_rate": 7.593866290802608e-05, |
|
"loss": 0.203, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 3.928888888888889, |
|
"grad_norm": 1.1805832386016846, |
|
"learning_rate": 7.580531369037533e-05, |
|
"loss": 0.2373, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.937777777777778, |
|
"grad_norm": 0.4900795817375183, |
|
"learning_rate": 7.567171381131802e-05, |
|
"loss": 0.2181, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 3.9466666666666668, |
|
"grad_norm": 0.35954034328460693, |
|
"learning_rate": 7.553786456858429e-05, |
|
"loss": 0.205, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.9555555555555557, |
|
"grad_norm": 0.6151618361473083, |
|
"learning_rate": 7.540376726232648e-05, |
|
"loss": 0.216, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.964444444444444, |
|
"grad_norm": 0.6646876335144043, |
|
"learning_rate": 7.526942319510655e-05, |
|
"loss": 0.2201, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.9733333333333336, |
|
"grad_norm": 0.3377375602722168, |
|
"learning_rate": 7.513483367188335e-05, |
|
"loss": 0.2397, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 3.982222222222222, |
|
"grad_norm": 0.44786104559898376, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.2128, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.991111111111111, |
|
"grad_norm": 0.8776792287826538, |
|
"learning_rate": 7.48649234891712e-05, |
|
"loss": 0.2508, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.3274805247783661, |
|
"learning_rate": 7.472960545147038e-05, |
|
"loss": 0.2039, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.0088888888888885, |
|
"grad_norm": 0.29022541642189026, |
|
"learning_rate": 7.459404720131718e-05, |
|
"loss": 0.2154, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 4.017777777777778, |
|
"grad_norm": 0.27060315012931824, |
|
"learning_rate": 7.445825005546448e-05, |
|
"loss": 0.1818, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.026666666666666, |
|
"grad_norm": 0.37638726830482483, |
|
"learning_rate": 7.432221533298569e-05, |
|
"loss": 0.1706, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 4.035555555555556, |
|
"grad_norm": 0.506497323513031, |
|
"learning_rate": 7.4185944355262e-05, |
|
"loss": 0.188, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 4.044444444444444, |
|
"grad_norm": 0.3924923241138458, |
|
"learning_rate": 7.404943844596939e-05, |
|
"loss": 0.234, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 4.053333333333334, |
|
"grad_norm": 0.4585055112838745, |
|
"learning_rate": 7.391269893106592e-05, |
|
"loss": 0.1821, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 4.062222222222222, |
|
"grad_norm": 0.41875752806663513, |
|
"learning_rate": 7.377572713877877e-05, |
|
"loss": 0.1349, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 4.071111111111111, |
|
"grad_norm": 0.33695676922798157, |
|
"learning_rate": 7.363852439959135e-05, |
|
"loss": 0.1754, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 0.739496648311615, |
|
"learning_rate": 7.350109204623043e-05, |
|
"loss": 0.1928, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 4.088888888888889, |
|
"grad_norm": 0.7458643317222595, |
|
"learning_rate": 7.33634314136531e-05, |
|
"loss": 0.2129, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.097777777777778, |
|
"grad_norm": 0.6029854416847229, |
|
"learning_rate": 7.322554383903388e-05, |
|
"loss": 0.1585, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 4.1066666666666665, |
|
"grad_norm": 0.5195834636688232, |
|
"learning_rate": 7.308743066175172e-05, |
|
"loss": 0.1809, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 4.115555555555556, |
|
"grad_norm": 0.5776547193527222, |
|
"learning_rate": 7.294909322337689e-05, |
|
"loss": 0.1918, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 4.124444444444444, |
|
"grad_norm": 0.394884318113327, |
|
"learning_rate": 7.281053286765815e-05, |
|
"loss": 0.2107, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 4.133333333333334, |
|
"grad_norm": 0.46265098452568054, |
|
"learning_rate": 7.267175094050952e-05, |
|
"loss": 0.2289, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 4.142222222222222, |
|
"grad_norm": 0.7043814063072205, |
|
"learning_rate": 7.253274878999727e-05, |
|
"loss": 0.185, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 4.151111111111111, |
|
"grad_norm": 0.6173180341720581, |
|
"learning_rate": 7.239352776632681e-05, |
|
"loss": 0.1913, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.6549043655395508, |
|
"learning_rate": 7.225408922182961e-05, |
|
"loss": 0.2055, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 4.168888888888889, |
|
"grad_norm": 0.488744854927063, |
|
"learning_rate": 7.211443451095007e-05, |
|
"loss": 0.1934, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 4.177777777777778, |
|
"grad_norm": 0.8572131395339966, |
|
"learning_rate": 7.197456499023225e-05, |
|
"loss": 0.166, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.1866666666666665, |
|
"grad_norm": 0.4277536869049072, |
|
"learning_rate": 7.183448201830685e-05, |
|
"loss": 0.1799, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 4.195555555555556, |
|
"grad_norm": 0.37053030729293823, |
|
"learning_rate": 7.169418695587791e-05, |
|
"loss": 0.1943, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 4.204444444444444, |
|
"grad_norm": 0.5450714826583862, |
|
"learning_rate": 7.155368116570962e-05, |
|
"loss": 0.1985, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 4.213333333333333, |
|
"grad_norm": 0.5906587243080139, |
|
"learning_rate": 7.141296601261314e-05, |
|
"loss": 0.2086, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 4.222222222222222, |
|
"grad_norm": 0.601839542388916, |
|
"learning_rate": 7.127204286343321e-05, |
|
"loss": 0.1699, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.231111111111111, |
|
"grad_norm": 0.4745982587337494, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 0.1757, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 0.5195015072822571, |
|
"learning_rate": 7.098957805429072e-05, |
|
"loss": 0.196, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 4.248888888888889, |
|
"grad_norm": 0.34185004234313965, |
|
"learning_rate": 7.084803913806641e-05, |
|
"loss": 0.1876, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.257777777777778, |
|
"grad_norm": 0.8930533528327942, |
|
"learning_rate": 7.070629771320852e-05, |
|
"loss": 0.1915, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 4.266666666666667, |
|
"grad_norm": 0.5491169095039368, |
|
"learning_rate": 7.056435515653059e-05, |
|
"loss": 0.221, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.275555555555556, |
|
"grad_norm": 0.36910513043403625, |
|
"learning_rate": 7.042221284679982e-05, |
|
"loss": 0.183, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 4.2844444444444445, |
|
"grad_norm": 0.961035430431366, |
|
"learning_rate": 7.027987216472377e-05, |
|
"loss": 0.1886, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.293333333333333, |
|
"grad_norm": 0.7092379927635193, |
|
"learning_rate": 7.013733449293687e-05, |
|
"loss": 0.1477, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 4.302222222222222, |
|
"grad_norm": 0.45441004633903503, |
|
"learning_rate": 6.999460121598704e-05, |
|
"loss": 0.1962, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.311111111111111, |
|
"grad_norm": 0.43963754177093506, |
|
"learning_rate": 6.985167372032225e-05, |
|
"loss": 0.1689, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 1.1201061010360718, |
|
"learning_rate": 6.970855339427698e-05, |
|
"loss": 0.176, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 4.328888888888889, |
|
"grad_norm": 0.7198124527931213, |
|
"learning_rate": 6.956524162805875e-05, |
|
"loss": 0.2368, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 4.337777777777778, |
|
"grad_norm": 0.7982850670814514, |
|
"learning_rate": 6.942173981373474e-05, |
|
"loss": 0.1913, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 4.346666666666667, |
|
"grad_norm": 0.6484464406967163, |
|
"learning_rate": 6.92780493452181e-05, |
|
"loss": 0.175, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 4.355555555555555, |
|
"grad_norm": 0.594021737575531, |
|
"learning_rate": 6.91341716182545e-05, |
|
"loss": 0.1777, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.364444444444445, |
|
"grad_norm": 0.6100602149963379, |
|
"learning_rate": 6.899010803040857e-05, |
|
"loss": 0.1492, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 4.373333333333333, |
|
"grad_norm": 0.5219585299491882, |
|
"learning_rate": 6.884585998105026e-05, |
|
"loss": 0.2231, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 4.3822222222222225, |
|
"grad_norm": 0.790718138217926, |
|
"learning_rate": 6.870142887134141e-05, |
|
"loss": 0.1844, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 4.391111111111111, |
|
"grad_norm": 0.44537198543548584, |
|
"learning_rate": 6.855681610422189e-05, |
|
"loss": 0.1866, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 0.3384419083595276, |
|
"learning_rate": 6.841202308439623e-05, |
|
"loss": 0.1788, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 4.408888888888889, |
|
"grad_norm": 0.4067346453666687, |
|
"learning_rate": 6.826705121831976e-05, |
|
"loss": 0.2291, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 4.417777777777777, |
|
"grad_norm": 0.9824737310409546, |
|
"learning_rate": 6.812190191418508e-05, |
|
"loss": 0.1777, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 4.426666666666667, |
|
"grad_norm": 0.963668704032898, |
|
"learning_rate": 6.797657658190839e-05, |
|
"loss": 0.1765, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 4.435555555555555, |
|
"grad_norm": 0.3285558223724365, |
|
"learning_rate": 6.783107663311565e-05, |
|
"loss": 0.1971, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"grad_norm": 0.38868242502212524, |
|
"learning_rate": 6.768540348112907e-05, |
|
"loss": 0.2064, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"eval_loss": 0.3316148519515991, |
|
"eval_runtime": 44.4048, |
|
"eval_samples_per_second": 2.252, |
|
"eval_steps_per_second": 2.252, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.453333333333333, |
|
"grad_norm": 0.8600643873214722, |
|
"learning_rate": 6.753955854095323e-05, |
|
"loss": 0.1599, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 4.4622222222222225, |
|
"grad_norm": 0.6059608459472656, |
|
"learning_rate": 6.739354322926136e-05, |
|
"loss": 0.1915, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 4.471111111111111, |
|
"grad_norm": 0.8490309715270996, |
|
"learning_rate": 6.724735896438167e-05, |
|
"loss": 0.2769, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.3413046896457672, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 0.1927, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 4.488888888888889, |
|
"grad_norm": 0.5042988657951355, |
|
"learning_rate": 6.695448925656333e-05, |
|
"loss": 0.146, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 4.497777777777777, |
|
"grad_norm": 0.46430540084838867, |
|
"learning_rate": 6.680780665843155e-05, |
|
"loss": 0.1989, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 4.506666666666667, |
|
"grad_norm": 0.45499563217163086, |
|
"learning_rate": 6.666096079669797e-05, |
|
"loss": 0.2019, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 4.515555555555555, |
|
"grad_norm": 0.33445480465888977, |
|
"learning_rate": 6.651395309775837e-05, |
|
"loss": 0.2001, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 4.524444444444445, |
|
"grad_norm": 0.6296830773353577, |
|
"learning_rate": 6.636678498958052e-05, |
|
"loss": 0.1525, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 4.533333333333333, |
|
"grad_norm": 0.49194592237472534, |
|
"learning_rate": 6.621945790169036e-05, |
|
"loss": 0.1748, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 4.542222222222223, |
|
"grad_norm": 0.3655872344970703, |
|
"learning_rate": 6.607197326515808e-05, |
|
"loss": 0.1935, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 4.551111111111111, |
|
"grad_norm": 0.6454001069068909, |
|
"learning_rate": 6.592433251258423e-05, |
|
"loss": 0.1625, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 4.5600000000000005, |
|
"grad_norm": 0.5047568082809448, |
|
"learning_rate": 6.577653707808577e-05, |
|
"loss": 0.1964, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 4.568888888888889, |
|
"grad_norm": 0.8591883778572083, |
|
"learning_rate": 6.562858839728223e-05, |
|
"loss": 0.2091, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 4.5777777777777775, |
|
"grad_norm": 0.3702365756034851, |
|
"learning_rate": 6.548048790728165e-05, |
|
"loss": 0.2029, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 4.586666666666667, |
|
"grad_norm": 0.4821636378765106, |
|
"learning_rate": 6.533223704666672e-05, |
|
"loss": 0.1799, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 4.595555555555555, |
|
"grad_norm": 0.4567244350910187, |
|
"learning_rate": 6.518383725548074e-05, |
|
"loss": 0.1867, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 4.604444444444445, |
|
"grad_norm": 0.613053560256958, |
|
"learning_rate": 6.503528997521366e-05, |
|
"loss": 0.1594, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 4.613333333333333, |
|
"grad_norm": 0.34238895773887634, |
|
"learning_rate": 6.488659664878808e-05, |
|
"loss": 0.1931, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 4.622222222222222, |
|
"grad_norm": 0.3325413465499878, |
|
"learning_rate": 6.473775872054521e-05, |
|
"loss": 0.1665, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 4.631111111111111, |
|
"grad_norm": 0.518052875995636, |
|
"learning_rate": 6.458877763623089e-05, |
|
"loss": 0.1491, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 0.398858904838562, |
|
"learning_rate": 6.44396548429815e-05, |
|
"loss": 0.1651, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 4.648888888888889, |
|
"grad_norm": 0.44876357913017273, |
|
"learning_rate": 6.42903917893099e-05, |
|
"loss": 0.1933, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 4.657777777777778, |
|
"grad_norm": 0.58448725938797, |
|
"learning_rate": 6.414098992509138e-05, |
|
"loss": 0.1924, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 4.666666666666667, |
|
"grad_norm": 0.5056392550468445, |
|
"learning_rate": 6.399145070154961e-05, |
|
"loss": 0.1667, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 4.6755555555555555, |
|
"grad_norm": 0.6365034580230713, |
|
"learning_rate": 6.384177557124247e-05, |
|
"loss": 0.182, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 4.684444444444445, |
|
"grad_norm": 0.3645959496498108, |
|
"learning_rate": 6.369196598804801e-05, |
|
"loss": 0.2032, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 4.693333333333333, |
|
"grad_norm": 0.42011165618896484, |
|
"learning_rate": 6.354202340715026e-05, |
|
"loss": 0.2085, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 4.702222222222222, |
|
"grad_norm": 0.4557260274887085, |
|
"learning_rate": 6.339194928502517e-05, |
|
"loss": 0.2283, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 4.711111111111111, |
|
"grad_norm": 0.41336071491241455, |
|
"learning_rate": 6.324174507942637e-05, |
|
"loss": 0.1633, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 0.3430007994174957, |
|
"learning_rate": 6.309141224937111e-05, |
|
"loss": 0.179, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 4.728888888888889, |
|
"grad_norm": 0.5025080442428589, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 0.1826, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 4.737777777777778, |
|
"grad_norm": 0.6088576316833496, |
|
"learning_rate": 6.279036655819302e-05, |
|
"loss": 0.2157, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 4.746666666666667, |
|
"grad_norm": 0.4910157322883606, |
|
"learning_rate": 6.263965662129487e-05, |
|
"loss": 0.1971, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 4.7555555555555555, |
|
"grad_norm": 0.3384090065956116, |
|
"learning_rate": 6.248882390836135e-05, |
|
"loss": 0.1443, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 4.764444444444445, |
|
"grad_norm": 0.40888798236846924, |
|
"learning_rate": 6.233786988451468e-05, |
|
"loss": 0.134, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 4.773333333333333, |
|
"grad_norm": 0.411403626203537, |
|
"learning_rate": 6.218679601605554e-05, |
|
"loss": 0.2239, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 4.782222222222222, |
|
"grad_norm": 0.5049718618392944, |
|
"learning_rate": 6.203560377044866e-05, |
|
"loss": 0.1874, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 4.791111111111111, |
|
"grad_norm": 0.35621222853660583, |
|
"learning_rate": 6.188429461630866e-05, |
|
"loss": 0.1915, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.7713626027107239, |
|
"learning_rate": 6.173287002338577e-05, |
|
"loss": 0.2097, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.808888888888889, |
|
"grad_norm": 1.0501298904418945, |
|
"learning_rate": 6.158133146255153e-05, |
|
"loss": 0.2026, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 4.817777777777778, |
|
"grad_norm": 0.7429870367050171, |
|
"learning_rate": 6.142968040578449e-05, |
|
"loss": 0.2041, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 4.826666666666666, |
|
"grad_norm": 0.4614238739013672, |
|
"learning_rate": 6.127791832615598e-05, |
|
"loss": 0.1685, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 4.835555555555556, |
|
"grad_norm": 0.63481205701828, |
|
"learning_rate": 6.112604669781572e-05, |
|
"loss": 0.1836, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 4.844444444444444, |
|
"grad_norm": 0.5263459086418152, |
|
"learning_rate": 6.097406699597759e-05, |
|
"loss": 0.162, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 4.8533333333333335, |
|
"grad_norm": 0.4739071726799011, |
|
"learning_rate": 6.0821980696905146e-05, |
|
"loss": 0.2085, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 4.862222222222222, |
|
"grad_norm": 0.4817952811717987, |
|
"learning_rate": 6.0669789277897507e-05, |
|
"loss": 0.1907, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 4.871111111111111, |
|
"grad_norm": 0.540302574634552, |
|
"learning_rate": 6.0517494217274794e-05, |
|
"loss": 0.1743, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 0.48975828289985657, |
|
"learning_rate": 6.036509699436389e-05, |
|
"loss": 0.1769, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 4.888888888888889, |
|
"grad_norm": 0.4697459042072296, |
|
"learning_rate": 6.021259908948402e-05, |
|
"loss": 0.1811, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.897777777777778, |
|
"grad_norm": 0.8186047077178955, |
|
"learning_rate": 6.0060001983932425e-05, |
|
"loss": 0.225, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 4.906666666666666, |
|
"grad_norm": 0.3741462528705597, |
|
"learning_rate": 5.9907307159969884e-05, |
|
"loss": 0.1601, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 4.915555555555556, |
|
"grad_norm": 0.46077340841293335, |
|
"learning_rate": 5.9754516100806423e-05, |
|
"loss": 0.1887, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 4.924444444444444, |
|
"grad_norm": 0.5414716601371765, |
|
"learning_rate": 5.960163029058682e-05, |
|
"loss": 0.1665, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 4.933333333333334, |
|
"grad_norm": 0.5361833572387695, |
|
"learning_rate": 5.944865121437622e-05, |
|
"loss": 0.1925, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 4.942222222222222, |
|
"grad_norm": 0.35004904866218567, |
|
"learning_rate": 5.9295580358145744e-05, |
|
"loss": 0.1933, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 4.9511111111111115, |
|
"grad_norm": 1.0410363674163818, |
|
"learning_rate": 5.914241920875798e-05, |
|
"loss": 0.2299, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 0.5200708508491516, |
|
"learning_rate": 5.898916925395264e-05, |
|
"loss": 0.1673, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 4.968888888888889, |
|
"grad_norm": 0.534602165222168, |
|
"learning_rate": 5.8835831982332015e-05, |
|
"loss": 0.1815, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 4.977777777777778, |
|
"grad_norm": 0.4768090546131134, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 0.2138, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.986666666666666, |
|
"grad_norm": 0.38369208574295044, |
|
"learning_rate": 5.852890144728034e-05, |
|
"loss": 0.1509, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 4.995555555555556, |
|
"grad_norm": 0.35721951723098755, |
|
"learning_rate": 5.837531116523682e-05, |
|
"loss": 0.1485, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 5.004444444444444, |
|
"grad_norm": 0.44299712777137756, |
|
"learning_rate": 5.822163952912404e-05, |
|
"loss": 0.1504, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 5.013333333333334, |
|
"grad_norm": 0.47305387258529663, |
|
"learning_rate": 5.806788803164034e-05, |
|
"loss": 0.1497, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 5.022222222222222, |
|
"grad_norm": 0.5253143906593323, |
|
"learning_rate": 5.791405816625975e-05, |
|
"loss": 0.1726, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 5.0311111111111115, |
|
"grad_norm": 0.5787915587425232, |
|
"learning_rate": 5.7760151427217576e-05, |
|
"loss": 0.1603, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"grad_norm": 0.7631460428237915, |
|
"learning_rate": 5.7606169309495836e-05, |
|
"loss": 0.2153, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 5.0488888888888885, |
|
"grad_norm": 0.472397118806839, |
|
"learning_rate": 5.745211330880872e-05, |
|
"loss": 0.1887, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 5.057777777777778, |
|
"grad_norm": 0.4452255368232727, |
|
"learning_rate": 5.729798492158811e-05, |
|
"loss": 0.2073, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 5.066666666666666, |
|
"grad_norm": 0.4919035732746124, |
|
"learning_rate": 5.714378564496901e-05, |
|
"loss": 0.1449, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.075555555555556, |
|
"grad_norm": 0.6700975894927979, |
|
"learning_rate": 5.698951697677498e-05, |
|
"loss": 0.1024, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 5.084444444444444, |
|
"grad_norm": 0.6321210265159607, |
|
"learning_rate": 5.683518041550368e-05, |
|
"loss": 0.1584, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 5.093333333333334, |
|
"grad_norm": 0.7015148997306824, |
|
"learning_rate": 5.668077746031219e-05, |
|
"loss": 0.1387, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 5.102222222222222, |
|
"grad_norm": 0.6382136344909668, |
|
"learning_rate": 5.6526309611002594e-05, |
|
"loss": 0.1706, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 5.111111111111111, |
|
"grad_norm": 0.45012837648391724, |
|
"learning_rate": 5.637177836800722e-05, |
|
"loss": 0.155, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"grad_norm": 0.47716137766838074, |
|
"learning_rate": 5.621718523237427e-05, |
|
"loss": 0.1303, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 5.128888888888889, |
|
"grad_norm": 0.7658069729804993, |
|
"learning_rate": 5.6062531705753075e-05, |
|
"loss": 0.1448, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 5.137777777777778, |
|
"grad_norm": 0.43680229783058167, |
|
"learning_rate": 5.590781929037965e-05, |
|
"loss": 0.1443, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 5.1466666666666665, |
|
"grad_norm": 0.834297239780426, |
|
"learning_rate": 5.575304948906194e-05, |
|
"loss": 0.0925, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 5.155555555555556, |
|
"grad_norm": 0.405964195728302, |
|
"learning_rate": 5.559822380516539e-05, |
|
"loss": 0.1427, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.164444444444444, |
|
"grad_norm": 0.49563100934028625, |
|
"learning_rate": 5.544334374259823e-05, |
|
"loss": 0.1492, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 5.173333333333334, |
|
"grad_norm": 0.5021004676818848, |
|
"learning_rate": 5.5288410805796895e-05, |
|
"loss": 0.1556, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 5.182222222222222, |
|
"grad_norm": 0.5862144827842712, |
|
"learning_rate": 5.5133426499711425e-05, |
|
"loss": 0.142, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 5.191111111111111, |
|
"grad_norm": 0.43614262342453003, |
|
"learning_rate": 5.497839232979084e-05, |
|
"loss": 0.1626, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 0.5935210585594177, |
|
"learning_rate": 5.4823309801968516e-05, |
|
"loss": 0.1267, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 5.208888888888889, |
|
"grad_norm": 0.4941434860229492, |
|
"learning_rate": 5.466818042264753e-05, |
|
"loss": 0.1144, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 5.217777777777778, |
|
"grad_norm": 0.6197287440299988, |
|
"learning_rate": 5.451300569868612e-05, |
|
"loss": 0.1654, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 5.226666666666667, |
|
"grad_norm": 0.38996899127960205, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 0.1925, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 5.235555555555556, |
|
"grad_norm": 1.012840986251831, |
|
"learning_rate": 5.420252624646238e-05, |
|
"loss": 0.1575, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 5.2444444444444445, |
|
"grad_norm": 0.33670493960380554, |
|
"learning_rate": 5.404722453406017e-05, |
|
"loss": 0.1635, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 5.253333333333333, |
|
"grad_norm": 0.3880348205566406, |
|
"learning_rate": 5.3891883508708444e-05, |
|
"loss": 0.1301, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 5.262222222222222, |
|
"grad_norm": 0.8689696192741394, |
|
"learning_rate": 5.373650467932122e-05, |
|
"loss": 0.1621, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 5.271111111111111, |
|
"grad_norm": 0.4196907877922058, |
|
"learning_rate": 5.3581089555179754e-05, |
|
"loss": 0.1501, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 0.9811832904815674, |
|
"learning_rate": 5.3425639645917834e-05, |
|
"loss": 0.1124, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 5.288888888888889, |
|
"grad_norm": 0.7768471240997314, |
|
"learning_rate": 5.327015646150716e-05, |
|
"loss": 0.1264, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 5.297777777777778, |
|
"grad_norm": 0.5367645025253296, |
|
"learning_rate": 5.311464151224261e-05, |
|
"loss": 0.1095, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 5.306666666666667, |
|
"grad_norm": 0.5028023719787598, |
|
"learning_rate": 5.295909630872764e-05, |
|
"loss": 0.1205, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 5.315555555555555, |
|
"grad_norm": 0.39689093828201294, |
|
"learning_rate": 5.2803522361859594e-05, |
|
"loss": 0.1511, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 5.3244444444444445, |
|
"grad_norm": 0.7014448642730713, |
|
"learning_rate": 5.264792118281498e-05, |
|
"loss": 0.1379, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 5.333333333333333, |
|
"grad_norm": 0.7733895778656006, |
|
"learning_rate": 5.249229428303486e-05, |
|
"loss": 0.1538, |
|
"step": 600 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1120, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.8639927296065536e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|