|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.955555555555556, |
|
"eval_steps": 500, |
|
"global_step": 1120, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008888888888888889, |
|
"grad_norm": 0.5841383934020996, |
|
"learning_rate": 8.928571428571428e-07, |
|
"loss": 0.4865, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017777777777777778, |
|
"grad_norm": 0.37317243218421936, |
|
"learning_rate": 1.7857142857142857e-06, |
|
"loss": 0.4116, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02666666666666667, |
|
"grad_norm": 0.16240684688091278, |
|
"learning_rate": 2.6785714285714285e-06, |
|
"loss": 0.3085, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 0.22618648409843445, |
|
"learning_rate": 3.5714285714285714e-06, |
|
"loss": 0.3213, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.044444444444444446, |
|
"grad_norm": 0.33735740184783936, |
|
"learning_rate": 4.464285714285715e-06, |
|
"loss": 0.2869, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 0.20259538292884827, |
|
"learning_rate": 5.357142857142857e-06, |
|
"loss": 0.3825, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06222222222222222, |
|
"grad_norm": 0.3431294560432434, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.3896, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 0.5504677891731262, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 0.3175, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1282545030117035, |
|
"learning_rate": 8.035714285714286e-06, |
|
"loss": 0.2901, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 0.18529950082302094, |
|
"learning_rate": 8.92857142857143e-06, |
|
"loss": 0.3735, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09777777777777778, |
|
"grad_norm": 0.2145024836063385, |
|
"learning_rate": 9.821428571428573e-06, |
|
"loss": 0.3525, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 0.9437930583953857, |
|
"learning_rate": 1.0714285714285714e-05, |
|
"loss": 0.444, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.11555555555555555, |
|
"grad_norm": 0.24417199194431305, |
|
"learning_rate": 1.1607142857142857e-05, |
|
"loss": 0.2967, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12444444444444444, |
|
"grad_norm": 0.2694835662841797, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.2967, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.13333333333333333, |
|
"grad_norm": 0.20757225155830383, |
|
"learning_rate": 1.3392857142857144e-05, |
|
"loss": 0.3487, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14222222222222222, |
|
"grad_norm": 0.2921523153781891, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 0.4247, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1511111111111111, |
|
"grad_norm": 0.20908255875110626, |
|
"learning_rate": 1.5178571428571429e-05, |
|
"loss": 0.3009, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.15087872743606567, |
|
"learning_rate": 1.6071428571428572e-05, |
|
"loss": 0.3145, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1688888888888889, |
|
"grad_norm": 0.8311613202095032, |
|
"learning_rate": 1.6964285714285715e-05, |
|
"loss": 0.3868, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 0.27965763211250305, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 0.351, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.18666666666666668, |
|
"grad_norm": 0.23372279107570648, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 0.3025, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.19555555555555557, |
|
"grad_norm": 0.3673437237739563, |
|
"learning_rate": 1.9642857142857145e-05, |
|
"loss": 0.388, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.20444444444444446, |
|
"grad_norm": 0.39919161796569824, |
|
"learning_rate": 2.0535714285714285e-05, |
|
"loss": 0.3611, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.21333333333333335, |
|
"grad_norm": 0.20096033811569214, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.3244, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.27874326705932617, |
|
"learning_rate": 2.2321428571428575e-05, |
|
"loss": 0.359, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2311111111111111, |
|
"grad_norm": 0.31668320298194885, |
|
"learning_rate": 2.3214285714285715e-05, |
|
"loss": 0.3062, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4131496548652649, |
|
"learning_rate": 2.4107142857142858e-05, |
|
"loss": 0.4351, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.24888888888888888, |
|
"grad_norm": 0.4470804035663605, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.4332, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2577777777777778, |
|
"grad_norm": 0.3746657967567444, |
|
"learning_rate": 2.5892857142857148e-05, |
|
"loss": 0.3811, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 0.8335658311843872, |
|
"learning_rate": 2.6785714285714288e-05, |
|
"loss": 0.4618, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.27555555555555555, |
|
"grad_norm": 0.8326655626296997, |
|
"learning_rate": 2.767857142857143e-05, |
|
"loss": 0.3872, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.28444444444444444, |
|
"grad_norm": 0.7701701521873474, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.3255, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.29333333333333333, |
|
"grad_norm": 0.40850526094436646, |
|
"learning_rate": 2.9464285714285718e-05, |
|
"loss": 0.3167, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3022222222222222, |
|
"grad_norm": 0.35397693514823914, |
|
"learning_rate": 3.0357142857142857e-05, |
|
"loss": 0.2987, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3111111111111111, |
|
"grad_norm": 0.7541699409484863, |
|
"learning_rate": 3.125e-05, |
|
"loss": 0.3413, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.3483797609806061, |
|
"learning_rate": 3.2142857142857144e-05, |
|
"loss": 0.2858, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3288888888888889, |
|
"grad_norm": 0.33964604139328003, |
|
"learning_rate": 3.303571428571429e-05, |
|
"loss": 0.3036, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3377777777777778, |
|
"grad_norm": 0.45213809609413147, |
|
"learning_rate": 3.392857142857143e-05, |
|
"loss": 0.3437, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3466666666666667, |
|
"grad_norm": 0.4597315192222595, |
|
"learning_rate": 3.4821428571428574e-05, |
|
"loss": 0.3318, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 0.44681742787361145, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.3107, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.36444444444444446, |
|
"grad_norm": 0.9226369857788086, |
|
"learning_rate": 3.6607142857142853e-05, |
|
"loss": 0.3231, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.37333333333333335, |
|
"grad_norm": 0.27465879917144775, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.3301, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.38222222222222224, |
|
"grad_norm": 0.4791021943092346, |
|
"learning_rate": 3.839285714285715e-05, |
|
"loss": 0.3527, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.39111111111111113, |
|
"grad_norm": 0.586669385433197, |
|
"learning_rate": 3.928571428571429e-05, |
|
"loss": 0.3359, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.34871557354927063, |
|
"learning_rate": 4.017857142857143e-05, |
|
"loss": 0.3268, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4088888888888889, |
|
"grad_norm": 0.4256209135055542, |
|
"learning_rate": 4.107142857142857e-05, |
|
"loss": 0.2978, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4177777777777778, |
|
"grad_norm": 0.30336323380470276, |
|
"learning_rate": 4.196428571428572e-05, |
|
"loss": 0.2898, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4266666666666667, |
|
"grad_norm": 0.5795422792434692, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 0.3481, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.43555555555555553, |
|
"grad_norm": 0.38410332798957825, |
|
"learning_rate": 4.375e-05, |
|
"loss": 0.3414, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.3348947763442993, |
|
"learning_rate": 4.464285714285715e-05, |
|
"loss": 0.336, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4533333333333333, |
|
"grad_norm": 0.42829787731170654, |
|
"learning_rate": 4.5535714285714286e-05, |
|
"loss": 0.2913, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4622222222222222, |
|
"grad_norm": 4.1896653175354, |
|
"learning_rate": 4.642857142857143e-05, |
|
"loss": 0.8937, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4711111111111111, |
|
"grad_norm": 0.7059090733528137, |
|
"learning_rate": 4.732142857142857e-05, |
|
"loss": 0.4011, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.41015371680259705, |
|
"learning_rate": 4.8214285714285716e-05, |
|
"loss": 0.3351, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.4888888888888889, |
|
"grad_norm": 0.43993642926216125, |
|
"learning_rate": 4.910714285714286e-05, |
|
"loss": 0.3313, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.49777777777777776, |
|
"grad_norm": 1.0636693239212036, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4636, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5066666666666667, |
|
"grad_norm": 1.5080366134643555, |
|
"learning_rate": 5.089285714285714e-05, |
|
"loss": 0.5489, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5155555555555555, |
|
"grad_norm": 0.5345658659934998, |
|
"learning_rate": 5.1785714285714296e-05, |
|
"loss": 0.3057, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5244444444444445, |
|
"grad_norm": 0.7976881861686707, |
|
"learning_rate": 5.267857142857143e-05, |
|
"loss": 0.3581, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 0.726458728313446, |
|
"learning_rate": 5.3571428571428575e-05, |
|
"loss": 0.3279, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5422222222222223, |
|
"grad_norm": 0.5178459286689758, |
|
"learning_rate": 5.446428571428571e-05, |
|
"loss": 0.3217, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5511111111111111, |
|
"grad_norm": 0.6377764940261841, |
|
"learning_rate": 5.535714285714286e-05, |
|
"loss": 0.321, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.3174062967300415, |
|
"learning_rate": 5.6250000000000005e-05, |
|
"loss": 0.3154, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5688888888888889, |
|
"grad_norm": 0.31623443961143494, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 0.2735, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5777777777777777, |
|
"grad_norm": 0.3521466851234436, |
|
"learning_rate": 5.803571428571429e-05, |
|
"loss": 0.2878, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5866666666666667, |
|
"grad_norm": 0.8923875689506531, |
|
"learning_rate": 5.8928571428571435e-05, |
|
"loss": 0.3906, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5955555555555555, |
|
"grad_norm": 0.6803852915763855, |
|
"learning_rate": 5.982142857142857e-05, |
|
"loss": 0.3573, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6044444444444445, |
|
"grad_norm": 0.5237946510314941, |
|
"learning_rate": 6.0714285714285715e-05, |
|
"loss": 0.3068, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6133333333333333, |
|
"grad_norm": 0.6161757111549377, |
|
"learning_rate": 6.160714285714286e-05, |
|
"loss": 0.2944, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6222222222222222, |
|
"grad_norm": 0.3687132000923157, |
|
"learning_rate": 6.25e-05, |
|
"loss": 0.2901, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6311111111111111, |
|
"grad_norm": 0.39233818650245667, |
|
"learning_rate": 6.339285714285714e-05, |
|
"loss": 0.2821, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.7180721759796143, |
|
"learning_rate": 6.428571428571429e-05, |
|
"loss": 0.3509, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6488888888888888, |
|
"grad_norm": 0.9132435917854309, |
|
"learning_rate": 6.517857142857143e-05, |
|
"loss": 0.4027, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6577777777777778, |
|
"grad_norm": 0.2931051552295685, |
|
"learning_rate": 6.607142857142857e-05, |
|
"loss": 0.3303, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.4739736318588257, |
|
"learning_rate": 6.696428571428572e-05, |
|
"loss": 0.286, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6755555555555556, |
|
"grad_norm": 0.645233154296875, |
|
"learning_rate": 6.785714285714286e-05, |
|
"loss": 0.3673, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6844444444444444, |
|
"grad_norm": 0.6568748354911804, |
|
"learning_rate": 6.875e-05, |
|
"loss": 0.3451, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6933333333333334, |
|
"grad_norm": 0.330121785402298, |
|
"learning_rate": 6.964285714285715e-05, |
|
"loss": 0.2916, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7022222222222222, |
|
"grad_norm": 0.6969891786575317, |
|
"learning_rate": 7.053571428571429e-05, |
|
"loss": 0.3799, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 0.3836056590080261, |
|
"learning_rate": 7.142857142857143e-05, |
|
"loss": 0.2785, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.6308532357215881, |
|
"learning_rate": 7.232142857142858e-05, |
|
"loss": 0.3365, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7288888888888889, |
|
"grad_norm": 1.3300080299377441, |
|
"learning_rate": 7.321428571428571e-05, |
|
"loss": 0.4452, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7377777777777778, |
|
"grad_norm": 0.4857744872570038, |
|
"learning_rate": 7.410714285714286e-05, |
|
"loss": 0.3408, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.7466666666666667, |
|
"grad_norm": 0.2752129137516022, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.2481, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7555555555555555, |
|
"grad_norm": 0.37411218881607056, |
|
"learning_rate": 7.589285714285714e-05, |
|
"loss": 0.2714, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7644444444444445, |
|
"grad_norm": 0.34179869294166565, |
|
"learning_rate": 7.67857142857143e-05, |
|
"loss": 0.322, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7733333333333333, |
|
"grad_norm": 0.5072541236877441, |
|
"learning_rate": 7.767857142857144e-05, |
|
"loss": 0.3442, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7822222222222223, |
|
"grad_norm": 0.3834559917449951, |
|
"learning_rate": 7.857142857142858e-05, |
|
"loss": 0.2904, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7911111111111111, |
|
"grad_norm": 0.37922006845474243, |
|
"learning_rate": 7.946428571428571e-05, |
|
"loss": 0.3116, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.41435790061950684, |
|
"learning_rate": 8.035714285714287e-05, |
|
"loss": 0.2949, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8088888888888889, |
|
"grad_norm": 0.5537578463554382, |
|
"learning_rate": 8.125000000000001e-05, |
|
"loss": 0.3337, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8177777777777778, |
|
"grad_norm": 0.24957779049873352, |
|
"learning_rate": 8.214285714285714e-05, |
|
"loss": 0.2709, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8266666666666667, |
|
"grad_norm": 0.43642184138298035, |
|
"learning_rate": 8.30357142857143e-05, |
|
"loss": 0.3033, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8355555555555556, |
|
"grad_norm": 0.835472583770752, |
|
"learning_rate": 8.392857142857144e-05, |
|
"loss": 0.3828, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8444444444444444, |
|
"grad_norm": 0.6168670654296875, |
|
"learning_rate": 8.482142857142857e-05, |
|
"loss": 0.3455, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8533333333333334, |
|
"grad_norm": 0.46005958318710327, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 0.3489, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8622222222222222, |
|
"grad_norm": 0.34383633732795715, |
|
"learning_rate": 8.660714285714287e-05, |
|
"loss": 0.2866, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8711111111111111, |
|
"grad_norm": 0.4366074204444885, |
|
"learning_rate": 8.75e-05, |
|
"loss": 0.3239, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.33174213767051697, |
|
"learning_rate": 8.839285714285714e-05, |
|
"loss": 0.2783, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.26910072565078735, |
|
"learning_rate": 8.92857142857143e-05, |
|
"loss": 0.2807, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8977777777777778, |
|
"grad_norm": 0.9884425401687622, |
|
"learning_rate": 9.017857142857143e-05, |
|
"loss": 0.4379, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9066666666666666, |
|
"grad_norm": 0.24256423115730286, |
|
"learning_rate": 9.107142857142857e-05, |
|
"loss": 0.231, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9155555555555556, |
|
"grad_norm": 0.36811211705207825, |
|
"learning_rate": 9.196428571428572e-05, |
|
"loss": 0.2668, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9244444444444444, |
|
"grad_norm": 0.45584559440612793, |
|
"learning_rate": 9.285714285714286e-05, |
|
"loss": 0.3285, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.9333333333333333, |
|
"grad_norm": 0.4034405052661896, |
|
"learning_rate": 9.375e-05, |
|
"loss": 0.2823, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.9422222222222222, |
|
"grad_norm": 0.48256492614746094, |
|
"learning_rate": 9.464285714285715e-05, |
|
"loss": 0.3389, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9511111111111111, |
|
"grad_norm": 0.4109070897102356, |
|
"learning_rate": 9.553571428571429e-05, |
|
"loss": 0.3515, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.8162788152694702, |
|
"learning_rate": 9.642857142857143e-05, |
|
"loss": 0.5591, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9688888888888889, |
|
"grad_norm": 0.33671724796295166, |
|
"learning_rate": 9.732142857142858e-05, |
|
"loss": 0.3512, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.9777777777777777, |
|
"grad_norm": 0.35684165358543396, |
|
"learning_rate": 9.821428571428572e-05, |
|
"loss": 0.3087, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9866666666666667, |
|
"grad_norm": 0.34859076142311096, |
|
"learning_rate": 9.910714285714286e-05, |
|
"loss": 0.2894, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.9955555555555555, |
|
"grad_norm": 0.3526354432106018, |
|
"learning_rate": 0.0001, |
|
"loss": 0.3052, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0044444444444445, |
|
"grad_norm": 0.2897067368030548, |
|
"learning_rate": 9.999975716105452e-05, |
|
"loss": 0.3058, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0133333333333334, |
|
"grad_norm": 0.4105049967765808, |
|
"learning_rate": 9.999902864657691e-05, |
|
"loss": 0.2635, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.0222222222222221, |
|
"grad_norm": 0.36226218938827515, |
|
"learning_rate": 9.999781446364365e-05, |
|
"loss": 0.3257, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.031111111111111, |
|
"grad_norm": 0.31135883927345276, |
|
"learning_rate": 9.999611462404875e-05, |
|
"loss": 0.2646, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 2.0171165466308594, |
|
"learning_rate": 9.999392914430371e-05, |
|
"loss": 0.5321, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.048888888888889, |
|
"grad_norm": 0.3250499665737152, |
|
"learning_rate": 9.999125804563732e-05, |
|
"loss": 0.3208, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.0577777777777777, |
|
"grad_norm": 0.2886015474796295, |
|
"learning_rate": 9.998810135399546e-05, |
|
"loss": 0.2932, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 0.3275207281112671, |
|
"learning_rate": 9.998445910004082e-05, |
|
"loss": 0.2971, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0755555555555556, |
|
"grad_norm": 0.5577352643013, |
|
"learning_rate": 9.998033131915266e-05, |
|
"loss": 0.3561, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.0844444444444445, |
|
"grad_norm": 1.0877000093460083, |
|
"learning_rate": 9.997571805142639e-05, |
|
"loss": 0.4113, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.0933333333333333, |
|
"grad_norm": 0.5185580253601074, |
|
"learning_rate": 9.997061934167328e-05, |
|
"loss": 0.3085, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1022222222222222, |
|
"grad_norm": 0.4035893678665161, |
|
"learning_rate": 9.996503523941994e-05, |
|
"loss": 0.271, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 0.3941110074520111, |
|
"learning_rate": 9.995896579890784e-05, |
|
"loss": 0.2875, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.3415586054325104, |
|
"learning_rate": 9.99524110790929e-05, |
|
"loss": 0.2711, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.1288888888888888, |
|
"grad_norm": 0.3006160259246826, |
|
"learning_rate": 9.99453711436447e-05, |
|
"loss": 0.2718, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.1377777777777778, |
|
"grad_norm": 0.522278368473053, |
|
"learning_rate": 9.993784606094612e-05, |
|
"loss": 0.3306, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.1466666666666667, |
|
"grad_norm": 0.5858255624771118, |
|
"learning_rate": 9.992983590409246e-05, |
|
"loss": 0.3475, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.1555555555555554, |
|
"grad_norm": 0.5486164689064026, |
|
"learning_rate": 9.992134075089084e-05, |
|
"loss": 0.3259, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1644444444444444, |
|
"grad_norm": 0.2975933253765106, |
|
"learning_rate": 9.991236068385941e-05, |
|
"loss": 0.2588, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.1733333333333333, |
|
"grad_norm": 0.6659825444221497, |
|
"learning_rate": 9.99028957902266e-05, |
|
"loss": 0.3266, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1822222222222223, |
|
"grad_norm": 0.2541256248950958, |
|
"learning_rate": 9.989294616193017e-05, |
|
"loss": 0.2649, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.1911111111111112, |
|
"grad_norm": 0.8277371525764465, |
|
"learning_rate": 9.988251189561645e-05, |
|
"loss": 0.4076, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.3177882432937622, |
|
"learning_rate": 9.987159309263924e-05, |
|
"loss": 0.304, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.208888888888889, |
|
"grad_norm": 0.36816540360450745, |
|
"learning_rate": 9.986018985905901e-05, |
|
"loss": 0.3187, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.2177777777777778, |
|
"grad_norm": 0.4456408619880676, |
|
"learning_rate": 9.984830230564171e-05, |
|
"loss": 0.2769, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.2266666666666666, |
|
"grad_norm": 0.7157383561134338, |
|
"learning_rate": 9.983593054785776e-05, |
|
"loss": 0.382, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.2355555555555555, |
|
"grad_norm": 0.5327372550964355, |
|
"learning_rate": 9.982307470588098e-05, |
|
"loss": 0.2732, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.2444444444444445, |
|
"grad_norm": 0.581408679485321, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 0.3956, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2533333333333334, |
|
"grad_norm": 0.32390910387039185, |
|
"learning_rate": 9.979591127355365e-05, |
|
"loss": 0.2905, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.2622222222222224, |
|
"grad_norm": 0.4703962802886963, |
|
"learning_rate": 9.978160394705668e-05, |
|
"loss": 0.2897, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.271111111111111, |
|
"grad_norm": 0.5023928284645081, |
|
"learning_rate": 9.976681306407148e-05, |
|
"loss": 0.328, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.3573106825351715, |
|
"learning_rate": 9.975153876827008e-05, |
|
"loss": 0.2687, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.2888888888888888, |
|
"grad_norm": 0.6152392625808716, |
|
"learning_rate": 9.973578120802025e-05, |
|
"loss": 0.3375, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.2977777777777777, |
|
"grad_norm": 0.40894556045532227, |
|
"learning_rate": 9.971954053638399e-05, |
|
"loss": 0.2888, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.3066666666666666, |
|
"grad_norm": 0.8339890241622925, |
|
"learning_rate": 9.970281691111598e-05, |
|
"loss": 0.4384, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.3155555555555556, |
|
"grad_norm": 0.41529226303100586, |
|
"learning_rate": 9.968561049466214e-05, |
|
"loss": 0.2831, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.3244444444444445, |
|
"grad_norm": 0.28021934628486633, |
|
"learning_rate": 9.966792145415795e-05, |
|
"loss": 0.2671, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.22352413833141327, |
|
"learning_rate": 9.964974996142698e-05, |
|
"loss": 0.2839, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3422222222222222, |
|
"grad_norm": 0.2703256905078888, |
|
"learning_rate": 9.963109619297905e-05, |
|
"loss": 0.2675, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.3511111111111112, |
|
"grad_norm": 0.2645833194255829, |
|
"learning_rate": 9.961196033000861e-05, |
|
"loss": 0.2708, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3599999999999999, |
|
"grad_norm": 0.40951216220855713, |
|
"learning_rate": 9.959234255839298e-05, |
|
"loss": 0.3015, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.3688888888888888, |
|
"grad_norm": 0.3288329839706421, |
|
"learning_rate": 9.957224306869053e-05, |
|
"loss": 0.325, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.3777777777777778, |
|
"grad_norm": 0.3932753801345825, |
|
"learning_rate": 9.955166205613879e-05, |
|
"loss": 0.2716, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.3866666666666667, |
|
"grad_norm": 0.46717700362205505, |
|
"learning_rate": 9.953059972065265e-05, |
|
"loss": 0.3444, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.3955555555555557, |
|
"grad_norm": 0.20313459634780884, |
|
"learning_rate": 9.950905626682228e-05, |
|
"loss": 0.2584, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.4044444444444444, |
|
"grad_norm": 0.47062796354293823, |
|
"learning_rate": 9.948703190391131e-05, |
|
"loss": 0.372, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.4133333333333333, |
|
"grad_norm": 0.3500126898288727, |
|
"learning_rate": 9.946452684585463e-05, |
|
"loss": 0.2737, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.4222222222222223, |
|
"grad_norm": 0.5783170461654663, |
|
"learning_rate": 9.944154131125642e-05, |
|
"loss": 0.2938, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.431111111111111, |
|
"grad_norm": 0.27551746368408203, |
|
"learning_rate": 9.941807552338804e-05, |
|
"loss": 0.3139, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.2641688287258148, |
|
"learning_rate": 9.939412971018574e-05, |
|
"loss": 0.3009, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.448888888888889, |
|
"grad_norm": 0.4430491030216217, |
|
"learning_rate": 9.936970410424857e-05, |
|
"loss": 0.2779, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.4577777777777778, |
|
"grad_norm": 0.44353199005126953, |
|
"learning_rate": 9.934479894283606e-05, |
|
"loss": 0.2694, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.4666666666666668, |
|
"grad_norm": 0.3297507166862488, |
|
"learning_rate": 9.931941446786594e-05, |
|
"loss": 0.2638, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.4755555555555555, |
|
"grad_norm": 0.5766128897666931, |
|
"learning_rate": 9.92935509259118e-05, |
|
"loss": 0.3052, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.4844444444444445, |
|
"grad_norm": 0.3493499755859375, |
|
"learning_rate": 9.92672085682006e-05, |
|
"loss": 0.2728, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.4933333333333334, |
|
"grad_norm": 0.34138888120651245, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 0.2679, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.5022222222222221, |
|
"grad_norm": 0.40943869948387146, |
|
"learning_rate": 9.921308843366772e-05, |
|
"loss": 0.2556, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.511111111111111, |
|
"grad_norm": 0.4275529384613037, |
|
"learning_rate": 9.918531118254507e-05, |
|
"loss": 0.3012, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.3822483718395233, |
|
"learning_rate": 9.915705616705839e-05, |
|
"loss": 0.2984, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.528888888888889, |
|
"grad_norm": 0.3507990837097168, |
|
"learning_rate": 9.912832366166442e-05, |
|
"loss": 0.2839, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.537777777777778, |
|
"grad_norm": 0.3176634907722473, |
|
"learning_rate": 9.909911394545799e-05, |
|
"loss": 0.2715, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.5466666666666666, |
|
"grad_norm": 0.7413046956062317, |
|
"learning_rate": 9.906942730216939e-05, |
|
"loss": 0.2995, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 0.5602743625640869, |
|
"learning_rate": 9.903926402016153e-05, |
|
"loss": 0.303, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.5644444444444443, |
|
"grad_norm": 0.3049962818622589, |
|
"learning_rate": 9.900862439242719e-05, |
|
"loss": 0.2866, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.5733333333333333, |
|
"grad_norm": 0.20894083380699158, |
|
"learning_rate": 9.89775087165862e-05, |
|
"loss": 0.2801, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.5822222222222222, |
|
"grad_norm": 0.4999159574508667, |
|
"learning_rate": 9.894591729488242e-05, |
|
"loss": 0.3153, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.5911111111111111, |
|
"grad_norm": 0.6849189400672913, |
|
"learning_rate": 9.8913850434181e-05, |
|
"loss": 0.2794, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.44084635376930237, |
|
"learning_rate": 9.888130844596524e-05, |
|
"loss": 0.2953, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.608888888888889, |
|
"grad_norm": 0.2744970917701721, |
|
"learning_rate": 9.884829164633359e-05, |
|
"loss": 0.2654, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.6177777777777778, |
|
"grad_norm": 0.7441728711128235, |
|
"learning_rate": 9.881480035599667e-05, |
|
"loss": 0.4128, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.6266666666666667, |
|
"grad_norm": 0.2983834147453308, |
|
"learning_rate": 9.878083490027406e-05, |
|
"loss": 0.3103, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.6355555555555554, |
|
"grad_norm": 0.2417658269405365, |
|
"learning_rate": 9.874639560909117e-05, |
|
"loss": 0.248, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.6444444444444444, |
|
"grad_norm": 0.4583745002746582, |
|
"learning_rate": 9.871148281697608e-05, |
|
"loss": 0.2747, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.6533333333333333, |
|
"grad_norm": 0.42793506383895874, |
|
"learning_rate": 9.867609686305617e-05, |
|
"loss": 0.282, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.6622222222222223, |
|
"grad_norm": 0.32263195514678955, |
|
"learning_rate": 9.864023809105497e-05, |
|
"loss": 0.2709, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.6711111111111112, |
|
"grad_norm": 0.35320043563842773, |
|
"learning_rate": 9.860390684928873e-05, |
|
"loss": 0.3429, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 0.6125680804252625, |
|
"learning_rate": 9.856710349066307e-05, |
|
"loss": 0.2844, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.6888888888888889, |
|
"grad_norm": 0.36522263288497925, |
|
"learning_rate": 9.852982837266955e-05, |
|
"loss": 0.2413, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6977777777777778, |
|
"grad_norm": 0.3167021870613098, |
|
"learning_rate": 9.849208185738217e-05, |
|
"loss": 0.2682, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.7066666666666666, |
|
"grad_norm": 0.46384674310684204, |
|
"learning_rate": 9.84538643114539e-05, |
|
"loss": 0.2671, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.7155555555555555, |
|
"grad_norm": 0.27667102217674255, |
|
"learning_rate": 9.841517610611309e-05, |
|
"loss": 0.2929, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.7244444444444444, |
|
"grad_norm": 0.34263694286346436, |
|
"learning_rate": 9.837601761715983e-05, |
|
"loss": 0.2837, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.7333333333333334, |
|
"grad_norm": 0.5394869446754456, |
|
"learning_rate": 9.833638922496238e-05, |
|
"loss": 0.2535, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.7422222222222223, |
|
"grad_norm": 0.30996885895729065, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 0.2845, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.751111111111111, |
|
"grad_norm": 0.3415825664997101, |
|
"learning_rate": 9.825572427512632e-05, |
|
"loss": 0.2525, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.23367656767368317, |
|
"learning_rate": 9.82146885010314e-05, |
|
"loss": 0.295, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.7688888888888887, |
|
"grad_norm": 0.32408076524734497, |
|
"learning_rate": 9.817318439077195e-05, |
|
"loss": 0.3182, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.4190412759780884, |
|
"learning_rate": 9.81312123475006e-05, |
|
"loss": 0.2723, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.7866666666666666, |
|
"grad_norm": 0.28320616483688354, |
|
"learning_rate": 9.808877277891521e-05, |
|
"loss": 0.2618, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.7955555555555556, |
|
"grad_norm": 0.3757404386997223, |
|
"learning_rate": 9.804586609725499e-05, |
|
"loss": 0.3327, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.8044444444444445, |
|
"grad_norm": 0.2790829539299011, |
|
"learning_rate": 9.800249271929645e-05, |
|
"loss": 0.2746, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.8133333333333335, |
|
"grad_norm": 0.5675872564315796, |
|
"learning_rate": 9.79586530663494e-05, |
|
"loss": 0.3178, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.8222222222222222, |
|
"grad_norm": 0.7035036683082581, |
|
"learning_rate": 9.791434756425288e-05, |
|
"loss": 0.283, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.8311111111111111, |
|
"grad_norm": 0.22361284494400024, |
|
"learning_rate": 9.78695766433709e-05, |
|
"loss": 0.2674, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.8399999999999999, |
|
"grad_norm": 0.39389535784721375, |
|
"learning_rate": 9.782434073858844e-05, |
|
"loss": 0.3173, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.8488888888888888, |
|
"grad_norm": 0.38015905022621155, |
|
"learning_rate": 9.777864028930705e-05, |
|
"loss": 0.2277, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.8577777777777778, |
|
"grad_norm": 0.44615599513053894, |
|
"learning_rate": 9.773247573944066e-05, |
|
"loss": 0.2951, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.8666666666666667, |
|
"grad_norm": 0.19754816591739655, |
|
"learning_rate": 9.768584753741134e-05, |
|
"loss": 0.2519, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.8755555555555556, |
|
"grad_norm": 0.5915124416351318, |
|
"learning_rate": 9.763875613614482e-05, |
|
"loss": 0.3013, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.8844444444444446, |
|
"grad_norm": 0.4793195426464081, |
|
"learning_rate": 9.759120199306613e-05, |
|
"loss": 0.2667, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.8933333333333333, |
|
"grad_norm": 0.3155132830142975, |
|
"learning_rate": 9.754318557009519e-05, |
|
"loss": 0.2911, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.9022222222222223, |
|
"grad_norm": 0.3596380949020386, |
|
"learning_rate": 9.74947073336423e-05, |
|
"loss": 0.3004, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.911111111111111, |
|
"grad_norm": 0.30630800127983093, |
|
"learning_rate": 9.744576775460364e-05, |
|
"loss": 0.2379, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.3115442097187042, |
|
"learning_rate": 9.73963673083566e-05, |
|
"loss": 0.2837, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.9288888888888889, |
|
"grad_norm": 0.36604344844818115, |
|
"learning_rate": 9.73465064747553e-05, |
|
"loss": 0.3264, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.9377777777777778, |
|
"grad_norm": 0.29043278098106384, |
|
"learning_rate": 9.72961857381258e-05, |
|
"loss": 0.3202, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.9466666666666668, |
|
"grad_norm": 0.31544229388237, |
|
"learning_rate": 9.724540558726151e-05, |
|
"loss": 0.2623, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.9555555555555557, |
|
"grad_norm": 0.30620795488357544, |
|
"learning_rate": 9.719416651541839e-05, |
|
"loss": 0.2421, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.9644444444444444, |
|
"grad_norm": 0.38000237941741943, |
|
"learning_rate": 9.714246902031006e-05, |
|
"loss": 0.237, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.9733333333333334, |
|
"grad_norm": 0.4513542950153351, |
|
"learning_rate": 9.709031360410318e-05, |
|
"loss": 0.2942, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.982222222222222, |
|
"grad_norm": 0.28410232067108154, |
|
"learning_rate": 9.703770077341236e-05, |
|
"loss": 0.2918, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.991111111111111, |
|
"grad_norm": 0.29834455251693726, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 0.2444, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.304579496383667, |
|
"learning_rate": 9.693110491724827e-05, |
|
"loss": 0.3, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.008888888888889, |
|
"grad_norm": 0.49522674083709717, |
|
"learning_rate": 9.687712292719997e-05, |
|
"loss": 0.317, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.017777777777778, |
|
"grad_norm": 0.2522561252117157, |
|
"learning_rate": 9.682268559350771e-05, |
|
"loss": 0.2269, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.026666666666667, |
|
"grad_norm": 0.48555663228034973, |
|
"learning_rate": 9.67677934449517e-05, |
|
"loss": 0.2827, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.0355555555555553, |
|
"grad_norm": 0.3875712752342224, |
|
"learning_rate": 9.671244701472999e-05, |
|
"loss": 0.2813, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.0444444444444443, |
|
"grad_norm": 0.2848590910434723, |
|
"learning_rate": 9.665664684045333e-05, |
|
"loss": 0.2451, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.0533333333333332, |
|
"grad_norm": 0.30722254514694214, |
|
"learning_rate": 9.660039346413994e-05, |
|
"loss": 0.1899, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.062222222222222, |
|
"grad_norm": 0.22592511773109436, |
|
"learning_rate": 9.654368743221022e-05, |
|
"loss": 0.2078, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.071111111111111, |
|
"grad_norm": 0.5311526656150818, |
|
"learning_rate": 9.648652929548152e-05, |
|
"loss": 0.2834, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.28005415201187134, |
|
"learning_rate": 9.642891960916268e-05, |
|
"loss": 0.2628, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.088888888888889, |
|
"grad_norm": 0.29537031054496765, |
|
"learning_rate": 9.637085893284876e-05, |
|
"loss": 0.2429, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.097777777777778, |
|
"grad_norm": 0.5397939682006836, |
|
"learning_rate": 9.631234783051544e-05, |
|
"loss": 0.3032, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.1066666666666665, |
|
"grad_norm": 0.36687228083610535, |
|
"learning_rate": 9.625338687051375e-05, |
|
"loss": 0.2908, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.1155555555555554, |
|
"grad_norm": 0.33907392621040344, |
|
"learning_rate": 9.619397662556435e-05, |
|
"loss": 0.2333, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.1244444444444444, |
|
"grad_norm": 0.43153664469718933, |
|
"learning_rate": 9.613411767275209e-05, |
|
"loss": 0.2341, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.1333333333333333, |
|
"grad_norm": 0.30483999848365784, |
|
"learning_rate": 9.607381059352038e-05, |
|
"loss": 0.2655, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.1422222222222222, |
|
"grad_norm": 1.9127657413482666, |
|
"learning_rate": 9.601305597366554e-05, |
|
"loss": 0.4065, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.151111111111111, |
|
"grad_norm": 0.35035258531570435, |
|
"learning_rate": 9.595185440333103e-05, |
|
"loss": 0.1855, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.5800159573554993, |
|
"learning_rate": 9.589020647700191e-05, |
|
"loss": 0.2758, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.168888888888889, |
|
"grad_norm": 0.6490089297294617, |
|
"learning_rate": 9.582811279349882e-05, |
|
"loss": 0.2891, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.1777777777777776, |
|
"grad_norm": 0.46835729479789734, |
|
"learning_rate": 9.576557395597236e-05, |
|
"loss": 0.2762, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.1866666666666665, |
|
"grad_norm": 0.3493160605430603, |
|
"learning_rate": 9.570259057189717e-05, |
|
"loss": 0.2785, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.1955555555555555, |
|
"grad_norm": 0.2647961378097534, |
|
"learning_rate": 9.563916325306594e-05, |
|
"loss": 0.2389, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.2044444444444444, |
|
"grad_norm": 0.3641429543495178, |
|
"learning_rate": 9.557529261558367e-05, |
|
"loss": 0.2625, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.2133333333333334, |
|
"grad_norm": 0.3188803195953369, |
|
"learning_rate": 9.551097927986144e-05, |
|
"loss": 0.2501, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 0.32517799735069275, |
|
"learning_rate": 9.544622387061055e-05, |
|
"loss": 0.2333, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.2311111111111113, |
|
"grad_norm": 0.36616072058677673, |
|
"learning_rate": 9.538102701683643e-05, |
|
"loss": 0.2942, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.34478914737701416, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 0.2435, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.2488888888888887, |
|
"grad_norm": 0.563099205493927, |
|
"learning_rate": 9.5249311513174e-05, |
|
"loss": 0.2787, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.2577777777777777, |
|
"grad_norm": 0.4205755293369293, |
|
"learning_rate": 9.518279414271183e-05, |
|
"loss": 0.2974, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.2666666666666666, |
|
"grad_norm": 0.32124075293540955, |
|
"learning_rate": 9.511583788656632e-05, |
|
"loss": 0.3255, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.2755555555555556, |
|
"grad_norm": 0.34654173254966736, |
|
"learning_rate": 9.504844339512095e-05, |
|
"loss": 0.272, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.2844444444444445, |
|
"grad_norm": 0.3165460526943207, |
|
"learning_rate": 9.498061132301601e-05, |
|
"loss": 0.2266, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.2933333333333334, |
|
"grad_norm": 0.3656767010688782, |
|
"learning_rate": 9.491234232914221e-05, |
|
"loss": 0.3019, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.3022222222222224, |
|
"grad_norm": 0.28812313079833984, |
|
"learning_rate": 9.484363707663442e-05, |
|
"loss": 0.2631, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.311111111111111, |
|
"grad_norm": 0.5799005031585693, |
|
"learning_rate": 9.477449623286505e-05, |
|
"loss": 0.2721, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.332253098487854, |
|
"learning_rate": 9.470492046943771e-05, |
|
"loss": 0.2335, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.328888888888889, |
|
"grad_norm": 0.39480510354042053, |
|
"learning_rate": 9.463491046218058e-05, |
|
"loss": 0.2565, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.3377777777777777, |
|
"grad_norm": 0.5974012017250061, |
|
"learning_rate": 9.456446689113992e-05, |
|
"loss": 0.286, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.3466666666666667, |
|
"grad_norm": 0.47316139936447144, |
|
"learning_rate": 9.449359044057345e-05, |
|
"loss": 0.2111, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.3555555555555556, |
|
"grad_norm": 0.5243270993232727, |
|
"learning_rate": 9.442228179894362e-05, |
|
"loss": 0.2256, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.3644444444444446, |
|
"grad_norm": 0.6339777112007141, |
|
"learning_rate": 9.435054165891109e-05, |
|
"loss": 0.2635, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.3733333333333335, |
|
"grad_norm": 0.31498321890830994, |
|
"learning_rate": 9.427837071732783e-05, |
|
"loss": 0.2504, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.3822222222222225, |
|
"grad_norm": 1.1357451677322388, |
|
"learning_rate": 9.420576967523049e-05, |
|
"loss": 0.3394, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.391111111111111, |
|
"grad_norm": 0.3476477265357971, |
|
"learning_rate": 9.413273923783346e-05, |
|
"loss": 0.241, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.3083588778972626, |
|
"learning_rate": 9.405928011452211e-05, |
|
"loss": 0.2366, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.408888888888889, |
|
"grad_norm": 0.45807912945747375, |
|
"learning_rate": 9.398539301884592e-05, |
|
"loss": 0.2271, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.417777777777778, |
|
"grad_norm": 0.28123828768730164, |
|
"learning_rate": 9.391107866851143e-05, |
|
"loss": 0.2591, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.4266666666666667, |
|
"grad_norm": 0.45137813687324524, |
|
"learning_rate": 9.38363377853754e-05, |
|
"loss": 0.2254, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.4355555555555557, |
|
"grad_norm": 0.38635390996932983, |
|
"learning_rate": 9.376117109543769e-05, |
|
"loss": 0.2498, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.4444444444444446, |
|
"grad_norm": 0.36426419019699097, |
|
"learning_rate": 9.368557932883432e-05, |
|
"loss": 0.2346, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.453333333333333, |
|
"grad_norm": 0.5773187875747681, |
|
"learning_rate": 9.360956321983028e-05, |
|
"loss": 0.2479, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.462222222222222, |
|
"grad_norm": 0.4950138330459595, |
|
"learning_rate": 9.353312350681242e-05, |
|
"loss": 0.2108, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.471111111111111, |
|
"grad_norm": 0.599851131439209, |
|
"learning_rate": 9.345626093228233e-05, |
|
"loss": 0.2419, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.6381746530532837, |
|
"learning_rate": 9.337897624284906e-05, |
|
"loss": 0.2769, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.488888888888889, |
|
"grad_norm": 0.28884226083755493, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.2746, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.497777777777778, |
|
"grad_norm": 0.3661234378814697, |
|
"learning_rate": 9.322314352620318e-05, |
|
"loss": 0.2438, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.506666666666667, |
|
"grad_norm": 0.4173062741756439, |
|
"learning_rate": 9.314459701268065e-05, |
|
"loss": 0.2864, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.5155555555555553, |
|
"grad_norm": 0.6054656505584717, |
|
"learning_rate": 9.306563141162046e-05, |
|
"loss": 0.2368, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.5244444444444447, |
|
"grad_norm": 0.3535010814666748, |
|
"learning_rate": 9.298624749005951e-05, |
|
"loss": 0.2807, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.533333333333333, |
|
"grad_norm": 0.3705323338508606, |
|
"learning_rate": 9.290644601909815e-05, |
|
"loss": 0.2116, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.542222222222222, |
|
"grad_norm": 0.3306547701358795, |
|
"learning_rate": 9.282622777389258e-05, |
|
"loss": 0.2345, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.551111111111111, |
|
"grad_norm": 0.40624353289604187, |
|
"learning_rate": 9.274559353364734e-05, |
|
"loss": 0.268, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.4179963767528534, |
|
"learning_rate": 9.266454408160779e-05, |
|
"loss": 0.2457, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.568888888888889, |
|
"grad_norm": 0.3766705393791199, |
|
"learning_rate": 9.258308020505247e-05, |
|
"loss": 0.2422, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.5777777777777775, |
|
"grad_norm": 0.26529139280319214, |
|
"learning_rate": 9.250120269528546e-05, |
|
"loss": 0.2562, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.586666666666667, |
|
"grad_norm": 0.5525755286216736, |
|
"learning_rate": 9.241891234762869e-05, |
|
"loss": 0.2836, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.5955555555555554, |
|
"grad_norm": 0.40089136362075806, |
|
"learning_rate": 9.233620996141421e-05, |
|
"loss": 0.2623, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.6044444444444443, |
|
"grad_norm": 0.3758887052536011, |
|
"learning_rate": 9.225309633997641e-05, |
|
"loss": 0.2118, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.6133333333333333, |
|
"grad_norm": 0.2941136062145233, |
|
"learning_rate": 9.21695722906443e-05, |
|
"loss": 0.2449, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.6222222222222222, |
|
"grad_norm": 0.32856640219688416, |
|
"learning_rate": 9.208563862473351e-05, |
|
"loss": 0.2308, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.631111111111111, |
|
"grad_norm": 0.3801966905593872, |
|
"learning_rate": 9.200129615753859e-05, |
|
"loss": 0.2727, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.5766463875770569, |
|
"learning_rate": 9.191654570832496e-05, |
|
"loss": 0.2068, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.648888888888889, |
|
"grad_norm": 0.2755817174911499, |
|
"learning_rate": 9.183138810032099e-05, |
|
"loss": 0.2436, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.6577777777777776, |
|
"grad_norm": 0.292676717042923, |
|
"learning_rate": 9.174582416071007e-05, |
|
"loss": 0.2379, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.5588552951812744, |
|
"learning_rate": 9.165985472062246e-05, |
|
"loss": 0.3178, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.6755555555555555, |
|
"grad_norm": 0.3965974748134613, |
|
"learning_rate": 9.157348061512727e-05, |
|
"loss": 0.2575, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.6844444444444444, |
|
"grad_norm": 0.45277366042137146, |
|
"learning_rate": 9.148670268322438e-05, |
|
"loss": 0.2235, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.6933333333333334, |
|
"grad_norm": 0.34740129113197327, |
|
"learning_rate": 9.139952176783626e-05, |
|
"loss": 0.2199, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.7022222222222223, |
|
"grad_norm": 0.280239075422287, |
|
"learning_rate": 9.131193871579975e-05, |
|
"loss": 0.2442, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.7111111111111112, |
|
"grad_norm": 0.3178425133228302, |
|
"learning_rate": 9.12239543778579e-05, |
|
"loss": 0.2262, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 0.421599417924881, |
|
"learning_rate": 9.113556960865167e-05, |
|
"loss": 0.2326, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.728888888888889, |
|
"grad_norm": 0.25511476397514343, |
|
"learning_rate": 9.104678526671162e-05, |
|
"loss": 0.2261, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.7377777777777776, |
|
"grad_norm": 0.48630788922309875, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 0.2077, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.7466666666666666, |
|
"grad_norm": 0.38153597712516785, |
|
"learning_rate": 9.086802131815031e-05, |
|
"loss": 0.2797, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.7555555555555555, |
|
"grad_norm": 0.5679035186767578, |
|
"learning_rate": 9.077804344796302e-05, |
|
"loss": 0.2584, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.7644444444444445, |
|
"grad_norm": 0.5588884949684143, |
|
"learning_rate": 9.068766947789292e-05, |
|
"loss": 0.2838, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.7733333333333334, |
|
"grad_norm": 0.40492427349090576, |
|
"learning_rate": 9.059690028579284e-05, |
|
"loss": 0.2037, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.7822222222222224, |
|
"grad_norm": 0.3332842290401459, |
|
"learning_rate": 9.050573675335453e-05, |
|
"loss": 0.2567, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.7911111111111113, |
|
"grad_norm": 0.29908373951911926, |
|
"learning_rate": 9.041417976610027e-05, |
|
"loss": 0.2315, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.31926652789115906, |
|
"learning_rate": 9.032223021337414e-05, |
|
"loss": 0.2138, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.8088888888888888, |
|
"grad_norm": 0.31083905696868896, |
|
"learning_rate": 9.022988898833342e-05, |
|
"loss": 0.211, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.8177777777777777, |
|
"grad_norm": 0.3902362287044525, |
|
"learning_rate": 9.013715698793996e-05, |
|
"loss": 0.2503, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.8266666666666667, |
|
"grad_norm": 0.4482881724834442, |
|
"learning_rate": 9.004403511295141e-05, |
|
"loss": 0.2204, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.8355555555555556, |
|
"grad_norm": 1.0134060382843018, |
|
"learning_rate": 8.995052426791247e-05, |
|
"loss": 0.2454, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.8444444444444446, |
|
"grad_norm": 0.4079667329788208, |
|
"learning_rate": 8.985662536114613e-05, |
|
"loss": 0.2865, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.8533333333333335, |
|
"grad_norm": 0.34580424427986145, |
|
"learning_rate": 8.976233930474486e-05, |
|
"loss": 0.2193, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.862222222222222, |
|
"grad_norm": 0.35853755474090576, |
|
"learning_rate": 8.966766701456177e-05, |
|
"loss": 0.2596, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.871111111111111, |
|
"grad_norm": 0.6029608249664307, |
|
"learning_rate": 8.957260941020154e-05, |
|
"loss": 0.264, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.5026920437812805, |
|
"learning_rate": 8.947716741501177e-05, |
|
"loss": 0.2566, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.888888888888889, |
|
"grad_norm": 0.5039583444595337, |
|
"learning_rate": 8.938134195607377e-05, |
|
"loss": 0.2266, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.897777777777778, |
|
"grad_norm": 0.4172382652759552, |
|
"learning_rate": 8.928513396419368e-05, |
|
"loss": 0.251, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.9066666666666667, |
|
"grad_norm": 0.48238590359687805, |
|
"learning_rate": 8.918854437389342e-05, |
|
"loss": 0.2583, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.9155555555555557, |
|
"grad_norm": 0.37107643485069275, |
|
"learning_rate": 8.90915741234015e-05, |
|
"loss": 0.2403, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.924444444444444, |
|
"grad_norm": 0.26561880111694336, |
|
"learning_rate": 8.899422415464409e-05, |
|
"loss": 0.233, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.9333333333333336, |
|
"grad_norm": 0.43001478910446167, |
|
"learning_rate": 8.889649541323574e-05, |
|
"loss": 0.2598, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.942222222222222, |
|
"grad_norm": 0.3992827832698822, |
|
"learning_rate": 8.879838884847025e-05, |
|
"loss": 0.2151, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.951111111111111, |
|
"grad_norm": 0.44580769538879395, |
|
"learning_rate": 8.869990541331138e-05, |
|
"loss": 0.2296, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.3314950168132782, |
|
"learning_rate": 8.860104606438369e-05, |
|
"loss": 0.2331, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.968888888888889, |
|
"grad_norm": 0.6843854784965515, |
|
"learning_rate": 8.850181176196315e-05, |
|
"loss": 0.3505, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.977777777777778, |
|
"grad_norm": 0.2698711156845093, |
|
"learning_rate": 8.840220346996792e-05, |
|
"loss": 0.2272, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.986666666666667, |
|
"grad_norm": 0.26144105195999146, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.2438, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.9955555555555557, |
|
"grad_norm": 0.26964858174324036, |
|
"learning_rate": 8.820186879108038e-05, |
|
"loss": 0.2434, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 3.0044444444444443, |
|
"grad_norm": 0.2074783891439438, |
|
"learning_rate": 8.810114435015054e-05, |
|
"loss": 0.2438, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.013333333333333, |
|
"grad_norm": 0.5041800737380981, |
|
"learning_rate": 8.800004981155208e-05, |
|
"loss": 0.2566, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 3.022222222222222, |
|
"grad_norm": 0.24667270481586456, |
|
"learning_rate": 8.789858615727265e-05, |
|
"loss": 0.2366, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.031111111111111, |
|
"grad_norm": 0.3841206729412079, |
|
"learning_rate": 8.779675437288532e-05, |
|
"loss": 0.2371, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 0.23301474750041962, |
|
"learning_rate": 8.7694555447539e-05, |
|
"loss": 0.1969, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.048888888888889, |
|
"grad_norm": 0.26055586338043213, |
|
"learning_rate": 8.759199037394887e-05, |
|
"loss": 0.2064, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 3.057777777777778, |
|
"grad_norm": 0.4642762243747711, |
|
"learning_rate": 8.748906014838672e-05, |
|
"loss": 0.1877, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.066666666666667, |
|
"grad_norm": 0.28164124488830566, |
|
"learning_rate": 8.738576577067122e-05, |
|
"loss": 0.2482, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.0755555555555554, |
|
"grad_norm": 0.6319877505302429, |
|
"learning_rate": 8.728210824415827e-05, |
|
"loss": 0.196, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.0844444444444443, |
|
"grad_norm": 0.3142436146736145, |
|
"learning_rate": 8.717808857573131e-05, |
|
"loss": 0.1907, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 3.0933333333333333, |
|
"grad_norm": 0.3424491882324219, |
|
"learning_rate": 8.707370777579133e-05, |
|
"loss": 0.228, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.102222222222222, |
|
"grad_norm": 0.34540361166000366, |
|
"learning_rate": 8.696896685824731e-05, |
|
"loss": 0.2292, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 3.111111111111111, |
|
"grad_norm": 0.38283613324165344, |
|
"learning_rate": 8.68638668405062e-05, |
|
"loss": 0.1662, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.4507913887500763, |
|
"learning_rate": 8.67584087434631e-05, |
|
"loss": 0.2408, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 3.128888888888889, |
|
"grad_norm": 1.011723518371582, |
|
"learning_rate": 8.665259359149132e-05, |
|
"loss": 0.2772, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.137777777777778, |
|
"grad_norm": 0.5270500779151917, |
|
"learning_rate": 8.654642241243247e-05, |
|
"loss": 0.2017, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 3.1466666666666665, |
|
"grad_norm": 0.8714268207550049, |
|
"learning_rate": 8.643989623758643e-05, |
|
"loss": 0.2311, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.1555555555555554, |
|
"grad_norm": 0.2898414134979248, |
|
"learning_rate": 8.633301610170135e-05, |
|
"loss": 0.2443, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.1644444444444444, |
|
"grad_norm": 0.6308351159095764, |
|
"learning_rate": 8.622578304296364e-05, |
|
"loss": 0.2517, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.1733333333333333, |
|
"grad_norm": 0.5109901428222656, |
|
"learning_rate": 8.611819810298778e-05, |
|
"loss": 0.2269, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 3.1822222222222223, |
|
"grad_norm": 0.432049959897995, |
|
"learning_rate": 8.601026232680634e-05, |
|
"loss": 0.1978, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.1911111111111112, |
|
"grad_norm": 0.3804490566253662, |
|
"learning_rate": 8.59019767628597e-05, |
|
"loss": 0.2082, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.31701037287712097, |
|
"learning_rate": 8.579334246298593e-05, |
|
"loss": 0.2356, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.2088888888888887, |
|
"grad_norm": 0.25454384088516235, |
|
"learning_rate": 8.56843604824106e-05, |
|
"loss": 0.1967, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 3.2177777777777776, |
|
"grad_norm": 0.3782944977283478, |
|
"learning_rate": 8.557503187973651e-05, |
|
"loss": 0.239, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.2266666666666666, |
|
"grad_norm": 0.6260755062103271, |
|
"learning_rate": 8.546535771693334e-05, |
|
"loss": 0.2061, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.2355555555555555, |
|
"grad_norm": 0.3981928825378418, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.2094, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.2444444444444445, |
|
"grad_norm": 0.5317679643630981, |
|
"learning_rate": 8.524497697559126e-05, |
|
"loss": 0.2448, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.2533333333333334, |
|
"grad_norm": 0.45160210132598877, |
|
"learning_rate": 8.513427253773346e-05, |
|
"loss": 0.2451, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.2622222222222224, |
|
"grad_norm": 0.5067645907402039, |
|
"learning_rate": 8.502322682108792e-05, |
|
"loss": 0.2036, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 3.2711111111111113, |
|
"grad_norm": 0.3842930495738983, |
|
"learning_rate": 8.491184090430364e-05, |
|
"loss": 0.2056, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.2800000000000002, |
|
"grad_norm": 0.33578169345855713, |
|
"learning_rate": 8.480011586933418e-05, |
|
"loss": 0.2292, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 3.2888888888888888, |
|
"grad_norm": 0.4589829444885254, |
|
"learning_rate": 8.468805280142709e-05, |
|
"loss": 0.2026, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.2977777777777777, |
|
"grad_norm": 0.666786789894104, |
|
"learning_rate": 8.457565278911348e-05, |
|
"loss": 0.2031, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.3066666666666666, |
|
"grad_norm": 0.3466854393482208, |
|
"learning_rate": 8.446291692419736e-05, |
|
"loss": 0.2024, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.3155555555555556, |
|
"grad_norm": 0.4151984453201294, |
|
"learning_rate": 8.434984630174509e-05, |
|
"loss": 0.2071, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 3.3244444444444445, |
|
"grad_norm": 0.3907124102115631, |
|
"learning_rate": 8.423644202007467e-05, |
|
"loss": 0.199, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 0.7014510035514832, |
|
"learning_rate": 8.412270518074518e-05, |
|
"loss": 0.2345, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.3422222222222224, |
|
"grad_norm": 0.3714005649089813, |
|
"learning_rate": 8.400863688854597e-05, |
|
"loss": 0.2272, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.351111111111111, |
|
"grad_norm": 0.9573063254356384, |
|
"learning_rate": 8.389423825148598e-05, |
|
"loss": 0.2531, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 0.5541326999664307, |
|
"learning_rate": 8.377951038078302e-05, |
|
"loss": 0.2667, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.368888888888889, |
|
"grad_norm": 0.5363870859146118, |
|
"learning_rate": 8.366445439085286e-05, |
|
"loss": 0.2028, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 3.3777777777777778, |
|
"grad_norm": 0.4728504717350006, |
|
"learning_rate": 8.354907139929851e-05, |
|
"loss": 0.1955, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.3866666666666667, |
|
"grad_norm": 0.46801096200942993, |
|
"learning_rate": 8.343336252689935e-05, |
|
"loss": 0.1981, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.3955555555555557, |
|
"grad_norm": 0.528943657875061, |
|
"learning_rate": 8.33173288976002e-05, |
|
"loss": 0.2185, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.4044444444444446, |
|
"grad_norm": 0.7669292092323303, |
|
"learning_rate": 8.320097163850043e-05, |
|
"loss": 0.264, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.413333333333333, |
|
"grad_norm": 0.46919432282447815, |
|
"learning_rate": 8.308429187984297e-05, |
|
"loss": 0.1932, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.422222222222222, |
|
"grad_norm": 0.5307047367095947, |
|
"learning_rate": 8.296729075500344e-05, |
|
"loss": 0.1948, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.431111111111111, |
|
"grad_norm": 0.4287688136100769, |
|
"learning_rate": 8.284996940047903e-05, |
|
"loss": 0.2424, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 0.44700077176094055, |
|
"learning_rate": 8.273232895587748e-05, |
|
"loss": 0.2041, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.448888888888889, |
|
"grad_norm": 0.5288101434707642, |
|
"learning_rate": 8.261437056390606e-05, |
|
"loss": 0.2205, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.457777777777778, |
|
"grad_norm": 0.4757843613624573, |
|
"learning_rate": 8.249609537036043e-05, |
|
"loss": 0.2398, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 3.466666666666667, |
|
"grad_norm": 0.2801768481731415, |
|
"learning_rate": 8.237750452411353e-05, |
|
"loss": 0.187, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.4755555555555557, |
|
"grad_norm": 0.35519644618034363, |
|
"learning_rate": 8.225859917710439e-05, |
|
"loss": 0.2045, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 3.4844444444444447, |
|
"grad_norm": 0.4472619891166687, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.21, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.493333333333333, |
|
"grad_norm": 0.7717186808586121, |
|
"learning_rate": 8.201984960381894e-05, |
|
"loss": 0.2442, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 3.502222222222222, |
|
"grad_norm": 0.5858293771743774, |
|
"learning_rate": 8.190000769665044e-05, |
|
"loss": 0.2072, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 3.511111111111111, |
|
"grad_norm": 0.4433729648590088, |
|
"learning_rate": 8.177985592691272e-05, |
|
"loss": 0.2933, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.47395846247673035, |
|
"learning_rate": 8.1659395461707e-05, |
|
"loss": 0.218, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 3.528888888888889, |
|
"grad_norm": 0.4193160831928253, |
|
"learning_rate": 8.153862747113292e-05, |
|
"loss": 0.2046, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 3.537777777777778, |
|
"grad_norm": 0.40366363525390625, |
|
"learning_rate": 8.141755312827736e-05, |
|
"loss": 0.2022, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 3.546666666666667, |
|
"grad_norm": 0.543988823890686, |
|
"learning_rate": 8.129617360920296e-05, |
|
"loss": 0.1818, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 3.5555555555555554, |
|
"grad_norm": 0.3089783191680908, |
|
"learning_rate": 8.117449009293668e-05, |
|
"loss": 0.2196, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.5644444444444443, |
|
"grad_norm": 0.399312287569046, |
|
"learning_rate": 8.10525037614584e-05, |
|
"loss": 0.1889, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 3.5733333333333333, |
|
"grad_norm": 0.3407880663871765, |
|
"learning_rate": 8.093021579968941e-05, |
|
"loss": 0.2562, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 3.582222222222222, |
|
"grad_norm": 0.8019405603408813, |
|
"learning_rate": 8.080762739548089e-05, |
|
"loss": 0.2453, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 3.591111111111111, |
|
"grad_norm": 0.308278352022171, |
|
"learning_rate": 8.068473973960238e-05, |
|
"loss": 0.2129, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 0.5972309112548828, |
|
"learning_rate": 8.056155402573024e-05, |
|
"loss": 0.2369, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 3.608888888888889, |
|
"grad_norm": 0.40940365195274353, |
|
"learning_rate": 8.043807145043604e-05, |
|
"loss": 0.2144, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 3.6177777777777775, |
|
"grad_norm": 0.3109559714794159, |
|
"learning_rate": 8.03142932131749e-05, |
|
"loss": 0.2189, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 3.626666666666667, |
|
"grad_norm": 0.3607446551322937, |
|
"learning_rate": 8.019022051627388e-05, |
|
"loss": 0.2197, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 3.6355555555555554, |
|
"grad_norm": 0.4839872419834137, |
|
"learning_rate": 8.006585456492029e-05, |
|
"loss": 0.2797, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 3.6444444444444444, |
|
"grad_norm": 0.33101025223731995, |
|
"learning_rate": 7.994119656715002e-05, |
|
"loss": 0.2042, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.6533333333333333, |
|
"grad_norm": 0.558871865272522, |
|
"learning_rate": 7.981624773383572e-05, |
|
"loss": 0.2372, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 3.6622222222222223, |
|
"grad_norm": 0.29825037717819214, |
|
"learning_rate": 7.969100927867507e-05, |
|
"loss": 0.2156, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 3.671111111111111, |
|
"grad_norm": 0.4881444573402405, |
|
"learning_rate": 7.956548241817912e-05, |
|
"loss": 0.177, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 0.3237267732620239, |
|
"learning_rate": 7.943966837166023e-05, |
|
"loss": 0.2089, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 3.688888888888889, |
|
"grad_norm": 0.32327884435653687, |
|
"learning_rate": 7.931356836122046e-05, |
|
"loss": 0.2082, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 3.6977777777777776, |
|
"grad_norm": 0.4469964802265167, |
|
"learning_rate": 7.91871836117395e-05, |
|
"loss": 0.1687, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 3.7066666666666666, |
|
"grad_norm": 0.29894670844078064, |
|
"learning_rate": 7.906051535086296e-05, |
|
"loss": 0.1816, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 3.7155555555555555, |
|
"grad_norm": 0.3552427291870117, |
|
"learning_rate": 7.89335648089903e-05, |
|
"loss": 0.2157, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 3.7244444444444444, |
|
"grad_norm": 0.6162396669387817, |
|
"learning_rate": 7.880633321926294e-05, |
|
"loss": 0.1988, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 3.7333333333333334, |
|
"grad_norm": 0.4064711332321167, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 0.2438, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.7422222222222223, |
|
"grad_norm": 0.634732723236084, |
|
"learning_rate": 7.855103184244776e-05, |
|
"loss": 0.266, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 3.7511111111111113, |
|
"grad_norm": 1.4341474771499634, |
|
"learning_rate": 7.842296453524463e-05, |
|
"loss": 0.3107, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 0.49320435523986816, |
|
"learning_rate": 7.829462113993207e-05, |
|
"loss": 0.2334, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 3.7688888888888887, |
|
"grad_norm": 0.39640945196151733, |
|
"learning_rate": 7.81660029031811e-05, |
|
"loss": 0.2418, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 3.7777777777777777, |
|
"grad_norm": 0.6148217916488647, |
|
"learning_rate": 7.80371110743324e-05, |
|
"loss": 0.2023, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.7866666666666666, |
|
"grad_norm": 0.3857884109020233, |
|
"learning_rate": 7.79079469053842e-05, |
|
"loss": 0.2146, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 3.7955555555555556, |
|
"grad_norm": 0.38891321420669556, |
|
"learning_rate": 7.777851165098012e-05, |
|
"loss": 0.1895, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 3.8044444444444445, |
|
"grad_norm": 0.3949791491031647, |
|
"learning_rate": 7.764880656839696e-05, |
|
"loss": 0.1818, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 3.8133333333333335, |
|
"grad_norm": 0.36369502544403076, |
|
"learning_rate": 7.751883291753262e-05, |
|
"loss": 0.2442, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 3.822222222222222, |
|
"grad_norm": 0.39478230476379395, |
|
"learning_rate": 7.738859196089358e-05, |
|
"loss": 0.1618, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.8311111111111114, |
|
"grad_norm": 0.34515222907066345, |
|
"learning_rate": 7.725808496358295e-05, |
|
"loss": 0.225, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.5892112851142883, |
|
"learning_rate": 7.712731319328798e-05, |
|
"loss": 0.2617, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 3.848888888888889, |
|
"grad_norm": 0.44167014956474304, |
|
"learning_rate": 7.699627792026783e-05, |
|
"loss": 0.2044, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 3.8577777777777778, |
|
"grad_norm": 0.3467167615890503, |
|
"learning_rate": 7.68649804173412e-05, |
|
"loss": 0.2429, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 3.8666666666666667, |
|
"grad_norm": 0.5901280045509338, |
|
"learning_rate": 7.673342195987397e-05, |
|
"loss": 0.1852, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 3.8755555555555556, |
|
"grad_norm": 0.47540798783302307, |
|
"learning_rate": 7.660160382576683e-05, |
|
"loss": 0.1873, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 3.8844444444444446, |
|
"grad_norm": 0.546599268913269, |
|
"learning_rate": 7.646952729544284e-05, |
|
"loss": 0.2246, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 3.8933333333333335, |
|
"grad_norm": 0.3890751898288727, |
|
"learning_rate": 7.633719365183504e-05, |
|
"loss": 0.2376, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.902222222222222, |
|
"grad_norm": 0.5732626914978027, |
|
"learning_rate": 7.620460418037388e-05, |
|
"loss": 0.2012, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 3.911111111111111, |
|
"grad_norm": 0.5902274250984192, |
|
"learning_rate": 7.60717601689749e-05, |
|
"loss": 0.2548, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 0.5472671985626221, |
|
"learning_rate": 7.593866290802608e-05, |
|
"loss": 0.203, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 3.928888888888889, |
|
"grad_norm": 1.1805832386016846, |
|
"learning_rate": 7.580531369037533e-05, |
|
"loss": 0.2373, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.937777777777778, |
|
"grad_norm": 0.4900795817375183, |
|
"learning_rate": 7.567171381131802e-05, |
|
"loss": 0.2181, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 3.9466666666666668, |
|
"grad_norm": 0.35954034328460693, |
|
"learning_rate": 7.553786456858429e-05, |
|
"loss": 0.205, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.9555555555555557, |
|
"grad_norm": 0.6151618361473083, |
|
"learning_rate": 7.540376726232648e-05, |
|
"loss": 0.216, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.964444444444444, |
|
"grad_norm": 0.6646876335144043, |
|
"learning_rate": 7.526942319510655e-05, |
|
"loss": 0.2201, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.9733333333333336, |
|
"grad_norm": 0.3377375602722168, |
|
"learning_rate": 7.513483367188335e-05, |
|
"loss": 0.2397, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 3.982222222222222, |
|
"grad_norm": 0.44786104559898376, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.2128, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.991111111111111, |
|
"grad_norm": 0.8776792287826538, |
|
"learning_rate": 7.48649234891712e-05, |
|
"loss": 0.2508, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.3274805247783661, |
|
"learning_rate": 7.472960545147038e-05, |
|
"loss": 0.2039, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.0088888888888885, |
|
"grad_norm": 0.29022541642189026, |
|
"learning_rate": 7.459404720131718e-05, |
|
"loss": 0.2154, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 4.017777777777778, |
|
"grad_norm": 0.27060315012931824, |
|
"learning_rate": 7.445825005546448e-05, |
|
"loss": 0.1818, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.026666666666666, |
|
"grad_norm": 0.37638726830482483, |
|
"learning_rate": 7.432221533298569e-05, |
|
"loss": 0.1706, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 4.035555555555556, |
|
"grad_norm": 0.506497323513031, |
|
"learning_rate": 7.4185944355262e-05, |
|
"loss": 0.188, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 4.044444444444444, |
|
"grad_norm": 0.3924923241138458, |
|
"learning_rate": 7.404943844596939e-05, |
|
"loss": 0.234, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 4.053333333333334, |
|
"grad_norm": 0.4585055112838745, |
|
"learning_rate": 7.391269893106592e-05, |
|
"loss": 0.1821, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 4.062222222222222, |
|
"grad_norm": 0.41875752806663513, |
|
"learning_rate": 7.377572713877877e-05, |
|
"loss": 0.1349, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 4.071111111111111, |
|
"grad_norm": 0.33695676922798157, |
|
"learning_rate": 7.363852439959135e-05, |
|
"loss": 0.1754, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 0.739496648311615, |
|
"learning_rate": 7.350109204623043e-05, |
|
"loss": 0.1928, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 4.088888888888889, |
|
"grad_norm": 0.7458643317222595, |
|
"learning_rate": 7.33634314136531e-05, |
|
"loss": 0.2129, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.097777777777778, |
|
"grad_norm": 0.6029854416847229, |
|
"learning_rate": 7.322554383903388e-05, |
|
"loss": 0.1585, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 4.1066666666666665, |
|
"grad_norm": 0.5195834636688232, |
|
"learning_rate": 7.308743066175172e-05, |
|
"loss": 0.1809, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 4.115555555555556, |
|
"grad_norm": 0.5776547193527222, |
|
"learning_rate": 7.294909322337689e-05, |
|
"loss": 0.1918, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 4.124444444444444, |
|
"grad_norm": 0.394884318113327, |
|
"learning_rate": 7.281053286765815e-05, |
|
"loss": 0.2107, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 4.133333333333334, |
|
"grad_norm": 0.46265098452568054, |
|
"learning_rate": 7.267175094050952e-05, |
|
"loss": 0.2289, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 4.142222222222222, |
|
"grad_norm": 0.7043814063072205, |
|
"learning_rate": 7.253274878999727e-05, |
|
"loss": 0.185, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 4.151111111111111, |
|
"grad_norm": 0.6173180341720581, |
|
"learning_rate": 7.239352776632681e-05, |
|
"loss": 0.1913, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.6549043655395508, |
|
"learning_rate": 7.225408922182961e-05, |
|
"loss": 0.2055, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 4.168888888888889, |
|
"grad_norm": 0.488744854927063, |
|
"learning_rate": 7.211443451095007e-05, |
|
"loss": 0.1934, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 4.177777777777778, |
|
"grad_norm": 0.8572131395339966, |
|
"learning_rate": 7.197456499023225e-05, |
|
"loss": 0.166, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.1866666666666665, |
|
"grad_norm": 0.4277536869049072, |
|
"learning_rate": 7.183448201830685e-05, |
|
"loss": 0.1799, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 4.195555555555556, |
|
"grad_norm": 0.37053030729293823, |
|
"learning_rate": 7.169418695587791e-05, |
|
"loss": 0.1943, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 4.204444444444444, |
|
"grad_norm": 0.5450714826583862, |
|
"learning_rate": 7.155368116570962e-05, |
|
"loss": 0.1985, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 4.213333333333333, |
|
"grad_norm": 0.5906587243080139, |
|
"learning_rate": 7.141296601261314e-05, |
|
"loss": 0.2086, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 4.222222222222222, |
|
"grad_norm": 0.601839542388916, |
|
"learning_rate": 7.127204286343321e-05, |
|
"loss": 0.1699, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.231111111111111, |
|
"grad_norm": 0.4745982587337494, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 0.1757, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 0.5195015072822571, |
|
"learning_rate": 7.098957805429072e-05, |
|
"loss": 0.196, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 4.248888888888889, |
|
"grad_norm": 0.34185004234313965, |
|
"learning_rate": 7.084803913806641e-05, |
|
"loss": 0.1876, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.257777777777778, |
|
"grad_norm": 0.8930533528327942, |
|
"learning_rate": 7.070629771320852e-05, |
|
"loss": 0.1915, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 4.266666666666667, |
|
"grad_norm": 0.5491169095039368, |
|
"learning_rate": 7.056435515653059e-05, |
|
"loss": 0.221, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.275555555555556, |
|
"grad_norm": 0.36910513043403625, |
|
"learning_rate": 7.042221284679982e-05, |
|
"loss": 0.183, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 4.2844444444444445, |
|
"grad_norm": 0.961035430431366, |
|
"learning_rate": 7.027987216472377e-05, |
|
"loss": 0.1886, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.293333333333333, |
|
"grad_norm": 0.7092379927635193, |
|
"learning_rate": 7.013733449293687e-05, |
|
"loss": 0.1477, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 4.302222222222222, |
|
"grad_norm": 0.45441004633903503, |
|
"learning_rate": 6.999460121598704e-05, |
|
"loss": 0.1962, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.311111111111111, |
|
"grad_norm": 0.43963754177093506, |
|
"learning_rate": 6.985167372032225e-05, |
|
"loss": 0.1689, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 1.1201061010360718, |
|
"learning_rate": 6.970855339427698e-05, |
|
"loss": 0.176, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 4.328888888888889, |
|
"grad_norm": 0.7198124527931213, |
|
"learning_rate": 6.956524162805875e-05, |
|
"loss": 0.2368, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 4.337777777777778, |
|
"grad_norm": 0.7982850670814514, |
|
"learning_rate": 6.942173981373474e-05, |
|
"loss": 0.1913, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 4.346666666666667, |
|
"grad_norm": 0.6484464406967163, |
|
"learning_rate": 6.92780493452181e-05, |
|
"loss": 0.175, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 4.355555555555555, |
|
"grad_norm": 0.594021737575531, |
|
"learning_rate": 6.91341716182545e-05, |
|
"loss": 0.1777, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.364444444444445, |
|
"grad_norm": 0.6100602149963379, |
|
"learning_rate": 6.899010803040857e-05, |
|
"loss": 0.1492, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 4.373333333333333, |
|
"grad_norm": 0.5219585299491882, |
|
"learning_rate": 6.884585998105026e-05, |
|
"loss": 0.2231, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 4.3822222222222225, |
|
"grad_norm": 0.790718138217926, |
|
"learning_rate": 6.870142887134141e-05, |
|
"loss": 0.1844, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 4.391111111111111, |
|
"grad_norm": 0.44537198543548584, |
|
"learning_rate": 6.855681610422189e-05, |
|
"loss": 0.1866, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 0.3384419083595276, |
|
"learning_rate": 6.841202308439623e-05, |
|
"loss": 0.1788, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 4.408888888888889, |
|
"grad_norm": 0.4067346453666687, |
|
"learning_rate": 6.826705121831976e-05, |
|
"loss": 0.2291, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 4.417777777777777, |
|
"grad_norm": 0.9824737310409546, |
|
"learning_rate": 6.812190191418508e-05, |
|
"loss": 0.1777, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 4.426666666666667, |
|
"grad_norm": 0.963668704032898, |
|
"learning_rate": 6.797657658190839e-05, |
|
"loss": 0.1765, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 4.435555555555555, |
|
"grad_norm": 0.3285558223724365, |
|
"learning_rate": 6.783107663311565e-05, |
|
"loss": 0.1971, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"grad_norm": 0.38868242502212524, |
|
"learning_rate": 6.768540348112907e-05, |
|
"loss": 0.2064, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"eval_loss": 0.3316148519515991, |
|
"eval_runtime": 44.4048, |
|
"eval_samples_per_second": 2.252, |
|
"eval_steps_per_second": 2.252, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.453333333333333, |
|
"grad_norm": 0.8600643873214722, |
|
"learning_rate": 6.753955854095323e-05, |
|
"loss": 0.1599, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 4.4622222222222225, |
|
"grad_norm": 0.6059608459472656, |
|
"learning_rate": 6.739354322926136e-05, |
|
"loss": 0.1915, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 4.471111111111111, |
|
"grad_norm": 0.8490309715270996, |
|
"learning_rate": 6.724735896438167e-05, |
|
"loss": 0.2769, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.3413046896457672, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 0.1927, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 4.488888888888889, |
|
"grad_norm": 0.5042988657951355, |
|
"learning_rate": 6.695448925656333e-05, |
|
"loss": 0.146, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 4.497777777777777, |
|
"grad_norm": 0.46430540084838867, |
|
"learning_rate": 6.680780665843155e-05, |
|
"loss": 0.1989, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 4.506666666666667, |
|
"grad_norm": 0.45499563217163086, |
|
"learning_rate": 6.666096079669797e-05, |
|
"loss": 0.2019, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 4.515555555555555, |
|
"grad_norm": 0.33445480465888977, |
|
"learning_rate": 6.651395309775837e-05, |
|
"loss": 0.2001, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 4.524444444444445, |
|
"grad_norm": 0.6296830773353577, |
|
"learning_rate": 6.636678498958052e-05, |
|
"loss": 0.1525, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 4.533333333333333, |
|
"grad_norm": 0.49194592237472534, |
|
"learning_rate": 6.621945790169036e-05, |
|
"loss": 0.1748, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 4.542222222222223, |
|
"grad_norm": 0.3655872344970703, |
|
"learning_rate": 6.607197326515808e-05, |
|
"loss": 0.1935, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 4.551111111111111, |
|
"grad_norm": 0.6454001069068909, |
|
"learning_rate": 6.592433251258423e-05, |
|
"loss": 0.1625, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 4.5600000000000005, |
|
"grad_norm": 0.5047568082809448, |
|
"learning_rate": 6.577653707808577e-05, |
|
"loss": 0.1964, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 4.568888888888889, |
|
"grad_norm": 0.8591883778572083, |
|
"learning_rate": 6.562858839728223e-05, |
|
"loss": 0.2091, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 4.5777777777777775, |
|
"grad_norm": 0.3702365756034851, |
|
"learning_rate": 6.548048790728165e-05, |
|
"loss": 0.2029, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 4.586666666666667, |
|
"grad_norm": 0.4821636378765106, |
|
"learning_rate": 6.533223704666672e-05, |
|
"loss": 0.1799, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 4.595555555555555, |
|
"grad_norm": 0.4567244350910187, |
|
"learning_rate": 6.518383725548074e-05, |
|
"loss": 0.1867, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 4.604444444444445, |
|
"grad_norm": 0.613053560256958, |
|
"learning_rate": 6.503528997521366e-05, |
|
"loss": 0.1594, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 4.613333333333333, |
|
"grad_norm": 0.34238895773887634, |
|
"learning_rate": 6.488659664878808e-05, |
|
"loss": 0.1931, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 4.622222222222222, |
|
"grad_norm": 0.3325413465499878, |
|
"learning_rate": 6.473775872054521e-05, |
|
"loss": 0.1665, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 4.631111111111111, |
|
"grad_norm": 0.518052875995636, |
|
"learning_rate": 6.458877763623089e-05, |
|
"loss": 0.1491, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 0.398858904838562, |
|
"learning_rate": 6.44396548429815e-05, |
|
"loss": 0.1651, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 4.648888888888889, |
|
"grad_norm": 0.44876357913017273, |
|
"learning_rate": 6.42903917893099e-05, |
|
"loss": 0.1933, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 4.657777777777778, |
|
"grad_norm": 0.58448725938797, |
|
"learning_rate": 6.414098992509138e-05, |
|
"loss": 0.1924, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 4.666666666666667, |
|
"grad_norm": 0.5056392550468445, |
|
"learning_rate": 6.399145070154961e-05, |
|
"loss": 0.1667, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 4.6755555555555555, |
|
"grad_norm": 0.6365034580230713, |
|
"learning_rate": 6.384177557124247e-05, |
|
"loss": 0.182, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 4.684444444444445, |
|
"grad_norm": 0.3645959496498108, |
|
"learning_rate": 6.369196598804801e-05, |
|
"loss": 0.2032, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 4.693333333333333, |
|
"grad_norm": 0.42011165618896484, |
|
"learning_rate": 6.354202340715026e-05, |
|
"loss": 0.2085, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 4.702222222222222, |
|
"grad_norm": 0.4557260274887085, |
|
"learning_rate": 6.339194928502517e-05, |
|
"loss": 0.2283, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 4.711111111111111, |
|
"grad_norm": 0.41336071491241455, |
|
"learning_rate": 6.324174507942637e-05, |
|
"loss": 0.1633, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 0.3430007994174957, |
|
"learning_rate": 6.309141224937111e-05, |
|
"loss": 0.179, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 4.728888888888889, |
|
"grad_norm": 0.5025080442428589, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 0.1826, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 4.737777777777778, |
|
"grad_norm": 0.6088576316833496, |
|
"learning_rate": 6.279036655819302e-05, |
|
"loss": 0.2157, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 4.746666666666667, |
|
"grad_norm": 0.4910157322883606, |
|
"learning_rate": 6.263965662129487e-05, |
|
"loss": 0.1971, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 4.7555555555555555, |
|
"grad_norm": 0.3384090065956116, |
|
"learning_rate": 6.248882390836135e-05, |
|
"loss": 0.1443, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 4.764444444444445, |
|
"grad_norm": 0.40888798236846924, |
|
"learning_rate": 6.233786988451468e-05, |
|
"loss": 0.134, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 4.773333333333333, |
|
"grad_norm": 0.411403626203537, |
|
"learning_rate": 6.218679601605554e-05, |
|
"loss": 0.2239, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 4.782222222222222, |
|
"grad_norm": 0.5049718618392944, |
|
"learning_rate": 6.203560377044866e-05, |
|
"loss": 0.1874, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 4.791111111111111, |
|
"grad_norm": 0.35621222853660583, |
|
"learning_rate": 6.188429461630866e-05, |
|
"loss": 0.1915, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.7713626027107239, |
|
"learning_rate": 6.173287002338577e-05, |
|
"loss": 0.2097, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.808888888888889, |
|
"grad_norm": 1.0501298904418945, |
|
"learning_rate": 6.158133146255153e-05, |
|
"loss": 0.2026, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 4.817777777777778, |
|
"grad_norm": 0.7429870367050171, |
|
"learning_rate": 6.142968040578449e-05, |
|
"loss": 0.2041, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 4.826666666666666, |
|
"grad_norm": 0.4614238739013672, |
|
"learning_rate": 6.127791832615598e-05, |
|
"loss": 0.1685, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 4.835555555555556, |
|
"grad_norm": 0.63481205701828, |
|
"learning_rate": 6.112604669781572e-05, |
|
"loss": 0.1836, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 4.844444444444444, |
|
"grad_norm": 0.5263459086418152, |
|
"learning_rate": 6.097406699597759e-05, |
|
"loss": 0.162, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 4.8533333333333335, |
|
"grad_norm": 0.4739071726799011, |
|
"learning_rate": 6.0821980696905146e-05, |
|
"loss": 0.2085, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 4.862222222222222, |
|
"grad_norm": 0.4817952811717987, |
|
"learning_rate": 6.0669789277897507e-05, |
|
"loss": 0.1907, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 4.871111111111111, |
|
"grad_norm": 0.540302574634552, |
|
"learning_rate": 6.0517494217274794e-05, |
|
"loss": 0.1743, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 0.48975828289985657, |
|
"learning_rate": 6.036509699436389e-05, |
|
"loss": 0.1769, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 4.888888888888889, |
|
"grad_norm": 0.4697459042072296, |
|
"learning_rate": 6.021259908948402e-05, |
|
"loss": 0.1811, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.897777777777778, |
|
"grad_norm": 0.8186047077178955, |
|
"learning_rate": 6.0060001983932425e-05, |
|
"loss": 0.225, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 4.906666666666666, |
|
"grad_norm": 0.3741462528705597, |
|
"learning_rate": 5.9907307159969884e-05, |
|
"loss": 0.1601, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 4.915555555555556, |
|
"grad_norm": 0.46077340841293335, |
|
"learning_rate": 5.9754516100806423e-05, |
|
"loss": 0.1887, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 4.924444444444444, |
|
"grad_norm": 0.5414716601371765, |
|
"learning_rate": 5.960163029058682e-05, |
|
"loss": 0.1665, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 4.933333333333334, |
|
"grad_norm": 0.5361833572387695, |
|
"learning_rate": 5.944865121437622e-05, |
|
"loss": 0.1925, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 4.942222222222222, |
|
"grad_norm": 0.35004904866218567, |
|
"learning_rate": 5.9295580358145744e-05, |
|
"loss": 0.1933, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 4.9511111111111115, |
|
"grad_norm": 1.0410363674163818, |
|
"learning_rate": 5.914241920875798e-05, |
|
"loss": 0.2299, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 0.5200708508491516, |
|
"learning_rate": 5.898916925395264e-05, |
|
"loss": 0.1673, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 4.968888888888889, |
|
"grad_norm": 0.534602165222168, |
|
"learning_rate": 5.8835831982332015e-05, |
|
"loss": 0.1815, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 4.977777777777778, |
|
"grad_norm": 0.4768090546131134, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 0.2138, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.986666666666666, |
|
"grad_norm": 0.38369208574295044, |
|
"learning_rate": 5.852890144728034e-05, |
|
"loss": 0.1509, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 4.995555555555556, |
|
"grad_norm": 0.35721951723098755, |
|
"learning_rate": 5.837531116523682e-05, |
|
"loss": 0.1485, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 5.004444444444444, |
|
"grad_norm": 0.44299712777137756, |
|
"learning_rate": 5.822163952912404e-05, |
|
"loss": 0.1504, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 5.013333333333334, |
|
"grad_norm": 0.47305387258529663, |
|
"learning_rate": 5.806788803164034e-05, |
|
"loss": 0.1497, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 5.022222222222222, |
|
"grad_norm": 0.5253143906593323, |
|
"learning_rate": 5.791405816625975e-05, |
|
"loss": 0.1726, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 5.0311111111111115, |
|
"grad_norm": 0.5787915587425232, |
|
"learning_rate": 5.7760151427217576e-05, |
|
"loss": 0.1603, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"grad_norm": 0.7631460428237915, |
|
"learning_rate": 5.7606169309495836e-05, |
|
"loss": 0.2153, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 5.0488888888888885, |
|
"grad_norm": 0.472397118806839, |
|
"learning_rate": 5.745211330880872e-05, |
|
"loss": 0.1887, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 5.057777777777778, |
|
"grad_norm": 0.4452255368232727, |
|
"learning_rate": 5.729798492158811e-05, |
|
"loss": 0.2073, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 5.066666666666666, |
|
"grad_norm": 0.4919035732746124, |
|
"learning_rate": 5.714378564496901e-05, |
|
"loss": 0.1449, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.075555555555556, |
|
"grad_norm": 0.6700975894927979, |
|
"learning_rate": 5.698951697677498e-05, |
|
"loss": 0.1024, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 5.084444444444444, |
|
"grad_norm": 0.6321210265159607, |
|
"learning_rate": 5.683518041550368e-05, |
|
"loss": 0.1584, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 5.093333333333334, |
|
"grad_norm": 0.7015148997306824, |
|
"learning_rate": 5.668077746031219e-05, |
|
"loss": 0.1387, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 5.102222222222222, |
|
"grad_norm": 0.6382136344909668, |
|
"learning_rate": 5.6526309611002594e-05, |
|
"loss": 0.1706, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 5.111111111111111, |
|
"grad_norm": 0.45012837648391724, |
|
"learning_rate": 5.637177836800722e-05, |
|
"loss": 0.155, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"grad_norm": 0.47716137766838074, |
|
"learning_rate": 5.621718523237427e-05, |
|
"loss": 0.1303, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 5.128888888888889, |
|
"grad_norm": 0.7658069729804993, |
|
"learning_rate": 5.6062531705753075e-05, |
|
"loss": 0.1448, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 5.137777777777778, |
|
"grad_norm": 0.43680229783058167, |
|
"learning_rate": 5.590781929037965e-05, |
|
"loss": 0.1443, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 5.1466666666666665, |
|
"grad_norm": 0.834297239780426, |
|
"learning_rate": 5.575304948906194e-05, |
|
"loss": 0.0925, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 5.155555555555556, |
|
"grad_norm": 0.405964195728302, |
|
"learning_rate": 5.559822380516539e-05, |
|
"loss": 0.1427, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.164444444444444, |
|
"grad_norm": 0.49563100934028625, |
|
"learning_rate": 5.544334374259823e-05, |
|
"loss": 0.1492, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 5.173333333333334, |
|
"grad_norm": 0.5021004676818848, |
|
"learning_rate": 5.5288410805796895e-05, |
|
"loss": 0.1556, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 5.182222222222222, |
|
"grad_norm": 0.5862144827842712, |
|
"learning_rate": 5.5133426499711425e-05, |
|
"loss": 0.142, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 5.191111111111111, |
|
"grad_norm": 0.43614262342453003, |
|
"learning_rate": 5.497839232979084e-05, |
|
"loss": 0.1626, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 0.5935210585594177, |
|
"learning_rate": 5.4823309801968516e-05, |
|
"loss": 0.1267, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 5.208888888888889, |
|
"grad_norm": 0.4941434860229492, |
|
"learning_rate": 5.466818042264753e-05, |
|
"loss": 0.1144, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 5.217777777777778, |
|
"grad_norm": 0.6197287440299988, |
|
"learning_rate": 5.451300569868612e-05, |
|
"loss": 0.1654, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 5.226666666666667, |
|
"grad_norm": 0.38996899127960205, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 0.1925, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 5.235555555555556, |
|
"grad_norm": 1.012840986251831, |
|
"learning_rate": 5.420252624646238e-05, |
|
"loss": 0.1575, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 5.2444444444444445, |
|
"grad_norm": 0.33670493960380554, |
|
"learning_rate": 5.404722453406017e-05, |
|
"loss": 0.1635, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 5.253333333333333, |
|
"grad_norm": 0.3880348205566406, |
|
"learning_rate": 5.3891883508708444e-05, |
|
"loss": 0.1301, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 5.262222222222222, |
|
"grad_norm": 0.8689696192741394, |
|
"learning_rate": 5.373650467932122e-05, |
|
"loss": 0.1621, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 5.271111111111111, |
|
"grad_norm": 0.4196907877922058, |
|
"learning_rate": 5.3581089555179754e-05, |
|
"loss": 0.1501, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 0.9811832904815674, |
|
"learning_rate": 5.3425639645917834e-05, |
|
"loss": 0.1124, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 5.288888888888889, |
|
"grad_norm": 0.7768471240997314, |
|
"learning_rate": 5.327015646150716e-05, |
|
"loss": 0.1264, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 5.297777777777778, |
|
"grad_norm": 0.5367645025253296, |
|
"learning_rate": 5.311464151224261e-05, |
|
"loss": 0.1095, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 5.306666666666667, |
|
"grad_norm": 0.5028023719787598, |
|
"learning_rate": 5.295909630872764e-05, |
|
"loss": 0.1205, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 5.315555555555555, |
|
"grad_norm": 0.39689093828201294, |
|
"learning_rate": 5.2803522361859594e-05, |
|
"loss": 0.1511, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 5.3244444444444445, |
|
"grad_norm": 0.7014448642730713, |
|
"learning_rate": 5.264792118281498e-05, |
|
"loss": 0.1379, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 5.333333333333333, |
|
"grad_norm": 0.7733895778656006, |
|
"learning_rate": 5.249229428303486e-05, |
|
"loss": 0.1538, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 5.342222222222222, |
|
"grad_norm": 0.46061837673187256, |
|
"learning_rate": 5.233664317421012e-05, |
|
"loss": 0.188, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 5.351111111111111, |
|
"grad_norm": 0.45391765236854553, |
|
"learning_rate": 5.218096936826681e-05, |
|
"loss": 0.2143, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"grad_norm": 0.4227662682533264, |
|
"learning_rate": 5.202527437735143e-05, |
|
"loss": 0.1974, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 5.368888888888889, |
|
"grad_norm": 0.5953728556632996, |
|
"learning_rate": 5.18695597138163e-05, |
|
"loss": 0.131, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 5.377777777777778, |
|
"grad_norm": 0.7897362112998962, |
|
"learning_rate": 5.1713826890204764e-05, |
|
"loss": 0.151, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 5.386666666666667, |
|
"grad_norm": 0.611005425453186, |
|
"learning_rate": 5.155807741923666e-05, |
|
"loss": 0.1771, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 5.395555555555555, |
|
"grad_norm": 0.746476411819458, |
|
"learning_rate": 5.140231281379345e-05, |
|
"loss": 0.1676, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 5.404444444444445, |
|
"grad_norm": 0.4701192378997803, |
|
"learning_rate": 5.124653458690365e-05, |
|
"loss": 0.1661, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 5.413333333333333, |
|
"grad_norm": 0.6008636355400085, |
|
"learning_rate": 5.1090744251728064e-05, |
|
"loss": 0.1462, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 5.4222222222222225, |
|
"grad_norm": 0.5137926340103149, |
|
"learning_rate": 5.0934943321545115e-05, |
|
"loss": 0.1682, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 5.431111111111111, |
|
"grad_norm": 0.42210161685943604, |
|
"learning_rate": 5.077913330973616e-05, |
|
"loss": 0.1599, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 0.805438220500946, |
|
"learning_rate": 5.062331572977076e-05, |
|
"loss": 0.1416, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 5.448888888888889, |
|
"grad_norm": 0.696622371673584, |
|
"learning_rate": 5.046749209519197e-05, |
|
"loss": 0.1785, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 5.457777777777777, |
|
"grad_norm": 0.4268178939819336, |
|
"learning_rate": 5.031166391960168e-05, |
|
"loss": 0.183, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 5.466666666666667, |
|
"grad_norm": 0.8230905532836914, |
|
"learning_rate": 5.015583271664589e-05, |
|
"loss": 0.1309, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 5.475555555555555, |
|
"grad_norm": 0.4906931221485138, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1683, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 5.484444444444445, |
|
"grad_norm": 0.6042024493217468, |
|
"learning_rate": 4.984416728335412e-05, |
|
"loss": 0.1486, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 5.493333333333333, |
|
"grad_norm": 0.39481180906295776, |
|
"learning_rate": 4.968833608039832e-05, |
|
"loss": 0.1548, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 5.502222222222223, |
|
"grad_norm": 0.5813857913017273, |
|
"learning_rate": 4.953250790480805e-05, |
|
"loss": 0.1513, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 5.511111111111111, |
|
"grad_norm": 0.3986724019050598, |
|
"learning_rate": 4.9376684270229254e-05, |
|
"loss": 0.1728, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"grad_norm": 0.5897936224937439, |
|
"learning_rate": 4.922086669026385e-05, |
|
"loss": 0.1607, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 5.528888888888889, |
|
"grad_norm": 0.4015739858150482, |
|
"learning_rate": 4.9065056678454904e-05, |
|
"loss": 0.1542, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 5.5377777777777775, |
|
"grad_norm": 0.5246159434318542, |
|
"learning_rate": 4.890925574827195e-05, |
|
"loss": 0.1894, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 5.546666666666667, |
|
"grad_norm": 0.6899661421775818, |
|
"learning_rate": 4.875346541309637e-05, |
|
"loss": 0.1172, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 5.555555555555555, |
|
"grad_norm": 0.3335443139076233, |
|
"learning_rate": 4.859768718620656e-05, |
|
"loss": 0.1272, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 5.564444444444445, |
|
"grad_norm": 0.5370382070541382, |
|
"learning_rate": 4.844192258076336e-05, |
|
"loss": 0.1597, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 5.573333333333333, |
|
"grad_norm": 0.7130039930343628, |
|
"learning_rate": 4.8286173109795255e-05, |
|
"loss": 0.1795, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 5.582222222222223, |
|
"grad_norm": 0.6044110059738159, |
|
"learning_rate": 4.813044028618373e-05, |
|
"loss": 0.1402, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 5.591111111111111, |
|
"grad_norm": 0.5665392875671387, |
|
"learning_rate": 4.7974725622648595e-05, |
|
"loss": 0.1339, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 0.49484479427337646, |
|
"learning_rate": 4.781903063173321e-05, |
|
"loss": 0.2101, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 5.608888888888889, |
|
"grad_norm": 0.39191365242004395, |
|
"learning_rate": 4.76633568257899e-05, |
|
"loss": 0.1823, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 5.6177777777777775, |
|
"grad_norm": 0.5061699151992798, |
|
"learning_rate": 4.750770571696514e-05, |
|
"loss": 0.127, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 5.626666666666667, |
|
"grad_norm": 0.5720147490501404, |
|
"learning_rate": 4.735207881718502e-05, |
|
"loss": 0.1226, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 5.635555555555555, |
|
"grad_norm": 0.5010076761245728, |
|
"learning_rate": 4.7196477638140404e-05, |
|
"loss": 0.1503, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 5.644444444444445, |
|
"grad_norm": 0.6409832239151001, |
|
"learning_rate": 4.704090369127236e-05, |
|
"loss": 0.1718, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 5.653333333333333, |
|
"grad_norm": 0.5019122958183289, |
|
"learning_rate": 4.68853584877574e-05, |
|
"loss": 0.1898, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 5.662222222222223, |
|
"grad_norm": 0.4418942630290985, |
|
"learning_rate": 4.6729843538492847e-05, |
|
"loss": 0.1961, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 5.671111111111111, |
|
"grad_norm": 0.4697255790233612, |
|
"learning_rate": 4.657436035408217e-05, |
|
"loss": 0.1161, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"grad_norm": 0.6954605579376221, |
|
"learning_rate": 4.6418910444820244e-05, |
|
"loss": 0.1298, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 5.688888888888889, |
|
"grad_norm": 0.7173737287521362, |
|
"learning_rate": 4.626349532067879e-05, |
|
"loss": 0.1488, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 5.697777777777778, |
|
"grad_norm": 0.4419032633304596, |
|
"learning_rate": 4.6108116491291575e-05, |
|
"loss": 0.1844, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 5.706666666666667, |
|
"grad_norm": 0.4070740044116974, |
|
"learning_rate": 4.595277546593984e-05, |
|
"loss": 0.1796, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 5.7155555555555555, |
|
"grad_norm": 0.40745702385902405, |
|
"learning_rate": 4.579747375353763e-05, |
|
"loss": 0.1655, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 5.724444444444444, |
|
"grad_norm": 0.7508057951927185, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 0.1376, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 5.733333333333333, |
|
"grad_norm": 0.7357051968574524, |
|
"learning_rate": 4.548699430131388e-05, |
|
"loss": 0.1365, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 5.742222222222222, |
|
"grad_norm": 1.2087818384170532, |
|
"learning_rate": 4.5331819577352474e-05, |
|
"loss": 0.1381, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 5.751111111111111, |
|
"grad_norm": 0.690025269985199, |
|
"learning_rate": 4.51766901980315e-05, |
|
"loss": 0.1401, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"grad_norm": 0.41388073563575745, |
|
"learning_rate": 4.502160767020918e-05, |
|
"loss": 0.1285, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 5.768888888888889, |
|
"grad_norm": 0.5141646862030029, |
|
"learning_rate": 4.486657350028859e-05, |
|
"loss": 0.1308, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 5.777777777777778, |
|
"grad_norm": 1.4173223972320557, |
|
"learning_rate": 4.471158919420312e-05, |
|
"loss": 0.1514, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 5.786666666666667, |
|
"grad_norm": 0.5400106906890869, |
|
"learning_rate": 4.4556656257401786e-05, |
|
"loss": 0.1632, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 5.795555555555556, |
|
"grad_norm": 0.8285782337188721, |
|
"learning_rate": 4.4401776194834613e-05, |
|
"loss": 0.1529, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 5.804444444444444, |
|
"grad_norm": 0.49138960242271423, |
|
"learning_rate": 4.424695051093807e-05, |
|
"loss": 0.1557, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 5.8133333333333335, |
|
"grad_norm": 0.4443909525871277, |
|
"learning_rate": 4.409218070962036e-05, |
|
"loss": 0.1424, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 5.822222222222222, |
|
"grad_norm": 0.5284453630447388, |
|
"learning_rate": 4.393746829424693e-05, |
|
"loss": 0.1889, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 5.831111111111111, |
|
"grad_norm": 0.5384455919265747, |
|
"learning_rate": 4.378281476762576e-05, |
|
"loss": 0.1727, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"grad_norm": 0.6882079839706421, |
|
"learning_rate": 4.362822163199279e-05, |
|
"loss": 0.1511, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 5.848888888888889, |
|
"grad_norm": 0.4634445309638977, |
|
"learning_rate": 4.347369038899744e-05, |
|
"loss": 0.1261, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 5.857777777777778, |
|
"grad_norm": 0.5075255036354065, |
|
"learning_rate": 4.331922253968781e-05, |
|
"loss": 0.1919, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 5.866666666666667, |
|
"grad_norm": 1.4190889596939087, |
|
"learning_rate": 4.316481958449634e-05, |
|
"loss": 0.2282, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 5.875555555555556, |
|
"grad_norm": 0.6407334804534912, |
|
"learning_rate": 4.3010483023225045e-05, |
|
"loss": 0.1497, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 5.884444444444444, |
|
"grad_norm": 0.5419467687606812, |
|
"learning_rate": 4.285621435503101e-05, |
|
"loss": 0.1311, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 5.8933333333333335, |
|
"grad_norm": 0.8229005336761475, |
|
"learning_rate": 4.2702015078411906e-05, |
|
"loss": 0.128, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 5.902222222222222, |
|
"grad_norm": 0.33451828360557556, |
|
"learning_rate": 4.254788669119127e-05, |
|
"loss": 0.15, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 5.911111111111111, |
|
"grad_norm": 0.5506479740142822, |
|
"learning_rate": 4.239383069050417e-05, |
|
"loss": 0.1371, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"grad_norm": 0.41086164116859436, |
|
"learning_rate": 4.223984857278242e-05, |
|
"loss": 0.1652, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 5.928888888888888, |
|
"grad_norm": 0.41804587841033936, |
|
"learning_rate": 4.208594183374026e-05, |
|
"loss": 0.1461, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 5.937777777777778, |
|
"grad_norm": 0.7241559028625488, |
|
"learning_rate": 4.1932111968359664e-05, |
|
"loss": 0.1429, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 5.946666666666666, |
|
"grad_norm": 0.7180150747299194, |
|
"learning_rate": 4.177836047087595e-05, |
|
"loss": 0.147, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 5.955555555555556, |
|
"grad_norm": 0.668580174446106, |
|
"learning_rate": 4.162468883476319e-05, |
|
"loss": 0.1562, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 5.964444444444444, |
|
"grad_norm": 0.46259158849716187, |
|
"learning_rate": 4.147109855271966e-05, |
|
"loss": 0.1453, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 5.973333333333334, |
|
"grad_norm": 0.5375617146492004, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.1581, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 5.982222222222222, |
|
"grad_norm": 0.39930492639541626, |
|
"learning_rate": 4.1164168017668e-05, |
|
"loss": 0.1394, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 5.9911111111111115, |
|
"grad_norm": 0.400290846824646, |
|
"learning_rate": 4.101083074604737e-05, |
|
"loss": 0.153, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 0.504225492477417, |
|
"learning_rate": 4.085758079124203e-05, |
|
"loss": 0.1532, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 6.0088888888888885, |
|
"grad_norm": 0.47197291254997253, |
|
"learning_rate": 4.0704419641854274e-05, |
|
"loss": 0.0869, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 6.017777777777778, |
|
"grad_norm": 1.4398659467697144, |
|
"learning_rate": 4.05513487856238e-05, |
|
"loss": 0.1247, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 6.026666666666666, |
|
"grad_norm": 0.4351118803024292, |
|
"learning_rate": 4.03983697094132e-05, |
|
"loss": 0.1621, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 6.035555555555556, |
|
"grad_norm": 0.4250251352787018, |
|
"learning_rate": 4.0245483899193595e-05, |
|
"loss": 0.1137, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 6.044444444444444, |
|
"grad_norm": 0.5068423748016357, |
|
"learning_rate": 4.0092692840030134e-05, |
|
"loss": 0.1503, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 6.053333333333334, |
|
"grad_norm": 0.8082426190376282, |
|
"learning_rate": 3.993999801606759e-05, |
|
"loss": 0.1423, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 6.062222222222222, |
|
"grad_norm": 0.4672413170337677, |
|
"learning_rate": 3.978740091051599e-05, |
|
"loss": 0.12, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 6.071111111111111, |
|
"grad_norm": 0.8545836806297302, |
|
"learning_rate": 3.963490300563613e-05, |
|
"loss": 0.1151, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"grad_norm": 0.4437183141708374, |
|
"learning_rate": 3.9482505782725224e-05, |
|
"loss": 0.1586, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 6.088888888888889, |
|
"grad_norm": 0.6166926622390747, |
|
"learning_rate": 3.933021072210251e-05, |
|
"loss": 0.1711, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 6.097777777777778, |
|
"grad_norm": 0.8207690715789795, |
|
"learning_rate": 3.917801930309486e-05, |
|
"loss": 0.1137, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 6.1066666666666665, |
|
"grad_norm": 1.1031180620193481, |
|
"learning_rate": 3.902593300402244e-05, |
|
"loss": 0.1194, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 6.115555555555556, |
|
"grad_norm": 0.5387362837791443, |
|
"learning_rate": 3.887395330218429e-05, |
|
"loss": 0.1407, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 6.124444444444444, |
|
"grad_norm": 0.48524364829063416, |
|
"learning_rate": 3.872208167384404e-05, |
|
"loss": 0.0863, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 6.133333333333334, |
|
"grad_norm": 0.5386446714401245, |
|
"learning_rate": 3.857031959421553e-05, |
|
"loss": 0.1376, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 6.142222222222222, |
|
"grad_norm": 1.6886932849884033, |
|
"learning_rate": 3.8418668537448495e-05, |
|
"loss": 0.1503, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 6.151111111111111, |
|
"grad_norm": 0.4677202105522156, |
|
"learning_rate": 3.8267129976614254e-05, |
|
"loss": 0.1255, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"grad_norm": 0.4135400652885437, |
|
"learning_rate": 3.8115705383691355e-05, |
|
"loss": 0.1174, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 6.168888888888889, |
|
"grad_norm": 0.6193229556083679, |
|
"learning_rate": 3.7964396229551364e-05, |
|
"loss": 0.1114, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 6.177777777777778, |
|
"grad_norm": 0.6773757338523865, |
|
"learning_rate": 3.781320398394446e-05, |
|
"loss": 0.1079, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 6.1866666666666665, |
|
"grad_norm": 0.6738713979721069, |
|
"learning_rate": 3.7662130115485314e-05, |
|
"loss": 0.1335, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 6.195555555555556, |
|
"grad_norm": 0.7145228385925293, |
|
"learning_rate": 3.7511176091638653e-05, |
|
"loss": 0.1191, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 6.204444444444444, |
|
"grad_norm": 0.5776643753051758, |
|
"learning_rate": 3.7360343378705124e-05, |
|
"loss": 0.1195, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 6.213333333333333, |
|
"grad_norm": 0.5871068239212036, |
|
"learning_rate": 3.7209633441807e-05, |
|
"loss": 0.1329, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 6.222222222222222, |
|
"grad_norm": 0.5175588130950928, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 0.124, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 6.231111111111111, |
|
"grad_norm": 0.4751562774181366, |
|
"learning_rate": 3.69085877506289e-05, |
|
"loss": 0.1393, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"grad_norm": 0.48501744866371155, |
|
"learning_rate": 3.675825492057364e-05, |
|
"loss": 0.1205, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 6.248888888888889, |
|
"grad_norm": 0.43550392985343933, |
|
"learning_rate": 3.660805071497485e-05, |
|
"loss": 0.156, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 6.257777777777778, |
|
"grad_norm": 0.761576235294342, |
|
"learning_rate": 3.6457976592849754e-05, |
|
"loss": 0.1583, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 6.266666666666667, |
|
"grad_norm": 0.6814683079719543, |
|
"learning_rate": 3.6308034011952e-05, |
|
"loss": 0.1124, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 6.275555555555556, |
|
"grad_norm": 0.5146603584289551, |
|
"learning_rate": 3.6158224428757535e-05, |
|
"loss": 0.1126, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 6.2844444444444445, |
|
"grad_norm": 0.5021145343780518, |
|
"learning_rate": 3.60085492984504e-05, |
|
"loss": 0.0965, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 6.293333333333333, |
|
"grad_norm": 0.5636239647865295, |
|
"learning_rate": 3.585901007490863e-05, |
|
"loss": 0.1003, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 6.302222222222222, |
|
"grad_norm": 0.783494770526886, |
|
"learning_rate": 3.5709608210690125e-05, |
|
"loss": 0.0869, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 6.311111111111111, |
|
"grad_norm": 0.5414121150970459, |
|
"learning_rate": 3.556034515701852e-05, |
|
"loss": 0.1572, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"grad_norm": 0.7618036270141602, |
|
"learning_rate": 3.541122236376911e-05, |
|
"loss": 0.1396, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 6.328888888888889, |
|
"grad_norm": 0.48708009719848633, |
|
"learning_rate": 3.5262241279454785e-05, |
|
"loss": 0.1151, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 6.337777777777778, |
|
"grad_norm": 0.4765779376029968, |
|
"learning_rate": 3.5113403351211935e-05, |
|
"loss": 0.1119, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 6.346666666666667, |
|
"grad_norm": 0.5376085042953491, |
|
"learning_rate": 3.4964710024786354e-05, |
|
"loss": 0.1194, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 6.355555555555555, |
|
"grad_norm": 0.7036638259887695, |
|
"learning_rate": 3.4816162744519263e-05, |
|
"loss": 0.1572, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 6.364444444444445, |
|
"grad_norm": 0.6036682724952698, |
|
"learning_rate": 3.4667762953333295e-05, |
|
"loss": 0.107, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 6.373333333333333, |
|
"grad_norm": 0.843076229095459, |
|
"learning_rate": 3.4519512092718354e-05, |
|
"loss": 0.1489, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 6.3822222222222225, |
|
"grad_norm": 0.9905326962471008, |
|
"learning_rate": 3.4371411602717784e-05, |
|
"loss": 0.1203, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 6.391111111111111, |
|
"grad_norm": 0.4549235999584198, |
|
"learning_rate": 3.422346292191424e-05, |
|
"loss": 0.1171, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 0.5710920095443726, |
|
"learning_rate": 3.4075667487415785e-05, |
|
"loss": 0.162, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 6.408888888888889, |
|
"grad_norm": 0.6092873215675354, |
|
"learning_rate": 3.392802673484193e-05, |
|
"loss": 0.1448, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 6.417777777777777, |
|
"grad_norm": 0.5983030796051025, |
|
"learning_rate": 3.3780542098309654e-05, |
|
"loss": 0.1172, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 6.426666666666667, |
|
"grad_norm": 0.6922889351844788, |
|
"learning_rate": 3.36332150104195e-05, |
|
"loss": 0.1098, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 6.435555555555555, |
|
"grad_norm": 0.6339572072029114, |
|
"learning_rate": 3.3486046902241664e-05, |
|
"loss": 0.1398, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 6.444444444444445, |
|
"grad_norm": 0.5848474502563477, |
|
"learning_rate": 3.333903920330205e-05, |
|
"loss": 0.1581, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 6.453333333333333, |
|
"grad_norm": 0.48497921228408813, |
|
"learning_rate": 3.319219334156847e-05, |
|
"loss": 0.1454, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 6.4622222222222225, |
|
"grad_norm": 0.5115544199943542, |
|
"learning_rate": 3.3045510743436665e-05, |
|
"loss": 0.1105, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 6.471111111111111, |
|
"grad_norm": 0.7270033359527588, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 0.1041, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"grad_norm": 0.6745510697364807, |
|
"learning_rate": 3.275264103561834e-05, |
|
"loss": 0.1349, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 6.488888888888889, |
|
"grad_norm": 0.4949975609779358, |
|
"learning_rate": 3.2606456770738636e-05, |
|
"loss": 0.1741, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 6.497777777777777, |
|
"grad_norm": 0.5432770252227783, |
|
"learning_rate": 3.246044145904677e-05, |
|
"loss": 0.1235, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 6.506666666666667, |
|
"grad_norm": 0.583692729473114, |
|
"learning_rate": 3.231459651887093e-05, |
|
"loss": 0.1151, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 6.515555555555555, |
|
"grad_norm": 0.4701627194881439, |
|
"learning_rate": 3.216892336688435e-05, |
|
"loss": 0.0989, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 6.524444444444445, |
|
"grad_norm": 0.6878033876419067, |
|
"learning_rate": 3.2023423418091626e-05, |
|
"loss": 0.1207, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 6.533333333333333, |
|
"grad_norm": 0.7487357258796692, |
|
"learning_rate": 3.1878098085814924e-05, |
|
"loss": 0.1652, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 6.542222222222223, |
|
"grad_norm": 0.8228577971458435, |
|
"learning_rate": 3.173294878168025e-05, |
|
"loss": 0.097, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 6.551111111111111, |
|
"grad_norm": 0.9791572690010071, |
|
"learning_rate": 3.158797691560378e-05, |
|
"loss": 0.1128, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 6.5600000000000005, |
|
"grad_norm": 0.4378013610839844, |
|
"learning_rate": 3.1443183895778105e-05, |
|
"loss": 0.101, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 6.568888888888889, |
|
"grad_norm": 0.5853977799415588, |
|
"learning_rate": 3.129857112865859e-05, |
|
"loss": 0.1425, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 6.5777777777777775, |
|
"grad_norm": 0.5795565247535706, |
|
"learning_rate": 3.115414001894974e-05, |
|
"loss": 0.1255, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 6.586666666666667, |
|
"grad_norm": 0.5457404851913452, |
|
"learning_rate": 3.100989196959145e-05, |
|
"loss": 0.1008, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 6.595555555555555, |
|
"grad_norm": 0.7424979209899902, |
|
"learning_rate": 3.086582838174551e-05, |
|
"loss": 0.1573, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 6.604444444444445, |
|
"grad_norm": 0.7952689528465271, |
|
"learning_rate": 3.072195065478192e-05, |
|
"loss": 0.1342, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 6.613333333333333, |
|
"grad_norm": 0.5833268761634827, |
|
"learning_rate": 3.0578260186265265e-05, |
|
"loss": 0.0916, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 6.622222222222222, |
|
"grad_norm": 0.6129670739173889, |
|
"learning_rate": 3.0434758371941258e-05, |
|
"loss": 0.0802, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 6.631111111111111, |
|
"grad_norm": 0.6553269028663635, |
|
"learning_rate": 3.029144660572304e-05, |
|
"loss": 0.1206, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"grad_norm": 0.7755371928215027, |
|
"learning_rate": 3.014832627967775e-05, |
|
"loss": 0.1356, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 6.648888888888889, |
|
"grad_norm": 0.5356115102767944, |
|
"learning_rate": 3.000539878401296e-05, |
|
"loss": 0.1344, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 6.657777777777778, |
|
"grad_norm": 0.6588380932807922, |
|
"learning_rate": 2.9862665507063147e-05, |
|
"loss": 0.1449, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 0.5725284218788147, |
|
"learning_rate": 2.9720127835276256e-05, |
|
"loss": 0.1202, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 6.6755555555555555, |
|
"grad_norm": 0.6672924160957336, |
|
"learning_rate": 2.9577787153200197e-05, |
|
"loss": 0.0947, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 6.684444444444445, |
|
"grad_norm": 0.6932110786437988, |
|
"learning_rate": 2.9435644843469436e-05, |
|
"loss": 0.1466, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 6.693333333333333, |
|
"grad_norm": 0.807898998260498, |
|
"learning_rate": 2.929370228679149e-05, |
|
"loss": 0.1349, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 6.702222222222222, |
|
"grad_norm": 0.4961835443973541, |
|
"learning_rate": 2.9151960861933614e-05, |
|
"loss": 0.1696, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 6.711111111111111, |
|
"grad_norm": 1.539179801940918, |
|
"learning_rate": 2.901042194570931e-05, |
|
"loss": 0.129, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"grad_norm": 0.46866491436958313, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 0.1299, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 6.728888888888889, |
|
"grad_norm": 0.5181779265403748, |
|
"learning_rate": 2.8727957136566823e-05, |
|
"loss": 0.1131, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 6.737777777777778, |
|
"grad_norm": 0.785124659538269, |
|
"learning_rate": 2.858703398738686e-05, |
|
"loss": 0.1055, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 6.746666666666667, |
|
"grad_norm": 0.4711576998233795, |
|
"learning_rate": 2.8446318834290358e-05, |
|
"loss": 0.1572, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 6.7555555555555555, |
|
"grad_norm": 0.5323825478553772, |
|
"learning_rate": 2.8305813044122097e-05, |
|
"loss": 0.1385, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 6.764444444444445, |
|
"grad_norm": 0.6163938641548157, |
|
"learning_rate": 2.8165517981693157e-05, |
|
"loss": 0.1445, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 6.773333333333333, |
|
"grad_norm": 0.6116064190864563, |
|
"learning_rate": 2.8025435009767747e-05, |
|
"loss": 0.0994, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 6.782222222222222, |
|
"grad_norm": 0.6490322947502136, |
|
"learning_rate": 2.7885565489049946e-05, |
|
"loss": 0.1374, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 6.791111111111111, |
|
"grad_norm": 0.8776857852935791, |
|
"learning_rate": 2.774591077817038e-05, |
|
"loss": 0.1, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"grad_norm": 0.44012898206710815, |
|
"learning_rate": 2.7606472233673186e-05, |
|
"loss": 0.1526, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 6.808888888888889, |
|
"grad_norm": 0.5889332890510559, |
|
"learning_rate": 2.746725121000273e-05, |
|
"loss": 0.1418, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 6.817777777777778, |
|
"grad_norm": 0.5039485692977905, |
|
"learning_rate": 2.732824905949049e-05, |
|
"loss": 0.175, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 6.826666666666666, |
|
"grad_norm": 0.5044254660606384, |
|
"learning_rate": 2.718946713234185e-05, |
|
"loss": 0.1538, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 6.835555555555556, |
|
"grad_norm": 0.43058761954307556, |
|
"learning_rate": 2.705090677662311e-05, |
|
"loss": 0.1376, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 6.844444444444444, |
|
"grad_norm": 0.7214236259460449, |
|
"learning_rate": 2.6912569338248315e-05, |
|
"loss": 0.1195, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 6.8533333333333335, |
|
"grad_norm": 1.1327557563781738, |
|
"learning_rate": 2.6774456160966122e-05, |
|
"loss": 0.1503, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 6.862222222222222, |
|
"grad_norm": 0.45260992646217346, |
|
"learning_rate": 2.66365685863469e-05, |
|
"loss": 0.1429, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 6.871111111111111, |
|
"grad_norm": 0.46228495240211487, |
|
"learning_rate": 2.6498907953769582e-05, |
|
"loss": 0.0988, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"grad_norm": 0.5020700097084045, |
|
"learning_rate": 2.636147560040866e-05, |
|
"loss": 0.1107, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 6.888888888888889, |
|
"grad_norm": 0.45958662033081055, |
|
"learning_rate": 2.6224272861221243e-05, |
|
"loss": 0.1518, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 6.897777777777778, |
|
"grad_norm": 0.5302269458770752, |
|
"learning_rate": 2.6087301068934106e-05, |
|
"loss": 0.1464, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 6.906666666666666, |
|
"grad_norm": 0.432018518447876, |
|
"learning_rate": 2.595056155403063e-05, |
|
"loss": 0.1781, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 6.915555555555556, |
|
"grad_norm": 0.48671501874923706, |
|
"learning_rate": 2.581405564473801e-05, |
|
"loss": 0.1276, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 6.924444444444444, |
|
"grad_norm": 0.4488458037376404, |
|
"learning_rate": 2.567778466701431e-05, |
|
"loss": 0.1344, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 6.933333333333334, |
|
"grad_norm": 0.47056058049201965, |
|
"learning_rate": 2.5541749944535554e-05, |
|
"loss": 0.1317, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 6.942222222222222, |
|
"grad_norm": 0.6379369497299194, |
|
"learning_rate": 2.5405952798682842e-05, |
|
"loss": 0.1521, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 6.9511111111111115, |
|
"grad_norm": 0.8954217433929443, |
|
"learning_rate": 2.527039454852963e-05, |
|
"loss": 0.1355, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"grad_norm": 0.6195422410964966, |
|
"learning_rate": 2.5135076510828837e-05, |
|
"loss": 0.1059, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 6.968888888888889, |
|
"grad_norm": 0.6319138407707214, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.1139, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 6.977777777777778, |
|
"grad_norm": 0.49685609340667725, |
|
"learning_rate": 2.486516632811666e-05, |
|
"loss": 0.1271, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 6.986666666666666, |
|
"grad_norm": 0.7997812628746033, |
|
"learning_rate": 2.473057680489348e-05, |
|
"loss": 0.1291, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 6.995555555555556, |
|
"grad_norm": 0.609612226486206, |
|
"learning_rate": 2.459623273767354e-05, |
|
"loss": 0.1428, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 7.004444444444444, |
|
"grad_norm": 0.4185393452644348, |
|
"learning_rate": 2.4462135431415733e-05, |
|
"loss": 0.1231, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 7.013333333333334, |
|
"grad_norm": 0.6450554132461548, |
|
"learning_rate": 2.4328286188681987e-05, |
|
"loss": 0.101, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 7.022222222222222, |
|
"grad_norm": 0.4551238417625427, |
|
"learning_rate": 2.4194686309624663e-05, |
|
"loss": 0.1042, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 7.0311111111111115, |
|
"grad_norm": 0.7651455998420715, |
|
"learning_rate": 2.4061337091973918e-05, |
|
"loss": 0.1206, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"grad_norm": 0.46599751710891724, |
|
"learning_rate": 2.39282398310251e-05, |
|
"loss": 0.1477, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 7.0488888888888885, |
|
"grad_norm": 0.46619459986686707, |
|
"learning_rate": 2.3795395819626114e-05, |
|
"loss": 0.1203, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 7.057777777777778, |
|
"grad_norm": 1.243935465812683, |
|
"learning_rate": 2.366280634816496e-05, |
|
"loss": 0.1135, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 7.066666666666666, |
|
"grad_norm": 0.5829083919525146, |
|
"learning_rate": 2.3530472704557145e-05, |
|
"loss": 0.0728, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 7.075555555555556, |
|
"grad_norm": 0.5312824845314026, |
|
"learning_rate": 2.3398396174233178e-05, |
|
"loss": 0.0761, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 7.084444444444444, |
|
"grad_norm": 0.6193990111351013, |
|
"learning_rate": 2.3266578040126036e-05, |
|
"loss": 0.0875, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 7.093333333333334, |
|
"grad_norm": 0.4868319034576416, |
|
"learning_rate": 2.3135019582658802e-05, |
|
"loss": 0.0876, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 7.102222222222222, |
|
"grad_norm": 0.4497816860675812, |
|
"learning_rate": 2.300372207973219e-05, |
|
"loss": 0.0989, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 7.111111111111111, |
|
"grad_norm": 0.4699987769126892, |
|
"learning_rate": 2.2872686806712035e-05, |
|
"loss": 0.1211, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"grad_norm": 0.7330834865570068, |
|
"learning_rate": 2.274191503641706e-05, |
|
"loss": 0.0784, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 7.128888888888889, |
|
"grad_norm": 0.3836953938007355, |
|
"learning_rate": 2.261140803910644e-05, |
|
"loss": 0.0889, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 7.137777777777778, |
|
"grad_norm": 0.528725266456604, |
|
"learning_rate": 2.2481167082467403e-05, |
|
"loss": 0.1309, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 7.1466666666666665, |
|
"grad_norm": 0.5461883544921875, |
|
"learning_rate": 2.235119343160303e-05, |
|
"loss": 0.1064, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 7.155555555555556, |
|
"grad_norm": 0.47903427481651306, |
|
"learning_rate": 2.2221488349019903e-05, |
|
"loss": 0.1347, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 7.164444444444444, |
|
"grad_norm": 0.43539464473724365, |
|
"learning_rate": 2.2092053094615813e-05, |
|
"loss": 0.0761, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 7.173333333333334, |
|
"grad_norm": 0.5986665487289429, |
|
"learning_rate": 2.1962888925667603e-05, |
|
"loss": 0.0729, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 7.182222222222222, |
|
"grad_norm": 0.5698485970497131, |
|
"learning_rate": 2.1833997096818898e-05, |
|
"loss": 0.1272, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 7.191111111111111, |
|
"grad_norm": 0.5316697359085083, |
|
"learning_rate": 2.1705378860067944e-05, |
|
"loss": 0.1396, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"grad_norm": 0.5823459029197693, |
|
"learning_rate": 2.157703546475539e-05, |
|
"loss": 0.1466, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 7.208888888888889, |
|
"grad_norm": 0.4261781573295593, |
|
"learning_rate": 2.144896815755224e-05, |
|
"loss": 0.0868, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 7.217777777777778, |
|
"grad_norm": 0.47086384892463684, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 0.15, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 7.226666666666667, |
|
"grad_norm": 0.43166688084602356, |
|
"learning_rate": 2.119366678073707e-05, |
|
"loss": 0.0989, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 7.235555555555556, |
|
"grad_norm": 0.7396053075790405, |
|
"learning_rate": 2.1066435191009715e-05, |
|
"loss": 0.0756, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 7.2444444444444445, |
|
"grad_norm": 0.4421260952949524, |
|
"learning_rate": 2.093948464913707e-05, |
|
"loss": 0.1058, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 7.253333333333333, |
|
"grad_norm": 0.9845119714736938, |
|
"learning_rate": 2.0812816388260518e-05, |
|
"loss": 0.1217, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 7.262222222222222, |
|
"grad_norm": 0.5246585607528687, |
|
"learning_rate": 2.0686431638779564e-05, |
|
"loss": 0.0857, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 7.271111111111111, |
|
"grad_norm": 0.4285733997821808, |
|
"learning_rate": 2.056033162833977e-05, |
|
"loss": 0.0885, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"grad_norm": 1.0954898595809937, |
|
"learning_rate": 2.0434517581820896e-05, |
|
"loss": 0.1127, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 7.288888888888889, |
|
"grad_norm": 0.6235244274139404, |
|
"learning_rate": 2.0308990721324927e-05, |
|
"loss": 0.1219, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 7.297777777777778, |
|
"grad_norm": 1.4151173830032349, |
|
"learning_rate": 2.01837522661643e-05, |
|
"loss": 0.1308, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 7.306666666666667, |
|
"grad_norm": 0.6299641132354736, |
|
"learning_rate": 2.0058803432849987e-05, |
|
"loss": 0.1403, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 7.315555555555555, |
|
"grad_norm": 0.5858526825904846, |
|
"learning_rate": 1.9934145435079702e-05, |
|
"loss": 0.0698, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 7.3244444444444445, |
|
"grad_norm": 0.43147486448287964, |
|
"learning_rate": 1.980977948372612e-05, |
|
"loss": 0.1261, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 7.333333333333333, |
|
"grad_norm": 0.6104679703712463, |
|
"learning_rate": 1.9685706786825114e-05, |
|
"loss": 0.0826, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 7.342222222222222, |
|
"grad_norm": 0.48661118745803833, |
|
"learning_rate": 1.9561928549563968e-05, |
|
"loss": 0.1047, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 7.351111111111111, |
|
"grad_norm": 0.5598762035369873, |
|
"learning_rate": 1.9438445974269754e-05, |
|
"loss": 0.1111, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"grad_norm": 0.6354761719703674, |
|
"learning_rate": 1.931526026039764e-05, |
|
"loss": 0.1158, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 7.368888888888889, |
|
"grad_norm": 0.59492427110672, |
|
"learning_rate": 1.9192372604519127e-05, |
|
"loss": 0.1495, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 7.377777777777778, |
|
"grad_norm": 0.5921106934547424, |
|
"learning_rate": 1.906978420031059e-05, |
|
"loss": 0.1477, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 7.386666666666667, |
|
"grad_norm": 0.5231952667236328, |
|
"learning_rate": 1.8947496238541612e-05, |
|
"loss": 0.072, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 7.395555555555555, |
|
"grad_norm": 0.7299399971961975, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 0.0799, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 7.404444444444445, |
|
"grad_norm": 0.545674204826355, |
|
"learning_rate": 1.8703826390797048e-05, |
|
"loss": 0.1354, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 7.413333333333333, |
|
"grad_norm": 0.5333763360977173, |
|
"learning_rate": 1.8582446871722636e-05, |
|
"loss": 0.0966, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 7.4222222222222225, |
|
"grad_norm": 0.6203194260597229, |
|
"learning_rate": 1.8461372528867093e-05, |
|
"loss": 0.1415, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 7.431111111111111, |
|
"grad_norm": 0.39000070095062256, |
|
"learning_rate": 1.8340604538293015e-05, |
|
"loss": 0.1476, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"grad_norm": 0.8264181613922119, |
|
"learning_rate": 1.822014407308727e-05, |
|
"loss": 0.1139, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 7.448888888888889, |
|
"grad_norm": 0.4161231815814972, |
|
"learning_rate": 1.8099992303349577e-05, |
|
"loss": 0.1079, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 7.457777777777777, |
|
"grad_norm": 0.48988258838653564, |
|
"learning_rate": 1.798015039618106e-05, |
|
"loss": 0.0761, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 7.466666666666667, |
|
"grad_norm": 0.573543906211853, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 0.1233, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 7.475555555555555, |
|
"grad_norm": 1.0791929960250854, |
|
"learning_rate": 1.774140082289563e-05, |
|
"loss": 0.0833, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 7.484444444444445, |
|
"grad_norm": 0.7512285113334656, |
|
"learning_rate": 1.7622495475886487e-05, |
|
"loss": 0.1107, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 7.493333333333333, |
|
"grad_norm": 0.4891441762447357, |
|
"learning_rate": 1.7503904629639574e-05, |
|
"loss": 0.0675, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 7.502222222222223, |
|
"grad_norm": 0.3364730775356293, |
|
"learning_rate": 1.738562943609396e-05, |
|
"loss": 0.0884, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 7.511111111111111, |
|
"grad_norm": 1.6392979621887207, |
|
"learning_rate": 1.7267671044122534e-05, |
|
"loss": 0.1564, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"grad_norm": 0.6125866770744324, |
|
"learning_rate": 1.7150030599520984e-05, |
|
"loss": 0.1495, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 7.528888888888889, |
|
"grad_norm": 0.6755143404006958, |
|
"learning_rate": 1.703270924499656e-05, |
|
"loss": 0.0962, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 7.5377777777777775, |
|
"grad_norm": 0.5426997542381287, |
|
"learning_rate": 1.691570812015704e-05, |
|
"loss": 0.0945, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 7.546666666666667, |
|
"grad_norm": 0.6936843395233154, |
|
"learning_rate": 1.679902836149959e-05, |
|
"loss": 0.1192, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 7.555555555555555, |
|
"grad_norm": 0.5444396138191223, |
|
"learning_rate": 1.6682671102399805e-05, |
|
"loss": 0.0844, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 7.564444444444445, |
|
"grad_norm": 0.5248525142669678, |
|
"learning_rate": 1.6566637473100667e-05, |
|
"loss": 0.1279, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 7.573333333333333, |
|
"grad_norm": 0.488179087638855, |
|
"learning_rate": 1.6450928600701504e-05, |
|
"loss": 0.1259, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 7.582222222222223, |
|
"grad_norm": 0.4678577184677124, |
|
"learning_rate": 1.6335545609147142e-05, |
|
"loss": 0.0997, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 7.591111111111111, |
|
"grad_norm": 0.5950666666030884, |
|
"learning_rate": 1.622048961921699e-05, |
|
"loss": 0.105, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"grad_norm": 0.7520263195037842, |
|
"learning_rate": 1.6105761748514007e-05, |
|
"loss": 0.143, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 7.608888888888889, |
|
"grad_norm": 0.45088163018226624, |
|
"learning_rate": 1.599136311145402e-05, |
|
"loss": 0.1162, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 7.6177777777777775, |
|
"grad_norm": 0.46655556559562683, |
|
"learning_rate": 1.5877294819254822e-05, |
|
"loss": 0.1449, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 7.626666666666667, |
|
"grad_norm": 0.5721983313560486, |
|
"learning_rate": 1.5763557979925324e-05, |
|
"loss": 0.1131, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 7.635555555555555, |
|
"grad_norm": 0.5537592172622681, |
|
"learning_rate": 1.5650153698254916e-05, |
|
"loss": 0.1206, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 7.644444444444445, |
|
"grad_norm": 0.4676898419857025, |
|
"learning_rate": 1.553708307580265e-05, |
|
"loss": 0.1245, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 7.653333333333333, |
|
"grad_norm": 0.9704411625862122, |
|
"learning_rate": 1.5424347210886538e-05, |
|
"loss": 0.0839, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 7.662222222222223, |
|
"grad_norm": 0.5594210624694824, |
|
"learning_rate": 1.531194719857292e-05, |
|
"loss": 0.0942, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 7.671111111111111, |
|
"grad_norm": 0.8229500651359558, |
|
"learning_rate": 1.5199884130665831e-05, |
|
"loss": 0.0732, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"grad_norm": 0.5628749132156372, |
|
"learning_rate": 1.5088159095696363e-05, |
|
"loss": 0.1224, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 7.688888888888889, |
|
"grad_norm": 0.6322095394134521, |
|
"learning_rate": 1.4976773178912084e-05, |
|
"loss": 0.0854, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 7.697777777777778, |
|
"grad_norm": 0.7371559143066406, |
|
"learning_rate": 1.4865727462266543e-05, |
|
"loss": 0.1024, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 7.706666666666667, |
|
"grad_norm": 1.2312216758728027, |
|
"learning_rate": 1.4755023024408748e-05, |
|
"loss": 0.14, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 7.7155555555555555, |
|
"grad_norm": 0.7259622812271118, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 0.0879, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 7.724444444444444, |
|
"grad_norm": 0.6174317002296448, |
|
"learning_rate": 1.453464228306668e-05, |
|
"loss": 0.0905, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 7.733333333333333, |
|
"grad_norm": 0.46135640144348145, |
|
"learning_rate": 1.4424968120263504e-05, |
|
"loss": 0.1019, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 7.742222222222222, |
|
"grad_norm": 0.983392596244812, |
|
"learning_rate": 1.4315639517589397e-05, |
|
"loss": 0.1439, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 7.751111111111111, |
|
"grad_norm": 0.5525241494178772, |
|
"learning_rate": 1.4206657537014079e-05, |
|
"loss": 0.1483, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"grad_norm": 0.4846337139606476, |
|
"learning_rate": 1.4098023237140334e-05, |
|
"loss": 0.145, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 7.768888888888889, |
|
"grad_norm": 0.5754955410957336, |
|
"learning_rate": 1.398973767319368e-05, |
|
"loss": 0.0999, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 7.777777777777778, |
|
"grad_norm": 0.567977786064148, |
|
"learning_rate": 1.3881801897012225e-05, |
|
"loss": 0.1267, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 7.786666666666667, |
|
"grad_norm": 0.6116244792938232, |
|
"learning_rate": 1.3774216957036367e-05, |
|
"loss": 0.0912, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 7.795555555555556, |
|
"grad_norm": 0.5989839434623718, |
|
"learning_rate": 1.3666983898298657e-05, |
|
"loss": 0.097, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 7.804444444444444, |
|
"grad_norm": 0.6098581552505493, |
|
"learning_rate": 1.3560103762413584e-05, |
|
"loss": 0.0789, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 7.8133333333333335, |
|
"grad_norm": 0.4235369563102722, |
|
"learning_rate": 1.345357758756754e-05, |
|
"loss": 0.1303, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 7.822222222222222, |
|
"grad_norm": 0.5483813881874084, |
|
"learning_rate": 1.3347406408508695e-05, |
|
"loss": 0.0967, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 7.831111111111111, |
|
"grad_norm": 0.5754316449165344, |
|
"learning_rate": 1.3241591256536917e-05, |
|
"loss": 0.0806, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"grad_norm": 1.0643495321273804, |
|
"learning_rate": 1.3136133159493802e-05, |
|
"loss": 0.0715, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 7.848888888888889, |
|
"grad_norm": 0.6817154884338379, |
|
"learning_rate": 1.3031033141752702e-05, |
|
"loss": 0.1125, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 7.857777777777778, |
|
"grad_norm": 0.5241005420684814, |
|
"learning_rate": 1.2926292224208664e-05, |
|
"loss": 0.0945, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 7.866666666666667, |
|
"grad_norm": 0.5982441902160645, |
|
"learning_rate": 1.282191142426869e-05, |
|
"loss": 0.1098, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 7.875555555555556, |
|
"grad_norm": 0.49455299973487854, |
|
"learning_rate": 1.2717891755841722e-05, |
|
"loss": 0.0987, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 7.884444444444444, |
|
"grad_norm": 0.5417470335960388, |
|
"learning_rate": 1.2614234229328792e-05, |
|
"loss": 0.0931, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 7.8933333333333335, |
|
"grad_norm": 0.6845139265060425, |
|
"learning_rate": 1.2510939851613285e-05, |
|
"loss": 0.1042, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 7.902222222222222, |
|
"grad_norm": 0.585252583026886, |
|
"learning_rate": 1.2408009626051137e-05, |
|
"loss": 0.1561, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 7.911111111111111, |
|
"grad_norm": 1.172561764717102, |
|
"learning_rate": 1.230544455246101e-05, |
|
"loss": 0.1128, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"grad_norm": 0.5593766570091248, |
|
"learning_rate": 1.2203245627114685e-05, |
|
"loss": 0.1115, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 7.928888888888888, |
|
"grad_norm": 0.7291374802589417, |
|
"learning_rate": 1.2101413842727345e-05, |
|
"loss": 0.0952, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 7.937777777777778, |
|
"grad_norm": 0.4604465961456299, |
|
"learning_rate": 1.1999950188447922e-05, |
|
"loss": 0.1461, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 7.946666666666666, |
|
"grad_norm": 0.43182215094566345, |
|
"learning_rate": 1.1898855649849461e-05, |
|
"loss": 0.1466, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 7.955555555555556, |
|
"grad_norm": 0.4441966116428375, |
|
"learning_rate": 1.1798131208919627e-05, |
|
"loss": 0.1075, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 7.964444444444444, |
|
"grad_norm": 0.5411422252655029, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 0.1224, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 7.973333333333334, |
|
"grad_norm": 0.4580567479133606, |
|
"learning_rate": 1.1597796530032085e-05, |
|
"loss": 0.1341, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 7.982222222222222, |
|
"grad_norm": 0.5973615646362305, |
|
"learning_rate": 1.1498188238036861e-05, |
|
"loss": 0.0929, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 7.9911111111111115, |
|
"grad_norm": 0.6847777366638184, |
|
"learning_rate": 1.1398953935616336e-05, |
|
"loss": 0.0985, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 0.6161903142929077, |
|
"learning_rate": 1.130009458668863e-05, |
|
"loss": 0.1104, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 8.008888888888889, |
|
"grad_norm": 0.46788787841796875, |
|
"learning_rate": 1.1201611151529756e-05, |
|
"loss": 0.0999, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 8.017777777777777, |
|
"grad_norm": 0.3665621876716614, |
|
"learning_rate": 1.1103504586764263e-05, |
|
"loss": 0.0823, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 8.026666666666667, |
|
"grad_norm": 0.4475909173488617, |
|
"learning_rate": 1.100577584535592e-05, |
|
"loss": 0.1003, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 8.035555555555556, |
|
"grad_norm": 0.37525415420532227, |
|
"learning_rate": 1.090842587659851e-05, |
|
"loss": 0.1119, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 8.044444444444444, |
|
"grad_norm": 0.4087243676185608, |
|
"learning_rate": 1.0811455626106599e-05, |
|
"loss": 0.0782, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 8.053333333333333, |
|
"grad_norm": 0.3830251693725586, |
|
"learning_rate": 1.0714866035806326e-05, |
|
"loss": 0.1224, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 8.062222222222223, |
|
"grad_norm": 0.49408161640167236, |
|
"learning_rate": 1.0618658043926233e-05, |
|
"loss": 0.0887, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 8.071111111111112, |
|
"grad_norm": 0.4445248246192932, |
|
"learning_rate": 1.0522832584988234e-05, |
|
"loss": 0.1233, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"grad_norm": 0.7690616250038147, |
|
"learning_rate": 1.0427390589798469e-05, |
|
"loss": 0.1152, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 8.088888888888889, |
|
"grad_norm": 0.4401998221874237, |
|
"learning_rate": 1.0332332985438248e-05, |
|
"loss": 0.1095, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 8.097777777777777, |
|
"grad_norm": 0.4737964868545532, |
|
"learning_rate": 1.023766069525513e-05, |
|
"loss": 0.1122, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 8.106666666666667, |
|
"grad_norm": 0.464690625667572, |
|
"learning_rate": 1.0143374638853891e-05, |
|
"loss": 0.0811, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 8.115555555555556, |
|
"grad_norm": 0.5359342694282532, |
|
"learning_rate": 1.004947573208756e-05, |
|
"loss": 0.076, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 8.124444444444444, |
|
"grad_norm": 0.641705334186554, |
|
"learning_rate": 9.955964887048607e-06, |
|
"loss": 0.1052, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 8.133333333333333, |
|
"grad_norm": 0.4826803505420685, |
|
"learning_rate": 9.862843012060047e-06, |
|
"loss": 0.0885, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 8.142222222222221, |
|
"grad_norm": 0.41360554099082947, |
|
"learning_rate": 9.770111011666583e-06, |
|
"loss": 0.0934, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 8.151111111111112, |
|
"grad_norm": 0.44290822744369507, |
|
"learning_rate": 9.677769786625867e-06, |
|
"loss": 0.1115, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"grad_norm": 1.0914057493209839, |
|
"learning_rate": 9.58582023389974e-06, |
|
"loss": 0.0933, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 8.168888888888889, |
|
"grad_norm": 1.4697805643081665, |
|
"learning_rate": 9.494263246645474e-06, |
|
"loss": 0.154, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 8.177777777777777, |
|
"grad_norm": 0.4320299029350281, |
|
"learning_rate": 9.403099714207175e-06, |
|
"loss": 0.0527, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 8.186666666666667, |
|
"grad_norm": 0.5250918865203857, |
|
"learning_rate": 9.312330522107076e-06, |
|
"loss": 0.1327, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 8.195555555555556, |
|
"grad_norm": 0.49150344729423523, |
|
"learning_rate": 9.221956552036992e-06, |
|
"loss": 0.06, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 8.204444444444444, |
|
"grad_norm": 0.4893438518047333, |
|
"learning_rate": 9.131978681849684e-06, |
|
"loss": 0.1078, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 8.213333333333333, |
|
"grad_norm": 0.6123852133750916, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 0.0889, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 8.222222222222221, |
|
"grad_norm": 0.4057047963142395, |
|
"learning_rate": 8.953214733288383e-06, |
|
"loss": 0.0751, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 8.231111111111112, |
|
"grad_norm": 0.5074282288551331, |
|
"learning_rate": 8.864430391348332e-06, |
|
"loss": 0.092, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"grad_norm": 0.6459361910820007, |
|
"learning_rate": 8.776045622142098e-06, |
|
"loss": 0.0532, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 8.248888888888889, |
|
"grad_norm": 0.4968688189983368, |
|
"learning_rate": 8.688061284200266e-06, |
|
"loss": 0.0794, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 8.257777777777777, |
|
"grad_norm": 0.5141646265983582, |
|
"learning_rate": 8.600478232163755e-06, |
|
"loss": 0.1339, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 8.266666666666667, |
|
"grad_norm": 0.864693820476532, |
|
"learning_rate": 8.513297316775625e-06, |
|
"loss": 0.0959, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 8.275555555555556, |
|
"grad_norm": 0.42371752858161926, |
|
"learning_rate": 8.426519384872733e-06, |
|
"loss": 0.1039, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 8.284444444444444, |
|
"grad_norm": 0.6697008609771729, |
|
"learning_rate": 8.34014527937756e-06, |
|
"loss": 0.0884, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 8.293333333333333, |
|
"grad_norm": 0.47926008701324463, |
|
"learning_rate": 8.254175839289941e-06, |
|
"loss": 0.0839, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 8.302222222222222, |
|
"grad_norm": 0.5035369396209717, |
|
"learning_rate": 8.168611899679013e-06, |
|
"loss": 0.1043, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 8.311111111111112, |
|
"grad_norm": 0.5710785984992981, |
|
"learning_rate": 8.08345429167507e-06, |
|
"loss": 0.0937, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"grad_norm": 0.5553144216537476, |
|
"learning_rate": 7.998703842461431e-06, |
|
"loss": 0.0776, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 8.328888888888889, |
|
"grad_norm": 0.8414843678474426, |
|
"learning_rate": 7.914361375266504e-06, |
|
"loss": 0.0924, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 8.337777777777777, |
|
"grad_norm": 0.4151730537414551, |
|
"learning_rate": 7.830427709355725e-06, |
|
"loss": 0.1119, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 8.346666666666668, |
|
"grad_norm": 0.7383652329444885, |
|
"learning_rate": 7.74690366002359e-06, |
|
"loss": 0.0704, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 8.355555555555556, |
|
"grad_norm": 0.5272123217582703, |
|
"learning_rate": 7.663790038585793e-06, |
|
"loss": 0.0665, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 8.364444444444445, |
|
"grad_norm": 0.4467993378639221, |
|
"learning_rate": 7.581087652371316e-06, |
|
"loss": 0.0703, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 8.373333333333333, |
|
"grad_norm": 0.45421063899993896, |
|
"learning_rate": 7.498797304714544e-06, |
|
"loss": 0.1272, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 8.382222222222222, |
|
"grad_norm": 0.5808837413787842, |
|
"learning_rate": 7.416919794947536e-06, |
|
"loss": 0.094, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 8.391111111111112, |
|
"grad_norm": 0.4749268889427185, |
|
"learning_rate": 7.33545591839222e-06, |
|
"loss": 0.079, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"grad_norm": 0.6306861042976379, |
|
"learning_rate": 7.2544064663526815e-06, |
|
"loss": 0.1138, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 8.408888888888889, |
|
"grad_norm": 0.4660092890262604, |
|
"learning_rate": 7.173772226107434e-06, |
|
"loss": 0.0533, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 8.417777777777777, |
|
"grad_norm": 0.3973066210746765, |
|
"learning_rate": 7.093553980901852e-06, |
|
"loss": 0.0845, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 8.426666666666666, |
|
"grad_norm": 0.5049480199813843, |
|
"learning_rate": 7.013752509940485e-06, |
|
"loss": 0.1119, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 8.435555555555556, |
|
"grad_norm": 0.4741290211677551, |
|
"learning_rate": 6.934368588379553e-06, |
|
"loss": 0.0845, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 8.444444444444445, |
|
"grad_norm": 0.6338300108909607, |
|
"learning_rate": 6.855402987319348e-06, |
|
"loss": 0.1058, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 8.453333333333333, |
|
"grad_norm": 0.8661510944366455, |
|
"learning_rate": 6.776856473796828e-06, |
|
"loss": 0.1208, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 8.462222222222222, |
|
"grad_norm": 0.40579918026924133, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.0766, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 8.471111111111112, |
|
"grad_norm": 0.5100427269935608, |
|
"learning_rate": 6.621023757150929e-06, |
|
"loss": 0.0978, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"grad_norm": 0.6719834804534912, |
|
"learning_rate": 6.54373906771768e-06, |
|
"loss": 0.0954, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 8.488888888888889, |
|
"grad_norm": 0.9201704263687134, |
|
"learning_rate": 6.46687649318759e-06, |
|
"loss": 0.0886, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 8.497777777777777, |
|
"grad_norm": 0.5468865633010864, |
|
"learning_rate": 6.390436780169734e-06, |
|
"loss": 0.0887, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 8.506666666666666, |
|
"grad_norm": 0.35717347264289856, |
|
"learning_rate": 6.314420671165694e-06, |
|
"loss": 0.0997, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 8.515555555555556, |
|
"grad_norm": 0.5313848257064819, |
|
"learning_rate": 6.238828904562316e-06, |
|
"loss": 0.1077, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 8.524444444444445, |
|
"grad_norm": 0.5072668194770813, |
|
"learning_rate": 6.163662214624616e-06, |
|
"loss": 0.0988, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 8.533333333333333, |
|
"grad_norm": 0.7123374938964844, |
|
"learning_rate": 6.088921331488568e-06, |
|
"loss": 0.0783, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 8.542222222222222, |
|
"grad_norm": 0.584987223148346, |
|
"learning_rate": 6.014606981154086e-06, |
|
"loss": 0.0501, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 8.551111111111112, |
|
"grad_norm": 0.5454912185668945, |
|
"learning_rate": 5.94071988547788e-06, |
|
"loss": 0.0881, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"grad_norm": 0.6294575333595276, |
|
"learning_rate": 5.867260762166543e-06, |
|
"loss": 0.1615, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 8.568888888888889, |
|
"grad_norm": 0.440383642911911, |
|
"learning_rate": 5.794230324769517e-06, |
|
"loss": 0.0822, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 8.577777777777778, |
|
"grad_norm": 0.49135321378707886, |
|
"learning_rate": 5.721629282672164e-06, |
|
"loss": 0.1226, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 8.586666666666666, |
|
"grad_norm": 0.3769822120666504, |
|
"learning_rate": 5.649458341088915e-06, |
|
"loss": 0.1169, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 8.595555555555556, |
|
"grad_norm": 0.45154932141304016, |
|
"learning_rate": 5.577718201056392e-06, |
|
"loss": 0.1008, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 8.604444444444445, |
|
"grad_norm": 0.48603206872940063, |
|
"learning_rate": 5.506409559426573e-06, |
|
"loss": 0.0974, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 8.613333333333333, |
|
"grad_norm": 0.9174118638038635, |
|
"learning_rate": 5.435533108860086e-06, |
|
"loss": 0.0739, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 8.622222222222222, |
|
"grad_norm": 0.7738699913024902, |
|
"learning_rate": 5.365089537819434e-06, |
|
"loss": 0.1017, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 8.63111111111111, |
|
"grad_norm": 0.4519662857055664, |
|
"learning_rate": 5.295079530562302e-06, |
|
"loss": 0.058, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"grad_norm": 0.5471189618110657, |
|
"learning_rate": 5.2255037671349535e-06, |
|
"loss": 0.089, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 8.648888888888889, |
|
"grad_norm": 0.40861138701438904, |
|
"learning_rate": 5.156362923365588e-06, |
|
"loss": 0.1123, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 8.657777777777778, |
|
"grad_norm": 0.5626519322395325, |
|
"learning_rate": 5.087657670857798e-06, |
|
"loss": 0.0654, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 8.666666666666666, |
|
"grad_norm": 0.7317439317703247, |
|
"learning_rate": 5.019388676984005e-06, |
|
"loss": 0.0452, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 8.675555555555556, |
|
"grad_norm": 0.454049289226532, |
|
"learning_rate": 4.951556604879048e-06, |
|
"loss": 0.0782, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 8.684444444444445, |
|
"grad_norm": 0.6130763292312622, |
|
"learning_rate": 4.8841621134336765e-06, |
|
"loss": 0.0874, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 8.693333333333333, |
|
"grad_norm": 0.5617997646331787, |
|
"learning_rate": 4.8172058572881765e-06, |
|
"loss": 0.065, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 8.702222222222222, |
|
"grad_norm": 0.5074090957641602, |
|
"learning_rate": 4.7506884868259995e-06, |
|
"loss": 0.0944, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 8.71111111111111, |
|
"grad_norm": 2.4399478435516357, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 0.118, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"grad_norm": 0.47364744544029236, |
|
"learning_rate": 4.618972983163566e-06, |
|
"loss": 0.1199, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 8.72888888888889, |
|
"grad_norm": 0.5814787149429321, |
|
"learning_rate": 4.5537761293894535e-06, |
|
"loss": 0.1464, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 8.737777777777778, |
|
"grad_norm": 0.4782449007034302, |
|
"learning_rate": 4.489020720138582e-06, |
|
"loss": 0.1058, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 8.746666666666666, |
|
"grad_norm": 0.4726293981075287, |
|
"learning_rate": 4.424707384416344e-06, |
|
"loss": 0.0855, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 8.755555555555556, |
|
"grad_norm": 0.8018205761909485, |
|
"learning_rate": 4.360836746934055e-06, |
|
"loss": 0.0956, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 8.764444444444445, |
|
"grad_norm": 0.44194820523262024, |
|
"learning_rate": 4.29740942810285e-06, |
|
"loss": 0.1166, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 8.773333333333333, |
|
"grad_norm": 0.5051525235176086, |
|
"learning_rate": 4.234426044027645e-06, |
|
"loss": 0.1071, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 8.782222222222222, |
|
"grad_norm": 0.539904773235321, |
|
"learning_rate": 4.1718872065011904e-06, |
|
"loss": 0.0965, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 8.79111111111111, |
|
"grad_norm": 0.5747020840644836, |
|
"learning_rate": 4.109793522998101e-06, |
|
"loss": 0.0666, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"grad_norm": 0.4732227027416229, |
|
"learning_rate": 4.048145596668967e-06, |
|
"loss": 0.1377, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 8.80888888888889, |
|
"grad_norm": 0.6295973658561707, |
|
"learning_rate": 3.9869440263344714e-06, |
|
"loss": 0.0956, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 8.817777777777778, |
|
"grad_norm": 0.46836766600608826, |
|
"learning_rate": 3.9261894064796135e-06, |
|
"loss": 0.1131, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 8.826666666666666, |
|
"grad_norm": 0.47983092069625854, |
|
"learning_rate": 3.865882327247916e-06, |
|
"loss": 0.1138, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 8.835555555555555, |
|
"grad_norm": 0.5129906535148621, |
|
"learning_rate": 3.8060233744356633e-06, |
|
"loss": 0.0959, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 8.844444444444445, |
|
"grad_norm": 0.4921208322048187, |
|
"learning_rate": 3.7466131294862595e-06, |
|
"loss": 0.1029, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 8.853333333333333, |
|
"grad_norm": 0.4332411289215088, |
|
"learning_rate": 3.687652169484568e-06, |
|
"loss": 0.1143, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 8.862222222222222, |
|
"grad_norm": 0.4102513790130615, |
|
"learning_rate": 3.6291410671512594e-06, |
|
"loss": 0.0641, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 8.87111111111111, |
|
"grad_norm": 0.5347101092338562, |
|
"learning_rate": 3.5710803908373224e-06, |
|
"loss": 0.102, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"grad_norm": 0.5151996612548828, |
|
"learning_rate": 3.5134707045184955e-06, |
|
"loss": 0.0896, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 8.88888888888889, |
|
"grad_norm": 0.5282700061798096, |
|
"learning_rate": 3.4563125677897932e-06, |
|
"loss": 0.0743, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 8.88888888888889, |
|
"eval_loss": 0.4198705554008484, |
|
"eval_runtime": 44.3807, |
|
"eval_samples_per_second": 2.253, |
|
"eval_steps_per_second": 2.253, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 8.897777777777778, |
|
"grad_norm": 0.8092765808105469, |
|
"learning_rate": 3.3996065358600782e-06, |
|
"loss": 0.1038, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 8.906666666666666, |
|
"grad_norm": 0.559906005859375, |
|
"learning_rate": 3.343353159546675e-06, |
|
"loss": 0.0894, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 8.915555555555555, |
|
"grad_norm": 3.0559465885162354, |
|
"learning_rate": 3.2875529852700147e-06, |
|
"loss": 0.1378, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 8.924444444444445, |
|
"grad_norm": 0.46687355637550354, |
|
"learning_rate": 3.2322065550483007e-06, |
|
"loss": 0.0736, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 8.933333333333334, |
|
"grad_norm": 0.5981876254081726, |
|
"learning_rate": 3.177314406492288e-06, |
|
"loss": 0.0788, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 8.942222222222222, |
|
"grad_norm": 0.7355525493621826, |
|
"learning_rate": 3.1228770728000455e-06, |
|
"loss": 0.0883, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 8.95111111111111, |
|
"grad_norm": 0.5845444202423096, |
|
"learning_rate": 3.0688950827517495e-06, |
|
"loss": 0.0556, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"grad_norm": 0.7546665668487549, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 0.0977, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 8.96888888888889, |
|
"grad_norm": 0.415771484375, |
|
"learning_rate": 2.9622992265876392e-06, |
|
"loss": 0.0817, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 8.977777777777778, |
|
"grad_norm": 0.5274441838264465, |
|
"learning_rate": 2.9096863958968268e-06, |
|
"loss": 0.1598, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 8.986666666666666, |
|
"grad_norm": 0.5578910112380981, |
|
"learning_rate": 2.8575309796899376e-06, |
|
"loss": 0.1199, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 8.995555555555555, |
|
"grad_norm": 0.4636129140853882, |
|
"learning_rate": 2.8058334845816213e-06, |
|
"loss": 0.127, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 9.004444444444445, |
|
"grad_norm": 0.4034081995487213, |
|
"learning_rate": 2.7545944127384813e-06, |
|
"loss": 0.0583, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 9.013333333333334, |
|
"grad_norm": 0.5779898762702942, |
|
"learning_rate": 2.7038142618741992e-06, |
|
"loss": 0.1332, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 9.022222222222222, |
|
"grad_norm": 0.5441192984580994, |
|
"learning_rate": 2.653493525244721e-06, |
|
"loss": 0.0642, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 9.03111111111111, |
|
"grad_norm": 0.4402835965156555, |
|
"learning_rate": 2.603632691643415e-06, |
|
"loss": 0.0959, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"grad_norm": 0.9990886449813843, |
|
"learning_rate": 2.5542322453963763e-06, |
|
"loss": 0.0993, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 9.04888888888889, |
|
"grad_norm": 0.36182594299316406, |
|
"learning_rate": 2.5052926663577e-06, |
|
"loss": 0.0925, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 9.057777777777778, |
|
"grad_norm": 0.43276774883270264, |
|
"learning_rate": 2.456814429904819e-06, |
|
"loss": 0.0692, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 9.066666666666666, |
|
"grad_norm": 0.43336784839630127, |
|
"learning_rate": 2.408798006933882e-06, |
|
"loss": 0.0883, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 9.075555555555555, |
|
"grad_norm": 0.5268573760986328, |
|
"learning_rate": 2.361243863855184e-06, |
|
"loss": 0.0742, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 9.084444444444445, |
|
"grad_norm": 0.5564923286437988, |
|
"learning_rate": 2.314152462588659e-06, |
|
"loss": 0.12, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 9.093333333333334, |
|
"grad_norm": 0.46045705676078796, |
|
"learning_rate": 2.2675242605593338e-06, |
|
"loss": 0.098, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 9.102222222222222, |
|
"grad_norm": 0.5564160346984863, |
|
"learning_rate": 2.221359710692961e-06, |
|
"loss": 0.1237, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 9.11111111111111, |
|
"grad_norm": 0.4042016565799713, |
|
"learning_rate": 2.1756592614115666e-06, |
|
"loss": 0.083, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"grad_norm": 0.32778361439704895, |
|
"learning_rate": 2.1304233566290964e-06, |
|
"loss": 0.0668, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 9.12888888888889, |
|
"grad_norm": 0.5222166776657104, |
|
"learning_rate": 2.085652435747132e-06, |
|
"loss": 0.0879, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 9.137777777777778, |
|
"grad_norm": 0.6078011393547058, |
|
"learning_rate": 2.041346933650612e-06, |
|
"loss": 0.0913, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 9.146666666666667, |
|
"grad_norm": 0.47403645515441895, |
|
"learning_rate": 1.99750728070357e-06, |
|
"loss": 0.0702, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 9.155555555555555, |
|
"grad_norm": 0.58065265417099, |
|
"learning_rate": 1.9541339027450256e-06, |
|
"loss": 0.0748, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 9.164444444444445, |
|
"grad_norm": 0.4831164479255676, |
|
"learning_rate": 1.911227221084788e-06, |
|
"loss": 0.0542, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 9.173333333333334, |
|
"grad_norm": 0.404856413602829, |
|
"learning_rate": 1.8687876524993987e-06, |
|
"loss": 0.0922, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 9.182222222222222, |
|
"grad_norm": 0.7939493060112, |
|
"learning_rate": 1.8268156092280496e-06, |
|
"loss": 0.0739, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 9.19111111111111, |
|
"grad_norm": 0.4974342882633209, |
|
"learning_rate": 1.785311498968617e-06, |
|
"loss": 0.0704, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"grad_norm": 0.6182239651679993, |
|
"learning_rate": 1.7442757248736785e-06, |
|
"loss": 0.0746, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 9.20888888888889, |
|
"grad_norm": 0.5401434302330017, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 0.1197, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 9.217777777777778, |
|
"grad_norm": 0.7422550320625305, |
|
"learning_rate": 1.6636107750376329e-06, |
|
"loss": 0.0921, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 9.226666666666667, |
|
"grad_norm": 0.4298858046531677, |
|
"learning_rate": 1.6239823828401945e-06, |
|
"loss": 0.0789, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 9.235555555555555, |
|
"grad_norm": 0.49326783418655396, |
|
"learning_rate": 1.584823893886933e-06, |
|
"loss": 0.0804, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 9.244444444444444, |
|
"grad_norm": 0.4212936758995056, |
|
"learning_rate": 1.5461356885461075e-06, |
|
"loss": 0.0727, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 9.253333333333334, |
|
"grad_norm": 0.4835444390773773, |
|
"learning_rate": 1.5079181426178313e-06, |
|
"loss": 0.108, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 9.262222222222222, |
|
"grad_norm": 0.48927512764930725, |
|
"learning_rate": 1.4701716273304521e-06, |
|
"loss": 0.0825, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 9.27111111111111, |
|
"grad_norm": 0.3901284635066986, |
|
"learning_rate": 1.4328965093369283e-06, |
|
"loss": 0.0873, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"grad_norm": 0.36725980043411255, |
|
"learning_rate": 1.3960931507112752e-06, |
|
"loss": 0.0868, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 9.28888888888889, |
|
"grad_norm": 0.6572950482368469, |
|
"learning_rate": 1.3597619089450342e-06, |
|
"loss": 0.0757, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 9.297777777777778, |
|
"grad_norm": 0.6254395842552185, |
|
"learning_rate": 1.3239031369438326e-06, |
|
"loss": 0.0756, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 9.306666666666667, |
|
"grad_norm": 0.5352001190185547, |
|
"learning_rate": 1.288517183023924e-06, |
|
"loss": 0.0987, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 9.315555555555555, |
|
"grad_norm": 0.5086984038352966, |
|
"learning_rate": 1.2536043909088191e-06, |
|
"loss": 0.0582, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 9.324444444444444, |
|
"grad_norm": 0.4248650074005127, |
|
"learning_rate": 1.2191650997259384e-06, |
|
"loss": 0.1289, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 9.333333333333334, |
|
"grad_norm": 0.44240283966064453, |
|
"learning_rate": 1.1851996440033319e-06, |
|
"loss": 0.1087, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 9.342222222222222, |
|
"grad_norm": 0.42735180258750916, |
|
"learning_rate": 1.1517083536664142e-06, |
|
"loss": 0.0652, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 9.351111111111111, |
|
"grad_norm": 0.5311141014099121, |
|
"learning_rate": 1.118691554034773e-06, |
|
"loss": 0.1254, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"grad_norm": 0.4787999391555786, |
|
"learning_rate": 1.0861495658190002e-06, |
|
"loss": 0.0717, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 9.36888888888889, |
|
"grad_norm": 0.4565508961677551, |
|
"learning_rate": 1.0540827051175818e-06, |
|
"loss": 0.0907, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 9.377777777777778, |
|
"grad_norm": 0.62436443567276, |
|
"learning_rate": 1.0224912834138122e-06, |
|
"loss": 0.1027, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 9.386666666666667, |
|
"grad_norm": 0.43678709864616394, |
|
"learning_rate": 9.913756075728087e-07, |
|
"loss": 0.0934, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 9.395555555555555, |
|
"grad_norm": 0.371412068605423, |
|
"learning_rate": 9.607359798384785e-07, |
|
"loss": 0.0735, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 9.404444444444444, |
|
"grad_norm": 0.43636369705200195, |
|
"learning_rate": 9.305726978306173e-07, |
|
"loss": 0.0725, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 9.413333333333334, |
|
"grad_norm": 0.8233289122581482, |
|
"learning_rate": 9.008860545420161e-07, |
|
"loss": 0.0986, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 9.422222222222222, |
|
"grad_norm": 0.47502434253692627, |
|
"learning_rate": 8.716763383355864e-07, |
|
"loss": 0.1169, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 9.431111111111111, |
|
"grad_norm": 0.48122385144233704, |
|
"learning_rate": 8.429438329416117e-07, |
|
"loss": 0.0942, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"grad_norm": 0.6257894039154053, |
|
"learning_rate": 8.146888174549339e-07, |
|
"loss": 0.1429, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 9.448888888888888, |
|
"grad_norm": 0.4403574764728546, |
|
"learning_rate": 7.869115663322879e-07, |
|
"loss": 0.0877, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 9.457777777777778, |
|
"grad_norm": 0.5642544031143188, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 0.0884, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 9.466666666666667, |
|
"grad_norm": 0.5571393966674805, |
|
"learning_rate": 7.327914317993955e-07, |
|
"loss": 0.0866, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 9.475555555555555, |
|
"grad_norm": 0.4795849621295929, |
|
"learning_rate": 7.064490740882057e-07, |
|
"loss": 0.1407, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 9.484444444444444, |
|
"grad_norm": 0.556771993637085, |
|
"learning_rate": 6.805855321340538e-07, |
|
"loss": 0.0918, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 9.493333333333334, |
|
"grad_norm": 0.40080004930496216, |
|
"learning_rate": 6.552010571639456e-07, |
|
"loss": 0.0715, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 9.502222222222223, |
|
"grad_norm": 0.3855873942375183, |
|
"learning_rate": 6.302958957514371e-07, |
|
"loss": 0.1082, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 9.511111111111111, |
|
"grad_norm": 0.509375274181366, |
|
"learning_rate": 6.058702898142643e-07, |
|
"loss": 0.0986, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"grad_norm": 0.41145873069763184, |
|
"learning_rate": 5.81924476611967e-07, |
|
"loss": 0.1064, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 9.528888888888888, |
|
"grad_norm": 0.4494861960411072, |
|
"learning_rate": 5.584586887435739e-07, |
|
"loss": 0.0856, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 9.537777777777778, |
|
"grad_norm": 0.48923179507255554, |
|
"learning_rate": 5.354731541453772e-07, |
|
"loss": 0.1022, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 9.546666666666667, |
|
"grad_norm": 0.44903436303138733, |
|
"learning_rate": 5.129680960887007e-07, |
|
"loss": 0.0858, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 9.555555555555555, |
|
"grad_norm": 0.4996289312839508, |
|
"learning_rate": 4.909437331777179e-07, |
|
"loss": 0.1038, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 9.564444444444444, |
|
"grad_norm": 0.44402775168418884, |
|
"learning_rate": 4.6940027934735954e-07, |
|
"loss": 0.0746, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 9.573333333333334, |
|
"grad_norm": 0.4440934360027313, |
|
"learning_rate": 4.4833794386121006e-07, |
|
"loss": 0.0769, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 9.582222222222223, |
|
"grad_norm": 0.38661956787109375, |
|
"learning_rate": 4.277569313094809e-07, |
|
"loss": 0.0688, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 9.591111111111111, |
|
"grad_norm": 0.456836074590683, |
|
"learning_rate": 4.076574416070233e-07, |
|
"loss": 0.0915, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"grad_norm": 0.49599242210388184, |
|
"learning_rate": 3.8803966999139684e-07, |
|
"loss": 0.0433, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 9.608888888888888, |
|
"grad_norm": 0.5112136006355286, |
|
"learning_rate": 3.689038070209594e-07, |
|
"loss": 0.073, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 9.617777777777778, |
|
"grad_norm": 0.4848357141017914, |
|
"learning_rate": 3.50250038573019e-07, |
|
"loss": 0.0902, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 9.626666666666667, |
|
"grad_norm": 0.4244151711463928, |
|
"learning_rate": 3.320785458420461e-07, |
|
"loss": 0.0831, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 9.635555555555555, |
|
"grad_norm": 0.42129454016685486, |
|
"learning_rate": 3.143895053378698e-07, |
|
"loss": 0.0878, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 9.644444444444444, |
|
"grad_norm": 0.3875948190689087, |
|
"learning_rate": 2.971830888840177e-07, |
|
"loss": 0.0607, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 9.653333333333332, |
|
"grad_norm": 0.49087822437286377, |
|
"learning_rate": 2.8045946361601183e-07, |
|
"loss": 0.1056, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 9.662222222222223, |
|
"grad_norm": 0.528295636177063, |
|
"learning_rate": 2.6421879197974784e-07, |
|
"loss": 0.0964, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 9.671111111111111, |
|
"grad_norm": 0.43908149003982544, |
|
"learning_rate": 2.4846123172992954e-07, |
|
"loss": 0.1033, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"grad_norm": 1.0525627136230469, |
|
"learning_rate": 2.3318693592853103e-07, |
|
"loss": 0.115, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 9.688888888888888, |
|
"grad_norm": 0.46659278869628906, |
|
"learning_rate": 2.1839605294330933e-07, |
|
"loss": 0.1169, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 9.697777777777778, |
|
"grad_norm": 0.48134076595306396, |
|
"learning_rate": 2.040887264463609e-07, |
|
"loss": 0.117, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 9.706666666666667, |
|
"grad_norm": 0.3311064541339874, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 0.0474, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 9.715555555555556, |
|
"grad_norm": 0.5371410846710205, |
|
"learning_rate": 1.7692529411904578e-07, |
|
"loss": 0.0503, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 9.724444444444444, |
|
"grad_norm": 0.6801145076751709, |
|
"learning_rate": 1.640694521422459e-07, |
|
"loss": 0.0785, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 9.733333333333333, |
|
"grad_norm": 0.5717996954917908, |
|
"learning_rate": 1.5169769435830482e-07, |
|
"loss": 0.0691, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 9.742222222222223, |
|
"grad_norm": 0.4943947494029999, |
|
"learning_rate": 1.3981014094099353e-07, |
|
"loss": 0.1084, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 9.751111111111111, |
|
"grad_norm": 0.6030212044715881, |
|
"learning_rate": 1.2840690736075633e-07, |
|
"loss": 0.1068, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"grad_norm": 0.5841107964515686, |
|
"learning_rate": 1.1748810438355628e-07, |
|
"loss": 0.0877, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 9.768888888888888, |
|
"grad_norm": 0.5031399130821228, |
|
"learning_rate": 1.0705383806982606e-07, |
|
"loss": 0.0649, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 9.777777777777779, |
|
"grad_norm": 0.533540666103363, |
|
"learning_rate": 9.710420977340762e-08, |
|
"loss": 0.1214, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 9.786666666666667, |
|
"grad_norm": 0.4822670817375183, |
|
"learning_rate": 8.763931614059195e-08, |
|
"loss": 0.1195, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 9.795555555555556, |
|
"grad_norm": 0.42520344257354736, |
|
"learning_rate": 7.865924910916977e-08, |
|
"loss": 0.0753, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 9.804444444444444, |
|
"grad_norm": 0.46582895517349243, |
|
"learning_rate": 7.01640959075489e-08, |
|
"loss": 0.1232, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 9.813333333333333, |
|
"grad_norm": 0.5701122283935547, |
|
"learning_rate": 6.215393905388278e-08, |
|
"loss": 0.0946, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 9.822222222222223, |
|
"grad_norm": 0.5732688307762146, |
|
"learning_rate": 5.462885635529324e-08, |
|
"loss": 0.0671, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 9.831111111111111, |
|
"grad_norm": 0.44558167457580566, |
|
"learning_rate": 4.7588920907110094e-08, |
|
"loss": 0.0811, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"grad_norm": 0.47383779287338257, |
|
"learning_rate": 4.1034201092154945e-08, |
|
"loss": 0.0976, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 9.848888888888888, |
|
"grad_norm": 0.562429666519165, |
|
"learning_rate": 3.496476058006959e-08, |
|
"loss": 0.1271, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 9.857777777777777, |
|
"grad_norm": 0.5050589442253113, |
|
"learning_rate": 2.9380658326727538e-08, |
|
"loss": 0.0731, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 9.866666666666667, |
|
"grad_norm": 0.48343461751937866, |
|
"learning_rate": 2.4281948573617874e-08, |
|
"loss": 0.077, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 9.875555555555556, |
|
"grad_norm": 0.5251277685165405, |
|
"learning_rate": 1.9668680847356735e-08, |
|
"loss": 0.0776, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 9.884444444444444, |
|
"grad_norm": 0.4849095940589905, |
|
"learning_rate": 1.5540899959187727e-08, |
|
"loss": 0.0632, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 9.893333333333333, |
|
"grad_norm": 0.5705631375312805, |
|
"learning_rate": 1.189864600454338e-08, |
|
"loss": 0.1093, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 9.902222222222223, |
|
"grad_norm": 0.645310640335083, |
|
"learning_rate": 8.741954362678772e-09, |
|
"loss": 0.102, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 9.911111111111111, |
|
"grad_norm": 0.6024972796440125, |
|
"learning_rate": 6.070855696294064e-09, |
|
"loss": 0.0915, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"grad_norm": 0.4722750782966614, |
|
"learning_rate": 3.885375951256931e-09, |
|
"loss": 0.0726, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 9.928888888888888, |
|
"grad_norm": 0.6939518451690674, |
|
"learning_rate": 2.185536356363871e-09, |
|
"loss": 0.1166, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 9.937777777777779, |
|
"grad_norm": 0.5158998966217041, |
|
"learning_rate": 9.713534230904041e-10, |
|
"loss": 0.0538, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 9.946666666666667, |
|
"grad_norm": 0.5253598690032959, |
|
"learning_rate": 2.428389454800506e-10, |
|
"loss": 0.0969, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 9.955555555555556, |
|
"grad_norm": 0.4952881932258606, |
|
"learning_rate": 0.0, |
|
"loss": 0.077, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 9.955555555555556, |
|
"step": 1120, |
|
"total_flos": 9.116617776876749e+17, |
|
"train_loss": 0.18832961371434587, |
|
"train_runtime": 17993.216, |
|
"train_samples_per_second": 0.5, |
|
"train_steps_per_second": 0.062 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1120, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.116617776876749e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|