|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9994810586403736, |
|
"eval_steps": 500, |
|
"global_step": 963, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.1219773218035698, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.3759, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.1272401511669159, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.3538, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.13286344707012177, |
|
"learning_rate": 3e-06, |
|
"loss": 1.3886, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.1291491985321045, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.3143, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.12544086575508118, |
|
"learning_rate": 5e-06, |
|
"loss": 1.317, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.15034854412078857, |
|
"learning_rate": 6e-06, |
|
"loss": 1.3537, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.14342203736305237, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 1.3083, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.1392456442117691, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.3501, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.13647185266017914, |
|
"learning_rate": 9e-06, |
|
"loss": 1.1859, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.12714740633964539, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3746, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.12918396294116974, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 1.3672, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2055857628583908, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.3692, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.1577647179365158, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 1.2316, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.1306232064962387, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 1.2453, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.1246161013841629, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.3613, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.1408628672361374, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.3905, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.1561778485774994, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 1.2617, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.13680391013622284, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.3604, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.12442106008529663, |
|
"learning_rate": 1.9e-05, |
|
"loss": 1.3812, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.13730080425739288, |
|
"learning_rate": 2e-05, |
|
"loss": 1.2252, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.11723917722702026, |
|
"learning_rate": 2.1e-05, |
|
"loss": 1.1512, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.13034303486347198, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 1.2698, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.15671290457248688, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 1.2823, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.11263088881969452, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.375, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.13272984325885773, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.2721, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.13560321927070618, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 1.3223, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.13235026597976685, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 1.2181, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.12842555344104767, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 1.3783, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.13284699618816376, |
|
"learning_rate": 2.9e-05, |
|
"loss": 1.3668, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.12673886120319366, |
|
"learning_rate": 3e-05, |
|
"loss": 1.3657, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1453860104084015, |
|
"learning_rate": 3.1e-05, |
|
"loss": 1.2646, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.17890802025794983, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.1986, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1185929998755455, |
|
"learning_rate": 3.3e-05, |
|
"loss": 1.1962, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.12994253635406494, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 1.2893, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1479964405298233, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.1221, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.13566361367702484, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.2425, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1559300720691681, |
|
"learning_rate": 3.7e-05, |
|
"loss": 1.2995, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.14187201857566833, |
|
"learning_rate": 3.8e-05, |
|
"loss": 1.3666, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.13634175062179565, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 1.3111, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1550116240978241, |
|
"learning_rate": 4e-05, |
|
"loss": 1.3729, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.14702185988426208, |
|
"learning_rate": 4.1e-05, |
|
"loss": 1.2447, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1325961798429489, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.174, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1384323686361313, |
|
"learning_rate": 4.3e-05, |
|
"loss": 1.2333, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.13662421703338623, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 1.1815, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.13229218125343323, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.2125, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.14347675442695618, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.1498, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.18536897003650665, |
|
"learning_rate": 4.7e-05, |
|
"loss": 1.2404, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.13089048862457275, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.2904, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.12752604484558105, |
|
"learning_rate": 4.9e-05, |
|
"loss": 1.2702, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.15005581080913544, |
|
"learning_rate": 5e-05, |
|
"loss": 1.2866, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.16374410688877106, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 1.333, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1684991866350174, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.4278, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.1903846263885498, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 1.3035, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.16056384146213531, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.2323, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.14987166225910187, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.0263, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.13468973338603973, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.2534, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.1467704474925995, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 1.3025, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.17560411989688873, |
|
"learning_rate": 5.8e-05, |
|
"loss": 1.2307, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.1423458606004715, |
|
"learning_rate": 5.9e-05, |
|
"loss": 1.1628, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.12863807380199432, |
|
"learning_rate": 6e-05, |
|
"loss": 1.1174, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.13459883630275726, |
|
"learning_rate": 6.1e-05, |
|
"loss": 1.2223, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.13762715458869934, |
|
"learning_rate": 6.2e-05, |
|
"loss": 1.2393, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1310354322195053, |
|
"learning_rate": 6.3e-05, |
|
"loss": 1.2487, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.12477163225412369, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.2632, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.15380260348320007, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 1.3762, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1248747706413269, |
|
"learning_rate": 6.6e-05, |
|
"loss": 1.1336, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.12641173601150513, |
|
"learning_rate": 6.7e-05, |
|
"loss": 1.2674, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.13872478902339935, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.3014, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1377810686826706, |
|
"learning_rate": 6.9e-05, |
|
"loss": 1.2816, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.13459351658821106, |
|
"learning_rate": 7e-05, |
|
"loss": 1.2872, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.13774238526821136, |
|
"learning_rate": 7.1e-05, |
|
"loss": 1.1481, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1276445835828781, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.1924, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.12415210157632828, |
|
"learning_rate": 7.3e-05, |
|
"loss": 1.3093, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.11668894439935684, |
|
"learning_rate": 7.4e-05, |
|
"loss": 1.1941, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.16712161898612976, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.2581, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1783616989850998, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.0885, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1388709843158722, |
|
"learning_rate": 7.7e-05, |
|
"loss": 1.1999, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.13248670101165771, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.1312, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.8402003645896912, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 1.281, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1560419201850891, |
|
"learning_rate": 8e-05, |
|
"loss": 1.3314, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.13449999690055847, |
|
"learning_rate": 8.1e-05, |
|
"loss": 1.1742, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.14383117854595184, |
|
"learning_rate": 8.2e-05, |
|
"loss": 1.1958, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.15058472752571106, |
|
"learning_rate": 8.3e-05, |
|
"loss": 1.2992, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.14776362478733063, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.3611, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.14592403173446655, |
|
"learning_rate": 8.5e-05, |
|
"loss": 1.1165, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1357845813035965, |
|
"learning_rate": 8.6e-05, |
|
"loss": 1.2509, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.15681371092796326, |
|
"learning_rate": 8.7e-05, |
|
"loss": 1.0857, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.14496199786663055, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 1.1111, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1501457244157791, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 1.2556, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.14578907191753387, |
|
"learning_rate": 9e-05, |
|
"loss": 1.1509, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.13890138268470764, |
|
"learning_rate": 9.1e-05, |
|
"loss": 1.1687, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.14948885142803192, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.0968, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.13117392361164093, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 1.3073, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.13922806084156036, |
|
"learning_rate": 9.4e-05, |
|
"loss": 1.2502, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.16392332315444946, |
|
"learning_rate": 9.5e-05, |
|
"loss": 1.3148, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.11756494641304016, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.3215, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.11909925937652588, |
|
"learning_rate": 9.7e-05, |
|
"loss": 1.2593, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1371874213218689, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.1537, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.14141380786895752, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 1.221, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.13502179086208344, |
|
"learning_rate": 0.0001, |
|
"loss": 1.3492, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.15468691289424896, |
|
"learning_rate": 0.000101, |
|
"loss": 1.2592, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.13041648268699646, |
|
"learning_rate": 0.00010200000000000001, |
|
"loss": 1.3792, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.11658702045679092, |
|
"learning_rate": 0.00010300000000000001, |
|
"loss": 1.1845, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.13084186613559723, |
|
"learning_rate": 0.00010400000000000001, |
|
"loss": 1.1808, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.13180844485759735, |
|
"learning_rate": 0.000105, |
|
"loss": 1.1721, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1375647485256195, |
|
"learning_rate": 0.00010600000000000002, |
|
"loss": 1.3622, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.11864122748374939, |
|
"learning_rate": 0.00010700000000000001, |
|
"loss": 1.238, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.11780542880296707, |
|
"learning_rate": 0.00010800000000000001, |
|
"loss": 1.1581, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.11449900269508362, |
|
"learning_rate": 0.000109, |
|
"loss": 1.3076, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.11585865169763565, |
|
"learning_rate": 0.00011000000000000002, |
|
"loss": 1.3553, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.12288644164800644, |
|
"learning_rate": 0.00011100000000000001, |
|
"loss": 1.1448, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.12131894379854202, |
|
"learning_rate": 0.00011200000000000001, |
|
"loss": 1.3456, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.13827624917030334, |
|
"learning_rate": 0.000113, |
|
"loss": 1.3183, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.13708753883838654, |
|
"learning_rate": 0.00011399999999999999, |
|
"loss": 1.3293, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.11893680691719055, |
|
"learning_rate": 0.00011499999999999999, |
|
"loss": 1.1954, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.12022116035223007, |
|
"learning_rate": 0.000116, |
|
"loss": 1.1822, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.11215274035930634, |
|
"learning_rate": 0.000117, |
|
"loss": 1.2453, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.11778578907251358, |
|
"learning_rate": 0.000118, |
|
"loss": 1.1939, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.12714551389217377, |
|
"learning_rate": 0.000119, |
|
"loss": 1.1917, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.11972431093454361, |
|
"learning_rate": 0.00012, |
|
"loss": 1.2769, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.12945646047592163, |
|
"learning_rate": 0.000121, |
|
"loss": 1.2929, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.11721435189247131, |
|
"learning_rate": 0.000122, |
|
"loss": 1.3606, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.117381751537323, |
|
"learning_rate": 0.000123, |
|
"loss": 1.3187, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.13110613822937012, |
|
"learning_rate": 0.000124, |
|
"loss": 1.3786, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.12571324408054352, |
|
"learning_rate": 0.000125, |
|
"loss": 1.0165, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.12401648610830307, |
|
"learning_rate": 0.000126, |
|
"loss": 1.1858, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.11960897594690323, |
|
"learning_rate": 0.000127, |
|
"loss": 1.2638, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.11195061355829239, |
|
"learning_rate": 0.00012800000000000002, |
|
"loss": 1.2469, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.1292635053396225, |
|
"learning_rate": 0.00012900000000000002, |
|
"loss": 1.3642, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.11140226572751999, |
|
"learning_rate": 0.00013000000000000002, |
|
"loss": 1.2238, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.10454066097736359, |
|
"learning_rate": 0.000131, |
|
"loss": 1.2418, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.11138935387134552, |
|
"learning_rate": 0.000132, |
|
"loss": 1.1964, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.12519143521785736, |
|
"learning_rate": 0.000133, |
|
"loss": 1.1889, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1129814088344574, |
|
"learning_rate": 0.000134, |
|
"loss": 1.3425, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.11608937382698059, |
|
"learning_rate": 0.00013500000000000003, |
|
"loss": 1.0762, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1410205215215683, |
|
"learning_rate": 0.00013600000000000003, |
|
"loss": 1.2502, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.11246364563703537, |
|
"learning_rate": 0.00013700000000000002, |
|
"loss": 1.3605, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1213245838880539, |
|
"learning_rate": 0.000138, |
|
"loss": 1.2632, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.11144573241472244, |
|
"learning_rate": 0.000139, |
|
"loss": 1.1771, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1700059324502945, |
|
"learning_rate": 0.00014, |
|
"loss": 1.3396, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.11950355768203735, |
|
"learning_rate": 0.000141, |
|
"loss": 1.1329, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.11367864161729813, |
|
"learning_rate": 0.000142, |
|
"loss": 1.3323, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.22367842495441437, |
|
"learning_rate": 0.000143, |
|
"loss": 1.201, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.11639752984046936, |
|
"learning_rate": 0.000144, |
|
"loss": 1.2507, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.12356352061033249, |
|
"learning_rate": 0.000145, |
|
"loss": 1.3437, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.10859037935733795, |
|
"learning_rate": 0.000146, |
|
"loss": 1.1677, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.11901260167360306, |
|
"learning_rate": 0.000147, |
|
"loss": 1.1125, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1092861220240593, |
|
"learning_rate": 0.000148, |
|
"loss": 1.1939, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.10728005319833755, |
|
"learning_rate": 0.00014900000000000002, |
|
"loss": 1.1974, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.11113261431455612, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 1.1107, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.14674580097198486, |
|
"learning_rate": 0.000151, |
|
"loss": 1.2339, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.10521555691957474, |
|
"learning_rate": 0.000152, |
|
"loss": 1.2917, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.12526080012321472, |
|
"learning_rate": 0.000153, |
|
"loss": 1.3247, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.2366744726896286, |
|
"learning_rate": 0.000154, |
|
"loss": 1.3637, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.11178407818078995, |
|
"learning_rate": 0.000155, |
|
"loss": 1.2649, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.10156022757291794, |
|
"learning_rate": 0.00015600000000000002, |
|
"loss": 1.1637, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.09755547344684601, |
|
"learning_rate": 0.00015700000000000002, |
|
"loss": 1.241, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.11344686150550842, |
|
"learning_rate": 0.00015800000000000002, |
|
"loss": 1.2208, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.1021842285990715, |
|
"learning_rate": 0.00015900000000000002, |
|
"loss": 1.1936, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.11096473783254623, |
|
"learning_rate": 0.00016, |
|
"loss": 1.2908, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.11365038901567459, |
|
"learning_rate": 0.000161, |
|
"loss": 1.1754, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.1159452423453331, |
|
"learning_rate": 0.000162, |
|
"loss": 1.2411, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.1240570917725563, |
|
"learning_rate": 0.000163, |
|
"loss": 1.2655, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.11343025416135788, |
|
"learning_rate": 0.000164, |
|
"loss": 1.3486, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.10576006770133972, |
|
"learning_rate": 0.000165, |
|
"loss": 1.2787, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.182003453373909, |
|
"learning_rate": 0.000166, |
|
"loss": 1.2045, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.10536637157201767, |
|
"learning_rate": 0.000167, |
|
"loss": 1.1388, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.10617152601480484, |
|
"learning_rate": 0.000168, |
|
"loss": 1.2686, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.11899720877408981, |
|
"learning_rate": 0.00016900000000000002, |
|
"loss": 1.1732, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.11736088991165161, |
|
"learning_rate": 0.00017, |
|
"loss": 1.2772, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.10500625520944595, |
|
"learning_rate": 0.000171, |
|
"loss": 1.2193, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.10299772769212723, |
|
"learning_rate": 0.000172, |
|
"loss": 1.2021, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.10550544410943985, |
|
"learning_rate": 0.000173, |
|
"loss": 1.2393, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.1132335364818573, |
|
"learning_rate": 0.000174, |
|
"loss": 1.2713, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.11581739038228989, |
|
"learning_rate": 0.000175, |
|
"loss": 1.3282, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.11211756616830826, |
|
"learning_rate": 0.00017600000000000002, |
|
"loss": 1.2374, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.10848426073789597, |
|
"learning_rate": 0.00017700000000000002, |
|
"loss": 1.2916, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.11354674398899078, |
|
"learning_rate": 0.00017800000000000002, |
|
"loss": 1.1914, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.12108771502971649, |
|
"learning_rate": 0.00017900000000000001, |
|
"loss": 1.3693, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.11068333685398102, |
|
"learning_rate": 0.00018, |
|
"loss": 1.086, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.11752796918153763, |
|
"learning_rate": 0.000181, |
|
"loss": 1.1508, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.10650038719177246, |
|
"learning_rate": 0.000182, |
|
"loss": 1.204, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.12248033285140991, |
|
"learning_rate": 0.000183, |
|
"loss": 1.2288, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.10320232063531876, |
|
"learning_rate": 0.00018400000000000003, |
|
"loss": 1.1309, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.1450732797384262, |
|
"learning_rate": 0.00018500000000000002, |
|
"loss": 1.2696, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.10568010061979294, |
|
"learning_rate": 0.00018600000000000002, |
|
"loss": 1.2643, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.12027282267808914, |
|
"learning_rate": 0.00018700000000000002, |
|
"loss": 1.2455, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.09627476334571838, |
|
"learning_rate": 0.000188, |
|
"loss": 1.1999, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1297677904367447, |
|
"learning_rate": 0.00018899999999999999, |
|
"loss": 1.0843, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1251845359802246, |
|
"learning_rate": 0.00019, |
|
"loss": 1.2957, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.11355076730251312, |
|
"learning_rate": 0.000191, |
|
"loss": 1.2705, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.11948814988136292, |
|
"learning_rate": 0.000192, |
|
"loss": 1.2352, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.12006771564483643, |
|
"learning_rate": 0.000193, |
|
"loss": 1.2489, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.11943700909614563, |
|
"learning_rate": 0.000194, |
|
"loss": 1.0768, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.12115556001663208, |
|
"learning_rate": 0.000195, |
|
"loss": 1.2614, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1361999660730362, |
|
"learning_rate": 0.000196, |
|
"loss": 1.1418, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.10292702168226242, |
|
"learning_rate": 0.00019700000000000002, |
|
"loss": 1.1649, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.12569741904735565, |
|
"learning_rate": 0.00019800000000000002, |
|
"loss": 1.2054, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.09684847295284271, |
|
"learning_rate": 0.000199, |
|
"loss": 1.0854, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.10697442293167114, |
|
"learning_rate": 0.0002, |
|
"loss": 1.1895, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.10017522424459457, |
|
"learning_rate": 0.00019999915234318063, |
|
"loss": 1.2775, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.0991068184375763, |
|
"learning_rate": 0.0001999966093870929, |
|
"loss": 1.3151, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.10074901580810547, |
|
"learning_rate": 0.00019999237117484796, |
|
"loss": 1.2666, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.10549337416887283, |
|
"learning_rate": 0.00019998643777829672, |
|
"loss": 1.2483, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.10364524275064468, |
|
"learning_rate": 0.00019997880929802894, |
|
"loss": 1.0562, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.21587559580802917, |
|
"learning_rate": 0.00019996948586337125, |
|
"loss": 1.3881, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.10044362396001816, |
|
"learning_rate": 0.00019995846763238512, |
|
"loss": 1.2114, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1037297397851944, |
|
"learning_rate": 0.0001999457547918641, |
|
"loss": 1.2611, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.11380340903997421, |
|
"learning_rate": 0.00019993134755733074, |
|
"loss": 1.3762, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.11318037658929825, |
|
"learning_rate": 0.0001999152461730328, |
|
"loss": 1.2906, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1097273975610733, |
|
"learning_rate": 0.0001998974509119393, |
|
"loss": 1.0855, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.11358966678380966, |
|
"learning_rate": 0.00019987796207573573, |
|
"loss": 1.2061, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.11139972507953644, |
|
"learning_rate": 0.00019985677999481894, |
|
"loss": 1.2453, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1275399923324585, |
|
"learning_rate": 0.00019983390502829166, |
|
"loss": 1.1917, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1333334743976593, |
|
"learning_rate": 0.00019980933756395634, |
|
"loss": 1.2102, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.10892786085605621, |
|
"learning_rate": 0.00019978307801830854, |
|
"loss": 1.1177, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.10664794594049454, |
|
"learning_rate": 0.00019975512683652986, |
|
"loss": 1.1484, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.20084381103515625, |
|
"learning_rate": 0.0001997254844924806, |
|
"loss": 1.0399, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.11178454756736755, |
|
"learning_rate": 0.0001996941514886914, |
|
"loss": 1.3401, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.10734831541776657, |
|
"learning_rate": 0.00019966112835635493, |
|
"loss": 1.1964, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.12020622938871384, |
|
"learning_rate": 0.00019962641565531692, |
|
"loss": 1.1535, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.10945051908493042, |
|
"learning_rate": 0.00019959001397406646, |
|
"loss": 1.2086, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.11263303458690643, |
|
"learning_rate": 0.00019955192392972628, |
|
"loss": 1.1488, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.119475819170475, |
|
"learning_rate": 0.00019951214616804202, |
|
"loss": 1.3071, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.10934551060199738, |
|
"learning_rate": 0.00019947068136337158, |
|
"loss": 1.2639, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.11675988882780075, |
|
"learning_rate": 0.0001994275302186734, |
|
"loss": 1.142, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.28856417536735535, |
|
"learning_rate": 0.00019938269346549475, |
|
"loss": 1.2437, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.11922767013311386, |
|
"learning_rate": 0.00019933617186395917, |
|
"loss": 1.1668, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.12038363516330719, |
|
"learning_rate": 0.00019928796620275377, |
|
"loss": 1.0465, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.10866128653287888, |
|
"learning_rate": 0.00019923807729911567, |
|
"loss": 1.1417, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.134980708360672, |
|
"learning_rate": 0.00019918650599881827, |
|
"loss": 1.3574, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.11622115224599838, |
|
"learning_rate": 0.00019913325317615684, |
|
"loss": 1.2457, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.11134631186723709, |
|
"learning_rate": 0.0001990783197339338, |
|
"loss": 1.2194, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.11312945932149887, |
|
"learning_rate": 0.0001990217066034432, |
|
"loss": 1.179, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.10070719569921494, |
|
"learning_rate": 0.00019896341474445525, |
|
"loss": 1.299, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.11468905210494995, |
|
"learning_rate": 0.00019890344514519973, |
|
"loss": 1.2141, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.12275680154561996, |
|
"learning_rate": 0.00019884179882234944, |
|
"loss": 1.2581, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1063394546508789, |
|
"learning_rate": 0.00019877847682100294, |
|
"loss": 1.3027, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.10849407315254211, |
|
"learning_rate": 0.00019871348021466672, |
|
"loss": 1.1914, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1102900579571724, |
|
"learning_rate": 0.0001986468101052371, |
|
"loss": 1.2589, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.11001666635274887, |
|
"learning_rate": 0.00019857846762298155, |
|
"loss": 1.141, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.10907820612192154, |
|
"learning_rate": 0.0001985084539265195, |
|
"loss": 1.1139, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1067519262433052, |
|
"learning_rate": 0.0001984367702028027, |
|
"loss": 1.2389, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.11710456013679504, |
|
"learning_rate": 0.0001983634176670951, |
|
"loss": 1.2421, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.10886801034212112, |
|
"learning_rate": 0.00019828839756295223, |
|
"loss": 1.1696, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.1404796689748764, |
|
"learning_rate": 0.00019821171116220012, |
|
"loss": 1.29, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.1096196323633194, |
|
"learning_rate": 0.00019813335976491385, |
|
"loss": 1.3536, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.11487864702939987, |
|
"learning_rate": 0.00019805334469939528, |
|
"loss": 1.1371, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.1555541455745697, |
|
"learning_rate": 0.00019797166732215076, |
|
"loss": 1.1812, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.10709488391876221, |
|
"learning_rate": 0.000197888329017868, |
|
"loss": 1.101, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.10238959640264511, |
|
"learning_rate": 0.00019780333119939262, |
|
"loss": 1.2363, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.12807561457157135, |
|
"learning_rate": 0.00019771667530770425, |
|
"loss": 1.217, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.10418405383825302, |
|
"learning_rate": 0.00019762836281189206, |
|
"loss": 1.36, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.10400766879320145, |
|
"learning_rate": 0.00019753839520912983, |
|
"loss": 1.2368, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.1629195362329483, |
|
"learning_rate": 0.00019744677402465052, |
|
"loss": 1.2236, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.11335594952106476, |
|
"learning_rate": 0.00019735350081172067, |
|
"loss": 1.2062, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.18272048234939575, |
|
"learning_rate": 0.00019725857715161373, |
|
"loss": 1.3138, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.10922593623399734, |
|
"learning_rate": 0.0001971620046535835, |
|
"loss": 1.1697, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.10762567073106766, |
|
"learning_rate": 0.00019706378495483665, |
|
"loss": 1.1303, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.11802787333726883, |
|
"learning_rate": 0.00019696391972050514, |
|
"loss": 1.2909, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.11715908348560333, |
|
"learning_rate": 0.00019686241064361792, |
|
"loss": 1.1456, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.11491627991199493, |
|
"learning_rate": 0.00019675925944507224, |
|
"loss": 1.2083, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.11131855100393295, |
|
"learning_rate": 0.0001966544678736044, |
|
"loss": 1.2579, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.12063883244991302, |
|
"learning_rate": 0.00019654803770576026, |
|
"loss": 1.3887, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.14915111660957336, |
|
"learning_rate": 0.0001964399707458649, |
|
"loss": 1.2351, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.11454611271619797, |
|
"learning_rate": 0.00019633026882599227, |
|
"loss": 1.3325, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.10960078984498978, |
|
"learning_rate": 0.00019621893380593396, |
|
"loss": 1.3, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.11405951529741287, |
|
"learning_rate": 0.0001961059675731678, |
|
"loss": 1.0641, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.10570972412824631, |
|
"learning_rate": 0.00019599137204282565, |
|
"loss": 1.1543, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.11385346204042435, |
|
"learning_rate": 0.00019587514915766124, |
|
"loss": 1.1546, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.10122133791446686, |
|
"learning_rate": 0.00019575730088801697, |
|
"loss": 1.1946, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.14278005063533783, |
|
"learning_rate": 0.00019563782923179063, |
|
"loss": 1.1853, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.11730815470218658, |
|
"learning_rate": 0.00019551673621440147, |
|
"loss": 1.4478, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.12385374307632446, |
|
"learning_rate": 0.00019539402388875598, |
|
"loss": 1.4392, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.10802686959505081, |
|
"learning_rate": 0.00019526969433521297, |
|
"loss": 1.3126, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.12256834656000137, |
|
"learning_rate": 0.00019514374966154825, |
|
"loss": 1.1855, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.10675772279500961, |
|
"learning_rate": 0.00019501619200291908, |
|
"loss": 1.2578, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.1070014238357544, |
|
"learning_rate": 0.00019488702352182786, |
|
"loss": 1.1426, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.11141553521156311, |
|
"learning_rate": 0.00019475624640808543, |
|
"loss": 1.1046, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.10741408169269562, |
|
"learning_rate": 0.00019462386287877407, |
|
"loss": 1.1991, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.11526710540056229, |
|
"learning_rate": 0.00019448987517820982, |
|
"loss": 1.3507, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.1025758609175682, |
|
"learning_rate": 0.0001943542855779044, |
|
"loss": 1.3, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.11231940984725952, |
|
"learning_rate": 0.0001942170963765268, |
|
"loss": 1.2408, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.1058710590004921, |
|
"learning_rate": 0.0001940783098998643, |
|
"loss": 1.1668, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1024676039814949, |
|
"learning_rate": 0.00019393792850078292, |
|
"loss": 1.2192, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.10104867815971375, |
|
"learning_rate": 0.00019379595455918774, |
|
"loss": 1.2881, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1211661621928215, |
|
"learning_rate": 0.00019365239048198225, |
|
"loss": 1.3614, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1058136522769928, |
|
"learning_rate": 0.0001935072387030279, |
|
"loss": 1.2043, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.3414977490901947, |
|
"learning_rate": 0.0001933605016831026, |
|
"loss": 1.1638, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.09908904135227203, |
|
"learning_rate": 0.00019321218190985904, |
|
"loss": 1.0565, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.11039788275957108, |
|
"learning_rate": 0.00019306228189778254, |
|
"loss": 1.1324, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1092277467250824, |
|
"learning_rate": 0.0001929108041881485, |
|
"loss": 1.0857, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.11304416507482529, |
|
"learning_rate": 0.00019275775134897918, |
|
"loss": 1.3365, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.1239667758345604, |
|
"learning_rate": 0.00019260312597500022, |
|
"loss": 1.2251, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.1069445088505745, |
|
"learning_rate": 0.00019244693068759666, |
|
"loss": 1.1466, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.11692642420530319, |
|
"learning_rate": 0.00019228916813476853, |
|
"loss": 0.947, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.11571913212537766, |
|
"learning_rate": 0.00019212984099108594, |
|
"loss": 1.1865, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.14679546654224396, |
|
"learning_rate": 0.00019196895195764362, |
|
"loss": 1.2747, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.11377564072608948, |
|
"learning_rate": 0.00019180650376201535, |
|
"loss": 1.2149, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.14246807992458344, |
|
"learning_rate": 0.0001916424991582075, |
|
"loss": 1.3234, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.11343812942504883, |
|
"learning_rate": 0.00019147694092661255, |
|
"loss": 1.2125, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.49394461512565613, |
|
"learning_rate": 0.00019130983187396172, |
|
"loss": 1.2545, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.12038710713386536, |
|
"learning_rate": 0.0001911411748332776, |
|
"loss": 1.2441, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.112415611743927, |
|
"learning_rate": 0.00019097097266382597, |
|
"loss": 1.1986, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.1328456848859787, |
|
"learning_rate": 0.0001907992282510675, |
|
"loss": 1.0427, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.12180749326944351, |
|
"learning_rate": 0.00019062594450660857, |
|
"loss": 1.2805, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.16928140819072723, |
|
"learning_rate": 0.00019045112436815215, |
|
"loss": 1.1842, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.15169909596443176, |
|
"learning_rate": 0.00019027477079944788, |
|
"loss": 1.1867, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.11884856224060059, |
|
"learning_rate": 0.0001900968867902419, |
|
"loss": 1.2883, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.10564276576042175, |
|
"learning_rate": 0.0001899174753562261, |
|
"loss": 1.264, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.12126502394676208, |
|
"learning_rate": 0.0001897365395389869, |
|
"loss": 1.2866, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.11135692894458771, |
|
"learning_rate": 0.00018955408240595395, |
|
"loss": 1.3224, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.11942031234502792, |
|
"learning_rate": 0.00018937010705034787, |
|
"loss": 1.2606, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.11544723808765411, |
|
"learning_rate": 0.00018918461659112802, |
|
"loss": 1.2025, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.12025148421525955, |
|
"learning_rate": 0.00018899761417293941, |
|
"loss": 1.4092, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.126939594745636, |
|
"learning_rate": 0.00018880910296605955, |
|
"loss": 1.1811, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.29961127042770386, |
|
"learning_rate": 0.00018861908616634465, |
|
"loss": 1.1998, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.12669076025485992, |
|
"learning_rate": 0.00018842756699517538, |
|
"loss": 1.1652, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.12721942365169525, |
|
"learning_rate": 0.00018823454869940242, |
|
"loss": 1.2145, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1199335977435112, |
|
"learning_rate": 0.0001880400345512913, |
|
"loss": 1.2953, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.13686604797840118, |
|
"learning_rate": 0.00018784402784846683, |
|
"loss": 1.2455, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1259993016719818, |
|
"learning_rate": 0.00018764653191385736, |
|
"loss": 0.9901, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.13265901803970337, |
|
"learning_rate": 0.00018744755009563848, |
|
"loss": 1.2294, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.11366987973451614, |
|
"learning_rate": 0.00018724708576717605, |
|
"loss": 1.3416, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.11126185208559036, |
|
"learning_rate": 0.0001870451423269692, |
|
"loss": 1.1254, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.11551456153392792, |
|
"learning_rate": 0.0001868417231985926, |
|
"loss": 1.2113, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.11155180633068085, |
|
"learning_rate": 0.00018663683183063845, |
|
"loss": 1.1587, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1066342443227768, |
|
"learning_rate": 0.0001864304716966581, |
|
"loss": 1.3596, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.11637596786022186, |
|
"learning_rate": 0.00018622264629510299, |
|
"loss": 1.2117, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.10947410017251968, |
|
"learning_rate": 0.00018601335914926557, |
|
"loss": 0.9963, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1826743334531784, |
|
"learning_rate": 0.0001858026138072193, |
|
"loss": 1.1548, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.12238079309463501, |
|
"learning_rate": 0.00018559041384175876, |
|
"loss": 1.0507, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1196766272187233, |
|
"learning_rate": 0.00018537676285033887, |
|
"loss": 1.1443, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1195516511797905, |
|
"learning_rate": 0.00018516166445501405, |
|
"loss": 1.083, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.111576147377491, |
|
"learning_rate": 0.0001849451223023767, |
|
"loss": 1.192, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.12735474109649658, |
|
"learning_rate": 0.0001847271400634955, |
|
"loss": 1.2692, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.10551782697439194, |
|
"learning_rate": 0.00018450772143385307, |
|
"loss": 1.2476, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.11405996978282928, |
|
"learning_rate": 0.00018428687013328336, |
|
"loss": 1.1888, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.13341975212097168, |
|
"learning_rate": 0.00018406458990590858, |
|
"loss": 1.2129, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.13987426459789276, |
|
"learning_rate": 0.00018384088452007578, |
|
"loss": 1.2114, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1929798275232315, |
|
"learning_rate": 0.0001836157577682928, |
|
"loss": 1.1411, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.11936288326978683, |
|
"learning_rate": 0.00018338921346716425, |
|
"loss": 1.2696, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.10896290838718414, |
|
"learning_rate": 0.0001831612554573265, |
|
"loss": 1.2438, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.11335362493991852, |
|
"learning_rate": 0.00018293188760338286, |
|
"loss": 1.0288, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.11729809641838074, |
|
"learning_rate": 0.00018270111379383775, |
|
"loss": 1.1118, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.11622367799282074, |
|
"learning_rate": 0.0001824689379410311, |
|
"loss": 1.1515, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.11727786809206009, |
|
"learning_rate": 0.00018223536398107176, |
|
"loss": 1.2468, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.11702138185501099, |
|
"learning_rate": 0.00018200039587377097, |
|
"loss": 1.2647, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.12109760195016861, |
|
"learning_rate": 0.0001817640376025751, |
|
"loss": 1.2282, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.11537209153175354, |
|
"learning_rate": 0.00018152629317449813, |
|
"loss": 1.178, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.1228351965546608, |
|
"learning_rate": 0.00018128716662005383, |
|
"loss": 1.2099, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.15511314570903778, |
|
"learning_rate": 0.0001810466619931872, |
|
"loss": 1.2881, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1076040267944336, |
|
"learning_rate": 0.00018080478337120605, |
|
"loss": 1.264, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.14106139540672302, |
|
"learning_rate": 0.00018056153485471166, |
|
"loss": 1.2821, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.11988777667284012, |
|
"learning_rate": 0.00018031692056752924, |
|
"loss": 1.2771, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.10703491419553757, |
|
"learning_rate": 0.0001800709446566382, |
|
"loss": 1.2525, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.11741764843463898, |
|
"learning_rate": 0.00017982361129210172, |
|
"loss": 1.2151, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.19867832958698273, |
|
"learning_rate": 0.00017957492466699605, |
|
"loss": 1.2559, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.12436340004205704, |
|
"learning_rate": 0.00017932488899733947, |
|
"loss": 1.3145, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.12323563545942307, |
|
"learning_rate": 0.00017907350852202077, |
|
"loss": 1.2145, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.126682311296463, |
|
"learning_rate": 0.00017882078750272742, |
|
"loss": 1.2805, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.10707955062389374, |
|
"learning_rate": 0.00017856673022387338, |
|
"loss": 1.1415, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1281125843524933, |
|
"learning_rate": 0.0001783113409925263, |
|
"loss": 1.3117, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.11295267939567566, |
|
"learning_rate": 0.00017805462413833468, |
|
"loss": 1.1453, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.12421952188014984, |
|
"learning_rate": 0.00017779658401345438, |
|
"loss": 1.2293, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.11468549817800522, |
|
"learning_rate": 0.00017753722499247478, |
|
"loss": 1.1391, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.12964022159576416, |
|
"learning_rate": 0.00017727655147234476, |
|
"loss": 1.3975, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.11601359397172928, |
|
"learning_rate": 0.00017701456787229804, |
|
"loss": 1.2622, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.12835174798965454, |
|
"learning_rate": 0.00017675127863377838, |
|
"loss": 1.1703, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.11938933283090591, |
|
"learning_rate": 0.00017648668822036408, |
|
"loss": 1.2702, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.12092038989067078, |
|
"learning_rate": 0.00017622080111769257, |
|
"loss": 1.3653, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1410731077194214, |
|
"learning_rate": 0.00017595362183338412, |
|
"loss": 1.2465, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.11769446730613708, |
|
"learning_rate": 0.00017568515489696558, |
|
"loss": 1.235, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.10854797065258026, |
|
"learning_rate": 0.00017541540485979359, |
|
"loss": 1.1758, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.10094323009252548, |
|
"learning_rate": 0.0001751443762949772, |
|
"loss": 1.0939, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.10836099833250046, |
|
"learning_rate": 0.00017487207379730078, |
|
"loss": 1.1618, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1138240173459053, |
|
"learning_rate": 0.00017459850198314563, |
|
"loss": 1.1423, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.11237498372793198, |
|
"learning_rate": 0.000174323665490412, |
|
"loss": 1.0702, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1078115925192833, |
|
"learning_rate": 0.00017404756897844052, |
|
"loss": 1.2523, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.11401957273483276, |
|
"learning_rate": 0.00017377021712793297, |
|
"loss": 1.2443, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.1109926849603653, |
|
"learning_rate": 0.00017349161464087312, |
|
"loss": 1.264, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.10931116342544556, |
|
"learning_rate": 0.00017321176624044687, |
|
"loss": 1.294, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.10655161738395691, |
|
"learning_rate": 0.00017293067667096239, |
|
"loss": 1.2629, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.11112368851900101, |
|
"learning_rate": 0.00017264835069776945, |
|
"loss": 1.25, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.1312362551689148, |
|
"learning_rate": 0.0001723647931071788, |
|
"loss": 1.1807, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.1174694374203682, |
|
"learning_rate": 0.00017208000870638093, |
|
"loss": 1.1719, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.11656120419502258, |
|
"learning_rate": 0.00017179400232336463, |
|
"loss": 1.4266, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.11490406095981598, |
|
"learning_rate": 0.00017150677880683514, |
|
"loss": 1.3041, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.1138778105378151, |
|
"learning_rate": 0.00017121834302613186, |
|
"loss": 1.3364, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.11219751089811325, |
|
"learning_rate": 0.000170928699871146, |
|
"loss": 1.3409, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.10369167476892471, |
|
"learning_rate": 0.0001706378542522374, |
|
"loss": 1.1775, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.11245618760585785, |
|
"learning_rate": 0.00017034581110015156, |
|
"loss": 1.1841, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.10895467549562454, |
|
"learning_rate": 0.00017005257536593577, |
|
"loss": 1.1855, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.10433463007211685, |
|
"learning_rate": 0.00016975815202085554, |
|
"loss": 1.2853, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.10486618429422379, |
|
"learning_rate": 0.00016946254605630992, |
|
"loss": 1.2029, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.12566353380680084, |
|
"learning_rate": 0.00016916576248374719, |
|
"loss": 1.187, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.11196866631507874, |
|
"learning_rate": 0.00016886780633457973, |
|
"loss": 1.1956, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.12952035665512085, |
|
"learning_rate": 0.00016856868266009874, |
|
"loss": 1.24, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.1032567098736763, |
|
"learning_rate": 0.0001682683965313887, |
|
"loss": 1.207, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.10673535615205765, |
|
"learning_rate": 0.00016796695303924127, |
|
"loss": 1.23, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.1103532612323761, |
|
"learning_rate": 0.00016766435729406913, |
|
"loss": 1.2994, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.11205995827913284, |
|
"learning_rate": 0.0001673606144258192, |
|
"loss": 1.1168, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.11950290203094482, |
|
"learning_rate": 0.00016705572958388576, |
|
"loss": 1.2152, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.17785818874835968, |
|
"learning_rate": 0.0001667497079370231, |
|
"loss": 1.2523, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.10789604485034943, |
|
"learning_rate": 0.00016644255467325794, |
|
"loss": 1.289, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.12129693478345871, |
|
"learning_rate": 0.00016613427499980143, |
|
"loss": 1.257, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.11083897203207016, |
|
"learning_rate": 0.00016582487414296097, |
|
"loss": 1.3349, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.09888826310634613, |
|
"learning_rate": 0.0001655143573480515, |
|
"loss": 1.1737, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.11880184710025787, |
|
"learning_rate": 0.00016520272987930652, |
|
"loss": 1.0338, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.11628952622413635, |
|
"learning_rate": 0.00016488999701978903, |
|
"loss": 1.2898, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.113735631108284, |
|
"learning_rate": 0.0001645761640713019, |
|
"loss": 1.1826, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.10678713023662567, |
|
"learning_rate": 0.00016426123635429786, |
|
"loss": 1.2231, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.11631446331739426, |
|
"learning_rate": 0.00016394521920778948, |
|
"loss": 1.0241, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.11213452368974686, |
|
"learning_rate": 0.00016362811798925853, |
|
"loss": 1.4935, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.1304715871810913, |
|
"learning_rate": 0.0001633099380745652, |
|
"loss": 1.1686, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.10429395735263824, |
|
"learning_rate": 0.000162990684857857, |
|
"loss": 1.0118, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.10091353952884674, |
|
"learning_rate": 0.00016267036375147725, |
|
"loss": 1.1401, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.10777878761291504, |
|
"learning_rate": 0.00016234898018587337, |
|
"loss": 1.33, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.11149658262729645, |
|
"learning_rate": 0.00016202653960950474, |
|
"loss": 1.2201, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.10549131035804749, |
|
"learning_rate": 0.0001617030474887505, |
|
"loss": 1.0758, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.12649282813072205, |
|
"learning_rate": 0.0001613785093078166, |
|
"loss": 1.2241, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.11720848083496094, |
|
"learning_rate": 0.00016105293056864315, |
|
"loss": 1.3001, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.12630730867385864, |
|
"learning_rate": 0.0001607263167908109, |
|
"loss": 0.986, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.11931440979242325, |
|
"learning_rate": 0.00016039867351144778, |
|
"loss": 1.2037, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.1198689192533493, |
|
"learning_rate": 0.000160070006285135, |
|
"loss": 1.2556, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.11019983142614365, |
|
"learning_rate": 0.0001597403206838128, |
|
"loss": 1.004, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.11280027031898499, |
|
"learning_rate": 0.00015940962229668625, |
|
"loss": 1.3164, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.12924431264400482, |
|
"learning_rate": 0.00015907791673013016, |
|
"loss": 1.1612, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.1206149235367775, |
|
"learning_rate": 0.00015874520960759423, |
|
"loss": 1.2171, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.12173935025930405, |
|
"learning_rate": 0.0001584115065695077, |
|
"loss": 1.2215, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.126984640955925, |
|
"learning_rate": 0.0001580768132731837, |
|
"loss": 1.2379, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.12142551690340042, |
|
"learning_rate": 0.0001577411353927233, |
|
"loss": 1.1171, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.11669143289327621, |
|
"learning_rate": 0.00015740447861891944, |
|
"loss": 1.1148, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.12235064059495926, |
|
"learning_rate": 0.00015706684865916026, |
|
"loss": 1.2705, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.11695750802755356, |
|
"learning_rate": 0.00015672825123733258, |
|
"loss": 1.1602, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.10883978754281998, |
|
"learning_rate": 0.00015638869209372457, |
|
"loss": 1.3629, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.09961367398500443, |
|
"learning_rate": 0.00015604817698492885, |
|
"loss": 1.1844, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.10849742591381073, |
|
"learning_rate": 0.00015570671168374438, |
|
"loss": 1.2453, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.13158155977725983, |
|
"learning_rate": 0.00015536430197907904, |
|
"loss": 1.0024, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.11819962412118912, |
|
"learning_rate": 0.00015502095367585122, |
|
"loss": 1.2432, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.10652721673250198, |
|
"learning_rate": 0.0001546766725948916, |
|
"loss": 1.2042, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.10861877351999283, |
|
"learning_rate": 0.0001543314645728442, |
|
"loss": 1.4347, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.11992961913347244, |
|
"learning_rate": 0.00015398533546206778, |
|
"loss": 1.2897, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.11120472103357315, |
|
"learning_rate": 0.00015363829113053634, |
|
"loss": 1.1729, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.11962945014238358, |
|
"learning_rate": 0.00015329033746173975, |
|
"loss": 1.0863, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.10950727015733719, |
|
"learning_rate": 0.00015294148035458404, |
|
"loss": 1.2127, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.11483786255121231, |
|
"learning_rate": 0.0001525917257232913, |
|
"loss": 1.1465, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.11853177845478058, |
|
"learning_rate": 0.0001522410794972995, |
|
"loss": 1.2527, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.10667990148067474, |
|
"learning_rate": 0.00015188954762116195, |
|
"loss": 1.2347, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1277850717306137, |
|
"learning_rate": 0.0001515371360544465, |
|
"loss": 1.2875, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.10839621722698212, |
|
"learning_rate": 0.00015118385077163445, |
|
"loss": 1.3415, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.11405637860298157, |
|
"learning_rate": 0.00015082969776201947, |
|
"loss": 1.1112, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1116739809513092, |
|
"learning_rate": 0.00015047468302960577, |
|
"loss": 1.2876, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.10504814237356186, |
|
"learning_rate": 0.00015011881259300653, |
|
"loss": 1.2027, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.10286784917116165, |
|
"learning_rate": 0.00014976209248534182, |
|
"loss": 1.1372, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1141451969742775, |
|
"learning_rate": 0.00014940452875413627, |
|
"loss": 1.0566, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.12613342702388763, |
|
"learning_rate": 0.00014904612746121659, |
|
"loss": 0.9762, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.11521808058023453, |
|
"learning_rate": 0.00014868689468260874, |
|
"loss": 1.144, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.10090262442827225, |
|
"learning_rate": 0.0001483268365084351, |
|
"loss": 1.0197, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.11367885768413544, |
|
"learning_rate": 0.0001479659590428109, |
|
"loss": 1.3636, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.11813089996576309, |
|
"learning_rate": 0.0001476042684037411, |
|
"loss": 1.314, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.10709206759929657, |
|
"learning_rate": 0.0001472417707230164, |
|
"loss": 1.2899, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.11429055780172348, |
|
"learning_rate": 0.00014687847214610943, |
|
"loss": 1.2237, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.10949011892080307, |
|
"learning_rate": 0.00014651437883207054, |
|
"loss": 1.3596, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1138731986284256, |
|
"learning_rate": 0.00014614949695342336, |
|
"loss": 1.0538, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.10718726366758347, |
|
"learning_rate": 0.00014578383269606005, |
|
"loss": 1.2639, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.10555477440357208, |
|
"learning_rate": 0.0001454173922591367, |
|
"loss": 1.2917, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.10175728797912598, |
|
"learning_rate": 0.00014505018185496802, |
|
"loss": 1.111, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.113253653049469, |
|
"learning_rate": 0.00014468220770892206, |
|
"loss": 1.0426, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.11308235675096512, |
|
"learning_rate": 0.0001443134760593147, |
|
"loss": 1.2584, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.10383269935846329, |
|
"learning_rate": 0.0001439439931573039, |
|
"loss": 1.0795, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.10998781770467758, |
|
"learning_rate": 0.00014357376526678367, |
|
"loss": 1.2224, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.12233158946037292, |
|
"learning_rate": 0.00014320279866427796, |
|
"loss": 1.3125, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.11626295000314713, |
|
"learning_rate": 0.00014283109963883419, |
|
"loss": 1.3173, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.12676700949668884, |
|
"learning_rate": 0.0001424586744919166, |
|
"loss": 1.3502, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.11780429631471634, |
|
"learning_rate": 0.00014208552953729947, |
|
"loss": 1.3161, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.11618795245885849, |
|
"learning_rate": 0.00014171167110096016, |
|
"loss": 1.2574, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.10930506885051727, |
|
"learning_rate": 0.00014133710552097174, |
|
"loss": 1.243, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.11596094816923141, |
|
"learning_rate": 0.00014096183914739553, |
|
"loss": 1.2908, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.13881249725818634, |
|
"learning_rate": 0.00014058587834217355, |
|
"loss": 1.1379, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.09807305783033371, |
|
"learning_rate": 0.00014020922947902066, |
|
"loss": 1.1799, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.10549982637166977, |
|
"learning_rate": 0.00013983189894331636, |
|
"loss": 1.2467, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.7992545366287231, |
|
"learning_rate": 0.00013945389313199669, |
|
"loss": 1.2711, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.18441331386566162, |
|
"learning_rate": 0.00013907521845344571, |
|
"loss": 1.0915, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.12779419124126434, |
|
"learning_rate": 0.00013869588132738695, |
|
"loss": 1.2633, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.13042038679122925, |
|
"learning_rate": 0.00013831588818477436, |
|
"loss": 1.2233, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.14113464951515198, |
|
"learning_rate": 0.00013793524546768356, |
|
"loss": 1.0653, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.12155605107545853, |
|
"learning_rate": 0.0001375539596292025, |
|
"loss": 1.2181, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.12651774287223816, |
|
"learning_rate": 0.00013717203713332188, |
|
"loss": 1.3338, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2244076430797577, |
|
"learning_rate": 0.00013678948445482598, |
|
"loss": 1.3459, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1257290095090866, |
|
"learning_rate": 0.00013640630807918247, |
|
"loss": 1.0061, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.11858661472797394, |
|
"learning_rate": 0.00013602251450243273, |
|
"loss": 1.1775, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1279936581850052, |
|
"learning_rate": 0.00013563811023108158, |
|
"loss": 1.3702, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.13254624605178833, |
|
"learning_rate": 0.00013525310178198705, |
|
"loss": 1.3612, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1243981122970581, |
|
"learning_rate": 0.00013486749568225001, |
|
"loss": 1.1652, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1259343922138214, |
|
"learning_rate": 0.0001344812984691031, |
|
"loss": 1.2998, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.12421772629022598, |
|
"learning_rate": 0.00013409451668980048, |
|
"loss": 1.2688, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.11533673852682114, |
|
"learning_rate": 0.0001337071569015063, |
|
"loss": 1.2123, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2135774940252304, |
|
"learning_rate": 0.00013331922567118393, |
|
"loss": 1.173, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1305667608976364, |
|
"learning_rate": 0.00013293072957548443, |
|
"loss": 1.2795, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.11869475990533829, |
|
"learning_rate": 0.00013254167520063509, |
|
"loss": 1.1984, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.13584646582603455, |
|
"learning_rate": 0.0001321520691423278, |
|
"loss": 1.2018, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.12778714299201965, |
|
"learning_rate": 0.0001317619180056072, |
|
"loss": 1.2817, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.12326094508171082, |
|
"learning_rate": 0.0001313712284047587, |
|
"loss": 1.3434, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.11255846172571182, |
|
"learning_rate": 0.0001309800069631964, |
|
"loss": 1.1986, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.11870656907558441, |
|
"learning_rate": 0.0001305882603133508, |
|
"loss": 1.2409, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.11277451366186142, |
|
"learning_rate": 0.0001301959950965562, |
|
"loss": 1.4011, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.11900844424962997, |
|
"learning_rate": 0.00012980321796293836, |
|
"loss": 1.2379, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.14404135942459106, |
|
"learning_rate": 0.00012940993557130164, |
|
"loss": 1.3902, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.12229623645544052, |
|
"learning_rate": 0.000129016154589016, |
|
"loss": 1.2842, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1197381243109703, |
|
"learning_rate": 0.00012862188169190418, |
|
"loss": 1.3836, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.12395986169576645, |
|
"learning_rate": 0.00012822712356412838, |
|
"loss": 1.3508, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.12418900430202484, |
|
"learning_rate": 0.00012783188689807697, |
|
"loss": 1.1969, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1138986125588417, |
|
"learning_rate": 0.00012743617839425098, |
|
"loss": 1.3654, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.11219038814306259, |
|
"learning_rate": 0.0001270400047611508, |
|
"loss": 1.2621, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.11220891773700714, |
|
"learning_rate": 0.00012664337271516194, |
|
"loss": 1.2384, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.11203695833683014, |
|
"learning_rate": 0.0001262462889804416, |
|
"loss": 1.1551, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.10459273308515549, |
|
"learning_rate": 0.00012584876028880453, |
|
"loss": 1.2276, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.10843029618263245, |
|
"learning_rate": 0.00012545079337960882, |
|
"loss": 1.2414, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.10928460955619812, |
|
"learning_rate": 0.00012505239499964178, |
|
"loss": 1.0391, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.1170789897441864, |
|
"learning_rate": 0.0001246535719030055, |
|
"loss": 1.2558, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.118638776242733, |
|
"learning_rate": 0.00012425433085100224, |
|
"loss": 1.2906, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.12077479064464569, |
|
"learning_rate": 0.0001238546786120201, |
|
"loss": 1.3222, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.12042129039764404, |
|
"learning_rate": 0.00012345462196141797, |
|
"loss": 1.2399, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.11980915814638138, |
|
"learning_rate": 0.00012305416768141082, |
|
"loss": 1.2426, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.10789747536182404, |
|
"learning_rate": 0.00012265332256095464, |
|
"loss": 1.3437, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.10967132449150085, |
|
"learning_rate": 0.00012225209339563145, |
|
"loss": 1.1329, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.11903247982263565, |
|
"learning_rate": 0.00012185048698753403, |
|
"loss": 1.2107, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.10917498171329498, |
|
"learning_rate": 0.00012144851014515055, |
|
"loss": 1.1689, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.11308782547712326, |
|
"learning_rate": 0.00012104616968324927, |
|
"loss": 1.2487, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.10030585527420044, |
|
"learning_rate": 0.00012064347242276293, |
|
"loss": 1.1451, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.1195712685585022, |
|
"learning_rate": 0.00012024042519067309, |
|
"loss": 1.2525, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.12115343660116196, |
|
"learning_rate": 0.00011983703481989443, |
|
"loss": 1.2348, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.1060243546962738, |
|
"learning_rate": 0.00011943330814915896, |
|
"loss": 1.0223, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.2178022414445877, |
|
"learning_rate": 0.00011902925202289996, |
|
"loss": 1.1158, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.13022154569625854, |
|
"learning_rate": 0.00011862487329113606, |
|
"loss": 1.2742, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.11495082825422287, |
|
"learning_rate": 0.00011822017880935505, |
|
"loss": 1.2562, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.11310116201639175, |
|
"learning_rate": 0.00011781517543839768, |
|
"loss": 1.2719, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.13280071318149567, |
|
"learning_rate": 0.00011740987004434137, |
|
"loss": 1.1481, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.12752439081668854, |
|
"learning_rate": 0.00011700426949838363, |
|
"loss": 1.3024, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.11650800704956055, |
|
"learning_rate": 0.00011659838067672599, |
|
"loss": 1.2154, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.11279530823230743, |
|
"learning_rate": 0.00011619221046045687, |
|
"loss": 1.1627, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.12641020119190216, |
|
"learning_rate": 0.0001157857657354354, |
|
"loss": 1.0148, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.10685007274150848, |
|
"learning_rate": 0.00011537905339217448, |
|
"loss": 1.288, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.10929278284311295, |
|
"learning_rate": 0.00011497208032572384, |
|
"loss": 1.123, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.11334774643182755, |
|
"learning_rate": 0.00011456485343555344, |
|
"loss": 1.2894, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.10330837219953537, |
|
"learning_rate": 0.0001141573796254363, |
|
"loss": 1.1484, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.09870249778032303, |
|
"learning_rate": 0.00011374966580333147, |
|
"loss": 1.0223, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.12955228984355927, |
|
"learning_rate": 0.00011334171888126697, |
|
"loss": 1.2058, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.11018647998571396, |
|
"learning_rate": 0.00011293354577522263, |
|
"loss": 1.1589, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.12055356800556183, |
|
"learning_rate": 0.00011252515340501281, |
|
"loss": 1.091, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.10831710696220398, |
|
"learning_rate": 0.00011211654869416901, |
|
"loss": 1.2076, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.11912648379802704, |
|
"learning_rate": 0.00011170773856982268, |
|
"loss": 1.2953, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.11210989207029343, |
|
"learning_rate": 0.00011129872996258757, |
|
"loss": 1.0549, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.10992615669965744, |
|
"learning_rate": 0.0001108895298064424, |
|
"loss": 1.1818, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.1124982088804245, |
|
"learning_rate": 0.00011048014503861321, |
|
"loss": 1.3218, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.12512408196926117, |
|
"learning_rate": 0.00011007058259945584, |
|
"loss": 1.2262, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.1207667887210846, |
|
"learning_rate": 0.00010966084943233817, |
|
"loss": 1.1636, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.10861886292695999, |
|
"learning_rate": 0.00010925095248352239, |
|
"loss": 1.1363, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.11505457758903503, |
|
"learning_rate": 0.0001088408987020475, |
|
"loss": 1.1823, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.10509267449378967, |
|
"learning_rate": 0.00010843069503961111, |
|
"loss": 1.2269, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.10376826673746109, |
|
"learning_rate": 0.00010802034845045188, |
|
"loss": 1.0244, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.10977223515510559, |
|
"learning_rate": 0.00010760986589123146, |
|
"loss": 1.2713, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.11492714285850525, |
|
"learning_rate": 0.0001071992543209167, |
|
"loss": 1.2242, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.10544589161872864, |
|
"learning_rate": 0.00010678852070066159, |
|
"loss": 1.2959, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.11260811984539032, |
|
"learning_rate": 0.00010637767199368912, |
|
"loss": 1.3398, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.10535457730293274, |
|
"learning_rate": 0.00010596671516517356, |
|
"loss": 1.3178, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.10959797352552414, |
|
"learning_rate": 0.00010555565718212197, |
|
"loss": 1.0378, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.10014626383781433, |
|
"learning_rate": 0.00010514450501325645, |
|
"loss": 1.2384, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.1097472757101059, |
|
"learning_rate": 0.00010473326562889582, |
|
"loss": 1.1366, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.10998505353927612, |
|
"learning_rate": 0.0001043219460008374, |
|
"loss": 1.1083, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.11129113286733627, |
|
"learning_rate": 0.00010391055310223899, |
|
"loss": 1.1951, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.10348080098628998, |
|
"learning_rate": 0.00010349909390750046, |
|
"loss": 1.1871, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.10865020006895065, |
|
"learning_rate": 0.00010308757539214572, |
|
"loss": 1.2269, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.1128375232219696, |
|
"learning_rate": 0.00010267600453270421, |
|
"loss": 1.2358, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.13256721198558807, |
|
"learning_rate": 0.0001022643883065929, |
|
"loss": 1.3445, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.10738494992256165, |
|
"learning_rate": 0.00010185273369199781, |
|
"loss": 1.1042, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.10427848994731903, |
|
"learning_rate": 0.00010144104766775572, |
|
"loss": 1.0901, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.11618473380804062, |
|
"learning_rate": 0.000101029337213236, |
|
"loss": 0.781, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.20147624611854553, |
|
"learning_rate": 0.00010061760930822211, |
|
"loss": 1.2707, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.11436042189598083, |
|
"learning_rate": 0.0001002058709327934, |
|
"loss": 1.193, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.12396758794784546, |
|
"learning_rate": 9.979412906720663e-05, |
|
"loss": 1.0342, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.11723118275403976, |
|
"learning_rate": 9.938239069177791e-05, |
|
"loss": 1.1791, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.12710683047771454, |
|
"learning_rate": 9.897066278676404e-05, |
|
"loss": 1.023, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.10067608207464218, |
|
"learning_rate": 9.85589523322443e-05, |
|
"loss": 1.1753, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.11074398458003998, |
|
"learning_rate": 9.81472663080022e-05, |
|
"loss": 1.2567, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.16948086023330688, |
|
"learning_rate": 9.773561169340712e-05, |
|
"loss": 1.3789, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.10852295160293579, |
|
"learning_rate": 9.732399546729578e-05, |
|
"loss": 1.3642, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.10131103545427322, |
|
"learning_rate": 9.691242460785433e-05, |
|
"loss": 1.129, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.11037011444568634, |
|
"learning_rate": 9.650090609249956e-05, |
|
"loss": 1.1974, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.12829488515853882, |
|
"learning_rate": 9.608944689776103e-05, |
|
"loss": 1.2126, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.16550996899604797, |
|
"learning_rate": 9.567805399916259e-05, |
|
"loss": 1.228, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.1353956162929535, |
|
"learning_rate": 9.526673437110419e-05, |
|
"loss": 1.2877, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.17490524053573608, |
|
"learning_rate": 9.485549498674358e-05, |
|
"loss": 1.212, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.10544750839471817, |
|
"learning_rate": 9.444434281787807e-05, |
|
"loss": 1.3147, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.10780143737792969, |
|
"learning_rate": 9.403328483482646e-05, |
|
"loss": 1.1851, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.12775209546089172, |
|
"learning_rate": 9.362232800631088e-05, |
|
"loss": 1.1219, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.11476773023605347, |
|
"learning_rate": 9.321147929933847e-05, |
|
"loss": 1.2289, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.11137551069259644, |
|
"learning_rate": 9.28007456790833e-05, |
|
"loss": 1.2016, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.13261277973651886, |
|
"learning_rate": 9.239013410876857e-05, |
|
"loss": 1.389, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.10396253317594528, |
|
"learning_rate": 9.197965154954815e-05, |
|
"loss": 1.3316, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.11388034373521805, |
|
"learning_rate": 9.15693049603889e-05, |
|
"loss": 1.2501, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.1430211067199707, |
|
"learning_rate": 9.115910129795251e-05, |
|
"loss": 1.2259, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.12369144707918167, |
|
"learning_rate": 9.074904751647762e-05, |
|
"loss": 1.138, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.11555325239896774, |
|
"learning_rate": 9.033915056766187e-05, |
|
"loss": 1.1845, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.15159161388874054, |
|
"learning_rate": 8.992941740054418e-05, |
|
"loss": 1.3823, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.12028995901346207, |
|
"learning_rate": 8.951985496138678e-05, |
|
"loss": 1.0985, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.12002217024564743, |
|
"learning_rate": 8.911047019355764e-05, |
|
"loss": 1.2431, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.11859162151813507, |
|
"learning_rate": 8.870127003741244e-05, |
|
"loss": 1.1669, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.10495835542678833, |
|
"learning_rate": 8.829226143017735e-05, |
|
"loss": 1.1424, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.11981993168592453, |
|
"learning_rate": 8.788345130583099e-05, |
|
"loss": 1.0444, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.11924871802330017, |
|
"learning_rate": 8.747484659498724e-05, |
|
"loss": 1.1574, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.15531030297279358, |
|
"learning_rate": 8.706645422477739e-05, |
|
"loss": 1.1786, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.11026681959629059, |
|
"learning_rate": 8.665828111873307e-05, |
|
"loss": 1.2112, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.12397878617048264, |
|
"learning_rate": 8.625033419666857e-05, |
|
"loss": 1.2931, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.10594267398118973, |
|
"learning_rate": 8.584262037456373e-05, |
|
"loss": 1.1422, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.11834708601236343, |
|
"learning_rate": 8.54351465644466e-05, |
|
"loss": 1.16, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.12605226039886475, |
|
"learning_rate": 8.502791967427618e-05, |
|
"loss": 1.3575, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.12767426669597626, |
|
"learning_rate": 8.462094660782556e-05, |
|
"loss": 1.3343, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.12377568334341049, |
|
"learning_rate": 8.42142342645646e-05, |
|
"loss": 1.2027, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.11611881852149963, |
|
"learning_rate": 8.380778953954313e-05, |
|
"loss": 1.1748, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.10847889631986618, |
|
"learning_rate": 8.340161932327404e-05, |
|
"loss": 1.138, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.11104707419872284, |
|
"learning_rate": 8.299573050161638e-05, |
|
"loss": 1.2403, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.10397801548242569, |
|
"learning_rate": 8.259012995565867e-05, |
|
"loss": 1.2159, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.10560380667448044, |
|
"learning_rate": 8.21848245616023e-05, |
|
"loss": 1.0932, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.10818043351173401, |
|
"learning_rate": 8.177982119064496e-05, |
|
"loss": 1.2667, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1053985133767128, |
|
"learning_rate": 8.137512670886397e-05, |
|
"loss": 1.3267, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.10604721307754517, |
|
"learning_rate": 8.097074797710006e-05, |
|
"loss": 1.2254, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1083521693944931, |
|
"learning_rate": 8.056669185084108e-05, |
|
"loss": 1.389, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.12045227736234665, |
|
"learning_rate": 8.016296518010558e-05, |
|
"loss": 1.245, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.11613083630800247, |
|
"learning_rate": 7.975957480932695e-05, |
|
"loss": 1.2618, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.11401872336864471, |
|
"learning_rate": 7.935652757723708e-05, |
|
"loss": 1.2073, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.1841176450252533, |
|
"learning_rate": 7.895383031675074e-05, |
|
"loss": 1.2132, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.10668601095676422, |
|
"learning_rate": 7.855148985484946e-05, |
|
"loss": 1.2177, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.10970567911863327, |
|
"learning_rate": 7.814951301246598e-05, |
|
"loss": 1.2306, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.10744346678256989, |
|
"learning_rate": 7.774790660436858e-05, |
|
"loss": 1.2155, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.09847363084554672, |
|
"learning_rate": 7.734667743904539e-05, |
|
"loss": 1.1383, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.12585873901844025, |
|
"learning_rate": 7.69458323185892e-05, |
|
"loss": 1.2411, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.13609972596168518, |
|
"learning_rate": 7.654537803858204e-05, |
|
"loss": 1.1972, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.10885506868362427, |
|
"learning_rate": 7.614532138797993e-05, |
|
"loss": 1.2197, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.10706409066915512, |
|
"learning_rate": 7.57456691489978e-05, |
|
"loss": 1.0704, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.11232727020978928, |
|
"learning_rate": 7.534642809699455e-05, |
|
"loss": 1.2443, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.1036246195435524, |
|
"learning_rate": 7.494760500035823e-05, |
|
"loss": 1.1623, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.11857923120260239, |
|
"learning_rate": 7.454920662039117e-05, |
|
"loss": 1.3161, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.1063128188252449, |
|
"learning_rate": 7.415123971119549e-05, |
|
"loss": 1.37, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.11207219213247299, |
|
"learning_rate": 7.375371101955841e-05, |
|
"loss": 1.1433, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.1198432594537735, |
|
"learning_rate": 7.335662728483808e-05, |
|
"loss": 1.2341, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.14110444486141205, |
|
"learning_rate": 7.295999523884921e-05, |
|
"loss": 1.2201, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.10053621232509613, |
|
"learning_rate": 7.256382160574901e-05, |
|
"loss": 1.1276, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.1087542399764061, |
|
"learning_rate": 7.216811310192308e-05, |
|
"loss": 1.3124, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.10470487177371979, |
|
"learning_rate": 7.177287643587163e-05, |
|
"loss": 1.0677, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.12348546087741852, |
|
"learning_rate": 7.137811830809583e-05, |
|
"loss": 1.0808, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.12709330022335052, |
|
"learning_rate": 7.0983845410984e-05, |
|
"loss": 1.1746, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.0996909812092781, |
|
"learning_rate": 7.059006442869839e-05, |
|
"loss": 1.2121, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.12972918152809143, |
|
"learning_rate": 7.019678203706163e-05, |
|
"loss": 1.149, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.10252442955970764, |
|
"learning_rate": 6.980400490344383e-05, |
|
"loss": 1.2643, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.11937206983566284, |
|
"learning_rate": 6.941173968664923e-05, |
|
"loss": 1.2484, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.11440838873386383, |
|
"learning_rate": 6.901999303680358e-05, |
|
"loss": 1.3256, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.10103975981473923, |
|
"learning_rate": 6.862877159524132e-05, |
|
"loss": 1.3196, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.10081090033054352, |
|
"learning_rate": 6.823808199439284e-05, |
|
"loss": 1.1371, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.16644109785556793, |
|
"learning_rate": 6.784793085767221e-05, |
|
"loss": 1.1239, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.1086033284664154, |
|
"learning_rate": 6.745832479936491e-05, |
|
"loss": 1.3244, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.11194080859422684, |
|
"learning_rate": 6.70692704245156e-05, |
|
"loss": 1.3153, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.1124996691942215, |
|
"learning_rate": 6.668077432881609e-05, |
|
"loss": 1.2942, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.3689382076263428, |
|
"learning_rate": 6.629284309849372e-05, |
|
"loss": 1.2606, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.11704660952091217, |
|
"learning_rate": 6.590548331019957e-05, |
|
"loss": 1.3214, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.11636264622211456, |
|
"learning_rate": 6.55187015308969e-05, |
|
"loss": 1.3366, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.10623035579919815, |
|
"learning_rate": 6.513250431775004e-05, |
|
"loss": 1.1485, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.10182057321071625, |
|
"learning_rate": 6.474689821801295e-05, |
|
"loss": 1.1521, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.11016492545604706, |
|
"learning_rate": 6.436188976891846e-05, |
|
"loss": 1.2067, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.11019518971443176, |
|
"learning_rate": 6.39774854975673e-05, |
|
"loss": 1.2559, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.11215123534202576, |
|
"learning_rate": 6.359369192081756e-05, |
|
"loss": 1.2557, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.1162608414888382, |
|
"learning_rate": 6.321051554517407e-05, |
|
"loss": 1.1967, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.1348588615655899, |
|
"learning_rate": 6.282796286667814e-05, |
|
"loss": 1.1944, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.12534910440444946, |
|
"learning_rate": 6.244604037079753e-05, |
|
"loss": 1.1354, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.11349747329950333, |
|
"learning_rate": 6.206475453231644e-05, |
|
"loss": 1.3001, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.11473750323057175, |
|
"learning_rate": 6.168411181522569e-05, |
|
"loss": 1.3084, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.11263560503721237, |
|
"learning_rate": 6.13041186726131e-05, |
|
"loss": 1.0361, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.11515972018241882, |
|
"learning_rate": 6.092478154655431e-05, |
|
"loss": 1.1342, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.10681987553834915, |
|
"learning_rate": 6.0546106868003324e-05, |
|
"loss": 1.2221, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.10119231045246124, |
|
"learning_rate": 6.016810105668365e-05, |
|
"loss": 1.1237, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.10480265319347382, |
|
"learning_rate": 5.979077052097936e-05, |
|
"loss": 1.2506, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.11073455959558487, |
|
"learning_rate": 5.941412165782645e-05, |
|
"loss": 1.0603, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.11075324565172195, |
|
"learning_rate": 5.9038160852604476e-05, |
|
"loss": 1.1351, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.10219030827283859, |
|
"learning_rate": 5.866289447902828e-05, |
|
"loss": 1.2163, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.10446413606405258, |
|
"learning_rate": 5.8288328899039834e-05, |
|
"loss": 1.2578, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.10746556520462036, |
|
"learning_rate": 5.791447046270056e-05, |
|
"loss": 1.2526, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.12443575263023376, |
|
"learning_rate": 5.754132550808344e-05, |
|
"loss": 1.3376, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.12434745579957962, |
|
"learning_rate": 5.7168900361165824e-05, |
|
"loss": 1.369, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.11743592470884323, |
|
"learning_rate": 5.679720133572206e-05, |
|
"loss": 1.2044, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.12590321898460388, |
|
"learning_rate": 5.6426234733216376e-05, |
|
"loss": 1.0729, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.10635906457901001, |
|
"learning_rate": 5.605600684269614e-05, |
|
"loss": 1.2448, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.10504303872585297, |
|
"learning_rate": 5.568652394068532e-05, |
|
"loss": 1.0663, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.10200861841440201, |
|
"learning_rate": 5.5317792291077965e-05, |
|
"loss": 1.0939, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.11620642244815826, |
|
"learning_rate": 5.4949818145031986e-05, |
|
"loss": 1.1002, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.10879629105329514, |
|
"learning_rate": 5.4582607740863324e-05, |
|
"loss": 1.1695, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.11083561182022095, |
|
"learning_rate": 5.421616730394e-05, |
|
"loss": 1.2967, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.11363320797681808, |
|
"learning_rate": 5.385050304657669e-05, |
|
"loss": 1.0099, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.10837159305810928, |
|
"learning_rate": 5.348562116792946e-05, |
|
"loss": 1.1575, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.7101715207099915, |
|
"learning_rate": 5.312152785389055e-05, |
|
"loss": 1.3229, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.11040350794792175, |
|
"learning_rate": 5.2758229276983614e-05, |
|
"loss": 1.183, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.10885446518659592, |
|
"learning_rate": 5.2395731596258925e-05, |
|
"loss": 1.139, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.10934137552976608, |
|
"learning_rate": 5.20340409571891e-05, |
|
"loss": 1.291, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.11229011416435242, |
|
"learning_rate": 5.167316349156495e-05, |
|
"loss": 1.1406, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.12293226271867752, |
|
"learning_rate": 5.1313105317391284e-05, |
|
"loss": 1.1102, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.10751207172870636, |
|
"learning_rate": 5.095387253878345e-05, |
|
"loss": 1.1296, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.10970565676689148, |
|
"learning_rate": 5.059547124586375e-05, |
|
"loss": 1.3482, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.10699622333049774, |
|
"learning_rate": 5.0237907514658176e-05, |
|
"loss": 1.3642, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.1132645532488823, |
|
"learning_rate": 4.988118740699346e-05, |
|
"loss": 1.1504, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.12392119318246841, |
|
"learning_rate": 4.9525316970394245e-05, |
|
"loss": 1.3906, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.10416343808174133, |
|
"learning_rate": 4.917030223798057e-05, |
|
"loss": 1.3419, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.11010722070932388, |
|
"learning_rate": 4.881614922836555e-05, |
|
"loss": 1.1175, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.11078814417123795, |
|
"learning_rate": 4.8462863945553516e-05, |
|
"loss": 1.3513, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.12069610506296158, |
|
"learning_rate": 4.8110452378838035e-05, |
|
"loss": 1.3913, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.11017792671918869, |
|
"learning_rate": 4.775892050270051e-05, |
|
"loss": 1.2217, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.10235986858606339, |
|
"learning_rate": 4.740827427670871e-05, |
|
"loss": 1.1375, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.11740509420633316, |
|
"learning_rate": 4.7058519645416e-05, |
|
"loss": 1.1649, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.16506601870059967, |
|
"learning_rate": 4.6709662538260267e-05, |
|
"loss": 1.0392, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.22638647258281708, |
|
"learning_rate": 4.63617088694637e-05, |
|
"loss": 1.21, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.11359802633523941, |
|
"learning_rate": 4.601466453793224e-05, |
|
"loss": 1.2481, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.10808828473091125, |
|
"learning_rate": 4.566853542715581e-05, |
|
"loss": 1.2008, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.09867287427186966, |
|
"learning_rate": 4.5323327405108416e-05, |
|
"loss": 1.0815, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.11430750787258148, |
|
"learning_rate": 4.4979046324148785e-05, |
|
"loss": 1.3082, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.1082565113902092, |
|
"learning_rate": 4.4635698020921014e-05, |
|
"loss": 1.0386, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.11467121541500092, |
|
"learning_rate": 4.4293288316255653e-05, |
|
"loss": 1.299, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.10791204124689102, |
|
"learning_rate": 4.395182301507118e-05, |
|
"loss": 1.2306, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.11149678379297256, |
|
"learning_rate": 4.3611307906275414e-05, |
|
"loss": 1.3074, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.11212314665317535, |
|
"learning_rate": 4.327174876266743e-05, |
|
"loss": 1.1512, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1070462167263031, |
|
"learning_rate": 4.293315134083975e-05, |
|
"loss": 1.2378, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.11427627503871918, |
|
"learning_rate": 4.2595521381080604e-05, |
|
"loss": 1.2729, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1135973185300827, |
|
"learning_rate": 4.2258864607276705e-05, |
|
"loss": 1.1399, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.11445244401693344, |
|
"learning_rate": 4.192318672681631e-05, |
|
"loss": 1.1549, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.10163676738739014, |
|
"learning_rate": 4.158849343049232e-05, |
|
"loss": 1.2751, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.11175056546926498, |
|
"learning_rate": 4.1254790392405797e-05, |
|
"loss": 1.2371, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.11040692776441574, |
|
"learning_rate": 4.092208326986986e-05, |
|
"loss": 1.3218, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.10620775073766708, |
|
"learning_rate": 4.0590377703313785e-05, |
|
"loss": 1.1667, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.10938204824924469, |
|
"learning_rate": 4.0259679316187214e-05, |
|
"loss": 1.0897, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.10027039796113968, |
|
"learning_rate": 3.992999371486508e-05, |
|
"loss": 1.1646, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.13585035502910614, |
|
"learning_rate": 3.960132648855226e-05, |
|
"loss": 1.1759, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.12253659218549728, |
|
"learning_rate": 3.9273683209189115e-05, |
|
"loss": 1.3585, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.14041317999362946, |
|
"learning_rate": 3.8947069431356855e-05, |
|
"loss": 1.2736, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.10640061646699905, |
|
"learning_rate": 3.862149069218343e-05, |
|
"loss": 1.2859, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.10962343961000443, |
|
"learning_rate": 3.829695251124953e-05, |
|
"loss": 1.2581, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.12311571091413498, |
|
"learning_rate": 3.797346039049528e-05, |
|
"loss": 1.3115, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.12078052759170532, |
|
"learning_rate": 3.7651019814126654e-05, |
|
"loss": 1.03, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.10591412335634232, |
|
"learning_rate": 3.732963624852275e-05, |
|
"loss": 1.0906, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1085178554058075, |
|
"learning_rate": 3.700931514214303e-05, |
|
"loss": 1.1478, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.10265811532735825, |
|
"learning_rate": 3.6690061925434814e-05, |
|
"loss": 1.1375, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.10623796284198761, |
|
"learning_rate": 3.637188201074149e-05, |
|
"loss": 1.2655, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1077788770198822, |
|
"learning_rate": 3.605478079221054e-05, |
|
"loss": 1.2805, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.1078350618481636, |
|
"learning_rate": 3.573876364570214e-05, |
|
"loss": 1.356, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.10767360776662827, |
|
"learning_rate": 3.5423835928698125e-05, |
|
"loss": 1.2346, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.10546786338090897, |
|
"learning_rate": 3.5110002980210975e-05, |
|
"loss": 1.2541, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.09910236299037933, |
|
"learning_rate": 3.479727012069349e-05, |
|
"loss": 1.2015, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.10072822123765945, |
|
"learning_rate": 3.4485642651948516e-05, |
|
"loss": 1.3005, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.11063572019338608, |
|
"learning_rate": 3.417512585703903e-05, |
|
"loss": 1.0739, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.09937085956335068, |
|
"learning_rate": 3.38657250001986e-05, |
|
"loss": 1.3106, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.10743856430053711, |
|
"learning_rate": 3.355744532674211e-05, |
|
"loss": 1.1492, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.11000777781009674, |
|
"learning_rate": 3.325029206297694e-05, |
|
"loss": 1.1876, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.11820725351572037, |
|
"learning_rate": 3.294427041611425e-05, |
|
"loss": 1.2457, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.10308333486318588, |
|
"learning_rate": 3.263938557418082e-05, |
|
"loss": 1.2801, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1042330265045166, |
|
"learning_rate": 3.233564270593088e-05, |
|
"loss": 1.2237, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.10803329199552536, |
|
"learning_rate": 3.2033046960758764e-05, |
|
"loss": 1.2841, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.11426287144422531, |
|
"learning_rate": 3.1731603468611337e-05, |
|
"loss": 1.355, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.10273044556379318, |
|
"learning_rate": 3.143131733990127e-05, |
|
"loss": 1.2975, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.11743076890707016, |
|
"learning_rate": 3.113219366542031e-05, |
|
"loss": 1.2532, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1157737672328949, |
|
"learning_rate": 3.083423751625282e-05, |
|
"loss": 1.4476, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.10297975689172745, |
|
"learning_rate": 3.0537453943690074e-05, |
|
"loss": 1.1543, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.10176219791173935, |
|
"learning_rate": 3.0241847979144487e-05, |
|
"loss": 1.0901, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.11567193269729614, |
|
"learning_rate": 2.994742463406427e-05, |
|
"loss": 1.25, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.12137153744697571, |
|
"learning_rate": 2.96541888998485e-05, |
|
"loss": 1.3956, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.10772846639156342, |
|
"learning_rate": 2.9362145747762625e-05, |
|
"loss": 1.4177, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.10832470655441284, |
|
"learning_rate": 2.9071300128854007e-05, |
|
"loss": 1.1614, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.09898839145898819, |
|
"learning_rate": 2.878165697386812e-05, |
|
"loss": 1.2994, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.1113181784749031, |
|
"learning_rate": 2.849322119316489e-05, |
|
"loss": 1.1914, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.10062463581562042, |
|
"learning_rate": 2.82059976766354e-05, |
|
"loss": 1.1165, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.10819394886493683, |
|
"learning_rate": 2.7919991293619098e-05, |
|
"loss": 1.283, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.1365766078233719, |
|
"learning_rate": 2.763520689282122e-05, |
|
"loss": 1.1963, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.11200159043073654, |
|
"learning_rate": 2.7351649302230553e-05, |
|
"loss": 1.1885, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.11218269914388657, |
|
"learning_rate": 2.7069323329037634e-05, |
|
"loss": 1.2944, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.12175281345844269, |
|
"learning_rate": 2.678823375955314e-05, |
|
"loss": 1.2379, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.125010147690773, |
|
"learning_rate": 2.650838535912692e-05, |
|
"loss": 1.2309, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.12338108569383621, |
|
"learning_rate": 2.6229782872067045e-05, |
|
"loss": 1.1489, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.11083382368087769, |
|
"learning_rate": 2.595243102155951e-05, |
|
"loss": 1.1595, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.11741047352552414, |
|
"learning_rate": 2.5676334509588008e-05, |
|
"loss": 1.2438, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.10495740175247192, |
|
"learning_rate": 2.540149801685441e-05, |
|
"loss": 1.299, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.11125294119119644, |
|
"learning_rate": 2.5127926202699238e-05, |
|
"loss": 1.1244, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.10036681592464447, |
|
"learning_rate": 2.485562370502279e-05, |
|
"loss": 1.2892, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.11835762113332748, |
|
"learning_rate": 2.4584595140206458e-05, |
|
"loss": 1.1257, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.11104938387870789, |
|
"learning_rate": 2.4314845103034456e-05, |
|
"loss": 1.1601, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.10659179091453552, |
|
"learning_rate": 2.40463781666159e-05, |
|
"loss": 1.2066, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.10941512882709503, |
|
"learning_rate": 2.377919888230744e-05, |
|
"loss": 1.3887, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.18816524744033813, |
|
"learning_rate": 2.3513311779635905e-05, |
|
"loss": 1.2191, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.1119375005364418, |
|
"learning_rate": 2.3248721366221638e-05, |
|
"loss": 1.2773, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.10966973006725311, |
|
"learning_rate": 2.2985432127701946e-05, |
|
"loss": 1.2283, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.10751962661743164, |
|
"learning_rate": 2.2723448527655266e-05, |
|
"loss": 1.375, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.19195051491260529, |
|
"learning_rate": 2.246277500752524e-05, |
|
"loss": 1.3717, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.4861750900745392, |
|
"learning_rate": 2.220341598654565e-05, |
|
"loss": 1.1786, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.10594101250171661, |
|
"learning_rate": 2.194537586166532e-05, |
|
"loss": 1.2528, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.11542479693889618, |
|
"learning_rate": 2.16886590074737e-05, |
|
"loss": 1.2277, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.10653986781835556, |
|
"learning_rate": 2.143326977612662e-05, |
|
"loss": 1.1785, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.11760350316762924, |
|
"learning_rate": 2.1179212497272584e-05, |
|
"loss": 1.2573, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.1091405376791954, |
|
"learning_rate": 2.092649147797927e-05, |
|
"loss": 1.0923, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.10867162048816681, |
|
"learning_rate": 2.0675111002660562e-05, |
|
"loss": 1.2358, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.10017253458499908, |
|
"learning_rate": 2.0425075333003953e-05, |
|
"loss": 1.2741, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.11259835958480835, |
|
"learning_rate": 2.0176388707898274e-05, |
|
"loss": 1.0486, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.1280423402786255, |
|
"learning_rate": 1.992905534336178e-05, |
|
"loss": 1.3091, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.10603731125593185, |
|
"learning_rate": 1.9683079432470775e-05, |
|
"loss": 1.2541, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.10933735966682434, |
|
"learning_rate": 1.9438465145288374e-05, |
|
"loss": 1.2186, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.11166153848171234, |
|
"learning_rate": 1.9195216628793956e-05, |
|
"loss": 1.2563, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.11184462904930115, |
|
"learning_rate": 1.8953338006812805e-05, |
|
"loss": 1.3052, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.11681969463825226, |
|
"learning_rate": 1.8712833379946215e-05, |
|
"loss": 1.1702, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.11982112377882004, |
|
"learning_rate": 1.847370682550187e-05, |
|
"loss": 1.3025, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.11721136420965195, |
|
"learning_rate": 1.82359623974249e-05, |
|
"loss": 1.1182, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.11441758275032043, |
|
"learning_rate": 1.799960412622904e-05, |
|
"loss": 1.1366, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.11492711305618286, |
|
"learning_rate": 1.776463601892825e-05, |
|
"loss": 1.1784, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.11135811358690262, |
|
"learning_rate": 1.753106205896895e-05, |
|
"loss": 1.2411, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.0985221266746521, |
|
"learning_rate": 1.729888620616228e-05, |
|
"loss": 1.283, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.10769841074943542, |
|
"learning_rate": 1.7068112396617163e-05, |
|
"loss": 1.2442, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.09943833947181702, |
|
"learning_rate": 1.683874454267349e-05, |
|
"loss": 1.1121, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.11267413944005966, |
|
"learning_rate": 1.6610786532835774e-05, |
|
"loss": 1.2912, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.10905832797288895, |
|
"learning_rate": 1.6384242231707204e-05, |
|
"loss": 1.1264, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.11152827739715576, |
|
"learning_rate": 1.6159115479924257e-05, |
|
"loss": 1.0625, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.10507339239120483, |
|
"learning_rate": 1.593541009409143e-05, |
|
"loss": 1.282, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.11833300441503525, |
|
"learning_rate": 1.5713129866716648e-05, |
|
"loss": 1.0683, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.10734719038009644, |
|
"learning_rate": 1.5492278566146946e-05, |
|
"loss": 1.2312, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.09920140355825424, |
|
"learning_rate": 1.5272859936504514e-05, |
|
"loss": 1.3253, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.10309410840272903, |
|
"learning_rate": 1.5054877697623304e-05, |
|
"loss": 1.3389, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.09966228157281876, |
|
"learning_rate": 1.4838335544985981e-05, |
|
"loss": 1.1592, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.10500723123550415, |
|
"learning_rate": 1.462323714966114e-05, |
|
"loss": 1.2318, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.10912671685218811, |
|
"learning_rate": 1.4409586158241272e-05, |
|
"loss": 1.2195, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1132998913526535, |
|
"learning_rate": 1.4197386192780715e-05, |
|
"loss": 1.2476, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.11414579302072525, |
|
"learning_rate": 1.3986640850734444e-05, |
|
"loss": 1.229, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.10963235795497894, |
|
"learning_rate": 1.3777353704897e-05, |
|
"loss": 1.1468, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.11412128806114197, |
|
"learning_rate": 1.3569528303341927e-05, |
|
"loss": 1.141, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.11492784321308136, |
|
"learning_rate": 1.3363168169361573e-05, |
|
"loss": 1.0967, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.11600396037101746, |
|
"learning_rate": 1.3158276801407431e-05, |
|
"loss": 1.2414, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.11931241303682327, |
|
"learning_rate": 1.2954857673030807e-05, |
|
"loss": 1.2318, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.10490193963050842, |
|
"learning_rate": 1.2752914232823942e-05, |
|
"loss": 1.2481, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.11982220411300659, |
|
"learning_rate": 1.2552449904361529e-05, |
|
"loss": 1.0291, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.0949595496058464, |
|
"learning_rate": 1.235346808614264e-05, |
|
"loss": 1.0835, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.11206669360399246, |
|
"learning_rate": 1.2155972151533224e-05, |
|
"loss": 1.2174, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.10925093293190002, |
|
"learning_rate": 1.1959965448708733e-05, |
|
"loss": 1.359, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.10597091913223267, |
|
"learning_rate": 1.1765451300597573e-05, |
|
"loss": 1.2567, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.12670493125915527, |
|
"learning_rate": 1.1572433004824635e-05, |
|
"loss": 1.1798, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.10978806763887405, |
|
"learning_rate": 1.1380913833655382e-05, |
|
"loss": 1.2303, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.1855391412973404, |
|
"learning_rate": 1.1190897033940461e-05, |
|
"loss": 1.2306, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.11130934208631516, |
|
"learning_rate": 1.1002385827060602e-05, |
|
"loss": 1.373, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.10244546085596085, |
|
"learning_rate": 1.0815383408871982e-05, |
|
"loss": 1.3044, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.11533805727958679, |
|
"learning_rate": 1.0629892949652132e-05, |
|
"loss": 1.0704, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.12121516466140747, |
|
"learning_rate": 1.0445917594046072e-05, |
|
"loss": 1.265, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.1047421246767044, |
|
"learning_rate": 1.026346046101312e-05, |
|
"loss": 1.1294, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.10979347676038742, |
|
"learning_rate": 1.0082524643773916e-05, |
|
"loss": 0.9608, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.12287425249814987, |
|
"learning_rate": 9.903113209758096e-06, |
|
"loss": 0.8795, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.11486369371414185, |
|
"learning_rate": 9.725229200552122e-06, |
|
"loss": 1.3881, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.11395483464002609, |
|
"learning_rate": 9.548875631847875e-06, |
|
"loss": 1.1552, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.10814516991376877, |
|
"learning_rate": 9.374055493391455e-06, |
|
"loss": 1.2155, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.10679764300584793, |
|
"learning_rate": 9.200771748932513e-06, |
|
"loss": 1.1449, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1089312732219696, |
|
"learning_rate": 9.029027336174022e-06, |
|
"loss": 1.2221, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.11471953988075256, |
|
"learning_rate": 8.858825166722417e-06, |
|
"loss": 0.9955, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.11357836425304413, |
|
"learning_rate": 8.6901681260383e-06, |
|
"loss": 1.0855, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.11274585127830505, |
|
"learning_rate": 8.523059073387474e-06, |
|
"loss": 1.1812, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.10156439989805222, |
|
"learning_rate": 8.35750084179251e-06, |
|
"loss": 1.1702, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.10595791786909103, |
|
"learning_rate": 8.193496237984677e-06, |
|
"loss": 1.1719, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.10934492200613022, |
|
"learning_rate": 8.031048042356392e-06, |
|
"loss": 1.1735, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.10104786604642868, |
|
"learning_rate": 7.870159008914069e-06, |
|
"loss": 1.1967, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.10521814227104187, |
|
"learning_rate": 7.710831865231461e-06, |
|
"loss": 1.3094, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.1166355162858963, |
|
"learning_rate": 7.553069312403349e-06, |
|
"loss": 1.2912, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.09746871888637543, |
|
"learning_rate": 7.3968740249998115e-06, |
|
"loss": 1.1133, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.10336558520793915, |
|
"learning_rate": 7.242248651020844e-06, |
|
"loss": 1.1893, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.11591080576181412, |
|
"learning_rate": 7.089195811851501e-06, |
|
"loss": 1.1395, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.22089900076389313, |
|
"learning_rate": 6.937718102217461e-06, |
|
"loss": 1.1993, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.108499675989151, |
|
"learning_rate": 6.787818090140985e-06, |
|
"loss": 1.1195, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.10180932283401489, |
|
"learning_rate": 6.639498316897419e-06, |
|
"loss": 1.3188, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.10950213670730591, |
|
"learning_rate": 6.492761296972116e-06, |
|
"loss": 1.1373, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.09740964323282242, |
|
"learning_rate": 6.347609518017761e-06, |
|
"loss": 1.1551, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.10599476099014282, |
|
"learning_rate": 6.2040454408123e-06, |
|
"loss": 1.2161, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.11540497094392776, |
|
"learning_rate": 6.062071499217081e-06, |
|
"loss": 1.256, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.10363679379224777, |
|
"learning_rate": 5.921690100135713e-06, |
|
"loss": 1.2462, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.10643389075994492, |
|
"learning_rate": 5.782903623473201e-06, |
|
"loss": 1.34, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.14184726774692535, |
|
"learning_rate": 5.645714422095627e-06, |
|
"loss": 1.0978, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.11375614255666733, |
|
"learning_rate": 5.510124821790208e-06, |
|
"loss": 1.2334, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.11842206120491028, |
|
"learning_rate": 5.376137121225933e-06, |
|
"loss": 1.2043, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.1027233749628067, |
|
"learning_rate": 5.24375359191458e-06, |
|
"loss": 1.2038, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.09779934585094452, |
|
"learning_rate": 5.11297647817216e-06, |
|
"loss": 1.2799, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.28459811210632324, |
|
"learning_rate": 4.983807997080925e-06, |
|
"loss": 1.2972, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.12350057065486908, |
|
"learning_rate": 4.856250338451762e-06, |
|
"loss": 1.1445, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.10094188153743744, |
|
"learning_rate": 4.73030566478706e-06, |
|
"loss": 1.0144, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.10270640254020691, |
|
"learning_rate": 4.605976111244015e-06, |
|
"loss": 1.1673, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.12842753529548645, |
|
"learning_rate": 4.483263785598524e-06, |
|
"loss": 1.1681, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.10717935115098953, |
|
"learning_rate": 4.362170768209406e-06, |
|
"loss": 1.2661, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.20119836926460266, |
|
"learning_rate": 4.24269911198305e-06, |
|
"loss": 1.1492, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.11531922966241837, |
|
"learning_rate": 4.124850842338779e-06, |
|
"loss": 1.2629, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.10086618363857269, |
|
"learning_rate": 4.008627957174372e-06, |
|
"loss": 1.1223, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.10228842496871948, |
|
"learning_rate": 3.8940324268322285e-06, |
|
"loss": 1.2285, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.1152658760547638, |
|
"learning_rate": 3.7810661940660517e-06, |
|
"loss": 1.3102, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.09905364364385605, |
|
"learning_rate": 3.66973117400774e-06, |
|
"loss": 1.2029, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.11741360276937485, |
|
"learning_rate": 3.5600292541351e-06, |
|
"loss": 1.0504, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.10663048177957535, |
|
"learning_rate": 3.451962294239741e-06, |
|
"loss": 1.0434, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.13997888565063477, |
|
"learning_rate": 3.3455321263955786e-06, |
|
"loss": 1.1909, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.10722199827432632, |
|
"learning_rate": 3.240740554927768e-06, |
|
"loss": 1.1066, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.10431613773107529, |
|
"learning_rate": 3.1375893563820756e-06, |
|
"loss": 1.1097, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.10055112838745117, |
|
"learning_rate": 3.0360802794948686e-06, |
|
"loss": 1.119, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.14287303388118744, |
|
"learning_rate": 2.936215045163371e-06, |
|
"loss": 1.1977, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.11945986747741699, |
|
"learning_rate": 2.837995346416533e-06, |
|
"loss": 1.0618, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.11288944631814957, |
|
"learning_rate": 2.7414228483862657e-06, |
|
"loss": 1.1562, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.11274345964193344, |
|
"learning_rate": 2.6464991882793278e-06, |
|
"loss": 1.2794, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.10257246345281601, |
|
"learning_rate": 2.5532259753494827e-06, |
|
"loss": 1.1981, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.11306274682283401, |
|
"learning_rate": 2.4616047908702087e-06, |
|
"loss": 1.2817, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.1063089668750763, |
|
"learning_rate": 2.3716371881079557e-06, |
|
"loss": 1.2429, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.11371003836393356, |
|
"learning_rate": 2.2833246922957407e-06, |
|
"loss": 1.2017, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.10964678227901459, |
|
"learning_rate": 2.1966688006073823e-06, |
|
"loss": 1.0438, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.10942777991294861, |
|
"learning_rate": 2.111670982132008e-06, |
|
"loss": 1.1411, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.20406045019626617, |
|
"learning_rate": 2.028332677849254e-06, |
|
"loss": 1.26, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1080511286854744, |
|
"learning_rate": 1.9466553006047383e-06, |
|
"loss": 1.2078, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.10461273789405823, |
|
"learning_rate": 1.86664023508617e-06, |
|
"loss": 1.0991, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.14060625433921814, |
|
"learning_rate": 1.7882888377998786e-06, |
|
"loss": 1.1999, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.10702642798423767, |
|
"learning_rate": 1.7116024370477878e-06, |
|
"loss": 1.1649, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.12004446983337402, |
|
"learning_rate": 1.6365823329049124e-06, |
|
"loss": 1.1923, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.12491834163665771, |
|
"learning_rate": 1.5632297971972965e-06, |
|
"loss": 1.1329, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.11271212995052338, |
|
"learning_rate": 1.4915460734805097e-06, |
|
"loss": 1.1198, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.11066664755344391, |
|
"learning_rate": 1.4215323770184641e-06, |
|
"loss": 1.2727, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.12216173112392426, |
|
"learning_rate": 1.3531898947629296e-06, |
|
"loss": 1.1179, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.10309845954179764, |
|
"learning_rate": 1.2865197853333178e-06, |
|
"loss": 1.328, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.1231243684887886, |
|
"learning_rate": 1.221523178997075e-06, |
|
"loss": 1.1911, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.12335013598203659, |
|
"learning_rate": 1.158201177650553e-06, |
|
"loss": 1.2257, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.11128874868154526, |
|
"learning_rate": 1.0965548548002803e-06, |
|
"loss": 1.2306, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.10876341909170151, |
|
"learning_rate": 1.036585255544764e-06, |
|
"loss": 1.2044, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.12024974822998047, |
|
"learning_rate": 9.782933965567954e-07, |
|
"loss": 1.1953, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.11236795783042908, |
|
"learning_rate": 9.21680266066216e-07, |
|
"loss": 1.3033, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.11030973494052887, |
|
"learning_rate": 8.667468238431453e-07, |
|
"loss": 1.2, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.10306066274642944, |
|
"learning_rate": 8.13494001181725e-07, |
|
"loss": 1.3141, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.10356069356203079, |
|
"learning_rate": 7.619227008843322e-07, |
|
"loss": 1.2244, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.10682324320077896, |
|
"learning_rate": 7.120337972462365e-07, |
|
"loss": 1.1282, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.11171166598796844, |
|
"learning_rate": 6.638281360408339e-07, |
|
"loss": 1.1534, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.09517676383256912, |
|
"learning_rate": 6.173065345052687e-07, |
|
"loss": 1.1292, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.11539721488952637, |
|
"learning_rate": 5.724697813265901e-07, |
|
"loss": 1.1716, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.11124927550554276, |
|
"learning_rate": 5.293186366284176e-07, |
|
"loss": 1.1894, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.261902779340744, |
|
"learning_rate": 4.878538319579629e-07, |
|
"loss": 1.2252, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.11507264524698257, |
|
"learning_rate": 4.4807607027372855e-07, |
|
"loss": 1.2918, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.1210726797580719, |
|
"learning_rate": 4.0998602593352863e-07, |
|
"loss": 1.1893, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.11546792089939117, |
|
"learning_rate": 3.7358434468308667e-07, |
|
"loss": 1.1486, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.12196337431669235, |
|
"learning_rate": 3.388716436450667e-07, |
|
"loss": 1.3495, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.10631462186574936, |
|
"learning_rate": 3.0584851130861513e-07, |
|
"loss": 1.1866, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.14002542197704315, |
|
"learning_rate": 2.745155075194128e-07, |
|
"loss": 0.9632, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.15344469249248505, |
|
"learning_rate": 2.448731634701384e-07, |
|
"loss": 1.3407, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.11114881932735443, |
|
"learning_rate": 2.169219816914869e-07, |
|
"loss": 1.1723, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.09970024973154068, |
|
"learning_rate": 1.9066243604367594e-07, |
|
"loss": 1.3012, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.10021547228097916, |
|
"learning_rate": 1.6609497170834155e-07, |
|
"loss": 1.1621, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.11028821766376495, |
|
"learning_rate": 1.4322000518106616e-07, |
|
"loss": 1.092, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.10497645288705826, |
|
"learning_rate": 1.220379242642844e-07, |
|
"loss": 1.1194, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.10754618048667908, |
|
"learning_rate": 1.025490880606883e-07, |
|
"loss": 1.0586, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.1118793711066246, |
|
"learning_rate": 8.475382696719881e-08, |
|
"loss": 1.1977, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.11205057799816132, |
|
"learning_rate": 6.865244266928139e-08, |
|
"loss": 0.944, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.10550331324338913, |
|
"learning_rate": 5.424520813590572e-08, |
|
"loss": 1.1463, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.10324594378471375, |
|
"learning_rate": 4.153236761488266e-08, |
|
"loss": 1.1325, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.10522052645683289, |
|
"learning_rate": 3.051413662874536e-08, |
|
"loss": 1.1922, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.12036368250846863, |
|
"learning_rate": 2.1190701971052218e-08, |
|
"loss": 1.123, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.11026199162006378, |
|
"learning_rate": 1.3562221703267152e-08, |
|
"loss": 1.261, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.28202083706855774, |
|
"learning_rate": 7.628825152050657e-09, |
|
"loss": 1.2547, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.10787337273359299, |
|
"learning_rate": 3.390612907094859e-09, |
|
"loss": 1.2832, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.10484518110752106, |
|
"learning_rate": 8.476568193804824e-10, |
|
"loss": 1.2884, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.11853493005037308, |
|
"learning_rate": 0.0, |
|
"loss": 1.2526, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 963, |
|
"total_flos": 1.3034774175330337e+19, |
|
"train_loss": 1.2195818047285822, |
|
"train_runtime": 50845.1793, |
|
"train_samples_per_second": 0.152, |
|
"train_steps_per_second": 0.019 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 963, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 1.3034774175330337e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|