|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 33.982300884955755, |
|
"global_step": 600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.2431, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.5758, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6e-06, |
|
"loss": 2.3017, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 2.3675, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1e-05, |
|
"loss": 2.3984, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.2e-05, |
|
"loss": 2.2344, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 2.3358, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 2.183, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8e-05, |
|
"loss": 2.2045, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2e-05, |
|
"loss": 2.146, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 2.0256, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.961, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 2.121, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 2.0502, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3e-05, |
|
"loss": 1.9754, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.8381, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 1.8665, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6e-05, |
|
"loss": 2.0878, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.8e-05, |
|
"loss": 1.7907, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4e-05, |
|
"loss": 2.0663, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.7996, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 2.0797, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.8656, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.8085, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5e-05, |
|
"loss": 1.8279, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.7922, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.8851, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.6355, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 5.8e-05, |
|
"loss": 1.7099, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 6e-05, |
|
"loss": 1.7664, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 6.2e-05, |
|
"loss": 1.7728, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.8335, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.6e-05, |
|
"loss": 1.7173, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.5278, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 7e-05, |
|
"loss": 1.8614, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.7847, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 7.4e-05, |
|
"loss": 1.7366, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.5379, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.6856, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 8e-05, |
|
"loss": 1.6452, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 8.2e-05, |
|
"loss": 1.8233, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.8079, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 8.6e-05, |
|
"loss": 1.7128, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 1.8746, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 9e-05, |
|
"loss": 1.6117, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.6552, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.4e-05, |
|
"loss": 1.6429, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.5429, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.6358, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 0.0001, |
|
"loss": 1.8581, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 0.00010200000000000001, |
|
"loss": 1.6794, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 0.00010400000000000001, |
|
"loss": 1.683, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.00010600000000000002, |
|
"loss": 1.7204, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 0.00010800000000000001, |
|
"loss": 1.7068, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 0.00011000000000000002, |
|
"loss": 1.5245, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 0.00011200000000000001, |
|
"loss": 1.5683, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 0.00011399999999999999, |
|
"loss": 1.8291, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 0.000116, |
|
"loss": 1.625, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 0.000118, |
|
"loss": 1.7945, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 0.00012, |
|
"loss": 1.5244, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 0.000122, |
|
"loss": 1.6436, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 0.000124, |
|
"loss": 1.6679, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 0.000126, |
|
"loss": 1.7644, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 0.00012800000000000002, |
|
"loss": 1.6776, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 0.00013000000000000002, |
|
"loss": 1.627, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 0.000132, |
|
"loss": 1.5084, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 0.000134, |
|
"loss": 1.6967, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 0.00013600000000000003, |
|
"loss": 1.5249, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 0.000138, |
|
"loss": 1.6821, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 0.00014, |
|
"loss": 1.5071, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 0.000142, |
|
"loss": 1.7624, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 0.000144, |
|
"loss": 1.6826, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 0.000146, |
|
"loss": 1.6488, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 0.000148, |
|
"loss": 1.5138, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 1.6149, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 0.000152, |
|
"loss": 1.4464, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 0.000154, |
|
"loss": 1.4529, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 0.00015600000000000002, |
|
"loss": 1.3768, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 0.00015800000000000002, |
|
"loss": 1.4852, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 0.00016, |
|
"loss": 1.6161, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 0.000162, |
|
"loss": 1.749, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 0.000164, |
|
"loss": 1.7293, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 0.000166, |
|
"loss": 1.6095, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 0.000168, |
|
"loss": 1.6558, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 0.00017, |
|
"loss": 1.7317, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 0.000172, |
|
"loss": 1.6435, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 0.000174, |
|
"loss": 1.8289, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 0.00017600000000000002, |
|
"loss": 1.4689, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 0.00017800000000000002, |
|
"loss": 1.728, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 0.00018, |
|
"loss": 1.5481, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 0.000182, |
|
"loss": 1.6498, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 0.00018400000000000003, |
|
"loss": 1.5319, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 0.00018600000000000002, |
|
"loss": 1.6678, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 0.000188, |
|
"loss": 1.4058, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 0.00019, |
|
"loss": 1.6551, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 0.000192, |
|
"loss": 1.5329, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 0.000194, |
|
"loss": 1.4911, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 0.000196, |
|
"loss": 1.796, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 0.00019800000000000002, |
|
"loss": 1.6878, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 0.0002, |
|
"loss": 1.6483, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 0.00019987500000000002, |
|
"loss": 1.4458, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 0.00019975, |
|
"loss": 1.4612, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 0.00019962500000000001, |
|
"loss": 1.6, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 0.00019950000000000002, |
|
"loss": 1.5416, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 0.000199375, |
|
"loss": 1.614, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 0.00019925, |
|
"loss": 1.5744, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 0.000199125, |
|
"loss": 1.4321, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 0.000199, |
|
"loss": 1.5335, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 0.00019887500000000002, |
|
"loss": 1.4739, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 0.00019875, |
|
"loss": 1.4072, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 0.00019862500000000002, |
|
"loss": 1.3836, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 0.00019850000000000003, |
|
"loss": 1.5784, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 0.000198375, |
|
"loss": 1.7039, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 0.00019825, |
|
"loss": 1.587, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 0.000198125, |
|
"loss": 1.4426, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 0.00019800000000000002, |
|
"loss": 1.514, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 0.000197875, |
|
"loss": 1.4913, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 0.00019775, |
|
"loss": 1.5138, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 0.00019762500000000002, |
|
"loss": 1.4991, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 0.00019750000000000003, |
|
"loss": 1.6112, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 0.00019737499999999999, |
|
"loss": 1.6294, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 0.00019725, |
|
"loss": 1.6799, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 0.000197125, |
|
"loss": 1.6058, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 0.00019700000000000002, |
|
"loss": 1.7086, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 0.000196875, |
|
"loss": 1.659, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 0.00019675, |
|
"loss": 1.4066, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 0.00019662500000000002, |
|
"loss": 1.4265, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 0.0001965, |
|
"loss": 1.5891, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 0.00019637500000000002, |
|
"loss": 1.6088, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 0.00019625, |
|
"loss": 1.5274, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 0.000196125, |
|
"loss": 1.5988, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 0.000196, |
|
"loss": 1.6922, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 0.000195875, |
|
"loss": 1.4708, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 0.00019575000000000001, |
|
"loss": 1.3907, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 0.00019562500000000003, |
|
"loss": 1.5097, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 0.0001955, |
|
"loss": 1.5843, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 0.00019537500000000002, |
|
"loss": 1.5949, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 0.00019525, |
|
"loss": 1.5275, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 0.000195125, |
|
"loss": 1.3971, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 0.000195, |
|
"loss": 1.6457, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 0.000194875, |
|
"loss": 1.4983, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 0.00019475000000000002, |
|
"loss": 1.3942, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 0.000194625, |
|
"loss": 1.3633, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 0.0001945, |
|
"loss": 1.3908, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 0.00019437500000000002, |
|
"loss": 1.4746, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 0.00019425, |
|
"loss": 1.4776, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 0.000194125, |
|
"loss": 1.5559, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 0.000194, |
|
"loss": 1.5798, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 0.000193875, |
|
"loss": 1.5173, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 0.00019375000000000002, |
|
"loss": 1.4286, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 0.000193625, |
|
"loss": 1.4609, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 0.00019350000000000001, |
|
"loss": 1.361, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 0.00019337500000000002, |
|
"loss": 1.6866, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 0.00019325, |
|
"loss": 1.5571, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 0.000193125, |
|
"loss": 1.4351, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 0.000193, |
|
"loss": 1.444, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 0.000192875, |
|
"loss": 1.5379, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 0.00019275, |
|
"loss": 1.4775, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 0.000192625, |
|
"loss": 1.5141, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 0.00019250000000000002, |
|
"loss": 1.491, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 0.00019237500000000003, |
|
"loss": 1.6459, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 0.00019225, |
|
"loss": 1.4853, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 0.000192125, |
|
"loss": 1.5958, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 0.000192, |
|
"loss": 1.228, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 0.00019187500000000002, |
|
"loss": 1.3625, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 0.00019175, |
|
"loss": 1.3645, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 0.000191625, |
|
"loss": 1.4625, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 0.00019150000000000002, |
|
"loss": 1.4005, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 0.000191375, |
|
"loss": 1.4099, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 0.00019125000000000001, |
|
"loss": 1.5099, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 0.000191125, |
|
"loss": 1.5183, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 0.000191, |
|
"loss": 1.5149, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 0.000190875, |
|
"loss": 1.2841, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 0.00019075, |
|
"loss": 1.6436, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 0.000190625, |
|
"loss": 1.4587, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 0.00019050000000000002, |
|
"loss": 1.7131, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"learning_rate": 0.000190375, |
|
"loss": 1.5204, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 0.00019025000000000002, |
|
"loss": 1.2174, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 0.00019012500000000003, |
|
"loss": 1.3831, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"learning_rate": 0.00019, |
|
"loss": 1.1915, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 0.000189875, |
|
"loss": 1.4282, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 0.00018975, |
|
"loss": 1.4675, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"learning_rate": 0.00018962500000000001, |
|
"loss": 1.4183, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 0.0001895, |
|
"loss": 1.6309, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 0.000189375, |
|
"loss": 1.4368, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 0.00018925000000000002, |
|
"loss": 1.507, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 0.00018912500000000003, |
|
"loss": 1.4642, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"learning_rate": 0.00018899999999999999, |
|
"loss": 1.5397, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 0.000188875, |
|
"loss": 1.4003, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 0.00018875, |
|
"loss": 1.5798, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"learning_rate": 0.00018862500000000002, |
|
"loss": 1.4797, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 0.0001885, |
|
"loss": 1.429, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 0.000188375, |
|
"loss": 1.49, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 0.00018825000000000002, |
|
"loss": 1.5075, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 0.000188125, |
|
"loss": 1.4662, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 0.000188, |
|
"loss": 1.4103, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 0.000187875, |
|
"loss": 1.4858, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 0.00018775, |
|
"loss": 1.4841, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 0.000187625, |
|
"loss": 1.4102, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 0.0001875, |
|
"loss": 1.3401, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"eval_loss": 2.023402452468872, |
|
"eval_runtime": 59.5382, |
|
"eval_samples_per_second": 4.216, |
|
"eval_steps_per_second": 1.058, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"learning_rate": 0.00018737500000000001, |
|
"loss": 1.3293, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"learning_rate": 0.00018725000000000002, |
|
"loss": 1.5936, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 0.000187125, |
|
"loss": 1.4509, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 11.55, |
|
"learning_rate": 0.00018700000000000002, |
|
"loss": 1.4002, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 0.000186875, |
|
"loss": 1.4654, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 0.00018675, |
|
"loss": 1.456, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 11.72, |
|
"learning_rate": 0.000186625, |
|
"loss": 1.5051, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 11.78, |
|
"learning_rate": 0.0001865, |
|
"loss": 1.4447, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 0.00018637500000000002, |
|
"loss": 1.4817, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"learning_rate": 0.00018625, |
|
"loss": 1.3649, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 0.000186125, |
|
"loss": 1.4798, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"learning_rate": 0.00018600000000000002, |
|
"loss": 1.469, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"learning_rate": 0.000185875, |
|
"loss": 1.4963, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 0.00018575, |
|
"loss": 1.377, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 12.18, |
|
"learning_rate": 0.000185625, |
|
"loss": 1.6133, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 12.23, |
|
"learning_rate": 0.0001855, |
|
"loss": 1.3288, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 0.00018537500000000002, |
|
"loss": 1.514, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 12.35, |
|
"learning_rate": 0.00018525, |
|
"loss": 1.4412, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 0.00018512500000000001, |
|
"loss": 1.4004, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 0.00018500000000000002, |
|
"loss": 1.359, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"learning_rate": 0.000184875, |
|
"loss": 1.3539, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 12.57, |
|
"learning_rate": 0.00018475, |
|
"loss": 1.3255, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 0.000184625, |
|
"loss": 1.4911, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 0.0001845, |
|
"loss": 1.4146, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 12.74, |
|
"learning_rate": 0.000184375, |
|
"loss": 1.3291, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"learning_rate": 0.00018425, |
|
"loss": 1.356, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 0.00018412500000000002, |
|
"loss": 1.4216, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 0.00018400000000000003, |
|
"loss": 1.4282, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 0.000183875, |
|
"loss": 1.354, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"learning_rate": 0.00018375, |
|
"loss": 1.4358, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 13.08, |
|
"learning_rate": 0.000183625, |
|
"loss": 1.5088, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 13.14, |
|
"learning_rate": 0.00018350000000000002, |
|
"loss": 1.3588, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"learning_rate": 0.000183375, |
|
"loss": 1.3718, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"learning_rate": 0.00018325, |
|
"loss": 1.4119, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 13.31, |
|
"learning_rate": 0.00018312500000000002, |
|
"loss": 1.2867, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 0.000183, |
|
"loss": 1.2827, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"learning_rate": 0.000182875, |
|
"loss": 1.4267, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"learning_rate": 0.00018275, |
|
"loss": 1.3174, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 0.000182625, |
|
"loss": 1.4199, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 13.59, |
|
"learning_rate": 0.0001825, |
|
"loss": 1.4644, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 13.65, |
|
"learning_rate": 0.000182375, |
|
"loss": 1.3819, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 13.71, |
|
"learning_rate": 0.00018225, |
|
"loss": 1.3101, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 0.00018212500000000002, |
|
"loss": 1.353, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"learning_rate": 0.000182, |
|
"loss": 1.3191, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"learning_rate": 0.00018187500000000002, |
|
"loss": 1.3851, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"learning_rate": 0.00018175, |
|
"loss": 1.3599, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"learning_rate": 0.000181625, |
|
"loss": 1.4739, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 14.05, |
|
"learning_rate": 0.0001815, |
|
"loss": 1.3944, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 14.1, |
|
"learning_rate": 0.000181375, |
|
"loss": 1.208, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 14.16, |
|
"learning_rate": 0.00018125000000000001, |
|
"loss": 1.4049, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 14.22, |
|
"learning_rate": 0.000181125, |
|
"loss": 1.2871, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"learning_rate": 0.000181, |
|
"loss": 1.4616, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"learning_rate": 0.00018087500000000002, |
|
"loss": 1.3318, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 14.39, |
|
"learning_rate": 0.00018075000000000003, |
|
"loss": 1.328, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"learning_rate": 0.000180625, |
|
"loss": 1.2645, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 0.0001805, |
|
"loss": 1.2477, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"learning_rate": 0.000180375, |
|
"loss": 1.4021, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 0.00018025000000000002, |
|
"loss": 1.424, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 14.67, |
|
"learning_rate": 0.000180125, |
|
"loss": 1.3124, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 14.73, |
|
"learning_rate": 0.00018, |
|
"loss": 1.3239, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"learning_rate": 0.00017987500000000002, |
|
"loss": 1.3568, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 14.84, |
|
"learning_rate": 0.00017975, |
|
"loss": 1.1723, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"learning_rate": 0.000179625, |
|
"loss": 1.3399, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"learning_rate": 0.0001795, |
|
"loss": 1.3437, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 15.01, |
|
"learning_rate": 0.000179375, |
|
"loss": 1.3229, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"learning_rate": 0.00017925000000000002, |
|
"loss": 1.2831, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 15.12, |
|
"learning_rate": 0.000179125, |
|
"loss": 1.1986, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 15.18, |
|
"learning_rate": 0.00017900000000000001, |
|
"loss": 1.236, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 15.24, |
|
"learning_rate": 0.00017887500000000002, |
|
"loss": 1.3338, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"learning_rate": 0.00017875, |
|
"loss": 1.2406, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 15.35, |
|
"learning_rate": 0.000178625, |
|
"loss": 1.1528, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"learning_rate": 0.0001785, |
|
"loss": 1.3401, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 15.46, |
|
"learning_rate": 0.000178375, |
|
"loss": 1.3493, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 15.52, |
|
"learning_rate": 0.00017825, |
|
"loss": 1.2721, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 15.58, |
|
"learning_rate": 0.000178125, |
|
"loss": 1.3501, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 15.63, |
|
"learning_rate": 0.00017800000000000002, |
|
"loss": 1.3662, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 15.69, |
|
"learning_rate": 0.000177875, |
|
"loss": 1.2117, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 15.75, |
|
"learning_rate": 0.00017775, |
|
"loss": 1.3798, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"learning_rate": 0.00017762500000000002, |
|
"loss": 1.3108, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 15.86, |
|
"learning_rate": 0.0001775, |
|
"loss": 1.3565, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 15.92, |
|
"learning_rate": 0.00017737500000000002, |
|
"loss": 1.3961, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 15.97, |
|
"learning_rate": 0.00017725, |
|
"loss": 1.216, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"learning_rate": 0.000177125, |
|
"loss": 1.3027, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"learning_rate": 0.00017700000000000002, |
|
"loss": 1.1798, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 16.14, |
|
"learning_rate": 0.000176875, |
|
"loss": 1.2375, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 16.2, |
|
"learning_rate": 0.00017675000000000001, |
|
"loss": 1.2179, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"learning_rate": 0.00017662500000000002, |
|
"loss": 1.1218, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 16.31, |
|
"learning_rate": 0.0001765, |
|
"loss": 1.2144, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"learning_rate": 0.000176375, |
|
"loss": 1.1878, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 16.42, |
|
"learning_rate": 0.00017625, |
|
"loss": 1.2809, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 16.48, |
|
"learning_rate": 0.000176125, |
|
"loss": 1.1862, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 16.54, |
|
"learning_rate": 0.00017600000000000002, |
|
"loss": 1.2139, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 16.59, |
|
"learning_rate": 0.000175875, |
|
"loss": 1.2991, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 16.65, |
|
"learning_rate": 0.00017575000000000002, |
|
"loss": 1.3304, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 16.71, |
|
"learning_rate": 0.00017562500000000003, |
|
"loss": 1.4349, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 16.76, |
|
"learning_rate": 0.0001755, |
|
"loss": 1.2817, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 16.82, |
|
"learning_rate": 0.000175375, |
|
"loss": 1.2263, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"learning_rate": 0.00017525, |
|
"loss": 1.3584, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 16.93, |
|
"learning_rate": 0.00017512500000000001, |
|
"loss": 1.4542, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 16.99, |
|
"learning_rate": 0.000175, |
|
"loss": 1.392, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"learning_rate": 0.000174875, |
|
"loss": 1.2685, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 17.1, |
|
"learning_rate": 0.00017475000000000002, |
|
"loss": 1.2337, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"learning_rate": 0.00017462500000000003, |
|
"loss": 1.3299, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"learning_rate": 0.0001745, |
|
"loss": 1.2579, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 17.27, |
|
"learning_rate": 0.000174375, |
|
"loss": 1.234, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 17.33, |
|
"learning_rate": 0.00017425, |
|
"loss": 1.2273, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 17.39, |
|
"learning_rate": 0.00017412500000000002, |
|
"loss": 1.3174, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 17.44, |
|
"learning_rate": 0.000174, |
|
"loss": 1.1738, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 0.000173875, |
|
"loss": 1.0985, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"learning_rate": 0.00017375000000000002, |
|
"loss": 1.283, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"learning_rate": 0.000173625, |
|
"loss": 1.318, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 17.67, |
|
"learning_rate": 0.00017350000000000002, |
|
"loss": 1.3006, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 17.73, |
|
"learning_rate": 0.000173375, |
|
"loss": 1.3629, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 17.78, |
|
"learning_rate": 0.00017325, |
|
"loss": 1.3144, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 17.84, |
|
"learning_rate": 0.000173125, |
|
"loss": 1.3476, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 17.9, |
|
"learning_rate": 0.000173, |
|
"loss": 1.1403, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 17.95, |
|
"learning_rate": 0.00017287500000000001, |
|
"loss": 1.1434, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 18.01, |
|
"learning_rate": 0.00017275000000000002, |
|
"loss": 1.1653, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 18.07, |
|
"learning_rate": 0.000172625, |
|
"loss": 1.1411, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 18.12, |
|
"learning_rate": 0.00017250000000000002, |
|
"loss": 1.2393, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"learning_rate": 0.000172375, |
|
"loss": 1.2092, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 0.00017225, |
|
"loss": 1.1437, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"learning_rate": 0.000172125, |
|
"loss": 1.1145, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 18.35, |
|
"learning_rate": 0.000172, |
|
"loss": 1.2723, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 18.41, |
|
"learning_rate": 0.00017187500000000002, |
|
"loss": 1.2471, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 18.46, |
|
"learning_rate": 0.00017175, |
|
"loss": 1.1047, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 18.52, |
|
"learning_rate": 0.000171625, |
|
"loss": 1.0908, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 18.58, |
|
"learning_rate": 0.00017150000000000002, |
|
"loss": 1.3261, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 18.63, |
|
"learning_rate": 0.00017137500000000003, |
|
"loss": 1.2138, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 18.69, |
|
"learning_rate": 0.00017125, |
|
"loss": 1.253, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 0.000171125, |
|
"loss": 1.2831, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 18.8, |
|
"learning_rate": 0.000171, |
|
"loss": 1.1795, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"learning_rate": 0.00017087500000000002, |
|
"loss": 1.2454, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 18.92, |
|
"learning_rate": 0.00017075, |
|
"loss": 1.2997, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 18.97, |
|
"learning_rate": 0.00017062500000000001, |
|
"loss": 1.2243, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 19.03, |
|
"learning_rate": 0.00017050000000000002, |
|
"loss": 1.1705, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 19.09, |
|
"learning_rate": 0.000170375, |
|
"loss": 1.133, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 19.14, |
|
"learning_rate": 0.00017025, |
|
"loss": 1.1997, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"learning_rate": 0.000170125, |
|
"loss": 1.1572, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 19.26, |
|
"learning_rate": 0.00017, |
|
"loss": 1.2115, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 19.31, |
|
"learning_rate": 0.000169875, |
|
"loss": 1.0557, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 0.00016975, |
|
"loss": 1.2455, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 0.00016962500000000002, |
|
"loss": 1.1917, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"learning_rate": 0.00016950000000000003, |
|
"loss": 1.2846, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 19.54, |
|
"learning_rate": 0.000169375, |
|
"loss": 1.2735, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 19.6, |
|
"learning_rate": 0.00016925, |
|
"loss": 1.1168, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 19.65, |
|
"learning_rate": 0.000169125, |
|
"loss": 1.2314, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 19.71, |
|
"learning_rate": 0.00016900000000000002, |
|
"loss": 1.1641, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 19.77, |
|
"learning_rate": 0.000168875, |
|
"loss": 1.2631, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 19.82, |
|
"learning_rate": 0.00016875, |
|
"loss": 1.0431, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"learning_rate": 0.00016862500000000002, |
|
"loss": 1.0931, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"learning_rate": 0.0001685, |
|
"loss": 1.2183, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 19.99, |
|
"learning_rate": 0.000168375, |
|
"loss": 1.0584, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 20.05, |
|
"learning_rate": 0.00016825000000000002, |
|
"loss": 1.1146, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 20.11, |
|
"learning_rate": 0.000168125, |
|
"loss": 1.0702, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 20.16, |
|
"learning_rate": 0.000168, |
|
"loss": 1.0612, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 20.22, |
|
"learning_rate": 0.000167875, |
|
"loss": 1.2609, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 20.28, |
|
"learning_rate": 0.00016775, |
|
"loss": 0.9842, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 20.33, |
|
"learning_rate": 0.00016762500000000002, |
|
"loss": 1.0986, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 20.39, |
|
"learning_rate": 0.0001675, |
|
"loss": 1.2615, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 20.45, |
|
"learning_rate": 0.00016737500000000002, |
|
"loss": 1.187, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"learning_rate": 0.00016725000000000003, |
|
"loss": 1.1115, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 20.56, |
|
"learning_rate": 0.000167125, |
|
"loss": 1.1215, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 20.62, |
|
"learning_rate": 0.000167, |
|
"loss": 1.0814, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 20.67, |
|
"learning_rate": 0.000166875, |
|
"loss": 1.2158, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 20.73, |
|
"learning_rate": 0.00016675000000000001, |
|
"loss": 1.1993, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 20.79, |
|
"learning_rate": 0.000166625, |
|
"loss": 1.2018, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 20.84, |
|
"learning_rate": 0.0001665, |
|
"loss": 1.1376, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 20.9, |
|
"learning_rate": 0.00016637500000000002, |
|
"loss": 1.1453, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 20.96, |
|
"learning_rate": 0.00016625000000000003, |
|
"loss": 1.2199, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 21.01, |
|
"learning_rate": 0.00016612499999999999, |
|
"loss": 1.2592, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 21.07, |
|
"learning_rate": 0.000166, |
|
"loss": 1.1196, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 21.13, |
|
"learning_rate": 0.000165875, |
|
"loss": 1.2162, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 21.18, |
|
"learning_rate": 0.00016575000000000002, |
|
"loss": 1.2218, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 21.24, |
|
"learning_rate": 0.000165625, |
|
"loss": 1.2069, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 21.3, |
|
"learning_rate": 0.0001655, |
|
"loss": 1.0723, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 21.35, |
|
"learning_rate": 0.00016537500000000002, |
|
"loss": 1.0855, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 21.41, |
|
"learning_rate": 0.00016525, |
|
"loss": 1.0705, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 21.47, |
|
"learning_rate": 0.00016512500000000002, |
|
"loss": 1.1985, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 21.52, |
|
"learning_rate": 0.000165, |
|
"loss": 1.1375, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 21.58, |
|
"learning_rate": 0.000164875, |
|
"loss": 1.0706, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 21.64, |
|
"learning_rate": 0.00016475, |
|
"loss": 1.1144, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 21.69, |
|
"learning_rate": 0.000164625, |
|
"loss": 1.1604, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 21.75, |
|
"learning_rate": 0.00016450000000000001, |
|
"loss": 1.0285, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 21.81, |
|
"learning_rate": 0.00016437500000000002, |
|
"loss": 1.2177, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 21.86, |
|
"learning_rate": 0.00016425, |
|
"loss": 1.1429, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 21.92, |
|
"learning_rate": 0.00016412500000000002, |
|
"loss": 1.067, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 21.98, |
|
"learning_rate": 0.000164, |
|
"loss": 1.1697, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 22.03, |
|
"learning_rate": 0.000163875, |
|
"loss": 1.0991, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 22.09, |
|
"learning_rate": 0.00016375, |
|
"loss": 1.0132, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 22.15, |
|
"learning_rate": 0.000163625, |
|
"loss": 1.0528, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 22.2, |
|
"learning_rate": 0.00016350000000000002, |
|
"loss": 1.1334, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 22.26, |
|
"learning_rate": 0.000163375, |
|
"loss": 1.0935, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 22.32, |
|
"learning_rate": 0.00016325, |
|
"loss": 0.9794, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 22.37, |
|
"learning_rate": 0.00016312500000000002, |
|
"loss": 1.1197, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 22.43, |
|
"learning_rate": 0.000163, |
|
"loss": 1.0824, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 22.48, |
|
"learning_rate": 0.000162875, |
|
"loss": 1.1128, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 22.54, |
|
"learning_rate": 0.00016275, |
|
"loss": 1.1092, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 22.6, |
|
"learning_rate": 0.000162625, |
|
"loss": 1.1914, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 22.65, |
|
"learning_rate": 0.00016250000000000002, |
|
"loss": 1.1871, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 22.65, |
|
"eval_loss": 2.3080484867095947, |
|
"eval_runtime": 67.9996, |
|
"eval_samples_per_second": 3.691, |
|
"eval_steps_per_second": 0.926, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 22.71, |
|
"learning_rate": 0.000162375, |
|
"loss": 1.1742, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 22.77, |
|
"learning_rate": 0.00016225000000000001, |
|
"loss": 1.3617, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 22.82, |
|
"learning_rate": 0.00016212500000000002, |
|
"loss": 1.0186, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 22.88, |
|
"learning_rate": 0.000162, |
|
"loss": 1.1819, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 22.94, |
|
"learning_rate": 0.000161875, |
|
"loss": 1.0858, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 22.99, |
|
"learning_rate": 0.00016175, |
|
"loss": 1.084, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 23.05, |
|
"learning_rate": 0.000161625, |
|
"loss": 1.0896, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 23.11, |
|
"learning_rate": 0.0001615, |
|
"loss": 0.9239, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 23.16, |
|
"learning_rate": 0.000161375, |
|
"loss": 1.16, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 23.22, |
|
"learning_rate": 0.00016125000000000002, |
|
"loss": 1.0874, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 23.28, |
|
"learning_rate": 0.00016112500000000003, |
|
"loss": 1.0956, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 23.33, |
|
"learning_rate": 0.000161, |
|
"loss": 1.1366, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 23.39, |
|
"learning_rate": 0.000160875, |
|
"loss": 1.1507, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 23.45, |
|
"learning_rate": 0.00016075, |
|
"loss": 1.0451, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 23.5, |
|
"learning_rate": 0.00016062500000000001, |
|
"loss": 1.1593, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 23.56, |
|
"learning_rate": 0.0001605, |
|
"loss": 1.0692, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 23.62, |
|
"learning_rate": 0.000160375, |
|
"loss": 1.0252, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 23.67, |
|
"learning_rate": 0.00016025000000000002, |
|
"loss": 1.0877, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 23.73, |
|
"learning_rate": 0.000160125, |
|
"loss": 1.2211, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 23.79, |
|
"learning_rate": 0.00016, |
|
"loss": 1.1834, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 23.84, |
|
"learning_rate": 0.000159875, |
|
"loss": 1.0965, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 23.9, |
|
"learning_rate": 0.00015975, |
|
"loss": 1.0839, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 23.96, |
|
"learning_rate": 0.000159625, |
|
"loss": 1.0622, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 24.01, |
|
"learning_rate": 0.0001595, |
|
"loss": 1.1252, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 24.07, |
|
"learning_rate": 0.000159375, |
|
"loss": 1.1057, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 24.13, |
|
"learning_rate": 0.00015925000000000002, |
|
"loss": 1.0499, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 24.18, |
|
"learning_rate": 0.000159125, |
|
"loss": 1.2239, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 24.24, |
|
"learning_rate": 0.00015900000000000002, |
|
"loss": 1.0456, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 24.3, |
|
"learning_rate": 0.00015887500000000003, |
|
"loss": 1.1966, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 24.35, |
|
"learning_rate": 0.00015875, |
|
"loss": 1.0116, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 24.41, |
|
"learning_rate": 0.000158625, |
|
"loss": 1.0544, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 24.47, |
|
"learning_rate": 0.0001585, |
|
"loss": 0.9815, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 24.52, |
|
"learning_rate": 0.00015837500000000001, |
|
"loss": 1.0492, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 24.58, |
|
"learning_rate": 0.00015825, |
|
"loss": 1.067, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 24.64, |
|
"learning_rate": 0.000158125, |
|
"loss": 1.0854, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 24.69, |
|
"learning_rate": 0.00015800000000000002, |
|
"loss": 1.1032, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 24.75, |
|
"learning_rate": 0.00015787500000000003, |
|
"loss": 1.0008, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 24.81, |
|
"learning_rate": 0.00015774999999999999, |
|
"loss": 1.0489, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 24.86, |
|
"learning_rate": 0.000157625, |
|
"loss": 1.0176, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 24.92, |
|
"learning_rate": 0.0001575, |
|
"loss": 0.9086, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 24.98, |
|
"learning_rate": 0.00015737500000000002, |
|
"loss": 1.105, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 25.03, |
|
"learning_rate": 0.00015725, |
|
"loss": 1.163, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 25.09, |
|
"learning_rate": 0.000157125, |
|
"loss": 1.0763, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 25.15, |
|
"learning_rate": 0.00015700000000000002, |
|
"loss": 0.9489, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 25.2, |
|
"learning_rate": 0.000156875, |
|
"loss": 1.0961, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 25.26, |
|
"learning_rate": 0.00015675, |
|
"loss": 1.1935, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 25.32, |
|
"learning_rate": 0.000156625, |
|
"loss": 1.0047, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 25.37, |
|
"learning_rate": 0.0001565, |
|
"loss": 1.0807, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 25.43, |
|
"learning_rate": 0.000156375, |
|
"loss": 1.0766, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 25.49, |
|
"learning_rate": 0.00015625, |
|
"loss": 0.9833, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 25.54, |
|
"learning_rate": 0.00015612500000000001, |
|
"loss": 1.032, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 25.6, |
|
"learning_rate": 0.00015600000000000002, |
|
"loss": 1.0264, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 25.66, |
|
"learning_rate": 0.000155875, |
|
"loss": 1.1064, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 25.71, |
|
"learning_rate": 0.00015575000000000002, |
|
"loss": 0.9957, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 25.77, |
|
"learning_rate": 0.000155625, |
|
"loss": 1.1988, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 25.83, |
|
"learning_rate": 0.0001555, |
|
"loss": 1.0918, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 25.88, |
|
"learning_rate": 0.000155375, |
|
"loss": 1.0202, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 25.94, |
|
"learning_rate": 0.00015525, |
|
"loss": 1.0904, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"learning_rate": 0.00015512500000000002, |
|
"loss": 1.0169, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 26.05, |
|
"learning_rate": 0.000155, |
|
"loss": 1.0173, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 26.11, |
|
"learning_rate": 0.000154875, |
|
"loss": 1.1198, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 26.17, |
|
"learning_rate": 0.00015475000000000002, |
|
"loss": 1.1392, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 26.22, |
|
"learning_rate": 0.000154625, |
|
"loss": 1.014, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 26.28, |
|
"learning_rate": 0.0001545, |
|
"loss": 0.9519, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 26.34, |
|
"learning_rate": 0.000154375, |
|
"loss": 0.9909, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 26.39, |
|
"learning_rate": 0.00015425, |
|
"loss": 0.9521, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 26.45, |
|
"learning_rate": 0.00015412500000000002, |
|
"loss": 1.1342, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 26.51, |
|
"learning_rate": 0.000154, |
|
"loss": 1.0784, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 26.56, |
|
"learning_rate": 0.000153875, |
|
"loss": 1.0328, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 26.62, |
|
"learning_rate": 0.00015375000000000002, |
|
"loss": 1.0087, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 26.68, |
|
"learning_rate": 0.000153625, |
|
"loss": 1.0398, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 26.73, |
|
"learning_rate": 0.0001535, |
|
"loss": 0.9858, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 26.79, |
|
"learning_rate": 0.000153375, |
|
"loss": 1.2076, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 26.85, |
|
"learning_rate": 0.00015325, |
|
"loss": 1.1085, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 26.9, |
|
"learning_rate": 0.000153125, |
|
"loss": 1.1035, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 26.96, |
|
"learning_rate": 0.000153, |
|
"loss": 0.9399, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 27.02, |
|
"learning_rate": 0.00015287500000000002, |
|
"loss": 1.0872, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 27.07, |
|
"learning_rate": 0.00015275000000000003, |
|
"loss": 1.0331, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 27.13, |
|
"learning_rate": 0.000152625, |
|
"loss": 0.9109, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 27.19, |
|
"learning_rate": 0.0001525, |
|
"loss": 0.9697, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 27.24, |
|
"learning_rate": 0.000152375, |
|
"loss": 1.0549, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 27.3, |
|
"learning_rate": 0.00015225000000000001, |
|
"loss": 0.998, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 27.36, |
|
"learning_rate": 0.000152125, |
|
"loss": 1.0075, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 27.41, |
|
"learning_rate": 0.000152, |
|
"loss": 1.0581, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 27.47, |
|
"learning_rate": 0.00015187500000000002, |
|
"loss": 1.0142, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 27.53, |
|
"learning_rate": 0.00015175, |
|
"loss": 0.9666, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 27.58, |
|
"learning_rate": 0.000151625, |
|
"loss": 1.0657, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 27.64, |
|
"learning_rate": 0.0001515, |
|
"loss": 1.0518, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 27.7, |
|
"learning_rate": 0.000151375, |
|
"loss": 1.0108, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 27.75, |
|
"learning_rate": 0.00015125, |
|
"loss": 1.0609, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 27.81, |
|
"learning_rate": 0.000151125, |
|
"loss": 0.9527, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 27.87, |
|
"learning_rate": 0.000151, |
|
"loss": 1.0935, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 27.92, |
|
"learning_rate": 0.00015087500000000002, |
|
"loss": 1.0846, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 27.98, |
|
"learning_rate": 0.00015075, |
|
"loss": 1.155, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 28.04, |
|
"learning_rate": 0.00015062500000000002, |
|
"loss": 1.0016, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 28.09, |
|
"learning_rate": 0.0001505, |
|
"loss": 0.9649, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 28.15, |
|
"learning_rate": 0.000150375, |
|
"loss": 0.8216, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 28.21, |
|
"learning_rate": 0.00015025, |
|
"loss": 0.8122, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 28.26, |
|
"learning_rate": 0.000150125, |
|
"loss": 0.9406, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 28.32, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 0.9773, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 28.38, |
|
"learning_rate": 0.000149875, |
|
"loss": 1.0644, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 28.43, |
|
"learning_rate": 0.00014975, |
|
"loss": 0.94, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 28.49, |
|
"learning_rate": 0.00014962500000000002, |
|
"loss": 0.9985, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 28.55, |
|
"learning_rate": 0.00014950000000000003, |
|
"loss": 0.9291, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 28.6, |
|
"learning_rate": 0.00014937499999999999, |
|
"loss": 1.105, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 28.66, |
|
"learning_rate": 0.00014925, |
|
"loss": 1.0298, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 28.72, |
|
"learning_rate": 0.000149125, |
|
"loss": 0.8352, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 28.77, |
|
"learning_rate": 0.00014900000000000002, |
|
"loss": 0.9752, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 28.83, |
|
"learning_rate": 0.000148875, |
|
"loss": 1.0227, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 28.88, |
|
"learning_rate": 0.00014875, |
|
"loss": 1.0159, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 28.94, |
|
"learning_rate": 0.00014862500000000002, |
|
"loss": 0.9519, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"learning_rate": 0.0001485, |
|
"loss": 1.0509, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 29.05, |
|
"learning_rate": 0.000148375, |
|
"loss": 0.9518, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 29.11, |
|
"learning_rate": 0.00014825, |
|
"loss": 0.916, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 29.17, |
|
"learning_rate": 0.000148125, |
|
"loss": 0.9861, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 29.22, |
|
"learning_rate": 0.000148, |
|
"loss": 0.8708, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 29.28, |
|
"learning_rate": 0.000147875, |
|
"loss": 0.9761, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 29.34, |
|
"learning_rate": 0.00014775, |
|
"loss": 0.9873, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 29.39, |
|
"learning_rate": 0.00014762500000000002, |
|
"loss": 0.9831, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 29.45, |
|
"learning_rate": 0.0001475, |
|
"loss": 0.9699, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 29.51, |
|
"learning_rate": 0.000147375, |
|
"loss": 1.0476, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 29.56, |
|
"learning_rate": 0.00014725, |
|
"loss": 0.9194, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 29.62, |
|
"learning_rate": 0.000147125, |
|
"loss": 0.8511, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 29.68, |
|
"learning_rate": 0.000147, |
|
"loss": 0.9279, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 29.73, |
|
"learning_rate": 0.000146875, |
|
"loss": 0.9263, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 29.79, |
|
"learning_rate": 0.00014675000000000002, |
|
"loss": 1.0193, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 29.85, |
|
"learning_rate": 0.000146625, |
|
"loss": 0.855, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 29.9, |
|
"learning_rate": 0.0001465, |
|
"loss": 1.0262, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 29.96, |
|
"learning_rate": 0.00014637500000000002, |
|
"loss": 0.9289, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 30.02, |
|
"learning_rate": 0.00014625, |
|
"loss": 0.9726, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 30.07, |
|
"learning_rate": 0.000146125, |
|
"loss": 0.9372, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 30.13, |
|
"learning_rate": 0.000146, |
|
"loss": 0.9264, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 30.19, |
|
"learning_rate": 0.000145875, |
|
"loss": 1.082, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 30.24, |
|
"learning_rate": 0.00014575000000000002, |
|
"loss": 0.8726, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 30.3, |
|
"learning_rate": 0.000145625, |
|
"loss": 0.946, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 30.36, |
|
"learning_rate": 0.0001455, |
|
"loss": 0.9745, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 30.41, |
|
"learning_rate": 0.00014537500000000002, |
|
"loss": 0.9029, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 30.47, |
|
"learning_rate": 0.00014525, |
|
"loss": 0.9419, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 30.53, |
|
"learning_rate": 0.000145125, |
|
"loss": 0.9483, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 30.58, |
|
"learning_rate": 0.000145, |
|
"loss": 0.931, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 30.64, |
|
"learning_rate": 0.000144875, |
|
"loss": 0.8506, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 30.7, |
|
"learning_rate": 0.00014475, |
|
"loss": 0.9794, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 30.75, |
|
"learning_rate": 0.000144625, |
|
"loss": 0.9518, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 30.81, |
|
"learning_rate": 0.00014450000000000002, |
|
"loss": 1.0354, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 30.87, |
|
"learning_rate": 0.00014437500000000003, |
|
"loss": 0.942, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 30.92, |
|
"learning_rate": 0.00014425, |
|
"loss": 1.0193, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 30.98, |
|
"learning_rate": 0.000144125, |
|
"loss": 1.0498, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 31.04, |
|
"learning_rate": 0.000144, |
|
"loss": 0.8734, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 31.09, |
|
"learning_rate": 0.00014387500000000001, |
|
"loss": 0.8525, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 31.15, |
|
"learning_rate": 0.00014375, |
|
"loss": 0.8724, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 31.21, |
|
"learning_rate": 0.000143625, |
|
"loss": 0.8856, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 31.26, |
|
"learning_rate": 0.00014350000000000002, |
|
"loss": 0.9098, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 31.32, |
|
"learning_rate": 0.000143375, |
|
"loss": 0.8638, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 31.38, |
|
"learning_rate": 0.00014325, |
|
"loss": 0.869, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 31.43, |
|
"learning_rate": 0.000143125, |
|
"loss": 0.9197, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 31.49, |
|
"learning_rate": 0.000143, |
|
"loss": 0.9562, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 31.55, |
|
"learning_rate": 0.000142875, |
|
"loss": 0.9259, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 31.6, |
|
"learning_rate": 0.00014275, |
|
"loss": 0.9913, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 31.66, |
|
"learning_rate": 0.000142625, |
|
"loss": 0.8817, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 31.72, |
|
"learning_rate": 0.00014250000000000002, |
|
"loss": 0.9828, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 31.77, |
|
"learning_rate": 0.000142375, |
|
"loss": 1.019, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 31.83, |
|
"learning_rate": 0.00014225000000000002, |
|
"loss": 1.0316, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 31.89, |
|
"learning_rate": 0.000142125, |
|
"loss": 0.8991, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 31.94, |
|
"learning_rate": 0.000142, |
|
"loss": 0.9081, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"learning_rate": 0.000141875, |
|
"loss": 0.9126, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 32.06, |
|
"learning_rate": 0.00014175, |
|
"loss": 0.8007, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 32.11, |
|
"learning_rate": 0.00014162500000000001, |
|
"loss": 0.9289, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 32.17, |
|
"learning_rate": 0.0001415, |
|
"loss": 1.001, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 32.23, |
|
"learning_rate": 0.000141375, |
|
"loss": 0.896, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 32.28, |
|
"learning_rate": 0.00014125000000000002, |
|
"loss": 0.9181, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 32.34, |
|
"learning_rate": 0.000141125, |
|
"loss": 0.9645, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 32.4, |
|
"learning_rate": 0.000141, |
|
"loss": 0.8713, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 32.45, |
|
"learning_rate": 0.000140875, |
|
"loss": 0.8495, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 32.51, |
|
"learning_rate": 0.00014075, |
|
"loss": 0.8962, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 32.57, |
|
"learning_rate": 0.00014062500000000002, |
|
"loss": 0.8542, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 32.62, |
|
"learning_rate": 0.0001405, |
|
"loss": 0.8794, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 32.68, |
|
"learning_rate": 0.000140375, |
|
"loss": 0.9115, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 32.74, |
|
"learning_rate": 0.00014025000000000002, |
|
"loss": 0.815, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 32.79, |
|
"learning_rate": 0.000140125, |
|
"loss": 0.8095, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 32.85, |
|
"learning_rate": 0.00014, |
|
"loss": 0.8478, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 32.91, |
|
"learning_rate": 0.000139875, |
|
"loss": 0.8056, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 32.96, |
|
"learning_rate": 0.00013975, |
|
"loss": 0.9301, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 33.02, |
|
"learning_rate": 0.00013962500000000002, |
|
"loss": 0.8053, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 33.08, |
|
"learning_rate": 0.0001395, |
|
"loss": 0.8094, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 33.13, |
|
"learning_rate": 0.000139375, |
|
"loss": 0.8191, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 33.19, |
|
"learning_rate": 0.00013925000000000002, |
|
"loss": 0.8934, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 33.25, |
|
"learning_rate": 0.000139125, |
|
"loss": 0.88, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 33.3, |
|
"learning_rate": 0.000139, |
|
"loss": 0.9598, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 33.36, |
|
"learning_rate": 0.000138875, |
|
"loss": 0.8177, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 33.42, |
|
"learning_rate": 0.00013875, |
|
"loss": 0.7988, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 33.47, |
|
"learning_rate": 0.000138625, |
|
"loss": 0.976, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 33.53, |
|
"learning_rate": 0.0001385, |
|
"loss": 0.7895, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 33.59, |
|
"learning_rate": 0.00013837500000000002, |
|
"loss": 0.8736, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 33.64, |
|
"learning_rate": 0.00013825, |
|
"loss": 0.821, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 33.7, |
|
"learning_rate": 0.000138125, |
|
"loss": 0.8448, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 33.76, |
|
"learning_rate": 0.000138, |
|
"loss": 0.8763, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 33.81, |
|
"learning_rate": 0.000137875, |
|
"loss": 0.9005, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 33.87, |
|
"learning_rate": 0.00013775000000000001, |
|
"loss": 0.7962, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 33.93, |
|
"learning_rate": 0.000137625, |
|
"loss": 0.9145, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 33.98, |
|
"learning_rate": 0.0001375, |
|
"loss": 0.8612, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 33.98, |
|
"eval_loss": 2.6225430965423584, |
|
"eval_runtime": 90.4931, |
|
"eval_samples_per_second": 2.774, |
|
"eval_steps_per_second": 0.696, |
|
"step": 600 |
|
} |
|
], |
|
"max_steps": 1700, |
|
"num_train_epochs": 100, |
|
"total_flos": 4.163924540462285e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|