|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 22.900763358778626, |
|
"eval_steps": 100, |
|
"global_step": 3000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.998727735368957e-05, |
|
"loss": 13.0747, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998727735368957e-05, |
|
"loss": 10.7662, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9961832061068705e-05, |
|
"loss": 7.722, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.993638676844784e-05, |
|
"loss": 3.4523, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9910941475826974e-05, |
|
"loss": 2.5227, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.988549618320611e-05, |
|
"loss": 0.8054, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9860050890585244e-05, |
|
"loss": 0.2351, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.983460559796438e-05, |
|
"loss": 0.2044, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9809160305343514e-05, |
|
"loss": 0.5828, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.978371501272265e-05, |
|
"loss": 0.6078, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.975826972010178e-05, |
|
"loss": 0.8824, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.973282442748092e-05, |
|
"loss": 0.3526, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.970737913486005e-05, |
|
"loss": 0.1536, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.968193384223919e-05, |
|
"loss": 0.3102, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.965648854961832e-05, |
|
"loss": 0.8505, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.963104325699746e-05, |
|
"loss": 0.2302, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.960559796437659e-05, |
|
"loss": 0.1172, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.958015267175573e-05, |
|
"loss": 0.4224, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.955470737913486e-05, |
|
"loss": 0.2523, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.9529262086514e-05, |
|
"loss": 0.4299, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.950381679389313e-05, |
|
"loss": 0.5534, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.9478371501272266e-05, |
|
"loss": 0.5437, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.94529262086514e-05, |
|
"loss": 0.7888, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.9427480916030536e-05, |
|
"loss": 0.3401, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.940203562340967e-05, |
|
"loss": 0.442, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.9376590330788806e-05, |
|
"loss": 0.5163, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.935114503816794e-05, |
|
"loss": 0.5436, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.9325699745547075e-05, |
|
"loss": 0.5621, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.930025445292621e-05, |
|
"loss": 0.2313, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.9274809160305345e-05, |
|
"loss": 0.1979, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.924936386768448e-05, |
|
"loss": 0.3287, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.9223918575063615e-05, |
|
"loss": 0.0833, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.9198473282442756e-05, |
|
"loss": 0.7452, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.9173027989821884e-05, |
|
"loss": 0.3994, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.914758269720102e-05, |
|
"loss": 0.413, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.9122137404580154e-05, |
|
"loss": 0.4078, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.909669211195929e-05, |
|
"loss": 0.1743, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.9071246819338424e-05, |
|
"loss": 0.3877, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.904580152671756e-05, |
|
"loss": 0.4874, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.902035623409669e-05, |
|
"loss": 0.1321, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.899491094147583e-05, |
|
"loss": 0.2902, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.896946564885496e-05, |
|
"loss": 0.0824, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.89440203562341e-05, |
|
"loss": 0.086, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.891857506361323e-05, |
|
"loss": 0.2476, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.889312977099237e-05, |
|
"loss": 0.4408, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.886768447837151e-05, |
|
"loss": 0.1607, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.884223918575064e-05, |
|
"loss": 0.5495, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.881679389312977e-05, |
|
"loss": 0.4271, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.879134860050891e-05, |
|
"loss": 0.1442, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.876590330788804e-05, |
|
"loss": 0.2471, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"eval_cer": 0.06833333333333333, |
|
"eval_loss": 0.19153563678264618, |
|
"eval_runtime": 9.1932, |
|
"eval_samples_per_second": 28.391, |
|
"eval_steps_per_second": 3.59, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.8740458015267176e-05, |
|
"loss": 0.2194, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.871501272264631e-05, |
|
"loss": 0.1366, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.8689567430025446e-05, |
|
"loss": 0.0784, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.866412213740458e-05, |
|
"loss": 0.5527, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.8638676844783716e-05, |
|
"loss": 0.2214, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.861323155216285e-05, |
|
"loss": 0.3578, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.858778625954199e-05, |
|
"loss": 0.1092, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.856234096692112e-05, |
|
"loss": 0.2762, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.853689567430026e-05, |
|
"loss": 0.1063, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.851145038167939e-05, |
|
"loss": 0.3029, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.848600508905853e-05, |
|
"loss": 0.482, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.846055979643766e-05, |
|
"loss": 0.5909, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.8435114503816794e-05, |
|
"loss": 0.6219, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.840966921119593e-05, |
|
"loss": 0.6025, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.8384223918575064e-05, |
|
"loss": 0.5285, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83587786259542e-05, |
|
"loss": 0.1767, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.8333333333333334e-05, |
|
"loss": 0.5803, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.830788804071247e-05, |
|
"loss": 0.1041, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.82824427480916e-05, |
|
"loss": 0.3941, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.8256997455470745e-05, |
|
"loss": 0.2762, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.823155216284987e-05, |
|
"loss": 0.1117, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.8206106870229015e-05, |
|
"loss": 0.13, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.818066157760814e-05, |
|
"loss": 0.5239, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.8155216284987284e-05, |
|
"loss": 0.2415, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.812977099236641e-05, |
|
"loss": 0.1007, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.8104325699745554e-05, |
|
"loss": 0.1092, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.807888040712468e-05, |
|
"loss": 0.1845, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.805343511450382e-05, |
|
"loss": 0.0917, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.802798982188295e-05, |
|
"loss": 0.1885, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.8002544529262086e-05, |
|
"loss": 0.2213, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.797709923664123e-05, |
|
"loss": 0.1772, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.7951653944020356e-05, |
|
"loss": 0.1693, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.79262086513995e-05, |
|
"loss": 0.1783, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.7900763358778626e-05, |
|
"loss": 0.7147, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.787531806615777e-05, |
|
"loss": 0.0766, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.7849872773536895e-05, |
|
"loss": 0.2119, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.782442748091604e-05, |
|
"loss": 0.0908, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.7798982188295165e-05, |
|
"loss": 0.4874, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.7773536895674307e-05, |
|
"loss": 0.0256, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.7748091603053435e-05, |
|
"loss": 0.2564, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.772264631043257e-05, |
|
"loss": 0.1598, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.7697201017811704e-05, |
|
"loss": 0.163, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.767175572519084e-05, |
|
"loss": 0.1982, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.764631043256998e-05, |
|
"loss": 0.0347, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.762086513994911e-05, |
|
"loss": 0.3642, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.759541984732825e-05, |
|
"loss": 0.2434, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.756997455470738e-05, |
|
"loss": 0.7607, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.754452926208652e-05, |
|
"loss": 0.245, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.751908396946565e-05, |
|
"loss": 0.1778, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.749363867684479e-05, |
|
"loss": 0.1461, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"eval_cer": 0.09, |
|
"eval_loss": 0.2961411774158478, |
|
"eval_runtime": 9.3409, |
|
"eval_samples_per_second": 27.942, |
|
"eval_steps_per_second": 3.533, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.746819338422392e-05, |
|
"loss": 0.2253, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.744274809160306e-05, |
|
"loss": 0.5202, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.741730279898219e-05, |
|
"loss": 0.6376, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.739185750636133e-05, |
|
"loss": 0.0297, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.736641221374046e-05, |
|
"loss": 0.158, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.734096692111959e-05, |
|
"loss": 0.5359, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.7315521628498734e-05, |
|
"loss": 0.1949, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.729007633587786e-05, |
|
"loss": 0.0096, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.7264631043257e-05, |
|
"loss": 0.1996, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.723918575063613e-05, |
|
"loss": 0.2097, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.721374045801527e-05, |
|
"loss": 0.2805, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.71882951653944e-05, |
|
"loss": 0.5995, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.716284987277354e-05, |
|
"loss": 0.0177, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.713740458015267e-05, |
|
"loss": 0.3505, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.711195928753181e-05, |
|
"loss": 0.4022, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.708651399491094e-05, |
|
"loss": 0.2403, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.706106870229008e-05, |
|
"loss": 0.0289, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.7035623409669217e-05, |
|
"loss": 0.1095, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.7010178117048345e-05, |
|
"loss": 0.5152, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.6984732824427486e-05, |
|
"loss": 0.0785, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.6959287531806614e-05, |
|
"loss": 0.025, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.6933842239185756e-05, |
|
"loss": 0.0908, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.6908396946564884e-05, |
|
"loss": 0.236, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.6882951653944026e-05, |
|
"loss": 0.2633, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.6857506361323154e-05, |
|
"loss": 0.0273, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.6832061068702295e-05, |
|
"loss": 0.0652, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.680661577608142e-05, |
|
"loss": 0.2345, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.6781170483460565e-05, |
|
"loss": 0.1733, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.675572519083969e-05, |
|
"loss": 0.1278, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.6730279898218835e-05, |
|
"loss": 0.0199, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.670483460559797e-05, |
|
"loss": 0.0073, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.6679389312977104e-05, |
|
"loss": 0.0656, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.665394402035624e-05, |
|
"loss": 0.0056, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.662849872773537e-05, |
|
"loss": 0.2293, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.660305343511451e-05, |
|
"loss": 0.005, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.657760814249364e-05, |
|
"loss": 0.2217, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.655216284987278e-05, |
|
"loss": 0.1531, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.6526717557251906e-05, |
|
"loss": 0.0001, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.650127226463105e-05, |
|
"loss": 0.0067, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.6475826972010176e-05, |
|
"loss": 0.1709, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.645038167938932e-05, |
|
"loss": 0.1888, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.642493638676845e-05, |
|
"loss": 0.0052, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.639949109414759e-05, |
|
"loss": 0.321, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.637404580152672e-05, |
|
"loss": 0.0088, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.634860050890586e-05, |
|
"loss": 0.024, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.632315521628499e-05, |
|
"loss": 0.3312, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.629770992366412e-05, |
|
"loss": 0.5438, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.6284987277353694e-05, |
|
"loss": 1.0535, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.625954198473283e-05, |
|
"loss": 0.8874, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.6234096692111964e-05, |
|
"loss": 0.1959, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"eval_cer": 0.06666666666666667, |
|
"eval_loss": 0.21715956926345825, |
|
"eval_runtime": 9.1608, |
|
"eval_samples_per_second": 28.491, |
|
"eval_steps_per_second": 3.602, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.62086513994911e-05, |
|
"loss": 0.029, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.618320610687023e-05, |
|
"loss": 0.592, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.615776081424937e-05, |
|
"loss": 0.1281, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.61323155216285e-05, |
|
"loss": 0.1893, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.610687022900764e-05, |
|
"loss": 0.0201, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.6081424936386766e-05, |
|
"loss": 0.04, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.605597964376591e-05, |
|
"loss": 0.0411, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.6030534351145036e-05, |
|
"loss": 1.5439, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.600508905852418e-05, |
|
"loss": 0.0094, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.5979643765903305e-05, |
|
"loss": 0.006, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.595419847328245e-05, |
|
"loss": 5.4483, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 4.592875318066158e-05, |
|
"loss": 0.2359, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.5903307888040716e-05, |
|
"loss": 0.0957, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.587786259541985e-05, |
|
"loss": 3.4057, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.5852417302798986e-05, |
|
"loss": 4.2062, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.582697201017812e-05, |
|
"loss": 2.477, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.5801526717557256e-05, |
|
"loss": 1.9953, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.577608142493639e-05, |
|
"loss": 4.1759, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.575063613231552e-05, |
|
"loss": 4.7143, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.572519083969466e-05, |
|
"loss": 1.8038, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.569974554707379e-05, |
|
"loss": 1.516, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.567430025445293e-05, |
|
"loss": 3.2261, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.5648854961832065e-05, |
|
"loss": 2.0044, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.56234096692112e-05, |
|
"loss": 1.2701, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.5597964376590334e-05, |
|
"loss": 1.3727, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.557251908396947e-05, |
|
"loss": 1.2656, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.5547073791348604e-05, |
|
"loss": 1.1259, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.552162849872774e-05, |
|
"loss": 1.3548, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.5496183206106874e-05, |
|
"loss": 1.2812, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.547073791348601e-05, |
|
"loss": 1.0272, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.544529262086514e-05, |
|
"loss": 1.0934, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.541984732824428e-05, |
|
"loss": 1.2292, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.539440203562341e-05, |
|
"loss": 1.1085, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.536895674300254e-05, |
|
"loss": 1.0788, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.534351145038168e-05, |
|
"loss": 0.9239, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.531806615776082e-05, |
|
"loss": 1.0381, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.529262086513995e-05, |
|
"loss": 1.3701, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.526717557251909e-05, |
|
"loss": 0.8537, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.524173027989822e-05, |
|
"loss": 0.9638, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.521628498727736e-05, |
|
"loss": 0.8149, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.519083969465649e-05, |
|
"loss": 1.1826, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.5165394402035626e-05, |
|
"loss": 1.1008, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.513994910941476e-05, |
|
"loss": 0.9321, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.5114503816793896e-05, |
|
"loss": 0.8994, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.508905852417303e-05, |
|
"loss": 0.9649, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.5063613231552166e-05, |
|
"loss": 1.1236, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.5038167938931294e-05, |
|
"loss": 0.915, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.5012722646310435e-05, |
|
"loss": 1.2086, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.498727735368957e-05, |
|
"loss": 1.1753, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.4961832061068705e-05, |
|
"loss": 1.4127, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"eval_cer": 0.105, |
|
"eval_loss": 1.2658765316009521, |
|
"eval_runtime": 9.0281, |
|
"eval_samples_per_second": 28.91, |
|
"eval_steps_per_second": 3.655, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.493638676844784e-05, |
|
"loss": 1.1454, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.4910941475826975e-05, |
|
"loss": 1.0668, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.488549618320611e-05, |
|
"loss": 1.1689, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.4860050890585244e-05, |
|
"loss": 0.8271, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.483460559796438e-05, |
|
"loss": 3.0489, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.4809160305343514e-05, |
|
"loss": 4.085, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.478371501272265e-05, |
|
"loss": 2.5191, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.4758269720101784e-05, |
|
"loss": 2.7557, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.473282442748092e-05, |
|
"loss": 2.4464, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.470737913486005e-05, |
|
"loss": 2.3328, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.468193384223919e-05, |
|
"loss": 2.1459, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.465648854961832e-05, |
|
"loss": 2.0175, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.463104325699746e-05, |
|
"loss": 1.8922, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.460559796437659e-05, |
|
"loss": 1.5296, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.458015267175573e-05, |
|
"loss": 1.6281, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 4.455470737913486e-05, |
|
"loss": 1.8866, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 4.4529262086514e-05, |
|
"loss": 1.7002, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.450381679389313e-05, |
|
"loss": 1.6532, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.447837150127227e-05, |
|
"loss": 1.5812, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 4.44529262086514e-05, |
|
"loss": 1.5947, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.4427480916030536e-05, |
|
"loss": 1.6216, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.440203562340967e-05, |
|
"loss": 1.7792, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.4376590330788806e-05, |
|
"loss": 1.6228, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.435114503816794e-05, |
|
"loss": 1.6892, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.4325699745547076e-05, |
|
"loss": 1.6365, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.430025445292621e-05, |
|
"loss": 1.6137, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.4274809160305345e-05, |
|
"loss": 1.5058, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.424936386768448e-05, |
|
"loss": 1.5183, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.4223918575063615e-05, |
|
"loss": 1.7357, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.419847328244275e-05, |
|
"loss": 1.4155, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.4173027989821885e-05, |
|
"loss": 1.4845, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.414758269720102e-05, |
|
"loss": 1.5083, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.4122137404580154e-05, |
|
"loss": 1.5875, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.409669211195929e-05, |
|
"loss": 1.6002, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.4071246819338424e-05, |
|
"loss": 1.3794, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.404580152671756e-05, |
|
"loss": 1.519, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.4020356234096694e-05, |
|
"loss": 1.6349, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.399491094147583e-05, |
|
"loss": 1.5777, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.396946564885496e-05, |
|
"loss": 1.79, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.39440203562341e-05, |
|
"loss": 1.7578, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 4.391857506361323e-05, |
|
"loss": 1.7805, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.389312977099237e-05, |
|
"loss": 1.6965, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 4.38676844783715e-05, |
|
"loss": 1.4909, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 4.384223918575064e-05, |
|
"loss": 1.4643, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 4.381679389312977e-05, |
|
"loss": 1.4647, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 4.379134860050891e-05, |
|
"loss": 1.4439, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 4.376590330788805e-05, |
|
"loss": 1.4828, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 4.374045801526718e-05, |
|
"loss": 1.418, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.371501272264631e-05, |
|
"loss": 1.4172, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 4.3689567430025446e-05, |
|
"loss": 1.6268, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"eval_cer": 1.4616666666666667, |
|
"eval_loss": 1.5408730506896973, |
|
"eval_runtime": 12.3957, |
|
"eval_samples_per_second": 21.056, |
|
"eval_steps_per_second": 2.662, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.366412213740458e-05, |
|
"loss": 1.4824, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 4.3638676844783716e-05, |
|
"loss": 1.5388, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 4.361323155216285e-05, |
|
"loss": 1.4597, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 4.3587786259541986e-05, |
|
"loss": 1.5521, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.356234096692112e-05, |
|
"loss": 1.6686, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.3536895674300255e-05, |
|
"loss": 1.4682, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 4.351145038167939e-05, |
|
"loss": 1.9896, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 4.3486005089058525e-05, |
|
"loss": 1.9093, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.346055979643766e-05, |
|
"loss": 1.4429, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 4.34351145038168e-05, |
|
"loss": 1.3953, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.340966921119593e-05, |
|
"loss": 1.598, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 4.3384223918575064e-05, |
|
"loss": 1.8409, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 4.33587786259542e-05, |
|
"loss": 1.6732, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 1.7087, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 4.330788804071247e-05, |
|
"loss": 1.4471, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 4.3282442748091604e-05, |
|
"loss": 1.505, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 4.325699745547074e-05, |
|
"loss": 1.5021, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 4.323155216284987e-05, |
|
"loss": 1.5206, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 4.320610687022901e-05, |
|
"loss": 1.6, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 4.318066157760814e-05, |
|
"loss": 1.5293, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 4.3155216284987285e-05, |
|
"loss": 1.4451, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 4.312977099236641e-05, |
|
"loss": 1.4265, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 4.3104325699745554e-05, |
|
"loss": 1.3854, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 4.307888040712468e-05, |
|
"loss": 1.373, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 4.3053435114503824e-05, |
|
"loss": 1.4177, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 4.302798982188295e-05, |
|
"loss": 1.4468, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 4.300254452926209e-05, |
|
"loss": 1.5429, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 4.297709923664122e-05, |
|
"loss": 1.308, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 4.2951653944020356e-05, |
|
"loss": 1.4762, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 4.292620865139949e-05, |
|
"loss": 1.3894, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.2900763358778626e-05, |
|
"loss": 1.4224, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 4.287531806615776e-05, |
|
"loss": 1.2713, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.2849872773536896e-05, |
|
"loss": 1.4202, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 4.282442748091604e-05, |
|
"loss": 1.3968, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 4.2798982188295165e-05, |
|
"loss": 1.283, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 4.277353689567431e-05, |
|
"loss": 1.409, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 4.2748091603053435e-05, |
|
"loss": 1.4048, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.272264631043258e-05, |
|
"loss": 1.5048, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 4.2697201017811705e-05, |
|
"loss": 1.2965, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 4.267175572519084e-05, |
|
"loss": 1.4138, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 4.2646310432569974e-05, |
|
"loss": 1.2395, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 4.262086513994911e-05, |
|
"loss": 1.4204, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 4.2595419847328244e-05, |
|
"loss": 1.4236, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 4.256997455470738e-05, |
|
"loss": 1.3041, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 4.254452926208652e-05, |
|
"loss": 1.4454, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 4.251908396946565e-05, |
|
"loss": 1.4345, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.249363867684479e-05, |
|
"loss": 1.3708, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.246819338422392e-05, |
|
"loss": 1.4303, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 4.244274809160306e-05, |
|
"loss": 1.4196, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 4.241730279898219e-05, |
|
"loss": 1.3672, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"eval_cer": 1.4533333333333334, |
|
"eval_loss": 1.5275843143463135, |
|
"eval_runtime": 11.8906, |
|
"eval_samples_per_second": 21.95, |
|
"eval_steps_per_second": 2.775, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.239185750636133e-05, |
|
"loss": 1.4312, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 4.236641221374046e-05, |
|
"loss": 1.4523, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 4.23409669211196e-05, |
|
"loss": 1.555, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.231552162849873e-05, |
|
"loss": 1.2982, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 4.229007633587786e-05, |
|
"loss": 1.2546, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 4.2264631043257e-05, |
|
"loss": 1.4486, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 4.223918575063613e-05, |
|
"loss": 1.5046, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 4.221374045801527e-05, |
|
"loss": 1.4805, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 4.21882951653944e-05, |
|
"loss": 1.4282, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 4.216284987277354e-05, |
|
"loss": 1.4071, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 4.213740458015267e-05, |
|
"loss": 1.4593, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 4.211195928753181e-05, |
|
"loss": 1.5556, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 4.208651399491094e-05, |
|
"loss": 1.3197, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 4.206106870229008e-05, |
|
"loss": 1.1628, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 4.203562340966921e-05, |
|
"loss": 1.5496, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 4.201017811704835e-05, |
|
"loss": 1.4464, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 4.198473282442748e-05, |
|
"loss": 1.2623, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 4.1959287531806615e-05, |
|
"loss": 1.4, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 4.193384223918575e-05, |
|
"loss": 1.3183, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 4.1908396946564884e-05, |
|
"loss": 1.4607, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 4.1882951653944026e-05, |
|
"loss": 1.4597, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 4.1857506361323154e-05, |
|
"loss": 1.3958, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 4.1832061068702296e-05, |
|
"loss": 1.4509, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 4.1806615776081424e-05, |
|
"loss": 1.4918, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 4.1781170483460565e-05, |
|
"loss": 1.3633, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 4.175572519083969e-05, |
|
"loss": 1.4143, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 4.1730279898218835e-05, |
|
"loss": 1.3187, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 4.170483460559796e-05, |
|
"loss": 1.4254, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 4.1679389312977105e-05, |
|
"loss": 1.4741, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 4.165394402035623e-05, |
|
"loss": 1.2889, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 4.1628498727735374e-05, |
|
"loss": 1.4764, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 4.160305343511451e-05, |
|
"loss": 1.513, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 4.157760814249364e-05, |
|
"loss": 1.2969, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 4.155216284987278e-05, |
|
"loss": 1.4052, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 4.152671755725191e-05, |
|
"loss": 1.4238, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 4.150127226463105e-05, |
|
"loss": 1.5067, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 4.1475826972010176e-05, |
|
"loss": 1.3132, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 4.145038167938932e-05, |
|
"loss": 1.4175, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 4.1424936386768446e-05, |
|
"loss": 1.1908, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 4.139949109414759e-05, |
|
"loss": 1.4047, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 4.1374045801526716e-05, |
|
"loss": 1.2117, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 4.134860050890586e-05, |
|
"loss": 1.3999, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 4.1323155216284985e-05, |
|
"loss": 1.3811, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 4.129770992366413e-05, |
|
"loss": 1.3761, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 4.127226463104326e-05, |
|
"loss": 1.269, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 4.124681933842239e-05, |
|
"loss": 1.2983, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 4.122137404580153e-05, |
|
"loss": 1.2791, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 4.119592875318066e-05, |
|
"loss": 1.4508, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 4.11704834605598e-05, |
|
"loss": 1.2673, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 4.114503816793893e-05, |
|
"loss": 1.3251, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"eval_cer": 2.08, |
|
"eval_loss": 1.4844698905944824, |
|
"eval_runtime": 13.3113, |
|
"eval_samples_per_second": 19.607, |
|
"eval_steps_per_second": 2.479, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 4.111959287531807e-05, |
|
"loss": 1.2549, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 4.10941475826972e-05, |
|
"loss": 1.4543, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 4.106870229007634e-05, |
|
"loss": 1.4231, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 4.104325699745547e-05, |
|
"loss": 1.5034, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 4.101781170483461e-05, |
|
"loss": 1.405, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 4.0992366412213745e-05, |
|
"loss": 1.3133, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 4.096692111959288e-05, |
|
"loss": 1.3101, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 4.0941475826972015e-05, |
|
"loss": 1.5093, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 4.091603053435115e-05, |
|
"loss": 1.3983, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 4.0890585241730284e-05, |
|
"loss": 1.3322, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 4.086513994910941e-05, |
|
"loss": 1.1801, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 4.0839694656488554e-05, |
|
"loss": 1.2332, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 4.081424936386768e-05, |
|
"loss": 1.1875, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 4.0788804071246824e-05, |
|
"loss": 1.3077, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 4.076335877862595e-05, |
|
"loss": 1.2804, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 4.073791348600509e-05, |
|
"loss": 1.1942, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 4.071246819338422e-05, |
|
"loss": 1.3138, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 4.068702290076336e-05, |
|
"loss": 1.2047, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 4.06615776081425e-05, |
|
"loss": 1.1966, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 4.063613231552163e-05, |
|
"loss": 1.3232, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 4.061068702290077e-05, |
|
"loss": 1.2763, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 4.05852417302799e-05, |
|
"loss": 1.1923, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 4.055979643765904e-05, |
|
"loss": 1.2184, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 4.053435114503817e-05, |
|
"loss": 1.3541, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 4.050890585241731e-05, |
|
"loss": 1.1989, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 4.0483460559796435e-05, |
|
"loss": 1.2263, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 4.0458015267175576e-05, |
|
"loss": 1.2974, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 4.0432569974554704e-05, |
|
"loss": 1.2148, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 4.0407124681933846e-05, |
|
"loss": 1.3899, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 4.0381679389312974e-05, |
|
"loss": 1.2513, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 4.0356234096692116e-05, |
|
"loss": 1.163, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 4.033078880407125e-05, |
|
"loss": 1.3232, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 4.0305343511450385e-05, |
|
"loss": 1.3787, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 4.027989821882952e-05, |
|
"loss": 1.2339, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 4.0254452926208655e-05, |
|
"loss": 1.3441, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 4.022900763358779e-05, |
|
"loss": 1.1968, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 4.0203562340966925e-05, |
|
"loss": 1.2653, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 4.017811704834606e-05, |
|
"loss": 1.1534, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 4.015267175572519e-05, |
|
"loss": 1.367, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 4.012722646310433e-05, |
|
"loss": 1.3714, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 4.010178117048346e-05, |
|
"loss": 1.3147, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 4.00763358778626e-05, |
|
"loss": 1.3468, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 4.0050890585241734e-05, |
|
"loss": 1.1087, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 4.002544529262087e-05, |
|
"loss": 1.4632, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 4e-05, |
|
"loss": 1.2951, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 3.997455470737914e-05, |
|
"loss": 1.1655, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 3.994910941475827e-05, |
|
"loss": 1.3331, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 3.992366412213741e-05, |
|
"loss": 1.1769, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 3.989821882951654e-05, |
|
"loss": 1.1607, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 3.987277353689568e-05, |
|
"loss": 1.2477, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"eval_cer": 1.2383333333333333, |
|
"eval_loss": 1.4819015264511108, |
|
"eval_runtime": 12.5927, |
|
"eval_samples_per_second": 20.726, |
|
"eval_steps_per_second": 2.621, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 3.984732824427481e-05, |
|
"loss": 1.3543, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 3.982188295165395e-05, |
|
"loss": 1.32, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 3.979643765903308e-05, |
|
"loss": 1.1779, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 3.977099236641221e-05, |
|
"loss": 1.2045, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 3.974554707379135e-05, |
|
"loss": 1.1947, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 3.9720101781170486e-05, |
|
"loss": 1.0608, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 3.969465648854962e-05, |
|
"loss": 1.3838, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 3.9669211195928756e-05, |
|
"loss": 1.2513, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 3.964376590330789e-05, |
|
"loss": 1.2486, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 3.9618320610687026e-05, |
|
"loss": 1.0845, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 3.959287531806616e-05, |
|
"loss": 0.9877, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 3.9567430025445295e-05, |
|
"loss": 1.2315, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 3.954198473282443e-05, |
|
"loss": 1.2069, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 3.9516539440203565e-05, |
|
"loss": 1.1374, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 3.94910941475827e-05, |
|
"loss": 1.2448, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 3.9465648854961835e-05, |
|
"loss": 1.1527, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 3.944020356234097e-05, |
|
"loss": 1.3363, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 3.9414758269720104e-05, |
|
"loss": 1.1996, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 3.938931297709924e-05, |
|
"loss": 0.9644, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 3.9363867684478374e-05, |
|
"loss": 1.3315, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 3.933842239185751e-05, |
|
"loss": 1.1744, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 3.9312977099236644e-05, |
|
"loss": 1.3106, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 3.928753180661578e-05, |
|
"loss": 1.1695, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 3.926208651399491e-05, |
|
"loss": 0.986, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 3.923664122137405e-05, |
|
"loss": 1.22, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 3.921119592875318e-05, |
|
"loss": 1.2478, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 3.918575063613232e-05, |
|
"loss": 1.2913, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 3.916030534351145e-05, |
|
"loss": 1.078, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 3.913486005089059e-05, |
|
"loss": 1.155, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 3.910941475826972e-05, |
|
"loss": 1.2021, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 3.908396946564886e-05, |
|
"loss": 1.2817, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 3.905852417302799e-05, |
|
"loss": 1.5194, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 3.903307888040713e-05, |
|
"loss": 1.1239, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 3.900763358778626e-05, |
|
"loss": 1.1291, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 3.8982188295165396e-05, |
|
"loss": 1.1058, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 3.895674300254453e-05, |
|
"loss": 1.338, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 3.8931297709923666e-05, |
|
"loss": 1.4552, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 3.89058524173028e-05, |
|
"loss": 1.1117, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 3.8880407124681936e-05, |
|
"loss": 1.329, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 3.885496183206107e-05, |
|
"loss": 1.0848, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 3.8829516539440205e-05, |
|
"loss": 1.141, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 3.880407124681934e-05, |
|
"loss": 1.4024, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 3.8778625954198475e-05, |
|
"loss": 1.3653, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 3.875318066157761e-05, |
|
"loss": 1.1886, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 3.8727735368956745e-05, |
|
"loss": 1.103, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 3.870229007633588e-05, |
|
"loss": 1.0959, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 3.8676844783715014e-05, |
|
"loss": 1.2043, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 3.865139949109415e-05, |
|
"loss": 1.2143, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 3.8625954198473284e-05, |
|
"loss": 1.1641, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 3.860050890585242e-05, |
|
"loss": 1.1487, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"eval_cer": 1.6783333333333332, |
|
"eval_loss": 1.4798498153686523, |
|
"eval_runtime": 13.2397, |
|
"eval_samples_per_second": 19.713, |
|
"eval_steps_per_second": 2.493, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 3.8575063613231554e-05, |
|
"loss": 1.5181, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 3.854961832061069e-05, |
|
"loss": 1.4416, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 3.852417302798982e-05, |
|
"loss": 1.183, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 3.849872773536896e-05, |
|
"loss": 0.9592, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 3.847328244274809e-05, |
|
"loss": 1.1535, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 3.844783715012723e-05, |
|
"loss": 1.1211, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 3.842239185750636e-05, |
|
"loss": 0.8703, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 3.83969465648855e-05, |
|
"loss": 1.1494, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 3.837150127226463e-05, |
|
"loss": 1.0239, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 3.834605597964377e-05, |
|
"loss": 1.2222, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 3.83206106870229e-05, |
|
"loss": 1.2261, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 3.829516539440204e-05, |
|
"loss": 1.2203, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 3.826972010178117e-05, |
|
"loss": 1.1816, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 3.8244274809160306e-05, |
|
"loss": 1.1933, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 3.821882951653944e-05, |
|
"loss": 1.0713, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 3.8193384223918576e-05, |
|
"loss": 1.2109, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 3.816793893129771e-05, |
|
"loss": 1.1673, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 3.8142493638676846e-05, |
|
"loss": 1.167, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 3.811704834605598e-05, |
|
"loss": 1.2627, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 3.8091603053435115e-05, |
|
"loss": 1.1286, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 3.806615776081425e-05, |
|
"loss": 1.0552, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 3.8040712468193385e-05, |
|
"loss": 1.1211, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 3.801526717557252e-05, |
|
"loss": 1.2985, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 3.7989821882951655e-05, |
|
"loss": 1.2846, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 3.796437659033079e-05, |
|
"loss": 1.0098, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 3.7938931297709924e-05, |
|
"loss": 1.0775, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 3.791348600508906e-05, |
|
"loss": 1.1962, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 3.78880407124682e-05, |
|
"loss": 1.0683, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 3.786259541984733e-05, |
|
"loss": 1.0593, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 3.783715012722647e-05, |
|
"loss": 1.1983, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 3.78117048346056e-05, |
|
"loss": 1.0894, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 3.778625954198473e-05, |
|
"loss": 1.141, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 3.776081424936387e-05, |
|
"loss": 1.1035, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 3.7735368956743e-05, |
|
"loss": 1.2687, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 3.770992366412214e-05, |
|
"loss": 1.1224, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 3.768447837150127e-05, |
|
"loss": 1.0745, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 3.765903307888041e-05, |
|
"loss": 0.9383, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 3.763358778625954e-05, |
|
"loss": 1.1412, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 3.760814249363868e-05, |
|
"loss": 0.9986, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 3.758269720101781e-05, |
|
"loss": 1.1864, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 3.7557251908396954e-05, |
|
"loss": 1.1357, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 3.753180661577608e-05, |
|
"loss": 0.9112, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 3.750636132315522e-05, |
|
"loss": 1.0888, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 3.748091603053435e-05, |
|
"loss": 1.246, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 3.745547073791349e-05, |
|
"loss": 1.0594, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 3.743002544529262e-05, |
|
"loss": 0.9861, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 3.7404580152671756e-05, |
|
"loss": 1.3891, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 3.737913486005089e-05, |
|
"loss": 1.1963, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 3.7353689567430025e-05, |
|
"loss": 1.0723, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 3.732824427480916e-05, |
|
"loss": 0.879, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"eval_cer": 1.0783333333333334, |
|
"eval_loss": 1.5497722625732422, |
|
"eval_runtime": 12.6235, |
|
"eval_samples_per_second": 20.676, |
|
"eval_steps_per_second": 2.614, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 3.7302798982188295e-05, |
|
"loss": 1.1869, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 3.727735368956743e-05, |
|
"loss": 0.912, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 3.7251908396946565e-05, |
|
"loss": 1.1385, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 3.7226463104325706e-05, |
|
"loss": 1.0039, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 3.7201017811704834e-05, |
|
"loss": 0.9042, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 3.7175572519083976e-05, |
|
"loss": 1.108, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 3.7150127226463104e-05, |
|
"loss": 1.1434, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 3.7124681933842246e-05, |
|
"loss": 1.0162, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 3.7099236641221374e-05, |
|
"loss": 0.9161, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 3.707379134860051e-05, |
|
"loss": 1.1173, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 3.704834605597964e-05, |
|
"loss": 1.0893, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 3.702290076335878e-05, |
|
"loss": 0.9602, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 3.699745547073791e-05, |
|
"loss": 0.9314, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 3.697201017811705e-05, |
|
"loss": 0.9972, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 3.694656488549619e-05, |
|
"loss": 1.0091, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 3.692111959287532e-05, |
|
"loss": 1.1803, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 3.689567430025446e-05, |
|
"loss": 1.1259, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 3.687022900763359e-05, |
|
"loss": 1.1641, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 3.684478371501273e-05, |
|
"loss": 0.9592, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 3.681933842239186e-05, |
|
"loss": 1.3188, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 3.6793893129771e-05, |
|
"loss": 1.1046, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 3.6768447837150126e-05, |
|
"loss": 1.0569, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 3.674300254452927e-05, |
|
"loss": 1.0293, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 3.6717557251908396e-05, |
|
"loss": 0.9867, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 3.669211195928753e-05, |
|
"loss": 1.0242, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 1.0203, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 3.66412213740458e-05, |
|
"loss": 1.2455, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 3.661577608142494e-05, |
|
"loss": 1.1691, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 3.659033078880407e-05, |
|
"loss": 1.0646, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 3.656488549618321e-05, |
|
"loss": 1.2154, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 3.653944020356234e-05, |
|
"loss": 0.9653, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 3.651399491094148e-05, |
|
"loss": 1.1375, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 3.648854961832061e-05, |
|
"loss": 1.0571, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 3.646310432569975e-05, |
|
"loss": 0.9675, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 3.643765903307888e-05, |
|
"loss": 0.9511, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 3.641221374045802e-05, |
|
"loss": 1.2214, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 3.638676844783715e-05, |
|
"loss": 1.0093, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 3.6361323155216284e-05, |
|
"loss": 0.9248, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 3.633587786259542e-05, |
|
"loss": 0.9676, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 3.631043256997455e-05, |
|
"loss": 0.9328, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 3.6284987277353695e-05, |
|
"loss": 0.9463, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 3.625954198473282e-05, |
|
"loss": 1.0143, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 3.6234096692111965e-05, |
|
"loss": 0.8562, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 3.620865139949109e-05, |
|
"loss": 0.9518, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 3.6183206106870234e-05, |
|
"loss": 1.0586, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 3.615776081424936e-05, |
|
"loss": 0.8875, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 3.6132315521628504e-05, |
|
"loss": 0.8863, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 3.610687022900763e-05, |
|
"loss": 0.882, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 3.6081424936386774e-05, |
|
"loss": 0.8965, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 3.60559796437659e-05, |
|
"loss": 0.9638, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"eval_cer": 2.155, |
|
"eval_loss": 1.631682276725769, |
|
"eval_runtime": 13.7528, |
|
"eval_samples_per_second": 18.978, |
|
"eval_steps_per_second": 2.4, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 3.603053435114504e-05, |
|
"loss": 1.1097, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 3.600508905852418e-05, |
|
"loss": 0.9083, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 3.5979643765903306e-05, |
|
"loss": 0.96, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 3.595419847328245e-05, |
|
"loss": 0.9702, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 3.5928753180661576e-05, |
|
"loss": 1.0194, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 3.590330788804072e-05, |
|
"loss": 0.9183, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 3.5877862595419845e-05, |
|
"loss": 0.9661, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 3.585241730279899e-05, |
|
"loss": 1.1686, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 3.5826972010178115e-05, |
|
"loss": 0.7814, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 3.580152671755726e-05, |
|
"loss": 0.8574, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 3.5776081424936385e-05, |
|
"loss": 0.8797, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 3.5750636132315526e-05, |
|
"loss": 0.9171, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 3.5725190839694654e-05, |
|
"loss": 0.9354, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 3.5699745547073796e-05, |
|
"loss": 1.018, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 3.567430025445293e-05, |
|
"loss": 0.9923, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 3.564885496183206e-05, |
|
"loss": 0.9084, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 3.56234096692112e-05, |
|
"loss": 1.002, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 3.559796437659033e-05, |
|
"loss": 0.8637, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 3.557251908396947e-05, |
|
"loss": 0.8584, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 3.55470737913486e-05, |
|
"loss": 0.819, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 3.552162849872774e-05, |
|
"loss": 0.9845, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 3.549618320610687e-05, |
|
"loss": 0.9483, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 3.547073791348601e-05, |
|
"loss": 0.7486, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 3.544529262086514e-05, |
|
"loss": 0.863, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 3.541984732824428e-05, |
|
"loss": 0.9741, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 3.5394402035623414e-05, |
|
"loss": 1.0203, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 3.536895674300255e-05, |
|
"loss": 1.056, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 3.5343511450381684e-05, |
|
"loss": 0.9974, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 3.531806615776082e-05, |
|
"loss": 0.8415, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 3.529262086513995e-05, |
|
"loss": 1.1325, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 3.526717557251908e-05, |
|
"loss": 0.7591, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 3.524173027989822e-05, |
|
"loss": 1.0119, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 3.521628498727735e-05, |
|
"loss": 0.8473, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 3.519083969465649e-05, |
|
"loss": 0.8779, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 3.516539440203562e-05, |
|
"loss": 0.7857, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 3.513994910941476e-05, |
|
"loss": 0.9987, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 3.511450381679389e-05, |
|
"loss": 1.1549, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 3.508905852417303e-05, |
|
"loss": 0.8328, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 3.506361323155217e-05, |
|
"loss": 0.8897, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 3.50381679389313e-05, |
|
"loss": 0.8221, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 3.5012722646310436e-05, |
|
"loss": 0.8064, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 3.498727735368957e-05, |
|
"loss": 0.6959, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 3.4961832061068706e-05, |
|
"loss": 0.8445, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 3.493638676844784e-05, |
|
"loss": 0.8955, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 3.4910941475826976e-05, |
|
"loss": 0.5805, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 3.4885496183206104e-05, |
|
"loss": 0.8472, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 3.4860050890585245e-05, |
|
"loss": 0.88, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 3.483460559796437e-05, |
|
"loss": 0.8396, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 3.4809160305343515e-05, |
|
"loss": 0.8501, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 3.478371501272264e-05, |
|
"loss": 0.8663, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"eval_cer": 0.785, |
|
"eval_loss": 1.9156314134597778, |
|
"eval_runtime": 12.5379, |
|
"eval_samples_per_second": 20.817, |
|
"eval_steps_per_second": 2.632, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 3.4758269720101785e-05, |
|
"loss": 0.8473, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 3.473282442748092e-05, |
|
"loss": 1.0161, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 3.4707379134860054e-05, |
|
"loss": 0.7519, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 3.468193384223919e-05, |
|
"loss": 0.7459, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 3.4656488549618324e-05, |
|
"loss": 0.7898, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 3.463104325699746e-05, |
|
"loss": 0.6242, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 3.4605597964376594e-05, |
|
"loss": 0.8839, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 3.458015267175573e-05, |
|
"loss": 0.9561, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 3.4554707379134856e-05, |
|
"loss": 1.0197, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 3.4529262086514e-05, |
|
"loss": 0.996, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 3.4503816793893126e-05, |
|
"loss": 0.7677, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 3.447837150127227e-05, |
|
"loss": 0.7711, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 3.44529262086514e-05, |
|
"loss": 0.9012, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 3.442748091603054e-05, |
|
"loss": 0.8905, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 3.440203562340967e-05, |
|
"loss": 0.8825, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 3.437659033078881e-05, |
|
"loss": 0.6976, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 3.435114503816794e-05, |
|
"loss": 0.7713, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 3.432569974554708e-05, |
|
"loss": 0.7792, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 3.430025445292621e-05, |
|
"loss": 0.7397, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 3.4274809160305346e-05, |
|
"loss": 0.7844, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 3.424936386768448e-05, |
|
"loss": 0.8553, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 3.4223918575063616e-05, |
|
"loss": 0.8848, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 3.419847328244275e-05, |
|
"loss": 0.698, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 3.417302798982188e-05, |
|
"loss": 0.7525, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 3.414758269720102e-05, |
|
"loss": 0.6092, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 3.4122137404580155e-05, |
|
"loss": 0.9465, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 3.409669211195929e-05, |
|
"loss": 0.7059, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 3.4071246819338425e-05, |
|
"loss": 0.8609, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 3.404580152671756e-05, |
|
"loss": 0.579, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 3.4020356234096695e-05, |
|
"loss": 0.8543, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 3.399491094147583e-05, |
|
"loss": 1.0405, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 3.3969465648854964e-05, |
|
"loss": 1.0546, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 3.39440203562341e-05, |
|
"loss": 0.8783, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 3.3918575063613234e-05, |
|
"loss": 0.7796, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 3.389312977099237e-05, |
|
"loss": 0.6612, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 3.3867684478371504e-05, |
|
"loss": 0.7545, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 3.384223918575064e-05, |
|
"loss": 0.8692, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 3.381679389312977e-05, |
|
"loss": 0.7158, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 3.379134860050891e-05, |
|
"loss": 0.8735, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 3.376590330788804e-05, |
|
"loss": 0.7916, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 3.374045801526718e-05, |
|
"loss": 0.802, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 3.371501272264631e-05, |
|
"loss": 0.8425, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 3.368956743002545e-05, |
|
"loss": 0.7276, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 3.366412213740458e-05, |
|
"loss": 0.891, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 3.363867684478372e-05, |
|
"loss": 0.7913, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 3.361323155216285e-05, |
|
"loss": 0.7408, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 3.358778625954199e-05, |
|
"loss": 0.8176, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 3.356234096692112e-05, |
|
"loss": 0.7871, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 3.3536895674300256e-05, |
|
"loss": 0.7954, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 3.351145038167939e-05, |
|
"loss": 0.9755, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"eval_cer": 0.485, |
|
"eval_loss": 2.777585506439209, |
|
"eval_runtime": 12.8482, |
|
"eval_samples_per_second": 20.314, |
|
"eval_steps_per_second": 2.568, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 3.3486005089058526e-05, |
|
"loss": 0.8733, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 3.346055979643766e-05, |
|
"loss": 0.9032, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 3.3435114503816796e-05, |
|
"loss": 0.6854, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 3.340966921119593e-05, |
|
"loss": 0.9199, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 3.3384223918575065e-05, |
|
"loss": 0.8205, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"learning_rate": 3.33587786259542e-05, |
|
"loss": 0.9542, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.6914, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 3.330788804071247e-05, |
|
"loss": 0.7872, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"learning_rate": 3.3282442748091605e-05, |
|
"loss": 0.744, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 3.325699745547074e-05, |
|
"loss": 0.8267, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 3.3231552162849874e-05, |
|
"loss": 0.9622, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 3.320610687022901e-05, |
|
"loss": 0.9869, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 3.3180661577608144e-05, |
|
"loss": 0.883, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 3.315521628498728e-05, |
|
"loss": 0.7592, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"learning_rate": 3.3129770992366414e-05, |
|
"loss": 0.8645, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 3.310432569974555e-05, |
|
"loss": 0.7664, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 3.307888040712468e-05, |
|
"loss": 0.8177, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 3.305343511450382e-05, |
|
"loss": 0.9408, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 3.302798982188295e-05, |
|
"loss": 0.8006, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 3.300254452926209e-05, |
|
"loss": 0.8512, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"learning_rate": 3.297709923664122e-05, |
|
"loss": 0.6951, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 3.295165394402036e-05, |
|
"loss": 0.8256, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"learning_rate": 3.292620865139949e-05, |
|
"loss": 0.879, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 3.290076335877863e-05, |
|
"loss": 0.8452, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 3.287531806615776e-05, |
|
"loss": 0.7518, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 3.28498727735369e-05, |
|
"loss": 0.7397, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 3.282442748091603e-05, |
|
"loss": 0.7552, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 3.2798982188295166e-05, |
|
"loss": 1.0041, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 3.27735368956743e-05, |
|
"loss": 1.0255, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 3.2748091603053436e-05, |
|
"loss": 0.8253, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 3.272264631043257e-05, |
|
"loss": 0.8581, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"learning_rate": 3.2697201017811706e-05, |
|
"loss": 0.8025, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 3.267175572519084e-05, |
|
"loss": 0.5604, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"learning_rate": 3.2646310432569975e-05, |
|
"loss": 0.7864, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 3.262086513994911e-05, |
|
"loss": 0.7906, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 3.2595419847328245e-05, |
|
"loss": 0.7996, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"learning_rate": 3.256997455470738e-05, |
|
"loss": 0.66, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 3.2544529262086515e-05, |
|
"loss": 0.6681, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 3.251908396946565e-05, |
|
"loss": 0.6779, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 3.2493638676844784e-05, |
|
"loss": 0.7656, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 3.246819338422392e-05, |
|
"loss": 0.8681, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 3.2442748091603054e-05, |
|
"loss": 0.7303, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 3.241730279898219e-05, |
|
"loss": 0.8804, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 3.2391857506361324e-05, |
|
"loss": 0.8637, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 3.236641221374046e-05, |
|
"loss": 0.9304, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 3.234096692111959e-05, |
|
"loss": 0.7039, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 3.231552162849873e-05, |
|
"loss": 0.7348, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 3.229007633587787e-05, |
|
"loss": 0.7779, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 3.2264631043257e-05, |
|
"loss": 0.6528, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 3.223918575063614e-05, |
|
"loss": 0.6447, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"eval_cer": 0.8266666666666667, |
|
"eval_loss": 2.4866857528686523, |
|
"eval_runtime": 13.2056, |
|
"eval_samples_per_second": 19.764, |
|
"eval_steps_per_second": 2.499, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 3.221374045801527e-05, |
|
"loss": 0.771, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 3.21882951653944e-05, |
|
"loss": 0.7643, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 3.216284987277354e-05, |
|
"loss": 0.6259, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 3.213740458015267e-05, |
|
"loss": 0.9097, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 3.211195928753181e-05, |
|
"loss": 0.7458, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"learning_rate": 3.208651399491094e-05, |
|
"loss": 0.6047, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 3.2061068702290076e-05, |
|
"loss": 0.7833, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"learning_rate": 3.203562340966921e-05, |
|
"loss": 0.8714, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 10.82, |
|
"learning_rate": 3.2010178117048346e-05, |
|
"loss": 0.8053, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"learning_rate": 3.198473282442748e-05, |
|
"loss": 0.7495, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 3.195928753180662e-05, |
|
"loss": 0.7018, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 3.193384223918575e-05, |
|
"loss": 0.5693, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 3.190839694656489e-05, |
|
"loss": 0.8921, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 3.188295165394402e-05, |
|
"loss": 0.6977, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 3.185750636132316e-05, |
|
"loss": 0.7847, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 3.183206106870229e-05, |
|
"loss": 0.7979, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"learning_rate": 3.1806615776081425e-05, |
|
"loss": 0.7237, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"learning_rate": 3.178117048346056e-05, |
|
"loss": 0.6758, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 3.1755725190839694e-05, |
|
"loss": 0.6873, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 3.173027989821883e-05, |
|
"loss": 0.9999, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 3.1704834605597964e-05, |
|
"loss": 1.0997, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"learning_rate": 3.16793893129771e-05, |
|
"loss": 0.8083, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 3.1653944020356234e-05, |
|
"loss": 0.6659, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 3.1628498727735375e-05, |
|
"loss": 0.7703, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 3.16030534351145e-05, |
|
"loss": 0.7253, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"learning_rate": 3.1577608142493645e-05, |
|
"loss": 0.7858, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 3.155216284987277e-05, |
|
"loss": 0.8834, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 3.1526717557251914e-05, |
|
"loss": 0.7663, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 3.150127226463104e-05, |
|
"loss": 0.8507, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 11.15, |
|
"learning_rate": 3.147582697201018e-05, |
|
"loss": 0.7357, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 3.145038167938931e-05, |
|
"loss": 0.68, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 11.18, |
|
"learning_rate": 3.142493638676845e-05, |
|
"loss": 0.6483, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"learning_rate": 3.139949109414758e-05, |
|
"loss": 0.7656, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 3.137404580152672e-05, |
|
"loss": 0.5926, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 11.22, |
|
"learning_rate": 3.134860050890586e-05, |
|
"loss": 0.5725, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 11.24, |
|
"learning_rate": 3.1323155216284986e-05, |
|
"loss": 0.9918, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 3.129770992366413e-05, |
|
"loss": 0.875, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 3.1272264631043256e-05, |
|
"loss": 0.6928, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 3.12468193384224e-05, |
|
"loss": 0.7292, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"learning_rate": 3.1221374045801526e-05, |
|
"loss": 0.7387, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 11.31, |
|
"learning_rate": 3.119592875318067e-05, |
|
"loss": 0.67, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 3.1170483460559795e-05, |
|
"loss": 0.6382, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"learning_rate": 3.114503816793894e-05, |
|
"loss": 0.7604, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 3.1119592875318065e-05, |
|
"loss": 0.5445, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"learning_rate": 3.10941475826972e-05, |
|
"loss": 0.7119, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 3.1068702290076335e-05, |
|
"loss": 0.7391, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 11.4, |
|
"learning_rate": 3.104325699745547e-05, |
|
"loss": 0.8999, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 3.101781170483461e-05, |
|
"loss": 0.8159, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"learning_rate": 3.099236641221374e-05, |
|
"loss": 0.6531, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"learning_rate": 3.096692111959288e-05, |
|
"loss": 0.7862, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 11.45, |
|
"eval_cer": 0.61, |
|
"eval_loss": 3.6975245475769043, |
|
"eval_runtime": 13.3376, |
|
"eval_samples_per_second": 19.569, |
|
"eval_steps_per_second": 2.474, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"learning_rate": 3.094147582697201e-05, |
|
"loss": 0.7377, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 3.091603053435115e-05, |
|
"loss": 1.1392, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 3.089058524173028e-05, |
|
"loss": 0.8604, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 3.086513994910942e-05, |
|
"loss": 0.7484, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 3.083969465648855e-05, |
|
"loss": 0.736, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 11.54, |
|
"learning_rate": 3.081424936386769e-05, |
|
"loss": 0.7803, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 3.078880407124682e-05, |
|
"loss": 0.7426, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 11.57, |
|
"learning_rate": 3.076335877862595e-05, |
|
"loss": 0.6179, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"learning_rate": 3.0737913486005094e-05, |
|
"loss": 0.789, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"learning_rate": 3.071246819338422e-05, |
|
"loss": 0.7082, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 3.0687022900763364e-05, |
|
"loss": 0.6718, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 3.066157760814249e-05, |
|
"loss": 0.7069, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"learning_rate": 3.0636132315521633e-05, |
|
"loss": 0.7823, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"learning_rate": 3.061068702290076e-05, |
|
"loss": 0.5196, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"learning_rate": 3.05852417302799e-05, |
|
"loss": 0.6285, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 11.69, |
|
"learning_rate": 3.055979643765903e-05, |
|
"loss": 0.6355, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 3.053435114503817e-05, |
|
"loss": 0.7717, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 3.05089058524173e-05, |
|
"loss": 0.7217, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"learning_rate": 3.048346055979644e-05, |
|
"loss": 0.5821, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 3.045801526717557e-05, |
|
"loss": 0.5953, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 3.043256997455471e-05, |
|
"loss": 0.7736, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"learning_rate": 3.0407124681933847e-05, |
|
"loss": 0.5606, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"learning_rate": 3.038167938931298e-05, |
|
"loss": 0.8974, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"learning_rate": 3.0356234096692117e-05, |
|
"loss": 0.8495, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"learning_rate": 3.0330788804071248e-05, |
|
"loss": 0.8835, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 11.85, |
|
"learning_rate": 3.0305343511450386e-05, |
|
"loss": 0.5954, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"learning_rate": 3.0279898218829518e-05, |
|
"loss": 0.7219, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 3.0254452926208652e-05, |
|
"loss": 0.8032, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"learning_rate": 3.0229007633587787e-05, |
|
"loss": 0.6683, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 3.0203562340966922e-05, |
|
"loss": 0.6912, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"learning_rate": 3.0178117048346054e-05, |
|
"loss": 0.6155, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 11.94, |
|
"learning_rate": 3.0152671755725192e-05, |
|
"loss": 0.7019, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 3.0127226463104323e-05, |
|
"loss": 0.6955, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"learning_rate": 3.010178117048346e-05, |
|
"loss": 0.598, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"learning_rate": 3.00763358778626e-05, |
|
"loss": 0.6676, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 3.005089058524173e-05, |
|
"loss": 0.4945, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 12.02, |
|
"learning_rate": 3.002544529262087e-05, |
|
"loss": 0.6349, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 3e-05, |
|
"loss": 0.5586, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"learning_rate": 2.997455470737914e-05, |
|
"loss": 0.6804, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"learning_rate": 2.994910941475827e-05, |
|
"loss": 0.5436, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"learning_rate": 2.992366412213741e-05, |
|
"loss": 1.0288, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"learning_rate": 2.989821882951654e-05, |
|
"loss": 0.6567, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"learning_rate": 2.9872773536895675e-05, |
|
"loss": 0.8401, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 2.984732824427481e-05, |
|
"loss": 0.8642, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 12.14, |
|
"learning_rate": 2.9821882951653945e-05, |
|
"loss": 0.652, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"learning_rate": 2.9796437659033083e-05, |
|
"loss": 0.8162, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"learning_rate": 2.9770992366412214e-05, |
|
"loss": 0.8508, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 12.18, |
|
"learning_rate": 2.9745547073791352e-05, |
|
"loss": 0.5377, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 12.2, |
|
"learning_rate": 2.9720101781170484e-05, |
|
"loss": 1.0559, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 2.9694656488549622e-05, |
|
"loss": 0.7416, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"eval_cer": 0.155, |
|
"eval_loss": 4.830371379852295, |
|
"eval_runtime": 11.8584, |
|
"eval_samples_per_second": 22.01, |
|
"eval_steps_per_second": 2.783, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 12.23, |
|
"learning_rate": 2.9669211195928754e-05, |
|
"loss": 0.7896, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 2.9643765903307892e-05, |
|
"loss": 0.8071, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 2.9618320610687023e-05, |
|
"loss": 0.6148, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 2.959287531806616e-05, |
|
"loss": 0.7511, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 2.9567430025445293e-05, |
|
"loss": 0.7985, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"learning_rate": 2.9541984732824428e-05, |
|
"loss": 0.6049, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 12.32, |
|
"learning_rate": 2.9516539440203562e-05, |
|
"loss": 0.6502, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"learning_rate": 2.9491094147582697e-05, |
|
"loss": 0.7028, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 12.35, |
|
"learning_rate": 2.9465648854961836e-05, |
|
"loss": 0.8401, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 2.9440203562340967e-05, |
|
"loss": 0.7012, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"learning_rate": 2.9414758269720105e-05, |
|
"loss": 0.6168, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 2.9389312977099237e-05, |
|
"loss": 0.8844, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 12.41, |
|
"learning_rate": 2.9363867684478375e-05, |
|
"loss": 0.581, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"learning_rate": 2.9338422391857506e-05, |
|
"loss": 0.628, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"learning_rate": 2.9312977099236644e-05, |
|
"loss": 0.8216, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 2.9287531806615776e-05, |
|
"loss": 0.5529, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 12.47, |
|
"learning_rate": 2.9262086513994914e-05, |
|
"loss": 0.8191, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 2.9236641221374046e-05, |
|
"loss": 0.5529, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 2.9211195928753184e-05, |
|
"loss": 0.7198, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"learning_rate": 2.918575063613232e-05, |
|
"loss": 0.7897, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 12.53, |
|
"learning_rate": 2.916030534351145e-05, |
|
"loss": 0.6908, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 2.9134860050890588e-05, |
|
"loss": 0.7455, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"learning_rate": 2.910941475826972e-05, |
|
"loss": 0.8934, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 2.9083969465648858e-05, |
|
"loss": 0.9264, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 12.6, |
|
"learning_rate": 2.905852417302799e-05, |
|
"loss": 0.6212, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 2.9033078880407128e-05, |
|
"loss": 0.758, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 2.900763358778626e-05, |
|
"loss": 0.5884, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 2.8982188295165397e-05, |
|
"loss": 0.8146, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 2.895674300254453e-05, |
|
"loss": 0.7774, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 2.8931297709923667e-05, |
|
"loss": 0.6874, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 2.89058524173028e-05, |
|
"loss": 0.6767, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"learning_rate": 2.8880407124681937e-05, |
|
"loss": 0.7755, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 2.885496183206107e-05, |
|
"loss": 0.6158, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 12.73, |
|
"learning_rate": 2.8829516539440203e-05, |
|
"loss": 0.6351, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 2.880407124681934e-05, |
|
"loss": 0.8882, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"learning_rate": 2.8778625954198472e-05, |
|
"loss": 0.5635, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 12.78, |
|
"learning_rate": 2.875318066157761e-05, |
|
"loss": 0.8622, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 2.8727735368956742e-05, |
|
"loss": 0.7301, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 2.870229007633588e-05, |
|
"loss": 0.7363, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 12.82, |
|
"learning_rate": 2.8676844783715012e-05, |
|
"loss": 0.7507, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 2.865139949109415e-05, |
|
"loss": 0.6413, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"learning_rate": 2.862595419847328e-05, |
|
"loss": 0.7979, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 12.87, |
|
"learning_rate": 2.860050890585242e-05, |
|
"loss": 0.6187, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"learning_rate": 2.857506361323155e-05, |
|
"loss": 0.5141, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 12.9, |
|
"learning_rate": 2.854961832061069e-05, |
|
"loss": 0.6732, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 12.92, |
|
"learning_rate": 2.8524173027989824e-05, |
|
"loss": 0.6868, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"learning_rate": 2.849872773536896e-05, |
|
"loss": 0.643, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"learning_rate": 2.8473282442748094e-05, |
|
"loss": 0.6161, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"learning_rate": 2.8447837150127225e-05, |
|
"loss": 0.6776, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 12.98, |
|
"learning_rate": 2.8422391857506363e-05, |
|
"loss": 0.5768, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 12.98, |
|
"eval_cer": 0.985, |
|
"eval_loss": 2.614406108856201, |
|
"eval_runtime": 12.9478, |
|
"eval_samples_per_second": 20.158, |
|
"eval_steps_per_second": 2.549, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 2.8396946564885495e-05, |
|
"loss": 0.7055, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"learning_rate": 2.8371501272264633e-05, |
|
"loss": 0.6081, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 13.02, |
|
"learning_rate": 2.8346055979643765e-05, |
|
"loss": 0.6232, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 13.04, |
|
"learning_rate": 2.8320610687022903e-05, |
|
"loss": 1.0069, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"learning_rate": 2.8295165394402034e-05, |
|
"loss": 0.8088, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 13.07, |
|
"learning_rate": 2.8269720101781172e-05, |
|
"loss": 0.5509, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 13.08, |
|
"learning_rate": 2.824427480916031e-05, |
|
"loss": 0.6812, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 13.1, |
|
"learning_rate": 2.8218829516539442e-05, |
|
"loss": 0.6886, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 13.11, |
|
"learning_rate": 2.8193384223918577e-05, |
|
"loss": 0.6288, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"learning_rate": 2.8167938931297712e-05, |
|
"loss": 0.5272, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 13.15, |
|
"learning_rate": 2.8142493638676847e-05, |
|
"loss": 1.008, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 13.16, |
|
"learning_rate": 2.8117048346055978e-05, |
|
"loss": 0.8586, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"learning_rate": 2.8091603053435116e-05, |
|
"loss": 0.5503, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"learning_rate": 2.8066157760814248e-05, |
|
"loss": 0.7417, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"learning_rate": 2.8040712468193386e-05, |
|
"loss": 0.6712, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"learning_rate": 2.8015267175572517e-05, |
|
"loss": 0.6285, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 2.7989821882951656e-05, |
|
"loss": 0.6874, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"learning_rate": 2.7964376590330787e-05, |
|
"loss": 0.6408, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"learning_rate": 2.7938931297709925e-05, |
|
"loss": 0.6697, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"learning_rate": 2.7913486005089063e-05, |
|
"loss": 0.5801, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"learning_rate": 2.7888040712468195e-05, |
|
"loss": 0.6358, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 13.31, |
|
"learning_rate": 2.7862595419847333e-05, |
|
"loss": 0.745, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"learning_rate": 2.7837150127226465e-05, |
|
"loss": 0.7081, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"learning_rate": 2.78117048346056e-05, |
|
"loss": 0.659, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 13.36, |
|
"learning_rate": 2.7786259541984734e-05, |
|
"loss": 0.5541, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 2.776081424936387e-05, |
|
"loss": 0.7937, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 2.7735368956743e-05, |
|
"loss": 0.9197, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 2.770992366412214e-05, |
|
"loss": 0.6734, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"learning_rate": 2.768447837150127e-05, |
|
"loss": 0.6809, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"learning_rate": 2.7659033078880408e-05, |
|
"loss": 0.712, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 13.45, |
|
"learning_rate": 2.7633587786259547e-05, |
|
"loss": 0.6177, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"learning_rate": 2.7608142493638678e-05, |
|
"loss": 0.6693, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"learning_rate": 2.7582697201017816e-05, |
|
"loss": 0.6131, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 2.7557251908396948e-05, |
|
"loss": 0.61, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"learning_rate": 2.7531806615776086e-05, |
|
"loss": 0.5576, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 13.53, |
|
"learning_rate": 2.7506361323155217e-05, |
|
"loss": 0.7008, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 2.7480916030534355e-05, |
|
"loss": 0.6033, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"learning_rate": 2.7455470737913487e-05, |
|
"loss": 0.4839, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 13.57, |
|
"learning_rate": 2.7430025445292622e-05, |
|
"loss": 0.6713, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 13.59, |
|
"learning_rate": 2.7404580152671757e-05, |
|
"loss": 0.6208, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"learning_rate": 2.737913486005089e-05, |
|
"loss": 0.6956, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 13.62, |
|
"learning_rate": 2.7353689567430023e-05, |
|
"loss": 0.6163, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"learning_rate": 2.732824427480916e-05, |
|
"loss": 0.682, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 13.65, |
|
"learning_rate": 2.73027989821883e-05, |
|
"loss": 0.6238, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"learning_rate": 2.727735368956743e-05, |
|
"loss": 0.8029, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"learning_rate": 2.725190839694657e-05, |
|
"loss": 0.6024, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"learning_rate": 2.72264631043257e-05, |
|
"loss": 0.6567, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 13.71, |
|
"learning_rate": 2.720101781170484e-05, |
|
"loss": 0.6399, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 13.73, |
|
"learning_rate": 2.717557251908397e-05, |
|
"loss": 0.7036, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"learning_rate": 2.7150127226463108e-05, |
|
"loss": 0.6198, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"eval_cer": 0.9283333333333333, |
|
"eval_loss": 1.9728145599365234, |
|
"eval_runtime": 13.0399, |
|
"eval_samples_per_second": 20.016, |
|
"eval_steps_per_second": 2.531, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 13.76, |
|
"learning_rate": 2.712468193384224e-05, |
|
"loss": 0.7954, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"learning_rate": 2.7099236641221375e-05, |
|
"loss": 0.6562, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 2.707379134860051e-05, |
|
"loss": 0.6525, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"learning_rate": 2.7048346055979644e-05, |
|
"loss": 0.6105, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"learning_rate": 2.7022900763358776e-05, |
|
"loss": 0.6545, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 13.83, |
|
"learning_rate": 2.6997455470737914e-05, |
|
"loss": 0.593, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"learning_rate": 2.6972010178117052e-05, |
|
"loss": 0.6329, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 13.86, |
|
"learning_rate": 2.6946564885496183e-05, |
|
"loss": 0.7738, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"learning_rate": 2.6921119592875322e-05, |
|
"loss": 0.6553, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 13.89, |
|
"learning_rate": 2.6895674300254453e-05, |
|
"loss": 0.7885, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 13.91, |
|
"learning_rate": 2.687022900763359e-05, |
|
"loss": 0.7487, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 13.92, |
|
"learning_rate": 2.6844783715012723e-05, |
|
"loss": 0.538, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"learning_rate": 2.681933842239186e-05, |
|
"loss": 0.641, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 13.95, |
|
"learning_rate": 2.6793893129770992e-05, |
|
"loss": 1.0136, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"learning_rate": 2.676844783715013e-05, |
|
"loss": 0.6821, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 13.98, |
|
"learning_rate": 2.6743002544529262e-05, |
|
"loss": 0.5798, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 2.6717557251908397e-05, |
|
"loss": 0.6055, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"learning_rate": 2.6692111959287535e-05, |
|
"loss": 0.6657, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.5488, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 14.05, |
|
"learning_rate": 2.6641221374045805e-05, |
|
"loss": 0.6495, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"learning_rate": 2.6615776081424936e-05, |
|
"loss": 0.7168, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"learning_rate": 2.6590330788804074e-05, |
|
"loss": 0.6762, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 14.09, |
|
"learning_rate": 2.6564885496183206e-05, |
|
"loss": 0.6912, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"learning_rate": 2.6539440203562344e-05, |
|
"loss": 0.6808, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"learning_rate": 2.6513994910941476e-05, |
|
"loss": 0.714, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 14.14, |
|
"learning_rate": 2.6488549618320614e-05, |
|
"loss": 0.6953, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"learning_rate": 2.6463104325699745e-05, |
|
"loss": 0.7978, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"learning_rate": 2.6437659033078883e-05, |
|
"loss": 0.7208, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 14.18, |
|
"learning_rate": 2.6412213740458015e-05, |
|
"loss": 0.7389, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"learning_rate": 2.638676844783715e-05, |
|
"loss": 0.6913, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 14.21, |
|
"learning_rate": 2.6361323155216288e-05, |
|
"loss": 0.589, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"learning_rate": 2.633587786259542e-05, |
|
"loss": 0.596, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 2.6310432569974558e-05, |
|
"loss": 0.6922, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"learning_rate": 2.628498727735369e-05, |
|
"loss": 0.5771, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"learning_rate": 2.6259541984732827e-05, |
|
"loss": 0.4962, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"learning_rate": 2.623409669211196e-05, |
|
"loss": 0.7108, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 14.31, |
|
"learning_rate": 2.6208651399491097e-05, |
|
"loss": 0.6828, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"learning_rate": 2.618320610687023e-05, |
|
"loss": 0.4449, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 14.34, |
|
"learning_rate": 2.6157760814249367e-05, |
|
"loss": 0.7032, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"learning_rate": 2.6132315521628498e-05, |
|
"loss": 0.6961, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 14.37, |
|
"learning_rate": 2.6106870229007636e-05, |
|
"loss": 0.6392, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"learning_rate": 2.608142493638677e-05, |
|
"loss": 0.662, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"learning_rate": 2.6055979643765906e-05, |
|
"loss": 0.8816, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"learning_rate": 2.603053435114504e-05, |
|
"loss": 0.6262, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 14.43, |
|
"learning_rate": 2.6005089058524172e-05, |
|
"loss": 0.5687, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"learning_rate": 2.597964376590331e-05, |
|
"loss": 0.7207, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 14.46, |
|
"learning_rate": 2.5954198473282442e-05, |
|
"loss": 0.5061, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"learning_rate": 2.592875318066158e-05, |
|
"loss": 0.5091, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 14.49, |
|
"learning_rate": 2.590330788804071e-05, |
|
"loss": 0.5773, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 2.587786259541985e-05, |
|
"loss": 0.5782, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"eval_cer": 0.235, |
|
"eval_loss": 4.354420185089111, |
|
"eval_runtime": 12.4381, |
|
"eval_samples_per_second": 20.984, |
|
"eval_steps_per_second": 2.653, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 14.52, |
|
"learning_rate": 2.585241730279898e-05, |
|
"loss": 0.4823, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"learning_rate": 2.582697201017812e-05, |
|
"loss": 0.5894, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 14.55, |
|
"learning_rate": 2.580152671755725e-05, |
|
"loss": 0.4522, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"learning_rate": 2.577608142493639e-05, |
|
"loss": 0.5878, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 14.58, |
|
"learning_rate": 2.5750636132315524e-05, |
|
"loss": 0.7595, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 14.6, |
|
"learning_rate": 2.572519083969466e-05, |
|
"loss": 0.599, |
|
"step": 1912 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 2.5699745547073793e-05, |
|
"loss": 0.6153, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"learning_rate": 2.5674300254452925e-05, |
|
"loss": 0.59, |
|
"step": 1916 |
|
}, |
|
{ |
|
"epoch": 14.64, |
|
"learning_rate": 2.5648854961832063e-05, |
|
"loss": 0.6025, |
|
"step": 1918 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"learning_rate": 2.5623409669211195e-05, |
|
"loss": 0.6712, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 14.67, |
|
"learning_rate": 2.5597964376590333e-05, |
|
"loss": 0.6139, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"learning_rate": 2.5572519083969464e-05, |
|
"loss": 0.6953, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 14.7, |
|
"learning_rate": 2.5547073791348602e-05, |
|
"loss": 0.6114, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 14.72, |
|
"learning_rate": 2.5521628498727734e-05, |
|
"loss": 0.6656, |
|
"step": 1928 |
|
}, |
|
{ |
|
"epoch": 14.73, |
|
"learning_rate": 2.5496183206106872e-05, |
|
"loss": 0.6653, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 2.5470737913486003e-05, |
|
"loss": 0.7021, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 14.76, |
|
"learning_rate": 2.5445292620865142e-05, |
|
"loss": 0.4988, |
|
"step": 1934 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"learning_rate": 2.541984732824428e-05, |
|
"loss": 0.6053, |
|
"step": 1936 |
|
}, |
|
{ |
|
"epoch": 14.79, |
|
"learning_rate": 2.539440203562341e-05, |
|
"loss": 0.537, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 14.81, |
|
"learning_rate": 2.5368956743002546e-05, |
|
"loss": 0.8234, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 14.82, |
|
"learning_rate": 2.534351145038168e-05, |
|
"loss": 0.6854, |
|
"step": 1942 |
|
}, |
|
{ |
|
"epoch": 14.84, |
|
"learning_rate": 2.5318066157760816e-05, |
|
"loss": 0.8084, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 14.85, |
|
"learning_rate": 2.5292620865139947e-05, |
|
"loss": 0.7823, |
|
"step": 1946 |
|
}, |
|
{ |
|
"epoch": 14.87, |
|
"learning_rate": 2.5267175572519086e-05, |
|
"loss": 0.8129, |
|
"step": 1948 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"learning_rate": 2.5241730279898217e-05, |
|
"loss": 0.6747, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"learning_rate": 2.5216284987277355e-05, |
|
"loss": 0.5221, |
|
"step": 1952 |
|
}, |
|
{ |
|
"epoch": 14.92, |
|
"learning_rate": 2.5190839694656487e-05, |
|
"loss": 0.6249, |
|
"step": 1954 |
|
}, |
|
{ |
|
"epoch": 14.93, |
|
"learning_rate": 2.5165394402035625e-05, |
|
"loss": 0.5185, |
|
"step": 1956 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"learning_rate": 2.5139949109414763e-05, |
|
"loss": 0.5373, |
|
"step": 1958 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 2.5114503816793894e-05, |
|
"loss": 0.7259, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 14.98, |
|
"learning_rate": 2.5089058524173033e-05, |
|
"loss": 0.4947, |
|
"step": 1962 |
|
}, |
|
{ |
|
"epoch": 14.99, |
|
"learning_rate": 2.5063613231552164e-05, |
|
"loss": 0.5521, |
|
"step": 1964 |
|
}, |
|
{ |
|
"epoch": 15.01, |
|
"learning_rate": 2.50381679389313e-05, |
|
"loss": 0.4607, |
|
"step": 1966 |
|
}, |
|
{ |
|
"epoch": 15.02, |
|
"learning_rate": 2.5012722646310434e-05, |
|
"loss": 0.5285, |
|
"step": 1968 |
|
}, |
|
{ |
|
"epoch": 15.04, |
|
"learning_rate": 2.498727735368957e-05, |
|
"loss": 0.8805, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"learning_rate": 2.4961832061068703e-05, |
|
"loss": 0.5849, |
|
"step": 1972 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"learning_rate": 2.4936386768447838e-05, |
|
"loss": 0.6189, |
|
"step": 1974 |
|
}, |
|
{ |
|
"epoch": 15.08, |
|
"learning_rate": 2.4910941475826973e-05, |
|
"loss": 0.6684, |
|
"step": 1976 |
|
}, |
|
{ |
|
"epoch": 15.1, |
|
"learning_rate": 2.4885496183206108e-05, |
|
"loss": 0.7003, |
|
"step": 1978 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 2.4860050890585243e-05, |
|
"loss": 0.6328, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 15.13, |
|
"learning_rate": 2.4834605597964378e-05, |
|
"loss": 0.6727, |
|
"step": 1982 |
|
}, |
|
{ |
|
"epoch": 15.15, |
|
"learning_rate": 2.4809160305343512e-05, |
|
"loss": 0.7608, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 15.16, |
|
"learning_rate": 2.4783715012722647e-05, |
|
"loss": 0.5894, |
|
"step": 1986 |
|
}, |
|
{ |
|
"epoch": 15.18, |
|
"learning_rate": 2.4758269720101782e-05, |
|
"loss": 0.771, |
|
"step": 1988 |
|
}, |
|
{ |
|
"epoch": 15.19, |
|
"learning_rate": 2.4732824427480917e-05, |
|
"loss": 0.6449, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 15.21, |
|
"learning_rate": 2.4707379134860052e-05, |
|
"loss": 0.6029, |
|
"step": 1992 |
|
}, |
|
{ |
|
"epoch": 15.22, |
|
"learning_rate": 2.468193384223919e-05, |
|
"loss": 0.6521, |
|
"step": 1994 |
|
}, |
|
{ |
|
"epoch": 15.24, |
|
"learning_rate": 2.465648854961832e-05, |
|
"loss": 0.5042, |
|
"step": 1996 |
|
}, |
|
{ |
|
"epoch": 15.25, |
|
"learning_rate": 2.4631043256997456e-05, |
|
"loss": 0.6181, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 15.27, |
|
"learning_rate": 2.460559796437659e-05, |
|
"loss": 0.6579, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 15.27, |
|
"eval_cer": 0.38666666666666666, |
|
"eval_loss": 3.3335180282592773, |
|
"eval_runtime": 12.8303, |
|
"eval_samples_per_second": 20.342, |
|
"eval_steps_per_second": 2.572, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 15.28, |
|
"learning_rate": 2.4580152671755726e-05, |
|
"loss": 0.66, |
|
"step": 2002 |
|
}, |
|
{ |
|
"epoch": 15.3, |
|
"learning_rate": 2.455470737913486e-05, |
|
"loss": 0.6787, |
|
"step": 2004 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"learning_rate": 2.4529262086513996e-05, |
|
"loss": 0.7532, |
|
"step": 2006 |
|
}, |
|
{ |
|
"epoch": 15.33, |
|
"learning_rate": 2.450381679389313e-05, |
|
"loss": 0.6325, |
|
"step": 2008 |
|
}, |
|
{ |
|
"epoch": 15.34, |
|
"learning_rate": 2.4478371501272265e-05, |
|
"loss": 0.5222, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 15.36, |
|
"learning_rate": 2.44529262086514e-05, |
|
"loss": 0.5865, |
|
"step": 2012 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"learning_rate": 2.4427480916030535e-05, |
|
"loss": 0.6388, |
|
"step": 2014 |
|
}, |
|
{ |
|
"epoch": 15.39, |
|
"learning_rate": 2.440203562340967e-05, |
|
"loss": 0.455, |
|
"step": 2016 |
|
}, |
|
{ |
|
"epoch": 15.4, |
|
"learning_rate": 2.4376590330788808e-05, |
|
"loss": 0.7409, |
|
"step": 2018 |
|
}, |
|
{ |
|
"epoch": 15.42, |
|
"learning_rate": 2.4351145038167943e-05, |
|
"loss": 0.5731, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"learning_rate": 2.4325699745547078e-05, |
|
"loss": 0.5482, |
|
"step": 2022 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"learning_rate": 2.430025445292621e-05, |
|
"loss": 0.5084, |
|
"step": 2024 |
|
}, |
|
{ |
|
"epoch": 15.47, |
|
"learning_rate": 2.4274809160305344e-05, |
|
"loss": 0.4516, |
|
"step": 2026 |
|
}, |
|
{ |
|
"epoch": 15.48, |
|
"learning_rate": 2.424936386768448e-05, |
|
"loss": 0.5458, |
|
"step": 2028 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 2.4223918575063613e-05, |
|
"loss": 0.5029, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 15.51, |
|
"learning_rate": 2.4198473282442748e-05, |
|
"loss": 0.4957, |
|
"step": 2032 |
|
}, |
|
{ |
|
"epoch": 15.53, |
|
"learning_rate": 2.4173027989821883e-05, |
|
"loss": 0.5328, |
|
"step": 2034 |
|
}, |
|
{ |
|
"epoch": 15.54, |
|
"learning_rate": 2.4147582697201018e-05, |
|
"loss": 0.4934, |
|
"step": 2036 |
|
}, |
|
{ |
|
"epoch": 15.56, |
|
"learning_rate": 2.4122137404580153e-05, |
|
"loss": 0.6305, |
|
"step": 2038 |
|
}, |
|
{ |
|
"epoch": 15.57, |
|
"learning_rate": 2.4096692111959288e-05, |
|
"loss": 0.6244, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"learning_rate": 2.4071246819338422e-05, |
|
"loss": 0.7726, |
|
"step": 2042 |
|
}, |
|
{ |
|
"epoch": 15.6, |
|
"learning_rate": 2.404580152671756e-05, |
|
"loss": 0.6559, |
|
"step": 2044 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"learning_rate": 2.4020356234096695e-05, |
|
"loss": 0.5439, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 15.63, |
|
"learning_rate": 2.399491094147583e-05, |
|
"loss": 0.6267, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 15.65, |
|
"learning_rate": 2.3969465648854965e-05, |
|
"loss": 0.7257, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 15.66, |
|
"learning_rate": 2.3944020356234097e-05, |
|
"loss": 0.6406, |
|
"step": 2052 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"learning_rate": 2.391857506361323e-05, |
|
"loss": 0.6335, |
|
"step": 2054 |
|
}, |
|
{ |
|
"epoch": 15.69, |
|
"learning_rate": 2.3893129770992366e-05, |
|
"loss": 0.6477, |
|
"step": 2056 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"learning_rate": 2.38676844783715e-05, |
|
"loss": 0.4761, |
|
"step": 2058 |
|
}, |
|
{ |
|
"epoch": 15.73, |
|
"learning_rate": 2.3842239185750636e-05, |
|
"loss": 0.6198, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"learning_rate": 2.381679389312977e-05, |
|
"loss": 0.6266, |
|
"step": 2062 |
|
}, |
|
{ |
|
"epoch": 15.76, |
|
"learning_rate": 2.3791348600508906e-05, |
|
"loss": 0.7595, |
|
"step": 2064 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"learning_rate": 2.376590330788804e-05, |
|
"loss": 0.703, |
|
"step": 2066 |
|
}, |
|
{ |
|
"epoch": 15.79, |
|
"learning_rate": 2.374045801526718e-05, |
|
"loss": 0.5967, |
|
"step": 2068 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"learning_rate": 2.3715012722646313e-05, |
|
"loss": 0.6107, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 15.82, |
|
"learning_rate": 2.3689567430025448e-05, |
|
"loss": 0.4899, |
|
"step": 2072 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"learning_rate": 2.3664122137404583e-05, |
|
"loss": 0.6638, |
|
"step": 2074 |
|
}, |
|
{ |
|
"epoch": 15.85, |
|
"learning_rate": 2.3638676844783718e-05, |
|
"loss": 0.5411, |
|
"step": 2076 |
|
}, |
|
{ |
|
"epoch": 15.86, |
|
"learning_rate": 2.3613231552162853e-05, |
|
"loss": 0.5502, |
|
"step": 2078 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"learning_rate": 2.3587786259541984e-05, |
|
"loss": 0.6979, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"learning_rate": 2.356234096692112e-05, |
|
"loss": 0.5588, |
|
"step": 2082 |
|
}, |
|
{ |
|
"epoch": 15.91, |
|
"learning_rate": 2.3536895674300254e-05, |
|
"loss": 0.6608, |
|
"step": 2084 |
|
}, |
|
{ |
|
"epoch": 15.92, |
|
"learning_rate": 2.351145038167939e-05, |
|
"loss": 0.6453, |
|
"step": 2086 |
|
}, |
|
{ |
|
"epoch": 15.94, |
|
"learning_rate": 2.3486005089058523e-05, |
|
"loss": 0.5868, |
|
"step": 2088 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"learning_rate": 2.3460559796437658e-05, |
|
"loss": 0.5969, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 15.97, |
|
"learning_rate": 2.3435114503816796e-05, |
|
"loss": 0.5101, |
|
"step": 2092 |
|
}, |
|
{ |
|
"epoch": 15.98, |
|
"learning_rate": 2.340966921119593e-05, |
|
"loss": 0.5134, |
|
"step": 2094 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 2.3384223918575066e-05, |
|
"loss": 0.6774, |
|
"step": 2096 |
|
}, |
|
{ |
|
"epoch": 16.02, |
|
"learning_rate": 2.33587786259542e-05, |
|
"loss": 0.5242, |
|
"step": 2098 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 0.5213, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"eval_cer": 0.23833333333333334, |
|
"eval_loss": 4.873970031738281, |
|
"eval_runtime": 11.9775, |
|
"eval_samples_per_second": 21.791, |
|
"eval_steps_per_second": 2.755, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 16.05, |
|
"learning_rate": 2.330788804071247e-05, |
|
"loss": 0.5324, |
|
"step": 2102 |
|
}, |
|
{ |
|
"epoch": 16.06, |
|
"learning_rate": 2.3282442748091605e-05, |
|
"loss": 0.5842, |
|
"step": 2104 |
|
}, |
|
{ |
|
"epoch": 16.08, |
|
"learning_rate": 2.325699745547074e-05, |
|
"loss": 0.6585, |
|
"step": 2106 |
|
}, |
|
{ |
|
"epoch": 16.09, |
|
"learning_rate": 2.3231552162849872e-05, |
|
"loss": 0.6619, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 16.11, |
|
"learning_rate": 2.3206106870229007e-05, |
|
"loss": 0.6788, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 16.12, |
|
"learning_rate": 2.318066157760814e-05, |
|
"loss": 0.4784, |
|
"step": 2112 |
|
}, |
|
{ |
|
"epoch": 16.14, |
|
"learning_rate": 2.3155216284987276e-05, |
|
"loss": 0.6589, |
|
"step": 2114 |
|
}, |
|
{ |
|
"epoch": 16.15, |
|
"learning_rate": 2.3129770992366414e-05, |
|
"loss": 0.8936, |
|
"step": 2116 |
|
}, |
|
{ |
|
"epoch": 16.17, |
|
"learning_rate": 2.310432569974555e-05, |
|
"loss": 0.5156, |
|
"step": 2118 |
|
}, |
|
{ |
|
"epoch": 16.18, |
|
"learning_rate": 2.3078880407124684e-05, |
|
"loss": 0.7076, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 16.2, |
|
"learning_rate": 2.305343511450382e-05, |
|
"loss": 0.7453, |
|
"step": 2122 |
|
}, |
|
{ |
|
"epoch": 16.21, |
|
"learning_rate": 2.3027989821882954e-05, |
|
"loss": 0.4477, |
|
"step": 2124 |
|
}, |
|
{ |
|
"epoch": 16.23, |
|
"learning_rate": 2.300254452926209e-05, |
|
"loss": 0.4964, |
|
"step": 2126 |
|
}, |
|
{ |
|
"epoch": 16.24, |
|
"learning_rate": 2.2977099236641223e-05, |
|
"loss": 0.7308, |
|
"step": 2128 |
|
}, |
|
{ |
|
"epoch": 16.26, |
|
"learning_rate": 2.2951653944020358e-05, |
|
"loss": 0.567, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 16.27, |
|
"learning_rate": 2.2926208651399493e-05, |
|
"loss": 0.46, |
|
"step": 2132 |
|
}, |
|
{ |
|
"epoch": 16.29, |
|
"learning_rate": 2.2900763358778628e-05, |
|
"loss": 0.6041, |
|
"step": 2134 |
|
}, |
|
{ |
|
"epoch": 16.31, |
|
"learning_rate": 2.287531806615776e-05, |
|
"loss": 0.5844, |
|
"step": 2136 |
|
}, |
|
{ |
|
"epoch": 16.32, |
|
"learning_rate": 2.2849872773536894e-05, |
|
"loss": 0.6429, |
|
"step": 2138 |
|
}, |
|
{ |
|
"epoch": 16.34, |
|
"learning_rate": 2.2824427480916032e-05, |
|
"loss": 0.6305, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 16.35, |
|
"learning_rate": 2.2798982188295167e-05, |
|
"loss": 0.6734, |
|
"step": 2142 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"learning_rate": 2.2773536895674302e-05, |
|
"loss": 0.5662, |
|
"step": 2144 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"learning_rate": 2.2748091603053437e-05, |
|
"loss": 0.525, |
|
"step": 2146 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"learning_rate": 2.272264631043257e-05, |
|
"loss": 0.4687, |
|
"step": 2148 |
|
}, |
|
{ |
|
"epoch": 16.41, |
|
"learning_rate": 2.2697201017811707e-05, |
|
"loss": 0.6332, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 16.43, |
|
"learning_rate": 2.267175572519084e-05, |
|
"loss": 0.6017, |
|
"step": 2152 |
|
}, |
|
{ |
|
"epoch": 16.44, |
|
"learning_rate": 2.2646310432569976e-05, |
|
"loss": 0.5675, |
|
"step": 2154 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"learning_rate": 2.262086513994911e-05, |
|
"loss": 0.5413, |
|
"step": 2156 |
|
}, |
|
{ |
|
"epoch": 16.47, |
|
"learning_rate": 2.2595419847328246e-05, |
|
"loss": 0.5284, |
|
"step": 2158 |
|
}, |
|
{ |
|
"epoch": 16.49, |
|
"learning_rate": 2.256997455470738e-05, |
|
"loss": 0.6012, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 2.2544529262086515e-05, |
|
"loss": 0.6813, |
|
"step": 2162 |
|
}, |
|
{ |
|
"epoch": 16.52, |
|
"learning_rate": 2.2519083969465647e-05, |
|
"loss": 0.7177, |
|
"step": 2164 |
|
}, |
|
{ |
|
"epoch": 16.53, |
|
"learning_rate": 2.2493638676844785e-05, |
|
"loss": 0.5161, |
|
"step": 2166 |
|
}, |
|
{ |
|
"epoch": 16.55, |
|
"learning_rate": 2.246819338422392e-05, |
|
"loss": 0.5074, |
|
"step": 2168 |
|
}, |
|
{ |
|
"epoch": 16.56, |
|
"learning_rate": 2.2442748091603055e-05, |
|
"loss": 0.5322, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"learning_rate": 2.241730279898219e-05, |
|
"loss": 0.6602, |
|
"step": 2172 |
|
}, |
|
{ |
|
"epoch": 16.6, |
|
"learning_rate": 2.2391857506361324e-05, |
|
"loss": 0.6103, |
|
"step": 2174 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"learning_rate": 2.236641221374046e-05, |
|
"loss": 0.6248, |
|
"step": 2176 |
|
}, |
|
{ |
|
"epoch": 16.63, |
|
"learning_rate": 2.2340966921119594e-05, |
|
"loss": 0.6812, |
|
"step": 2178 |
|
}, |
|
{ |
|
"epoch": 16.64, |
|
"learning_rate": 2.231552162849873e-05, |
|
"loss": 0.6544, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 16.66, |
|
"learning_rate": 2.2290076335877864e-05, |
|
"loss": 0.7274, |
|
"step": 2182 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"learning_rate": 2.2264631043257e-05, |
|
"loss": 0.6022, |
|
"step": 2184 |
|
}, |
|
{ |
|
"epoch": 16.69, |
|
"learning_rate": 2.2239185750636133e-05, |
|
"loss": 0.5973, |
|
"step": 2186 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"learning_rate": 2.2213740458015268e-05, |
|
"loss": 0.5278, |
|
"step": 2188 |
|
}, |
|
{ |
|
"epoch": 16.72, |
|
"learning_rate": 2.2188295165394403e-05, |
|
"loss": 0.6594, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 16.73, |
|
"learning_rate": 2.2162849872773538e-05, |
|
"loss": 0.4612, |
|
"step": 2192 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"learning_rate": 2.2137404580152673e-05, |
|
"loss": 0.6669, |
|
"step": 2194 |
|
}, |
|
{ |
|
"epoch": 16.76, |
|
"learning_rate": 2.2111959287531808e-05, |
|
"loss": 0.5867, |
|
"step": 2196 |
|
}, |
|
{ |
|
"epoch": 16.78, |
|
"learning_rate": 2.2086513994910942e-05, |
|
"loss": 0.5023, |
|
"step": 2198 |
|
}, |
|
{ |
|
"epoch": 16.79, |
|
"learning_rate": 2.2061068702290077e-05, |
|
"loss": 0.6719, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 16.79, |
|
"eval_cer": 0.35833333333333334, |
|
"eval_loss": 3.5339906215667725, |
|
"eval_runtime": 12.2511, |
|
"eval_samples_per_second": 21.304, |
|
"eval_steps_per_second": 2.694, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 16.81, |
|
"learning_rate": 2.2035623409669212e-05, |
|
"loss": 0.5517, |
|
"step": 2202 |
|
}, |
|
{ |
|
"epoch": 16.82, |
|
"learning_rate": 2.2010178117048347e-05, |
|
"loss": 0.6138, |
|
"step": 2204 |
|
}, |
|
{ |
|
"epoch": 16.84, |
|
"learning_rate": 2.198473282442748e-05, |
|
"loss": 0.6301, |
|
"step": 2206 |
|
}, |
|
{ |
|
"epoch": 16.85, |
|
"learning_rate": 2.1959287531806617e-05, |
|
"loss": 0.4804, |
|
"step": 2208 |
|
}, |
|
{ |
|
"epoch": 16.87, |
|
"learning_rate": 2.193384223918575e-05, |
|
"loss": 0.479, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 16.89, |
|
"learning_rate": 2.1908396946564886e-05, |
|
"loss": 0.6461, |
|
"step": 2212 |
|
}, |
|
{ |
|
"epoch": 16.9, |
|
"learning_rate": 2.1882951653944024e-05, |
|
"loss": 0.6714, |
|
"step": 2214 |
|
}, |
|
{ |
|
"epoch": 16.92, |
|
"learning_rate": 2.1857506361323156e-05, |
|
"loss": 0.6949, |
|
"step": 2216 |
|
}, |
|
{ |
|
"epoch": 16.93, |
|
"learning_rate": 2.183206106870229e-05, |
|
"loss": 0.7101, |
|
"step": 2218 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"learning_rate": 2.1806615776081425e-05, |
|
"loss": 0.5797, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 16.96, |
|
"learning_rate": 2.178117048346056e-05, |
|
"loss": 0.5848, |
|
"step": 2222 |
|
}, |
|
{ |
|
"epoch": 16.98, |
|
"learning_rate": 2.1755725190839695e-05, |
|
"loss": 0.5456, |
|
"step": 2224 |
|
}, |
|
{ |
|
"epoch": 16.99, |
|
"learning_rate": 2.173027989821883e-05, |
|
"loss": 0.6577, |
|
"step": 2226 |
|
}, |
|
{ |
|
"epoch": 17.01, |
|
"learning_rate": 2.1704834605597965e-05, |
|
"loss": 0.9026, |
|
"step": 2228 |
|
}, |
|
{ |
|
"epoch": 17.02, |
|
"learning_rate": 2.16793893129771e-05, |
|
"loss": 0.5493, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 17.04, |
|
"learning_rate": 2.1653944020356234e-05, |
|
"loss": 0.658, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 17.05, |
|
"learning_rate": 2.162849872773537e-05, |
|
"loss": 0.5155, |
|
"step": 2234 |
|
}, |
|
{ |
|
"epoch": 17.07, |
|
"learning_rate": 2.1603053435114504e-05, |
|
"loss": 0.4761, |
|
"step": 2236 |
|
}, |
|
{ |
|
"epoch": 17.08, |
|
"learning_rate": 2.1577608142493642e-05, |
|
"loss": 0.4809, |
|
"step": 2238 |
|
}, |
|
{ |
|
"epoch": 17.1, |
|
"learning_rate": 2.1552162849872777e-05, |
|
"loss": 0.5304, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 17.11, |
|
"learning_rate": 2.1526717557251912e-05, |
|
"loss": 0.5546, |
|
"step": 2242 |
|
}, |
|
{ |
|
"epoch": 17.13, |
|
"learning_rate": 2.1501272264631043e-05, |
|
"loss": 0.6821, |
|
"step": 2244 |
|
}, |
|
{ |
|
"epoch": 17.15, |
|
"learning_rate": 2.1475826972010178e-05, |
|
"loss": 0.6316, |
|
"step": 2246 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"learning_rate": 2.1450381679389313e-05, |
|
"loss": 0.6221, |
|
"step": 2248 |
|
}, |
|
{ |
|
"epoch": 17.18, |
|
"learning_rate": 2.1424936386768448e-05, |
|
"loss": 0.4736, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 17.19, |
|
"learning_rate": 2.1399491094147583e-05, |
|
"loss": 0.608, |
|
"step": 2252 |
|
}, |
|
{ |
|
"epoch": 17.21, |
|
"learning_rate": 2.1374045801526718e-05, |
|
"loss": 0.6325, |
|
"step": 2254 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"learning_rate": 2.1348600508905852e-05, |
|
"loss": 0.5805, |
|
"step": 2256 |
|
}, |
|
{ |
|
"epoch": 17.24, |
|
"learning_rate": 2.1323155216284987e-05, |
|
"loss": 0.6571, |
|
"step": 2258 |
|
}, |
|
{ |
|
"epoch": 17.25, |
|
"learning_rate": 2.1297709923664122e-05, |
|
"loss": 0.6797, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 17.27, |
|
"learning_rate": 2.127226463104326e-05, |
|
"loss": 0.4343, |
|
"step": 2262 |
|
}, |
|
{ |
|
"epoch": 17.28, |
|
"learning_rate": 2.1246819338422395e-05, |
|
"loss": 0.4951, |
|
"step": 2264 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"learning_rate": 2.122137404580153e-05, |
|
"loss": 0.5035, |
|
"step": 2266 |
|
}, |
|
{ |
|
"epoch": 17.31, |
|
"learning_rate": 2.1195928753180665e-05, |
|
"loss": 0.629, |
|
"step": 2268 |
|
}, |
|
{ |
|
"epoch": 17.33, |
|
"learning_rate": 2.11704834605598e-05, |
|
"loss": 0.4054, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"learning_rate": 2.114503816793893e-05, |
|
"loss": 0.6026, |
|
"step": 2272 |
|
}, |
|
{ |
|
"epoch": 17.36, |
|
"learning_rate": 2.1119592875318066e-05, |
|
"loss": 0.5428, |
|
"step": 2274 |
|
}, |
|
{ |
|
"epoch": 17.37, |
|
"learning_rate": 2.10941475826972e-05, |
|
"loss": 0.4469, |
|
"step": 2276 |
|
}, |
|
{ |
|
"epoch": 17.39, |
|
"learning_rate": 2.1068702290076335e-05, |
|
"loss": 0.4458, |
|
"step": 2278 |
|
}, |
|
{ |
|
"epoch": 17.4, |
|
"learning_rate": 2.104325699745547e-05, |
|
"loss": 0.5269, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 17.42, |
|
"learning_rate": 2.1017811704834605e-05, |
|
"loss": 0.5308, |
|
"step": 2282 |
|
}, |
|
{ |
|
"epoch": 17.44, |
|
"learning_rate": 2.099236641221374e-05, |
|
"loss": 0.6008, |
|
"step": 2284 |
|
}, |
|
{ |
|
"epoch": 17.45, |
|
"learning_rate": 2.0966921119592875e-05, |
|
"loss": 0.7206, |
|
"step": 2286 |
|
}, |
|
{ |
|
"epoch": 17.47, |
|
"learning_rate": 2.0941475826972013e-05, |
|
"loss": 0.6058, |
|
"step": 2288 |
|
}, |
|
{ |
|
"epoch": 17.48, |
|
"learning_rate": 2.0916030534351148e-05, |
|
"loss": 0.6627, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 2.0890585241730283e-05, |
|
"loss": 0.5918, |
|
"step": 2292 |
|
}, |
|
{ |
|
"epoch": 17.51, |
|
"learning_rate": 2.0865139949109417e-05, |
|
"loss": 0.5845, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 17.53, |
|
"learning_rate": 2.0839694656488552e-05, |
|
"loss": 0.5084, |
|
"step": 2296 |
|
}, |
|
{ |
|
"epoch": 17.54, |
|
"learning_rate": 2.0814249363867687e-05, |
|
"loss": 0.5598, |
|
"step": 2298 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"learning_rate": 2.078880407124682e-05, |
|
"loss": 0.5179, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"eval_cer": 0.2816666666666667, |
|
"eval_loss": 6.160032749176025, |
|
"eval_runtime": 13.1562, |
|
"eval_samples_per_second": 19.839, |
|
"eval_steps_per_second": 2.508, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 17.57, |
|
"learning_rate": 2.0763358778625953e-05, |
|
"loss": 0.4853, |
|
"step": 2302 |
|
}, |
|
{ |
|
"epoch": 17.59, |
|
"learning_rate": 2.0737913486005088e-05, |
|
"loss": 0.5039, |
|
"step": 2304 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 2.0712468193384223e-05, |
|
"loss": 0.6638, |
|
"step": 2306 |
|
}, |
|
{ |
|
"epoch": 17.62, |
|
"learning_rate": 2.0687022900763358e-05, |
|
"loss": 0.8646, |
|
"step": 2308 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"learning_rate": 2.0661577608142493e-05, |
|
"loss": 0.7057, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 17.65, |
|
"learning_rate": 2.063613231552163e-05, |
|
"loss": 0.6036, |
|
"step": 2312 |
|
}, |
|
{ |
|
"epoch": 17.66, |
|
"learning_rate": 2.0610687022900766e-05, |
|
"loss": 0.467, |
|
"step": 2314 |
|
}, |
|
{ |
|
"epoch": 17.68, |
|
"learning_rate": 2.05852417302799e-05, |
|
"loss": 0.6336, |
|
"step": 2316 |
|
}, |
|
{ |
|
"epoch": 17.69, |
|
"learning_rate": 2.0559796437659035e-05, |
|
"loss": 0.4946, |
|
"step": 2318 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"learning_rate": 2.053435114503817e-05, |
|
"loss": 0.706, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 17.73, |
|
"learning_rate": 2.0508905852417305e-05, |
|
"loss": 1.0023, |
|
"step": 2322 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 2.048346055979644e-05, |
|
"loss": 0.5102, |
|
"step": 2324 |
|
}, |
|
{ |
|
"epoch": 17.76, |
|
"learning_rate": 2.0458015267175575e-05, |
|
"loss": 0.5677, |
|
"step": 2326 |
|
}, |
|
{ |
|
"epoch": 17.77, |
|
"learning_rate": 2.0432569974554706e-05, |
|
"loss": 0.745, |
|
"step": 2328 |
|
}, |
|
{ |
|
"epoch": 17.79, |
|
"learning_rate": 2.040712468193384e-05, |
|
"loss": 0.6287, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 17.8, |
|
"learning_rate": 2.0381679389312976e-05, |
|
"loss": 0.5484, |
|
"step": 2332 |
|
}, |
|
{ |
|
"epoch": 17.82, |
|
"learning_rate": 2.035623409669211e-05, |
|
"loss": 0.5887, |
|
"step": 2334 |
|
}, |
|
{ |
|
"epoch": 17.83, |
|
"learning_rate": 2.033078880407125e-05, |
|
"loss": 0.6744, |
|
"step": 2336 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"learning_rate": 2.0305343511450384e-05, |
|
"loss": 0.5906, |
|
"step": 2338 |
|
}, |
|
{ |
|
"epoch": 17.86, |
|
"learning_rate": 2.027989821882952e-05, |
|
"loss": 0.4837, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 2.0254452926208653e-05, |
|
"loss": 0.6183, |
|
"step": 2342 |
|
}, |
|
{ |
|
"epoch": 17.89, |
|
"learning_rate": 2.0229007633587788e-05, |
|
"loss": 0.575, |
|
"step": 2344 |
|
}, |
|
{ |
|
"epoch": 17.91, |
|
"learning_rate": 2.0203562340966923e-05, |
|
"loss": 0.7105, |
|
"step": 2346 |
|
}, |
|
{ |
|
"epoch": 17.92, |
|
"learning_rate": 2.0178117048346058e-05, |
|
"loss": 0.6605, |
|
"step": 2348 |
|
}, |
|
{ |
|
"epoch": 17.94, |
|
"learning_rate": 2.0152671755725193e-05, |
|
"loss": 0.6811, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 17.95, |
|
"learning_rate": 2.0127226463104327e-05, |
|
"loss": 0.5716, |
|
"step": 2352 |
|
}, |
|
{ |
|
"epoch": 17.97, |
|
"learning_rate": 2.0101781170483462e-05, |
|
"loss": 0.6219, |
|
"step": 2354 |
|
}, |
|
{ |
|
"epoch": 17.98, |
|
"learning_rate": 2.0076335877862594e-05, |
|
"loss": 0.4857, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 2.005089058524173e-05, |
|
"loss": 0.7918, |
|
"step": 2358 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"learning_rate": 2.0025445292620867e-05, |
|
"loss": 0.9456, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 18.03, |
|
"learning_rate": 2e-05, |
|
"loss": 0.5068, |
|
"step": 2362 |
|
}, |
|
{ |
|
"epoch": 18.05, |
|
"learning_rate": 1.9974554707379136e-05, |
|
"loss": 0.4661, |
|
"step": 2364 |
|
}, |
|
{ |
|
"epoch": 18.06, |
|
"learning_rate": 1.994910941475827e-05, |
|
"loss": 0.4722, |
|
"step": 2366 |
|
}, |
|
{ |
|
"epoch": 18.08, |
|
"learning_rate": 1.9923664122137406e-05, |
|
"loss": 0.6025, |
|
"step": 2368 |
|
}, |
|
{ |
|
"epoch": 18.09, |
|
"learning_rate": 1.989821882951654e-05, |
|
"loss": 0.6247, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 18.11, |
|
"learning_rate": 1.9872773536895676e-05, |
|
"loss": 0.4999, |
|
"step": 2372 |
|
}, |
|
{ |
|
"epoch": 18.12, |
|
"learning_rate": 1.984732824427481e-05, |
|
"loss": 0.5055, |
|
"step": 2374 |
|
}, |
|
{ |
|
"epoch": 18.14, |
|
"learning_rate": 1.9821882951653945e-05, |
|
"loss": 0.5135, |
|
"step": 2376 |
|
}, |
|
{ |
|
"epoch": 18.15, |
|
"learning_rate": 1.979643765903308e-05, |
|
"loss": 0.5206, |
|
"step": 2378 |
|
}, |
|
{ |
|
"epoch": 18.17, |
|
"learning_rate": 1.9770992366412215e-05, |
|
"loss": 0.5837, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"learning_rate": 1.974554707379135e-05, |
|
"loss": 0.5924, |
|
"step": 2382 |
|
}, |
|
{ |
|
"epoch": 18.2, |
|
"learning_rate": 1.9720101781170485e-05, |
|
"loss": 0.5799, |
|
"step": 2384 |
|
}, |
|
{ |
|
"epoch": 18.21, |
|
"learning_rate": 1.969465648854962e-05, |
|
"loss": 0.5315, |
|
"step": 2386 |
|
}, |
|
{ |
|
"epoch": 18.23, |
|
"learning_rate": 1.9669211195928754e-05, |
|
"loss": 0.5375, |
|
"step": 2388 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 1.964376590330789e-05, |
|
"loss": 0.4988, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 18.26, |
|
"learning_rate": 1.9618320610687024e-05, |
|
"loss": 0.6637, |
|
"step": 2392 |
|
}, |
|
{ |
|
"epoch": 18.27, |
|
"learning_rate": 1.959287531806616e-05, |
|
"loss": 0.5626, |
|
"step": 2394 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"learning_rate": 1.9567430025445294e-05, |
|
"loss": 0.5602, |
|
"step": 2396 |
|
}, |
|
{ |
|
"epoch": 18.31, |
|
"learning_rate": 1.954198473282443e-05, |
|
"loss": 0.4954, |
|
"step": 2398 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"learning_rate": 1.9516539440203563e-05, |
|
"loss": 0.6328, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"eval_cer": 0.7733333333333333, |
|
"eval_loss": 4.303896903991699, |
|
"eval_runtime": 13.5179, |
|
"eval_samples_per_second": 19.308, |
|
"eval_steps_per_second": 2.441, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 18.34, |
|
"learning_rate": 1.9491094147582698e-05, |
|
"loss": 0.4413, |
|
"step": 2402 |
|
}, |
|
{ |
|
"epoch": 18.35, |
|
"learning_rate": 1.9465648854961833e-05, |
|
"loss": 0.7464, |
|
"step": 2404 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"learning_rate": 1.9440203562340968e-05, |
|
"loss": 0.7487, |
|
"step": 2406 |
|
}, |
|
{ |
|
"epoch": 18.38, |
|
"learning_rate": 1.9414758269720103e-05, |
|
"loss": 0.9175, |
|
"step": 2408 |
|
}, |
|
{ |
|
"epoch": 18.4, |
|
"learning_rate": 1.9389312977099238e-05, |
|
"loss": 0.7402, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 18.41, |
|
"learning_rate": 1.9363867684478372e-05, |
|
"loss": 0.5455, |
|
"step": 2412 |
|
}, |
|
{ |
|
"epoch": 18.43, |
|
"learning_rate": 1.9338422391857507e-05, |
|
"loss": 0.5307, |
|
"step": 2414 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"learning_rate": 1.9312977099236642e-05, |
|
"loss": 0.6569, |
|
"step": 2416 |
|
}, |
|
{ |
|
"epoch": 18.46, |
|
"learning_rate": 1.9287531806615777e-05, |
|
"loss": 0.6888, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 18.47, |
|
"learning_rate": 1.926208651399491e-05, |
|
"loss": 0.6493, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 18.49, |
|
"learning_rate": 1.9236641221374046e-05, |
|
"loss": 0.6108, |
|
"step": 2422 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"learning_rate": 1.921119592875318e-05, |
|
"loss": 0.558, |
|
"step": 2424 |
|
}, |
|
{ |
|
"epoch": 18.52, |
|
"learning_rate": 1.9185750636132316e-05, |
|
"loss": 0.5925, |
|
"step": 2426 |
|
}, |
|
{ |
|
"epoch": 18.53, |
|
"learning_rate": 1.916030534351145e-05, |
|
"loss": 0.4832, |
|
"step": 2428 |
|
}, |
|
{ |
|
"epoch": 18.55, |
|
"learning_rate": 1.9134860050890586e-05, |
|
"loss": 0.4764, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"learning_rate": 1.910941475826972e-05, |
|
"loss": 0.4937, |
|
"step": 2432 |
|
}, |
|
{ |
|
"epoch": 18.58, |
|
"learning_rate": 1.9083969465648855e-05, |
|
"loss": 0.5787, |
|
"step": 2434 |
|
}, |
|
{ |
|
"epoch": 18.6, |
|
"learning_rate": 1.905852417302799e-05, |
|
"loss": 0.5666, |
|
"step": 2436 |
|
}, |
|
{ |
|
"epoch": 18.61, |
|
"learning_rate": 1.9033078880407125e-05, |
|
"loss": 0.5288, |
|
"step": 2438 |
|
}, |
|
{ |
|
"epoch": 18.63, |
|
"learning_rate": 1.900763358778626e-05, |
|
"loss": 0.4817, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 18.64, |
|
"learning_rate": 1.8982188295165395e-05, |
|
"loss": 0.6006, |
|
"step": 2442 |
|
}, |
|
{ |
|
"epoch": 18.66, |
|
"learning_rate": 1.895674300254453e-05, |
|
"loss": 0.5381, |
|
"step": 2444 |
|
}, |
|
{ |
|
"epoch": 18.67, |
|
"learning_rate": 1.8931297709923664e-05, |
|
"loss": 0.4153, |
|
"step": 2446 |
|
}, |
|
{ |
|
"epoch": 18.69, |
|
"learning_rate": 1.89058524173028e-05, |
|
"loss": 0.5422, |
|
"step": 2448 |
|
}, |
|
{ |
|
"epoch": 18.7, |
|
"learning_rate": 1.8880407124681934e-05, |
|
"loss": 0.7129, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 18.72, |
|
"learning_rate": 1.885496183206107e-05, |
|
"loss": 0.7264, |
|
"step": 2452 |
|
}, |
|
{ |
|
"epoch": 18.73, |
|
"learning_rate": 1.8829516539440204e-05, |
|
"loss": 0.507, |
|
"step": 2454 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 1.880407124681934e-05, |
|
"loss": 0.483, |
|
"step": 2456 |
|
}, |
|
{ |
|
"epoch": 18.76, |
|
"learning_rate": 1.8778625954198477e-05, |
|
"loss": 0.4916, |
|
"step": 2458 |
|
}, |
|
{ |
|
"epoch": 18.78, |
|
"learning_rate": 1.875318066157761e-05, |
|
"loss": 0.4537, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 18.79, |
|
"learning_rate": 1.8727735368956746e-05, |
|
"loss": 0.5219, |
|
"step": 2462 |
|
}, |
|
{ |
|
"epoch": 18.81, |
|
"learning_rate": 1.8702290076335878e-05, |
|
"loss": 0.4941, |
|
"step": 2464 |
|
}, |
|
{ |
|
"epoch": 18.82, |
|
"learning_rate": 1.8676844783715013e-05, |
|
"loss": 0.5794, |
|
"step": 2466 |
|
}, |
|
{ |
|
"epoch": 18.84, |
|
"learning_rate": 1.8651399491094148e-05, |
|
"loss": 0.8022, |
|
"step": 2468 |
|
}, |
|
{ |
|
"epoch": 18.85, |
|
"learning_rate": 1.8625954198473282e-05, |
|
"loss": 0.5549, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 18.87, |
|
"learning_rate": 1.8600508905852417e-05, |
|
"loss": 0.6745, |
|
"step": 2472 |
|
}, |
|
{ |
|
"epoch": 18.89, |
|
"learning_rate": 1.8575063613231552e-05, |
|
"loss": 0.5353, |
|
"step": 2474 |
|
}, |
|
{ |
|
"epoch": 18.9, |
|
"learning_rate": 1.8549618320610687e-05, |
|
"loss": 0.5068, |
|
"step": 2476 |
|
}, |
|
{ |
|
"epoch": 18.92, |
|
"learning_rate": 1.852417302798982e-05, |
|
"loss": 0.4106, |
|
"step": 2478 |
|
}, |
|
{ |
|
"epoch": 18.93, |
|
"learning_rate": 1.8498727735368956e-05, |
|
"loss": 0.4819, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 18.95, |
|
"learning_rate": 1.8473282442748095e-05, |
|
"loss": 0.6774, |
|
"step": 2482 |
|
}, |
|
{ |
|
"epoch": 18.96, |
|
"learning_rate": 1.844783715012723e-05, |
|
"loss": 0.5727, |
|
"step": 2484 |
|
}, |
|
{ |
|
"epoch": 18.98, |
|
"learning_rate": 1.8422391857506364e-05, |
|
"loss": 1.0129, |
|
"step": 2486 |
|
}, |
|
{ |
|
"epoch": 18.99, |
|
"learning_rate": 1.83969465648855e-05, |
|
"loss": 0.7458, |
|
"step": 2488 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"learning_rate": 1.8371501272264634e-05, |
|
"loss": 0.7094, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 19.02, |
|
"learning_rate": 1.8346055979643765e-05, |
|
"loss": 0.9128, |
|
"step": 2492 |
|
}, |
|
{ |
|
"epoch": 19.04, |
|
"learning_rate": 1.83206106870229e-05, |
|
"loss": 0.6349, |
|
"step": 2494 |
|
}, |
|
{ |
|
"epoch": 19.05, |
|
"learning_rate": 1.8295165394402035e-05, |
|
"loss": 0.4806, |
|
"step": 2496 |
|
}, |
|
{ |
|
"epoch": 19.07, |
|
"learning_rate": 1.826972010178117e-05, |
|
"loss": 0.5949, |
|
"step": 2498 |
|
}, |
|
{ |
|
"epoch": 19.08, |
|
"learning_rate": 1.8244274809160305e-05, |
|
"loss": 0.5064, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 19.08, |
|
"eval_cer": 1.6666666666666667, |
|
"eval_loss": 3.5248308181762695, |
|
"eval_runtime": 15.663, |
|
"eval_samples_per_second": 16.663, |
|
"eval_steps_per_second": 2.107, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 19.1, |
|
"learning_rate": 1.821882951653944e-05, |
|
"loss": 0.5567, |
|
"step": 2502 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"learning_rate": 1.8193384223918574e-05, |
|
"loss": 0.4422, |
|
"step": 2504 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"learning_rate": 1.816793893129771e-05, |
|
"loss": 0.6828, |
|
"step": 2506 |
|
}, |
|
{ |
|
"epoch": 19.15, |
|
"learning_rate": 1.8142493638676847e-05, |
|
"loss": 0.4808, |
|
"step": 2508 |
|
}, |
|
{ |
|
"epoch": 19.16, |
|
"learning_rate": 1.8117048346055982e-05, |
|
"loss": 0.6971, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 19.18, |
|
"learning_rate": 1.8091603053435117e-05, |
|
"loss": 0.7127, |
|
"step": 2512 |
|
}, |
|
{ |
|
"epoch": 19.19, |
|
"learning_rate": 1.8066157760814252e-05, |
|
"loss": 0.7504, |
|
"step": 2514 |
|
}, |
|
{ |
|
"epoch": 19.21, |
|
"learning_rate": 1.8040712468193387e-05, |
|
"loss": 0.6705, |
|
"step": 2516 |
|
}, |
|
{ |
|
"epoch": 19.22, |
|
"learning_rate": 1.801526717557252e-05, |
|
"loss": 0.6327, |
|
"step": 2518 |
|
}, |
|
{ |
|
"epoch": 19.24, |
|
"learning_rate": 1.7989821882951653e-05, |
|
"loss": 0.4723, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 19.25, |
|
"learning_rate": 1.7964376590330788e-05, |
|
"loss": 0.6639, |
|
"step": 2522 |
|
}, |
|
{ |
|
"epoch": 19.27, |
|
"learning_rate": 1.7938931297709923e-05, |
|
"loss": 0.5713, |
|
"step": 2524 |
|
}, |
|
{ |
|
"epoch": 19.28, |
|
"learning_rate": 1.7913486005089058e-05, |
|
"loss": 0.6523, |
|
"step": 2526 |
|
}, |
|
{ |
|
"epoch": 19.3, |
|
"learning_rate": 1.7888040712468192e-05, |
|
"loss": 0.436, |
|
"step": 2528 |
|
}, |
|
{ |
|
"epoch": 19.31, |
|
"learning_rate": 1.7862595419847327e-05, |
|
"loss": 0.5954, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 19.33, |
|
"learning_rate": 1.7837150127226465e-05, |
|
"loss": 0.5139, |
|
"step": 2532 |
|
}, |
|
{ |
|
"epoch": 19.34, |
|
"learning_rate": 1.78117048346056e-05, |
|
"loss": 0.7467, |
|
"step": 2534 |
|
}, |
|
{ |
|
"epoch": 19.36, |
|
"learning_rate": 1.7786259541984735e-05, |
|
"loss": 0.515, |
|
"step": 2536 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 1.776081424936387e-05, |
|
"loss": 0.4295, |
|
"step": 2538 |
|
}, |
|
{ |
|
"epoch": 19.39, |
|
"learning_rate": 1.7735368956743005e-05, |
|
"loss": 0.5099, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 19.4, |
|
"learning_rate": 1.770992366412214e-05, |
|
"loss": 0.5244, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 19.42, |
|
"learning_rate": 1.7684478371501274e-05, |
|
"loss": 0.4796, |
|
"step": 2544 |
|
}, |
|
{ |
|
"epoch": 19.44, |
|
"learning_rate": 1.765903307888041e-05, |
|
"loss": 0.5541, |
|
"step": 2546 |
|
}, |
|
{ |
|
"epoch": 19.45, |
|
"learning_rate": 1.763358778625954e-05, |
|
"loss": 0.6066, |
|
"step": 2548 |
|
}, |
|
{ |
|
"epoch": 19.47, |
|
"learning_rate": 1.7608142493638675e-05, |
|
"loss": 0.5268, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"learning_rate": 1.758269720101781e-05, |
|
"loss": 0.475, |
|
"step": 2552 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 1.7557251908396945e-05, |
|
"loss": 0.5441, |
|
"step": 2554 |
|
}, |
|
{ |
|
"epoch": 19.51, |
|
"learning_rate": 1.7531806615776083e-05, |
|
"loss": 0.5939, |
|
"step": 2556 |
|
}, |
|
{ |
|
"epoch": 19.53, |
|
"learning_rate": 1.7506361323155218e-05, |
|
"loss": 0.4331, |
|
"step": 2558 |
|
}, |
|
{ |
|
"epoch": 19.54, |
|
"learning_rate": 1.7480916030534353e-05, |
|
"loss": 0.64, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 19.56, |
|
"learning_rate": 1.7455470737913488e-05, |
|
"loss": 0.6079, |
|
"step": 2562 |
|
}, |
|
{ |
|
"epoch": 19.57, |
|
"learning_rate": 1.7430025445292623e-05, |
|
"loss": 0.549, |
|
"step": 2564 |
|
}, |
|
{ |
|
"epoch": 19.59, |
|
"learning_rate": 1.7404580152671757e-05, |
|
"loss": 0.5349, |
|
"step": 2566 |
|
}, |
|
{ |
|
"epoch": 19.6, |
|
"learning_rate": 1.7379134860050892e-05, |
|
"loss": 0.8238, |
|
"step": 2568 |
|
}, |
|
{ |
|
"epoch": 19.62, |
|
"learning_rate": 1.7353689567430027e-05, |
|
"loss": 0.5387, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 19.63, |
|
"learning_rate": 1.7328244274809162e-05, |
|
"loss": 0.3779, |
|
"step": 2572 |
|
}, |
|
{ |
|
"epoch": 19.65, |
|
"learning_rate": 1.7302798982188297e-05, |
|
"loss": 0.5164, |
|
"step": 2574 |
|
}, |
|
{ |
|
"epoch": 19.66, |
|
"learning_rate": 1.7277353689567428e-05, |
|
"loss": 0.5982, |
|
"step": 2576 |
|
}, |
|
{ |
|
"epoch": 19.68, |
|
"learning_rate": 1.7251908396946563e-05, |
|
"loss": 0.5311, |
|
"step": 2578 |
|
}, |
|
{ |
|
"epoch": 19.69, |
|
"learning_rate": 1.72264631043257e-05, |
|
"loss": 0.5054, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 19.71, |
|
"learning_rate": 1.7201017811704836e-05, |
|
"loss": 0.5302, |
|
"step": 2582 |
|
}, |
|
{ |
|
"epoch": 19.73, |
|
"learning_rate": 1.717557251908397e-05, |
|
"loss": 0.6818, |
|
"step": 2584 |
|
}, |
|
{ |
|
"epoch": 19.74, |
|
"learning_rate": 1.7150127226463106e-05, |
|
"loss": 0.471, |
|
"step": 2586 |
|
}, |
|
{ |
|
"epoch": 19.76, |
|
"learning_rate": 1.712468193384224e-05, |
|
"loss": 0.5866, |
|
"step": 2588 |
|
}, |
|
{ |
|
"epoch": 19.77, |
|
"learning_rate": 1.7099236641221375e-05, |
|
"loss": 0.5497, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"learning_rate": 1.707379134860051e-05, |
|
"loss": 0.6197, |
|
"step": 2592 |
|
}, |
|
{ |
|
"epoch": 19.8, |
|
"learning_rate": 1.7048346055979645e-05, |
|
"loss": 0.5797, |
|
"step": 2594 |
|
}, |
|
{ |
|
"epoch": 19.82, |
|
"learning_rate": 1.702290076335878e-05, |
|
"loss": 0.4112, |
|
"step": 2596 |
|
}, |
|
{ |
|
"epoch": 19.83, |
|
"learning_rate": 1.6997455470737915e-05, |
|
"loss": 0.6759, |
|
"step": 2598 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"learning_rate": 1.697201017811705e-05, |
|
"loss": 0.4158, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"eval_cer": 0.43833333333333335, |
|
"eval_loss": 3.8013916015625, |
|
"eval_runtime": 12.6815, |
|
"eval_samples_per_second": 20.581, |
|
"eval_steps_per_second": 2.602, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 19.86, |
|
"learning_rate": 1.6946564885496184e-05, |
|
"loss": 0.5147, |
|
"step": 2602 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"learning_rate": 1.692111959287532e-05, |
|
"loss": 0.443, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 19.89, |
|
"learning_rate": 1.6895674300254454e-05, |
|
"loss": 0.6891, |
|
"step": 2606 |
|
}, |
|
{ |
|
"epoch": 19.91, |
|
"learning_rate": 1.687022900763359e-05, |
|
"loss": 0.6668, |
|
"step": 2608 |
|
}, |
|
{ |
|
"epoch": 19.92, |
|
"learning_rate": 1.6844783715012724e-05, |
|
"loss": 0.3972, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"learning_rate": 1.681933842239186e-05, |
|
"loss": 0.5349, |
|
"step": 2612 |
|
}, |
|
{ |
|
"epoch": 19.95, |
|
"learning_rate": 1.6793893129770993e-05, |
|
"loss": 0.5152, |
|
"step": 2614 |
|
}, |
|
{ |
|
"epoch": 19.97, |
|
"learning_rate": 1.6768447837150128e-05, |
|
"loss": 0.4808, |
|
"step": 2616 |
|
}, |
|
{ |
|
"epoch": 19.98, |
|
"learning_rate": 1.6743002544529263e-05, |
|
"loss": 0.4996, |
|
"step": 2618 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 1.6717557251908398e-05, |
|
"loss": 0.5772, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 20.02, |
|
"learning_rate": 1.6692111959287533e-05, |
|
"loss": 0.6665, |
|
"step": 2622 |
|
}, |
|
{ |
|
"epoch": 20.03, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.5312, |
|
"step": 2624 |
|
}, |
|
{ |
|
"epoch": 20.05, |
|
"learning_rate": 1.6641221374045802e-05, |
|
"loss": 0.497, |
|
"step": 2626 |
|
}, |
|
{ |
|
"epoch": 20.06, |
|
"learning_rate": 1.6615776081424937e-05, |
|
"loss": 0.6712, |
|
"step": 2628 |
|
}, |
|
{ |
|
"epoch": 20.08, |
|
"learning_rate": 1.6590330788804072e-05, |
|
"loss": 0.4766, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 20.09, |
|
"learning_rate": 1.6564885496183207e-05, |
|
"loss": 0.5655, |
|
"step": 2632 |
|
}, |
|
{ |
|
"epoch": 20.11, |
|
"learning_rate": 1.653944020356234e-05, |
|
"loss": 0.5094, |
|
"step": 2634 |
|
}, |
|
{ |
|
"epoch": 20.12, |
|
"learning_rate": 1.6513994910941476e-05, |
|
"loss": 0.5919, |
|
"step": 2636 |
|
}, |
|
{ |
|
"epoch": 20.14, |
|
"learning_rate": 1.648854961832061e-05, |
|
"loss": 0.5843, |
|
"step": 2638 |
|
}, |
|
{ |
|
"epoch": 20.15, |
|
"learning_rate": 1.6463104325699746e-05, |
|
"loss": 0.4332, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 20.17, |
|
"learning_rate": 1.643765903307888e-05, |
|
"loss": 0.5958, |
|
"step": 2642 |
|
}, |
|
{ |
|
"epoch": 20.18, |
|
"learning_rate": 1.6412213740458016e-05, |
|
"loss": 0.5763, |
|
"step": 2644 |
|
}, |
|
{ |
|
"epoch": 20.2, |
|
"learning_rate": 1.638676844783715e-05, |
|
"loss": 0.5673, |
|
"step": 2646 |
|
}, |
|
{ |
|
"epoch": 20.21, |
|
"learning_rate": 1.6361323155216285e-05, |
|
"loss": 0.4903, |
|
"step": 2648 |
|
}, |
|
{ |
|
"epoch": 20.23, |
|
"learning_rate": 1.633587786259542e-05, |
|
"loss": 0.414, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 20.24, |
|
"learning_rate": 1.6310432569974555e-05, |
|
"loss": 0.5613, |
|
"step": 2652 |
|
}, |
|
{ |
|
"epoch": 20.26, |
|
"learning_rate": 1.628498727735369e-05, |
|
"loss": 0.5852, |
|
"step": 2654 |
|
}, |
|
{ |
|
"epoch": 20.27, |
|
"learning_rate": 1.6259541984732825e-05, |
|
"loss": 0.5332, |
|
"step": 2656 |
|
}, |
|
{ |
|
"epoch": 20.29, |
|
"learning_rate": 1.623409669211196e-05, |
|
"loss": 0.6841, |
|
"step": 2658 |
|
}, |
|
{ |
|
"epoch": 20.31, |
|
"learning_rate": 1.6208651399491094e-05, |
|
"loss": 0.4666, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 20.32, |
|
"learning_rate": 1.618320610687023e-05, |
|
"loss": 0.5628, |
|
"step": 2662 |
|
}, |
|
{ |
|
"epoch": 20.34, |
|
"learning_rate": 1.6157760814249364e-05, |
|
"loss": 0.5494, |
|
"step": 2664 |
|
}, |
|
{ |
|
"epoch": 20.35, |
|
"learning_rate": 1.61323155216285e-05, |
|
"loss": 0.6103, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 20.37, |
|
"learning_rate": 1.6106870229007634e-05, |
|
"loss": 0.4859, |
|
"step": 2668 |
|
}, |
|
{ |
|
"epoch": 20.38, |
|
"learning_rate": 1.608142493638677e-05, |
|
"loss": 0.5271, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 20.4, |
|
"learning_rate": 1.6055979643765903e-05, |
|
"loss": 0.4128, |
|
"step": 2672 |
|
}, |
|
{ |
|
"epoch": 20.41, |
|
"learning_rate": 1.6030534351145038e-05, |
|
"loss": 0.5364, |
|
"step": 2674 |
|
}, |
|
{ |
|
"epoch": 20.43, |
|
"learning_rate": 1.6005089058524173e-05, |
|
"loss": 0.4226, |
|
"step": 2676 |
|
}, |
|
{ |
|
"epoch": 20.44, |
|
"learning_rate": 1.597964376590331e-05, |
|
"loss": 0.629, |
|
"step": 2678 |
|
}, |
|
{ |
|
"epoch": 20.46, |
|
"learning_rate": 1.5954198473282446e-05, |
|
"loss": 0.6607, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 20.47, |
|
"learning_rate": 1.592875318066158e-05, |
|
"loss": 0.554, |
|
"step": 2682 |
|
}, |
|
{ |
|
"epoch": 20.49, |
|
"learning_rate": 1.5903307888040712e-05, |
|
"loss": 0.6167, |
|
"step": 2684 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"learning_rate": 1.5877862595419847e-05, |
|
"loss": 0.4845, |
|
"step": 2686 |
|
}, |
|
{ |
|
"epoch": 20.52, |
|
"learning_rate": 1.5852417302798982e-05, |
|
"loss": 0.5157, |
|
"step": 2688 |
|
}, |
|
{ |
|
"epoch": 20.53, |
|
"learning_rate": 1.5826972010178117e-05, |
|
"loss": 0.4487, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 20.55, |
|
"learning_rate": 1.580152671755725e-05, |
|
"loss": 0.6119, |
|
"step": 2692 |
|
}, |
|
{ |
|
"epoch": 20.56, |
|
"learning_rate": 1.5776081424936386e-05, |
|
"loss": 0.4845, |
|
"step": 2694 |
|
}, |
|
{ |
|
"epoch": 20.58, |
|
"learning_rate": 1.575063613231552e-05, |
|
"loss": 0.6076, |
|
"step": 2696 |
|
}, |
|
{ |
|
"epoch": 20.6, |
|
"learning_rate": 1.5725190839694656e-05, |
|
"loss": 0.4932, |
|
"step": 2698 |
|
}, |
|
{ |
|
"epoch": 20.61, |
|
"learning_rate": 1.569974554707379e-05, |
|
"loss": 0.5817, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 20.61, |
|
"eval_cer": 1.2683333333333333, |
|
"eval_loss": 3.7289490699768066, |
|
"eval_runtime": 14.8824, |
|
"eval_samples_per_second": 17.538, |
|
"eval_steps_per_second": 2.217, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 20.63, |
|
"learning_rate": 1.567430025445293e-05, |
|
"loss": 0.5956, |
|
"step": 2702 |
|
}, |
|
{ |
|
"epoch": 20.64, |
|
"learning_rate": 1.5648854961832064e-05, |
|
"loss": 0.7098, |
|
"step": 2704 |
|
}, |
|
{ |
|
"epoch": 20.66, |
|
"learning_rate": 1.56234096692112e-05, |
|
"loss": 0.5259, |
|
"step": 2706 |
|
}, |
|
{ |
|
"epoch": 20.67, |
|
"learning_rate": 1.5597964376590334e-05, |
|
"loss": 0.5975, |
|
"step": 2708 |
|
}, |
|
{ |
|
"epoch": 20.69, |
|
"learning_rate": 1.557251908396947e-05, |
|
"loss": 0.4992, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 20.7, |
|
"learning_rate": 1.55470737913486e-05, |
|
"loss": 0.5587, |
|
"step": 2712 |
|
}, |
|
{ |
|
"epoch": 20.72, |
|
"learning_rate": 1.5521628498727735e-05, |
|
"loss": 0.5982, |
|
"step": 2714 |
|
}, |
|
{ |
|
"epoch": 20.73, |
|
"learning_rate": 1.549618320610687e-05, |
|
"loss": 0.4909, |
|
"step": 2716 |
|
}, |
|
{ |
|
"epoch": 20.75, |
|
"learning_rate": 1.5470737913486004e-05, |
|
"loss": 0.4484, |
|
"step": 2718 |
|
}, |
|
{ |
|
"epoch": 20.76, |
|
"learning_rate": 1.544529262086514e-05, |
|
"loss": 0.4779, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 20.78, |
|
"learning_rate": 1.5419847328244274e-05, |
|
"loss": 0.6331, |
|
"step": 2722 |
|
}, |
|
{ |
|
"epoch": 20.79, |
|
"learning_rate": 1.539440203562341e-05, |
|
"loss": 0.4751, |
|
"step": 2724 |
|
}, |
|
{ |
|
"epoch": 20.81, |
|
"learning_rate": 1.5368956743002547e-05, |
|
"loss": 0.4198, |
|
"step": 2726 |
|
}, |
|
{ |
|
"epoch": 20.82, |
|
"learning_rate": 1.5343511450381682e-05, |
|
"loss": 0.6411, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 20.84, |
|
"learning_rate": 1.5318066157760817e-05, |
|
"loss": 0.4794, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 20.85, |
|
"learning_rate": 1.529262086513995e-05, |
|
"loss": 0.8884, |
|
"step": 2732 |
|
}, |
|
{ |
|
"epoch": 20.87, |
|
"learning_rate": 1.5267175572519086e-05, |
|
"loss": 0.6413, |
|
"step": 2734 |
|
}, |
|
{ |
|
"epoch": 20.89, |
|
"learning_rate": 1.524173027989822e-05, |
|
"loss": 0.5209, |
|
"step": 2736 |
|
}, |
|
{ |
|
"epoch": 20.9, |
|
"learning_rate": 1.5216284987277354e-05, |
|
"loss": 0.5557, |
|
"step": 2738 |
|
}, |
|
{ |
|
"epoch": 20.92, |
|
"learning_rate": 1.519083969465649e-05, |
|
"loss": 0.5334, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 20.93, |
|
"learning_rate": 1.5165394402035624e-05, |
|
"loss": 0.5286, |
|
"step": 2742 |
|
}, |
|
{ |
|
"epoch": 20.95, |
|
"learning_rate": 1.5139949109414759e-05, |
|
"loss": 0.4974, |
|
"step": 2744 |
|
}, |
|
{ |
|
"epoch": 20.96, |
|
"learning_rate": 1.5114503816793894e-05, |
|
"loss": 0.4457, |
|
"step": 2746 |
|
}, |
|
{ |
|
"epoch": 20.98, |
|
"learning_rate": 1.5089058524173027e-05, |
|
"loss": 0.6571, |
|
"step": 2748 |
|
}, |
|
{ |
|
"epoch": 20.99, |
|
"learning_rate": 1.5063613231552162e-05, |
|
"loss": 0.4064, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 21.01, |
|
"learning_rate": 1.50381679389313e-05, |
|
"loss": 0.6135, |
|
"step": 2752 |
|
}, |
|
{ |
|
"epoch": 21.02, |
|
"learning_rate": 1.5012722646310435e-05, |
|
"loss": 0.4958, |
|
"step": 2754 |
|
}, |
|
{ |
|
"epoch": 21.04, |
|
"learning_rate": 1.498727735368957e-05, |
|
"loss": 0.449, |
|
"step": 2756 |
|
}, |
|
{ |
|
"epoch": 21.05, |
|
"learning_rate": 1.4961832061068704e-05, |
|
"loss": 0.4873, |
|
"step": 2758 |
|
}, |
|
{ |
|
"epoch": 21.07, |
|
"learning_rate": 1.4936386768447837e-05, |
|
"loss": 0.5427, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 21.08, |
|
"learning_rate": 1.4910941475826972e-05, |
|
"loss": 0.5974, |
|
"step": 2762 |
|
}, |
|
{ |
|
"epoch": 21.1, |
|
"learning_rate": 1.4885496183206107e-05, |
|
"loss": 0.5277, |
|
"step": 2764 |
|
}, |
|
{ |
|
"epoch": 21.11, |
|
"learning_rate": 1.4860050890585242e-05, |
|
"loss": 0.4605, |
|
"step": 2766 |
|
}, |
|
{ |
|
"epoch": 21.13, |
|
"learning_rate": 1.4834605597964377e-05, |
|
"loss": 0.489, |
|
"step": 2768 |
|
}, |
|
{ |
|
"epoch": 21.15, |
|
"learning_rate": 1.4809160305343512e-05, |
|
"loss": 0.3826, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 21.16, |
|
"learning_rate": 1.4783715012722646e-05, |
|
"loss": 0.7243, |
|
"step": 2772 |
|
}, |
|
{ |
|
"epoch": 21.18, |
|
"learning_rate": 1.4758269720101781e-05, |
|
"loss": 0.4984, |
|
"step": 2774 |
|
}, |
|
{ |
|
"epoch": 21.19, |
|
"learning_rate": 1.4732824427480918e-05, |
|
"loss": 0.4792, |
|
"step": 2776 |
|
}, |
|
{ |
|
"epoch": 21.21, |
|
"learning_rate": 1.4707379134860053e-05, |
|
"loss": 0.5277, |
|
"step": 2778 |
|
}, |
|
{ |
|
"epoch": 21.22, |
|
"learning_rate": 1.4681933842239187e-05, |
|
"loss": 0.5729, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 21.24, |
|
"learning_rate": 1.4656488549618322e-05, |
|
"loss": 0.5802, |
|
"step": 2782 |
|
}, |
|
{ |
|
"epoch": 21.25, |
|
"learning_rate": 1.4631043256997457e-05, |
|
"loss": 0.5329, |
|
"step": 2784 |
|
}, |
|
{ |
|
"epoch": 21.27, |
|
"learning_rate": 1.4605597964376592e-05, |
|
"loss": 0.4111, |
|
"step": 2786 |
|
}, |
|
{ |
|
"epoch": 21.28, |
|
"learning_rate": 1.4580152671755725e-05, |
|
"loss": 0.5507, |
|
"step": 2788 |
|
}, |
|
{ |
|
"epoch": 21.3, |
|
"learning_rate": 1.455470737913486e-05, |
|
"loss": 0.5248, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 21.31, |
|
"learning_rate": 1.4529262086513995e-05, |
|
"loss": 0.4178, |
|
"step": 2792 |
|
}, |
|
{ |
|
"epoch": 21.33, |
|
"learning_rate": 1.450381679389313e-05, |
|
"loss": 0.512, |
|
"step": 2794 |
|
}, |
|
{ |
|
"epoch": 21.34, |
|
"learning_rate": 1.4478371501272264e-05, |
|
"loss": 0.4594, |
|
"step": 2796 |
|
}, |
|
{ |
|
"epoch": 21.36, |
|
"learning_rate": 1.44529262086514e-05, |
|
"loss": 0.4647, |
|
"step": 2798 |
|
}, |
|
{ |
|
"epoch": 21.37, |
|
"learning_rate": 1.4427480916030536e-05, |
|
"loss": 0.5574, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 21.37, |
|
"eval_cer": 0.45666666666666667, |
|
"eval_loss": 6.2158331871032715, |
|
"eval_runtime": 13.5284, |
|
"eval_samples_per_second": 19.293, |
|
"eval_steps_per_second": 2.439, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 21.39, |
|
"learning_rate": 1.440203562340967e-05, |
|
"loss": 0.5497, |
|
"step": 2802 |
|
}, |
|
{ |
|
"epoch": 21.4, |
|
"learning_rate": 1.4376590330788805e-05, |
|
"loss": 0.4333, |
|
"step": 2804 |
|
}, |
|
{ |
|
"epoch": 21.42, |
|
"learning_rate": 1.435114503816794e-05, |
|
"loss": 0.6657, |
|
"step": 2806 |
|
}, |
|
{ |
|
"epoch": 21.44, |
|
"learning_rate": 1.4325699745547075e-05, |
|
"loss": 0.5721, |
|
"step": 2808 |
|
}, |
|
{ |
|
"epoch": 21.45, |
|
"learning_rate": 1.430025445292621e-05, |
|
"loss": 0.6449, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 21.47, |
|
"learning_rate": 1.4274809160305345e-05, |
|
"loss": 0.4825, |
|
"step": 2812 |
|
}, |
|
{ |
|
"epoch": 21.48, |
|
"learning_rate": 1.424936386768448e-05, |
|
"loss": 0.5418, |
|
"step": 2814 |
|
}, |
|
{ |
|
"epoch": 21.5, |
|
"learning_rate": 1.4223918575063613e-05, |
|
"loss": 0.511, |
|
"step": 2816 |
|
}, |
|
{ |
|
"epoch": 21.51, |
|
"learning_rate": 1.4198473282442747e-05, |
|
"loss": 0.3933, |
|
"step": 2818 |
|
}, |
|
{ |
|
"epoch": 21.53, |
|
"learning_rate": 1.4173027989821882e-05, |
|
"loss": 0.4775, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 21.54, |
|
"learning_rate": 1.4147582697201017e-05, |
|
"loss": 0.5571, |
|
"step": 2822 |
|
}, |
|
{ |
|
"epoch": 21.56, |
|
"learning_rate": 1.4122137404580155e-05, |
|
"loss": 0.6556, |
|
"step": 2824 |
|
}, |
|
{ |
|
"epoch": 21.57, |
|
"learning_rate": 1.4096692111959288e-05, |
|
"loss": 0.5842, |
|
"step": 2826 |
|
}, |
|
{ |
|
"epoch": 21.59, |
|
"learning_rate": 1.4071246819338423e-05, |
|
"loss": 0.5444, |
|
"step": 2828 |
|
}, |
|
{ |
|
"epoch": 21.6, |
|
"learning_rate": 1.4045801526717558e-05, |
|
"loss": 0.5351, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 21.62, |
|
"learning_rate": 1.4020356234096693e-05, |
|
"loss": 0.5468, |
|
"step": 2832 |
|
}, |
|
{ |
|
"epoch": 21.63, |
|
"learning_rate": 1.3994910941475828e-05, |
|
"loss": 0.6085, |
|
"step": 2834 |
|
}, |
|
{ |
|
"epoch": 21.65, |
|
"learning_rate": 1.3969465648854963e-05, |
|
"loss": 0.4829, |
|
"step": 2836 |
|
}, |
|
{ |
|
"epoch": 21.66, |
|
"learning_rate": 1.3944020356234097e-05, |
|
"loss": 0.4787, |
|
"step": 2838 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"learning_rate": 1.3918575063613232e-05, |
|
"loss": 0.4858, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 21.69, |
|
"learning_rate": 1.3893129770992367e-05, |
|
"loss": 0.4637, |
|
"step": 2842 |
|
}, |
|
{ |
|
"epoch": 21.71, |
|
"learning_rate": 1.38676844783715e-05, |
|
"loss": 0.6044, |
|
"step": 2844 |
|
}, |
|
{ |
|
"epoch": 21.73, |
|
"learning_rate": 1.3842239185750635e-05, |
|
"loss": 0.478, |
|
"step": 2846 |
|
}, |
|
{ |
|
"epoch": 21.74, |
|
"learning_rate": 1.3816793893129773e-05, |
|
"loss": 0.6719, |
|
"step": 2848 |
|
}, |
|
{ |
|
"epoch": 21.76, |
|
"learning_rate": 1.3791348600508908e-05, |
|
"loss": 0.4391, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 21.77, |
|
"learning_rate": 1.3765903307888043e-05, |
|
"loss": 0.5353, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 21.79, |
|
"learning_rate": 1.3740458015267178e-05, |
|
"loss": 0.371, |
|
"step": 2854 |
|
}, |
|
{ |
|
"epoch": 21.8, |
|
"learning_rate": 1.3715012722646311e-05, |
|
"loss": 0.4898, |
|
"step": 2856 |
|
}, |
|
{ |
|
"epoch": 21.82, |
|
"learning_rate": 1.3689567430025446e-05, |
|
"loss": 0.5191, |
|
"step": 2858 |
|
}, |
|
{ |
|
"epoch": 21.83, |
|
"learning_rate": 1.366412213740458e-05, |
|
"loss": 0.4718, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 21.85, |
|
"learning_rate": 1.3638676844783715e-05, |
|
"loss": 0.4079, |
|
"step": 2862 |
|
}, |
|
{ |
|
"epoch": 21.86, |
|
"learning_rate": 1.361323155216285e-05, |
|
"loss": 0.8327, |
|
"step": 2864 |
|
}, |
|
{ |
|
"epoch": 21.88, |
|
"learning_rate": 1.3587786259541985e-05, |
|
"loss": 0.5435, |
|
"step": 2866 |
|
}, |
|
{ |
|
"epoch": 21.89, |
|
"learning_rate": 1.356234096692112e-05, |
|
"loss": 0.4663, |
|
"step": 2868 |
|
}, |
|
{ |
|
"epoch": 21.91, |
|
"learning_rate": 1.3536895674300255e-05, |
|
"loss": 0.468, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 21.92, |
|
"learning_rate": 1.3511450381679388e-05, |
|
"loss": 0.4782, |
|
"step": 2872 |
|
}, |
|
{ |
|
"epoch": 21.94, |
|
"learning_rate": 1.3486005089058526e-05, |
|
"loss": 0.5288, |
|
"step": 2874 |
|
}, |
|
{ |
|
"epoch": 21.95, |
|
"learning_rate": 1.3460559796437661e-05, |
|
"loss": 0.5454, |
|
"step": 2876 |
|
}, |
|
{ |
|
"epoch": 21.97, |
|
"learning_rate": 1.3435114503816796e-05, |
|
"loss": 0.4063, |
|
"step": 2878 |
|
}, |
|
{ |
|
"epoch": 21.98, |
|
"learning_rate": 1.340966921119593e-05, |
|
"loss": 0.5518, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"learning_rate": 1.3384223918575065e-05, |
|
"loss": 0.615, |
|
"step": 2882 |
|
}, |
|
{ |
|
"epoch": 22.02, |
|
"learning_rate": 1.3358778625954198e-05, |
|
"loss": 0.4139, |
|
"step": 2884 |
|
}, |
|
{ |
|
"epoch": 22.03, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.4635, |
|
"step": 2886 |
|
}, |
|
{ |
|
"epoch": 22.05, |
|
"learning_rate": 1.3307888040712468e-05, |
|
"loss": 0.5414, |
|
"step": 2888 |
|
}, |
|
{ |
|
"epoch": 22.06, |
|
"learning_rate": 1.3282442748091603e-05, |
|
"loss": 0.4512, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 22.08, |
|
"learning_rate": 1.3256997455470738e-05, |
|
"loss": 0.4532, |
|
"step": 2892 |
|
}, |
|
{ |
|
"epoch": 22.09, |
|
"learning_rate": 1.3231552162849873e-05, |
|
"loss": 0.612, |
|
"step": 2894 |
|
}, |
|
{ |
|
"epoch": 22.11, |
|
"learning_rate": 1.3206106870229007e-05, |
|
"loss": 0.4963, |
|
"step": 2896 |
|
}, |
|
{ |
|
"epoch": 22.12, |
|
"learning_rate": 1.3180661577608144e-05, |
|
"loss": 0.7695, |
|
"step": 2898 |
|
}, |
|
{ |
|
"epoch": 22.14, |
|
"learning_rate": 1.3155216284987279e-05, |
|
"loss": 0.3769, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 22.14, |
|
"eval_cer": 1.0666666666666667, |
|
"eval_loss": 3.7434914112091064, |
|
"eval_runtime": 14.3075, |
|
"eval_samples_per_second": 18.242, |
|
"eval_steps_per_second": 2.306, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 22.15, |
|
"learning_rate": 1.3129770992366414e-05, |
|
"loss": 0.5136, |
|
"step": 2902 |
|
}, |
|
{ |
|
"epoch": 22.17, |
|
"learning_rate": 1.3104325699745548e-05, |
|
"loss": 0.4998, |
|
"step": 2904 |
|
}, |
|
{ |
|
"epoch": 22.18, |
|
"learning_rate": 1.3078880407124683e-05, |
|
"loss": 0.463, |
|
"step": 2906 |
|
}, |
|
{ |
|
"epoch": 22.2, |
|
"learning_rate": 1.3053435114503818e-05, |
|
"loss": 0.7542, |
|
"step": 2908 |
|
}, |
|
{ |
|
"epoch": 22.21, |
|
"learning_rate": 1.3027989821882953e-05, |
|
"loss": 0.5518, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 22.23, |
|
"learning_rate": 1.3002544529262086e-05, |
|
"loss": 0.4916, |
|
"step": 2912 |
|
}, |
|
{ |
|
"epoch": 22.24, |
|
"learning_rate": 1.2977099236641221e-05, |
|
"loss": 0.5718, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 22.26, |
|
"learning_rate": 1.2951653944020356e-05, |
|
"loss": 0.4251, |
|
"step": 2916 |
|
}, |
|
{ |
|
"epoch": 22.27, |
|
"learning_rate": 1.292620865139949e-05, |
|
"loss": 0.5434, |
|
"step": 2918 |
|
}, |
|
{ |
|
"epoch": 22.29, |
|
"learning_rate": 1.2900763358778625e-05, |
|
"loss": 0.553, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 22.31, |
|
"learning_rate": 1.2875318066157762e-05, |
|
"loss": 0.5578, |
|
"step": 2922 |
|
}, |
|
{ |
|
"epoch": 22.32, |
|
"learning_rate": 1.2849872773536897e-05, |
|
"loss": 0.481, |
|
"step": 2924 |
|
}, |
|
{ |
|
"epoch": 22.34, |
|
"learning_rate": 1.2824427480916032e-05, |
|
"loss": 0.4623, |
|
"step": 2926 |
|
}, |
|
{ |
|
"epoch": 22.35, |
|
"learning_rate": 1.2798982188295166e-05, |
|
"loss": 0.4564, |
|
"step": 2928 |
|
}, |
|
{ |
|
"epoch": 22.37, |
|
"learning_rate": 1.2773536895674301e-05, |
|
"loss": 0.4327, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 22.38, |
|
"learning_rate": 1.2748091603053436e-05, |
|
"loss": 0.4521, |
|
"step": 2932 |
|
}, |
|
{ |
|
"epoch": 22.4, |
|
"learning_rate": 1.2722646310432571e-05, |
|
"loss": 0.4404, |
|
"step": 2934 |
|
}, |
|
{ |
|
"epoch": 22.41, |
|
"learning_rate": 1.2697201017811706e-05, |
|
"loss": 0.5332, |
|
"step": 2936 |
|
}, |
|
{ |
|
"epoch": 22.43, |
|
"learning_rate": 1.267175572519084e-05, |
|
"loss": 0.6079, |
|
"step": 2938 |
|
}, |
|
{ |
|
"epoch": 22.44, |
|
"learning_rate": 1.2646310432569974e-05, |
|
"loss": 0.4472, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 22.46, |
|
"learning_rate": 1.2620865139949108e-05, |
|
"loss": 0.5221, |
|
"step": 2942 |
|
}, |
|
{ |
|
"epoch": 22.47, |
|
"learning_rate": 1.2595419847328243e-05, |
|
"loss": 0.43, |
|
"step": 2944 |
|
}, |
|
{ |
|
"epoch": 22.49, |
|
"learning_rate": 1.2569974554707382e-05, |
|
"loss": 0.4141, |
|
"step": 2946 |
|
}, |
|
{ |
|
"epoch": 22.5, |
|
"learning_rate": 1.2544529262086516e-05, |
|
"loss": 0.4824, |
|
"step": 2948 |
|
}, |
|
{ |
|
"epoch": 22.52, |
|
"learning_rate": 1.251908396946565e-05, |
|
"loss": 0.5227, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 22.53, |
|
"learning_rate": 1.2493638676844784e-05, |
|
"loss": 0.4538, |
|
"step": 2952 |
|
}, |
|
{ |
|
"epoch": 22.55, |
|
"learning_rate": 1.2468193384223919e-05, |
|
"loss": 0.4206, |
|
"step": 2954 |
|
}, |
|
{ |
|
"epoch": 22.56, |
|
"learning_rate": 1.2442748091603054e-05, |
|
"loss": 0.4709, |
|
"step": 2956 |
|
}, |
|
{ |
|
"epoch": 22.58, |
|
"learning_rate": 1.2417302798982189e-05, |
|
"loss": 0.5862, |
|
"step": 2958 |
|
}, |
|
{ |
|
"epoch": 22.6, |
|
"learning_rate": 1.2391857506361324e-05, |
|
"loss": 0.6123, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 22.61, |
|
"learning_rate": 1.2366412213740458e-05, |
|
"loss": 0.6074, |
|
"step": 2962 |
|
}, |
|
{ |
|
"epoch": 22.63, |
|
"learning_rate": 1.2340966921119595e-05, |
|
"loss": 0.5028, |
|
"step": 2964 |
|
}, |
|
{ |
|
"epoch": 22.64, |
|
"learning_rate": 1.2315521628498728e-05, |
|
"loss": 0.5615, |
|
"step": 2966 |
|
}, |
|
{ |
|
"epoch": 22.66, |
|
"learning_rate": 1.2290076335877863e-05, |
|
"loss": 0.3996, |
|
"step": 2968 |
|
}, |
|
{ |
|
"epoch": 22.67, |
|
"learning_rate": 1.2264631043256998e-05, |
|
"loss": 0.6097, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 22.69, |
|
"learning_rate": 1.2239185750636133e-05, |
|
"loss": 0.4233, |
|
"step": 2972 |
|
}, |
|
{ |
|
"epoch": 22.7, |
|
"learning_rate": 1.2213740458015267e-05, |
|
"loss": 0.5531, |
|
"step": 2974 |
|
}, |
|
{ |
|
"epoch": 22.72, |
|
"learning_rate": 1.2188295165394404e-05, |
|
"loss": 0.5276, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 22.73, |
|
"learning_rate": 1.2162849872773539e-05, |
|
"loss": 0.5667, |
|
"step": 2978 |
|
}, |
|
{ |
|
"epoch": 22.75, |
|
"learning_rate": 1.2137404580152672e-05, |
|
"loss": 0.421, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 22.76, |
|
"learning_rate": 1.2111959287531807e-05, |
|
"loss": 0.3711, |
|
"step": 2982 |
|
}, |
|
{ |
|
"epoch": 22.78, |
|
"learning_rate": 1.2086513994910942e-05, |
|
"loss": 0.6067, |
|
"step": 2984 |
|
}, |
|
{ |
|
"epoch": 22.79, |
|
"learning_rate": 1.2061068702290076e-05, |
|
"loss": 0.3717, |
|
"step": 2986 |
|
}, |
|
{ |
|
"epoch": 22.81, |
|
"learning_rate": 1.2035623409669211e-05, |
|
"loss": 0.6554, |
|
"step": 2988 |
|
}, |
|
{ |
|
"epoch": 22.82, |
|
"learning_rate": 1.2010178117048348e-05, |
|
"loss": 0.6248, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 22.84, |
|
"learning_rate": 1.1984732824427483e-05, |
|
"loss": 0.4462, |
|
"step": 2992 |
|
}, |
|
{ |
|
"epoch": 22.85, |
|
"learning_rate": 1.1959287531806616e-05, |
|
"loss": 0.5547, |
|
"step": 2994 |
|
}, |
|
{ |
|
"epoch": 22.87, |
|
"learning_rate": 1.193384223918575e-05, |
|
"loss": 0.3156, |
|
"step": 2996 |
|
}, |
|
{ |
|
"epoch": 22.89, |
|
"learning_rate": 1.1908396946564885e-05, |
|
"loss": 0.413, |
|
"step": 2998 |
|
}, |
|
{ |
|
"epoch": 22.9, |
|
"learning_rate": 1.188295165394402e-05, |
|
"loss": 0.5049, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 22.9, |
|
"eval_cer": 0.655, |
|
"eval_loss": 4.91603422164917, |
|
"eval_runtime": 13.887, |
|
"eval_samples_per_second": 18.794, |
|
"eval_steps_per_second": 2.376, |
|
"step": 3000 |
|
} |
|
], |
|
"logging_steps": 2, |
|
"max_steps": 3930, |
|
"num_train_epochs": 30, |
|
"save_steps": 1000, |
|
"total_flos": 1.7876533212893676e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|