|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9568302779420463, |
|
"global_step": 10000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9546619357382224e-05, |
|
"loss": 7.0936, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.905381431105855e-05, |
|
"loss": 4.7571, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.856100926473487e-05, |
|
"loss": 4.3059, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.80682042184112e-05, |
|
"loss": 3.9402, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7575399172087526e-05, |
|
"loss": 4.5039, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.708259412576385e-05, |
|
"loss": 5.0274, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.658978907944018e-05, |
|
"loss": 4.4826, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.60969840331165e-05, |
|
"loss": 4.2794, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.560417898679283e-05, |
|
"loss": 4.1005, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5111373940469154e-05, |
|
"loss": 3.6633, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_cer": 1.0385071558472365, |
|
"eval_loss": 3.3380627632141113, |
|
"eval_runtime": 108.6612, |
|
"eval_samples_per_second": 13.832, |
|
"eval_steps_per_second": 3.46, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.461856889414548e-05, |
|
"loss": 3.2821, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.41257638478218e-05, |
|
"loss": 3.1914, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.363295880149813e-05, |
|
"loss": 3.0884, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3140153755174456e-05, |
|
"loss": 2.9599, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.264734870885078e-05, |
|
"loss": 2.8756, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.215454366252711e-05, |
|
"loss": 2.7076, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.166173861620343e-05, |
|
"loss": 2.6674, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.116893356987976e-05, |
|
"loss": 2.5175, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0676128523556084e-05, |
|
"loss": 2.5046, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.018332347723241e-05, |
|
"loss": 2.3915, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"eval_cer": 0.6388866924962442, |
|
"eval_loss": 2.358058214187622, |
|
"eval_runtime": 91.2055, |
|
"eval_samples_per_second": 16.479, |
|
"eval_steps_per_second": 4.123, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.969051843090874e-05, |
|
"loss": 2.2984, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.919771338458506e-05, |
|
"loss": 2.2398, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8704908338261386e-05, |
|
"loss": 2.1845, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.821210329193771e-05, |
|
"loss": 2.0596, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.771929824561404e-05, |
|
"loss": 1.9458, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.722649319929036e-05, |
|
"loss": 1.8901, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.673368815296669e-05, |
|
"loss": 1.8351, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6240883106643014e-05, |
|
"loss": 1.6854, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.574807806031934e-05, |
|
"loss": 1.6954, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.525527301399567e-05, |
|
"loss": 1.5061, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"eval_cer": 0.47789989720882425, |
|
"eval_loss": 1.4634768962860107, |
|
"eval_runtime": 93.897, |
|
"eval_samples_per_second": 16.007, |
|
"eval_steps_per_second": 4.004, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.476246796767199e-05, |
|
"loss": 1.5024, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4269662921348316e-05, |
|
"loss": 1.5311, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.377685787502464e-05, |
|
"loss": 1.334, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.328405282870097e-05, |
|
"loss": 1.2053, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.27912477823773e-05, |
|
"loss": 1.1355, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.229844273605362e-05, |
|
"loss": 1.0514, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1805637689729944e-05, |
|
"loss": 0.9352, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.131283264340627e-05, |
|
"loss": 0.8822, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.08200275970826e-05, |
|
"loss": 0.9451, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0327222550758922e-05, |
|
"loss": 0.8043, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"eval_cer": 0.34268996599984186, |
|
"eval_loss": 0.826034426689148, |
|
"eval_runtime": 95.3049, |
|
"eval_samples_per_second": 15.77, |
|
"eval_steps_per_second": 3.945, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9834417504435245e-05, |
|
"loss": 0.8479, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9341612458111573e-05, |
|
"loss": 0.7237, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.88488074117879e-05, |
|
"loss": 0.6919, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8356002365464223e-05, |
|
"loss": 0.6698, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.786319731914055e-05, |
|
"loss": 0.5657, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7370392272816874e-05, |
|
"loss": 0.6165, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.68775872264932e-05, |
|
"loss": 0.6387, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6384782180169525e-05, |
|
"loss": 0.5372, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5891977133845852e-05, |
|
"loss": 0.5254, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.539917208752218e-05, |
|
"loss": 0.46, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"eval_cer": 0.22416383332015496, |
|
"eval_loss": 0.5074146389961243, |
|
"eval_runtime": 96.0964, |
|
"eval_samples_per_second": 15.641, |
|
"eval_steps_per_second": 3.913, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4906367041198502e-05, |
|
"loss": 0.4557, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.441356199487483e-05, |
|
"loss": 0.445, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3920756948551153e-05, |
|
"loss": 0.4523, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.342795190222748e-05, |
|
"loss": 0.3584, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2935146855903804e-05, |
|
"loss": 0.4356, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.244234180958013e-05, |
|
"loss": 0.3989, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1949536763256458e-05, |
|
"loss": 0.333, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.145673171693278e-05, |
|
"loss": 0.3701, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.096392667060911e-05, |
|
"loss": 0.4125, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0471121624285432e-05, |
|
"loss": 0.3393, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"eval_cer": 0.12192614849371393, |
|
"eval_loss": 0.2698688209056854, |
|
"eval_runtime": 95.6778, |
|
"eval_samples_per_second": 15.709, |
|
"eval_steps_per_second": 3.93, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.997831657796176e-05, |
|
"loss": 0.2519, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9485511531638083e-05, |
|
"loss": 0.2175, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.899270648531441e-05, |
|
"loss": 0.2418, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8499901438990734e-05, |
|
"loss": 0.2157, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.800709639266706e-05, |
|
"loss": 0.2653, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7514291346343388e-05, |
|
"loss": 0.2321, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.702148630001971e-05, |
|
"loss": 0.1881, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.652868125369604e-05, |
|
"loss": 0.1818, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6035876207372362e-05, |
|
"loss": 0.1204, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.554307116104869e-05, |
|
"loss": 0.1077, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"eval_cer": 0.09330275954771883, |
|
"eval_loss": 0.17944632470607758, |
|
"eval_runtime": 96.1807, |
|
"eval_samples_per_second": 15.627, |
|
"eval_steps_per_second": 3.909, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5050266114725015e-05, |
|
"loss": 0.174, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.455746106840134e-05, |
|
"loss": 0.1222, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4064656022077665e-05, |
|
"loss": 0.0985, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.357185097575399e-05, |
|
"loss": 0.0946, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3079045929430316e-05, |
|
"loss": 0.0832, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2586240883106643e-05, |
|
"loss": 0.074, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.209343583678297e-05, |
|
"loss": 0.0768, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1600630790459296e-05, |
|
"loss": 0.1018, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1107825744135621e-05, |
|
"loss": 0.0668, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0615020697811946e-05, |
|
"loss": 0.063, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"eval_cer": 0.06167470546374634, |
|
"eval_loss": 0.13426831364631653, |
|
"eval_runtime": 95.5453, |
|
"eval_samples_per_second": 15.731, |
|
"eval_steps_per_second": 3.935, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0122215651488272e-05, |
|
"loss": 0.0742, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.629410605164597e-06, |
|
"loss": 0.0679, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.136605558840922e-06, |
|
"loss": 0.0818, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.643800512517248e-06, |
|
"loss": 0.0704, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.150995466193575e-06, |
|
"loss": 0.0537, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.6581904198699e-06, |
|
"loss": 0.0831, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.1653853735462255e-06, |
|
"loss": 0.0547, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.672580327222551e-06, |
|
"loss": 0.0765, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.179775280898876e-06, |
|
"loss": 0.0408, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.6869702345752024e-06, |
|
"loss": 0.0356, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"eval_cer": 0.06918636830868981, |
|
"eval_loss": 0.0790172666311264, |
|
"eval_runtime": 97.3434, |
|
"eval_samples_per_second": 15.44, |
|
"eval_steps_per_second": 3.863, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.194165188251528e-06, |
|
"loss": 0.059, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.70628819239109e-06, |
|
"loss": 0.0775, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.213483146067416e-06, |
|
"loss": 0.058, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.7206780997437416e-06, |
|
"loss": 0.0327, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.227873053420067e-06, |
|
"loss": 0.0378, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.7350680070963927e-06, |
|
"loss": 0.0752, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.2422629607727185e-06, |
|
"loss": 0.0269, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.749457914449044e-06, |
|
"loss": 0.0359, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2566528681253697e-06, |
|
"loss": 0.0345, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.638478218016952e-07, |
|
"loss": 0.0292, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"eval_cer": 0.044595556258401205, |
|
"eval_loss": 0.06197139248251915, |
|
"eval_runtime": 96.6477, |
|
"eval_samples_per_second": 15.551, |
|
"eval_steps_per_second": 3.89, |
|
"step": 10000 |
|
} |
|
], |
|
"max_steps": 10146, |
|
"num_train_epochs": 3, |
|
"total_flos": 7.242528907589714e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|