|
{ |
|
"best_metric": 0.753694919914759, |
|
"best_model_checkpoint": "./whisper-small-dialect_iraqi/checkpoint-3000", |
|
"epoch": 3.977724741447892, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 90.6798324584961, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 4.6998, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 40.06618118286133, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 4.1109, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 38.31050109863281, |
|
"learning_rate": 1.5e-06, |
|
"loss": 3.2388, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 27.650033950805664, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.7981, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 34.07307052612305, |
|
"learning_rate": 2.5e-06, |
|
"loss": 2.3226, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 30.355411529541016, |
|
"learning_rate": 3e-06, |
|
"loss": 2.3817, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 22.727617263793945, |
|
"learning_rate": 3.5e-06, |
|
"loss": 2.1619, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 24.718725204467773, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.0719, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 28.388458251953125, |
|
"learning_rate": 4.5e-06, |
|
"loss": 2.0441, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 21.303863525390625, |
|
"learning_rate": 5e-06, |
|
"loss": 1.941, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 27.430503845214844, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 1.9, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 27.235267639160156, |
|
"learning_rate": 6e-06, |
|
"loss": 1.7483, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 29.63104820251465, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 1.6439, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 21.515504837036133, |
|
"learning_rate": 7e-06, |
|
"loss": 1.5067, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 26.370962142944336, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.3685, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 25.11650276184082, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.4172, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 25.06173324584961, |
|
"learning_rate": 8.5e-06, |
|
"loss": 1.5026, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 24.87285614013672, |
|
"learning_rate": 9e-06, |
|
"loss": 1.449, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 23.05367660522461, |
|
"learning_rate": 9.5e-06, |
|
"loss": 1.3377, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 22.767385482788086, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3833, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 25.26249122619629, |
|
"learning_rate": 9.944444444444445e-06, |
|
"loss": 1.3704, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 16.503665924072266, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 1.3313, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 25.474660873413086, |
|
"learning_rate": 9.833333333333333e-06, |
|
"loss": 1.3546, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 23.469463348388672, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 1.3364, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 27.196256637573242, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 1.3881, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 28.02334213256836, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 1.476, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 23.868146896362305, |
|
"learning_rate": 9.611111111111112e-06, |
|
"loss": 1.3418, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 25.75493049621582, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 1.3658, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 20.75641632080078, |
|
"learning_rate": 9.5e-06, |
|
"loss": 1.3521, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 19.32164192199707, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 1.1973, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 22.786579132080078, |
|
"learning_rate": 9.38888888888889e-06, |
|
"loss": 1.2699, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 28.15203094482422, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 1.2875, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 23.417707443237305, |
|
"learning_rate": 9.277777777777778e-06, |
|
"loss": 1.2642, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 26.45885467529297, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 1.2766, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 21.266761779785156, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 1.2071, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 29.940473556518555, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 1.3464, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 23.371288299560547, |
|
"learning_rate": 9.055555555555556e-06, |
|
"loss": 1.2047, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 29.867013931274414, |
|
"learning_rate": 9e-06, |
|
"loss": 1.2475, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 25.816246032714844, |
|
"learning_rate": 8.944444444444446e-06, |
|
"loss": 1.3114, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 25.020904541015625, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.2314, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"eval_loss": 1.2611169815063477, |
|
"eval_runtime": 759.5723, |
|
"eval_samples_per_second": 3.308, |
|
"eval_steps_per_second": 0.415, |
|
"eval_wer": 0.8315116518869871, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 26.749116897583008, |
|
"learning_rate": 8.833333333333334e-06, |
|
"loss": 1.3321, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 25.487159729003906, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 1.2774, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 24.627052307128906, |
|
"learning_rate": 8.722222222222224e-06, |
|
"loss": 1.2631, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 30.22520637512207, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 1.3112, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 24.753944396972656, |
|
"learning_rate": 8.611111111111112e-06, |
|
"loss": 1.2371, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 28.411720275878906, |
|
"learning_rate": 8.555555555555556e-06, |
|
"loss": 1.2851, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 15.954439163208008, |
|
"learning_rate": 8.5e-06, |
|
"loss": 1.1501, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 20.5186710357666, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 1.3305, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 21.135791778564453, |
|
"learning_rate": 8.38888888888889e-06, |
|
"loss": 1.2152, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 23.05451202392578, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.2342, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 14.508014678955078, |
|
"learning_rate": 8.277777777777778e-06, |
|
"loss": 1.088, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 22.216018676757812, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.9067, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 18.821016311645508, |
|
"learning_rate": 8.166666666666668e-06, |
|
"loss": 0.9273, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 14.82918930053711, |
|
"learning_rate": 8.111111111111112e-06, |
|
"loss": 0.8638, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 14.661232948303223, |
|
"learning_rate": 8.055555555555557e-06, |
|
"loss": 0.9233, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 12.39109992980957, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.9203, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 16.905513763427734, |
|
"learning_rate": 7.944444444444445e-06, |
|
"loss": 0.9121, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 19.73763656616211, |
|
"learning_rate": 7.88888888888889e-06, |
|
"loss": 0.9762, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 20.832565307617188, |
|
"learning_rate": 7.833333333333333e-06, |
|
"loss": 1.0234, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 20.59532356262207, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.9885, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 19.251754760742188, |
|
"learning_rate": 7.722222222222223e-06, |
|
"loss": 0.8777, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 22.859533309936523, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 1.0234, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 18.167465209960938, |
|
"learning_rate": 7.611111111111111e-06, |
|
"loss": 0.9787, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 19.155288696289062, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.9028, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 14.16364574432373, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.8999, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 19.69369125366211, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 0.9565, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 18.87957763671875, |
|
"learning_rate": 7.38888888888889e-06, |
|
"loss": 0.929, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 13.816971778869629, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.9804, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 21.371904373168945, |
|
"learning_rate": 7.277777777777778e-06, |
|
"loss": 0.9721, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 15.800612449645996, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.9186, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 24.303115844726562, |
|
"learning_rate": 7.166666666666667e-06, |
|
"loss": 0.9046, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 23.116640090942383, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.8983, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 20.9332332611084, |
|
"learning_rate": 7.055555555555557e-06, |
|
"loss": 0.9215, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 16.32611846923828, |
|
"learning_rate": 7e-06, |
|
"loss": 0.9387, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 12.740330696105957, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.8465, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 22.3834228515625, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 1.0026, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 15.032549858093262, |
|
"learning_rate": 6.833333333333334e-06, |
|
"loss": 0.9839, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 16.8016357421875, |
|
"learning_rate": 6.777777777777779e-06, |
|
"loss": 0.9477, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 17.123817443847656, |
|
"learning_rate": 6.7222222222222235e-06, |
|
"loss": 0.9238, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 23.146198272705078, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.8997, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"eval_loss": 1.1737741231918335, |
|
"eval_runtime": 720.7256, |
|
"eval_samples_per_second": 3.487, |
|
"eval_steps_per_second": 0.437, |
|
"eval_wer": 0.8439540798790128, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 14.144861221313477, |
|
"learning_rate": 6.6111111111111115e-06, |
|
"loss": 0.9259, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 23.315628051757812, |
|
"learning_rate": 6.555555555555556e-06, |
|
"loss": 0.918, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 19.603803634643555, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.8248, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 23.32997703552246, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.994, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 14.22852897644043, |
|
"learning_rate": 6.3888888888888885e-06, |
|
"loss": 0.8911, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 25.88995933532715, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 0.9571, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 14.300514221191406, |
|
"learning_rate": 6.277777777777778e-06, |
|
"loss": 0.9271, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 20.620363235473633, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.9448, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 22.74941062927246, |
|
"learning_rate": 6.166666666666667e-06, |
|
"loss": 0.8855, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 23.452943801879883, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.9769, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 18.53643035888672, |
|
"learning_rate": 6.055555555555555e-06, |
|
"loss": 0.9351, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 12.938403129577637, |
|
"learning_rate": 6e-06, |
|
"loss": 0.886, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 21.200199127197266, |
|
"learning_rate": 5.944444444444445e-06, |
|
"loss": 0.893, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 21.017866134643555, |
|
"learning_rate": 5.88888888888889e-06, |
|
"loss": 0.9721, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 14.922087669372559, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.9119, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 18.216527938842773, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.8708, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 19.921646118164062, |
|
"learning_rate": 5.722222222222222e-06, |
|
"loss": 0.913, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 19.92595863342285, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 0.9116, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 21.927392959594727, |
|
"learning_rate": 5.611111111111112e-06, |
|
"loss": 0.9042, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 16.251340866088867, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.8799, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 12.66164493560791, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.6893, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 16.812175750732422, |
|
"learning_rate": 5.444444444444445e-06, |
|
"loss": 0.5815, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 24.777708053588867, |
|
"learning_rate": 5.388888888888889e-06, |
|
"loss": 0.6612, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 18.211021423339844, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.6015, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 16.52762222290039, |
|
"learning_rate": 5.2777777777777785e-06, |
|
"loss": 0.6056, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 14.783591270446777, |
|
"learning_rate": 5.2222222222222226e-06, |
|
"loss": 0.6516, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 14.879830360412598, |
|
"learning_rate": 5.1666666666666675e-06, |
|
"loss": 0.6334, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 12.205127716064453, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.5561, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 21.069787979125977, |
|
"learning_rate": 5.0555555555555555e-06, |
|
"loss": 0.6318, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 19.374557495117188, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6581, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 12.187582015991211, |
|
"learning_rate": 4.944444444444445e-06, |
|
"loss": 0.5867, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 10.92504596710205, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.6629, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 16.010311126708984, |
|
"learning_rate": 4.833333333333333e-06, |
|
"loss": 0.6236, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 17.286380767822266, |
|
"learning_rate": 4.777777777777778e-06, |
|
"loss": 0.6444, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 20.500370025634766, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 0.6486, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 8.12575912475586, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.647, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 14.205204010009766, |
|
"learning_rate": 4.611111111111112e-06, |
|
"loss": 0.6264, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 19.79119300842285, |
|
"learning_rate": 4.555555555555556e-06, |
|
"loss": 0.6556, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 16.10354995727539, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.724, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 15.188028335571289, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.6654, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"eval_loss": 1.176345705986023, |
|
"eval_runtime": 739.8672, |
|
"eval_samples_per_second": 3.397, |
|
"eval_steps_per_second": 0.426, |
|
"eval_wer": 0.753694919914759, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 12.310816764831543, |
|
"learning_rate": 4.388888888888889e-06, |
|
"loss": 0.6626, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 16.34236717224121, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 0.5884, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 21.42963981628418, |
|
"learning_rate": 4.277777777777778e-06, |
|
"loss": 0.6561, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 13.815032958984375, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 0.5719, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 17.99472999572754, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.6684, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 16.833436965942383, |
|
"learning_rate": 4.111111111111111e-06, |
|
"loss": 0.6566, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 18.723281860351562, |
|
"learning_rate": 4.055555555555556e-06, |
|
"loss": 0.5795, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 14.792190551757812, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.6239, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 12.845802307128906, |
|
"learning_rate": 3.944444444444445e-06, |
|
"loss": 0.6182, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 15.569611549377441, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 0.63, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 11.007694244384766, |
|
"learning_rate": 3.833333333333334e-06, |
|
"loss": 0.6106, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 18.574846267700195, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 0.5999, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 15.045463562011719, |
|
"learning_rate": 3.7222222222222225e-06, |
|
"loss": 0.6264, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 14.46103572845459, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 0.6492, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 21.062015533447266, |
|
"learning_rate": 3.6111111111111115e-06, |
|
"loss": 0.6262, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 18.670812606811523, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 0.6268, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 15.329480171203613, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.5885, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 13.511859893798828, |
|
"learning_rate": 3.444444444444445e-06, |
|
"loss": 0.625, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 18.378982543945312, |
|
"learning_rate": 3.3888888888888893e-06, |
|
"loss": 0.588, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 10.714431762695312, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.5832, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 13.219440460205078, |
|
"learning_rate": 3.277777777777778e-06, |
|
"loss": 0.6165, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 20.18632698059082, |
|
"learning_rate": 3.2222222222222227e-06, |
|
"loss": 0.5918, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 17.847423553466797, |
|
"learning_rate": 3.1666666666666667e-06, |
|
"loss": 0.5514, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 10.973837852478027, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 0.6074, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 10.063940048217773, |
|
"learning_rate": 3.055555555555556e-06, |
|
"loss": 0.5841, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 19.3330020904541, |
|
"learning_rate": 3e-06, |
|
"loss": 0.6031, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 12.685822486877441, |
|
"learning_rate": 2.944444444444445e-06, |
|
"loss": 0.5543, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 13.237887382507324, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 0.5575, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 14.186014175415039, |
|
"learning_rate": 2.8333333333333335e-06, |
|
"loss": 0.5701, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 11.545550346374512, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 0.5855, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 12.496350288391113, |
|
"learning_rate": 2.7222222222222224e-06, |
|
"loss": 0.6069, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 10.237105369567871, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.3926, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 6.904445648193359, |
|
"learning_rate": 2.6111111111111113e-06, |
|
"loss": 0.3931, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 9.90048599243164, |
|
"learning_rate": 2.5555555555555557e-06, |
|
"loss": 0.3966, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 9.313446044921875, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.4119, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 9.53442668914795, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 0.4221, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 15.683548927307129, |
|
"learning_rate": 2.388888888888889e-06, |
|
"loss": 0.423, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 13.14923095703125, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 0.3881, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 7.727118015289307, |
|
"learning_rate": 2.277777777777778e-06, |
|
"loss": 0.3926, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 6.784303665161133, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.3151, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"eval_loss": 1.20012366771698, |
|
"eval_runtime": 741.1631, |
|
"eval_samples_per_second": 3.391, |
|
"eval_steps_per_second": 0.425, |
|
"eval_wer": 0.7703306523681859, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 11.838973045349121, |
|
"learning_rate": 2.166666666666667e-06, |
|
"loss": 0.4212, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 9.963013648986816, |
|
"learning_rate": 2.1111111111111114e-06, |
|
"loss": 0.5183, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 11.655789375305176, |
|
"learning_rate": 2.0555555555555555e-06, |
|
"loss": 0.3753, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 12.085322380065918, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.3888, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 16.148874282836914, |
|
"learning_rate": 1.944444444444445e-06, |
|
"loss": 0.4098, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 14.727580070495605, |
|
"learning_rate": 1.888888888888889e-06, |
|
"loss": 0.4313, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 7.685079574584961, |
|
"learning_rate": 1.8333333333333333e-06, |
|
"loss": 0.4227, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 12.381622314453125, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 0.4043, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 12.150275230407715, |
|
"learning_rate": 1.7222222222222224e-06, |
|
"loss": 0.4428, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 14.8292875289917, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.4276, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 18.160255432128906, |
|
"learning_rate": 1.6111111111111113e-06, |
|
"loss": 0.4544, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 8.906795501708984, |
|
"learning_rate": 1.5555555555555558e-06, |
|
"loss": 0.3873, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 9.33066463470459, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.417, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 13.509571075439453, |
|
"learning_rate": 1.4444444444444445e-06, |
|
"loss": 0.4384, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 14.600696563720703, |
|
"learning_rate": 1.3888888888888892e-06, |
|
"loss": 0.471, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 15.88110065460205, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 0.3886, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 21.162866592407227, |
|
"learning_rate": 1.2777777777777779e-06, |
|
"loss": 0.4064, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 16.15275764465332, |
|
"learning_rate": 1.2222222222222223e-06, |
|
"loss": 0.4125, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 10.692975997924805, |
|
"learning_rate": 1.1666666666666668e-06, |
|
"loss": 0.3805, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 13.422977447509766, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 0.4478, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 13.359613418579102, |
|
"learning_rate": 1.0555555555555557e-06, |
|
"loss": 0.3822, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 15.324664115905762, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.4295, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 12.166446685791016, |
|
"learning_rate": 9.444444444444445e-07, |
|
"loss": 0.4018, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 11.992637634277344, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 0.3979, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 19.6129150390625, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 0.4456, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 9.729543685913086, |
|
"learning_rate": 7.777777777777779e-07, |
|
"loss": 0.3642, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 10.414878845214844, |
|
"learning_rate": 7.222222222222222e-07, |
|
"loss": 0.3569, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 12.819910049438477, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 0.4157, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 17.679868698120117, |
|
"learning_rate": 6.111111111111112e-07, |
|
"loss": 0.4457, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 14.453750610351562, |
|
"learning_rate": 5.555555555555555e-07, |
|
"loss": 0.3788, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 11.410884857177734, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.4198, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 16.757707595825195, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 0.3561, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 10.525169372558594, |
|
"learning_rate": 3.8888888888888895e-07, |
|
"loss": 0.3851, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 16.068998336791992, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 0.4166, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 15.054728507995605, |
|
"learning_rate": 2.7777777777777776e-07, |
|
"loss": 0.386, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 15.278282165527344, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 0.408, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 18.32455062866211, |
|
"learning_rate": 1.6666666666666668e-07, |
|
"loss": 0.4419, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 13.800189971923828, |
|
"learning_rate": 1.1111111111111112e-07, |
|
"loss": 0.4077, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 17.295827865600586, |
|
"learning_rate": 5.555555555555556e-08, |
|
"loss": 0.3982, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 20.146503448486328, |
|
"learning_rate": 0.0, |
|
"loss": 0.4063, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"eval_loss": 1.197590947151184, |
|
"eval_runtime": 665.6332, |
|
"eval_samples_per_second": 3.775, |
|
"eval_steps_per_second": 0.473, |
|
"eval_wer": 0.7633876400632433, |
|
"step": 5000 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 1000, |
|
"total_flos": 1.15390872317952e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|