fsicoli's picture
Upload 72 files
bb50e9e verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.12862563508907326,
"eval_steps": 1000,
"global_step": 2000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 58.440006256103516,
"learning_rate": 1e-08,
"loss": 1.3813,
"step": 25
},
{
"epoch": 0.0,
"grad_norm": 59.934757232666016,
"learning_rate": 2.2e-08,
"loss": 1.469,
"step": 50
},
{
"epoch": 0.0,
"grad_norm": 31.011018753051758,
"learning_rate": 3.4500000000000005e-08,
"loss": 1.2226,
"step": 75
},
{
"epoch": 0.01,
"grad_norm": 59.818233489990234,
"learning_rate": 4.7e-08,
"loss": 1.2458,
"step": 100
},
{
"epoch": 0.01,
"grad_norm": 60.51572036743164,
"learning_rate": 5.95e-08,
"loss": 1.2781,
"step": 125
},
{
"epoch": 0.01,
"grad_norm": 51.360103607177734,
"learning_rate": 7.2e-08,
"loss": 1.4055,
"step": 150
},
{
"epoch": 0.01,
"grad_norm": 73.35002136230469,
"learning_rate": 8.45e-08,
"loss": 1.3354,
"step": 175
},
{
"epoch": 0.01,
"grad_norm": 69.32823944091797,
"learning_rate": 9.7e-08,
"loss": 1.2005,
"step": 200
},
{
"epoch": 0.01,
"grad_norm": 51.02174377441406,
"learning_rate": 1.095e-07,
"loss": 1.3853,
"step": 225
},
{
"epoch": 0.02,
"grad_norm": 72.20179748535156,
"learning_rate": 1.2199999999999998e-07,
"loss": 1.4476,
"step": 250
},
{
"epoch": 0.02,
"grad_norm": 108.30382537841797,
"learning_rate": 1.345e-07,
"loss": 1.2339,
"step": 275
},
{
"epoch": 0.02,
"grad_norm": 66.15994262695312,
"learning_rate": 1.4699999999999998e-07,
"loss": 1.379,
"step": 300
},
{
"epoch": 0.02,
"grad_norm": 47.82923126220703,
"learning_rate": 1.595e-07,
"loss": 1.1467,
"step": 325
},
{
"epoch": 0.02,
"grad_norm": 85.7218246459961,
"learning_rate": 1.7199999999999998e-07,
"loss": 1.1622,
"step": 350
},
{
"epoch": 0.02,
"grad_norm": 68.25504302978516,
"learning_rate": 1.845e-07,
"loss": 1.1413,
"step": 375
},
{
"epoch": 0.03,
"grad_norm": 106.06077575683594,
"learning_rate": 1.97e-07,
"loss": 1.0855,
"step": 400
},
{
"epoch": 0.03,
"grad_norm": 79.60690307617188,
"learning_rate": 2.095e-07,
"loss": 0.929,
"step": 425
},
{
"epoch": 0.03,
"grad_norm": 42.14814376831055,
"learning_rate": 2.22e-07,
"loss": 0.8728,
"step": 450
},
{
"epoch": 0.03,
"grad_norm": 37.4913444519043,
"learning_rate": 2.3449999999999996e-07,
"loss": 0.6651,
"step": 475
},
{
"epoch": 0.03,
"grad_norm": 41.89991760253906,
"learning_rate": 2.47e-07,
"loss": 0.5875,
"step": 500
},
{
"epoch": 0.03,
"grad_norm": 75.21453094482422,
"learning_rate": 2.595e-07,
"loss": 0.6868,
"step": 525
},
{
"epoch": 0.04,
"grad_norm": 21.09180450439453,
"learning_rate": 2.72e-07,
"loss": 0.741,
"step": 550
},
{
"epoch": 0.04,
"grad_norm": 44.54707336425781,
"learning_rate": 2.845e-07,
"loss": 0.3898,
"step": 575
},
{
"epoch": 0.04,
"grad_norm": 31.656843185424805,
"learning_rate": 2.9699999999999997e-07,
"loss": 0.422,
"step": 600
},
{
"epoch": 0.04,
"grad_norm": 56.28642654418945,
"learning_rate": 3.0949999999999996e-07,
"loss": 0.3803,
"step": 625
},
{
"epoch": 0.04,
"grad_norm": 38.66410827636719,
"learning_rate": 3.22e-07,
"loss": 0.5062,
"step": 650
},
{
"epoch": 0.04,
"grad_norm": 31.183727264404297,
"learning_rate": 3.345e-07,
"loss": 0.4075,
"step": 675
},
{
"epoch": 0.05,
"grad_norm": 23.618703842163086,
"learning_rate": 3.4699999999999997e-07,
"loss": 0.3627,
"step": 700
},
{
"epoch": 0.05,
"grad_norm": 70.09487915039062,
"learning_rate": 3.5949999999999996e-07,
"loss": 0.3087,
"step": 725
},
{
"epoch": 0.05,
"grad_norm": 74.42188262939453,
"learning_rate": 3.72e-07,
"loss": 0.4021,
"step": 750
},
{
"epoch": 0.05,
"grad_norm": 44.99939727783203,
"learning_rate": 3.845e-07,
"loss": 0.3203,
"step": 775
},
{
"epoch": 0.05,
"grad_norm": 42.77998352050781,
"learning_rate": 3.97e-07,
"loss": 0.3797,
"step": 800
},
{
"epoch": 0.05,
"grad_norm": 64.61412811279297,
"learning_rate": 4.0949999999999995e-07,
"loss": 0.3403,
"step": 825
},
{
"epoch": 0.05,
"grad_norm": 29.286806106567383,
"learning_rate": 4.2199999999999994e-07,
"loss": 0.2879,
"step": 850
},
{
"epoch": 0.06,
"grad_norm": 58.146263122558594,
"learning_rate": 4.345e-07,
"loss": 0.4017,
"step": 875
},
{
"epoch": 0.06,
"grad_norm": 44.624202728271484,
"learning_rate": 4.4699999999999997e-07,
"loss": 0.3698,
"step": 900
},
{
"epoch": 0.06,
"grad_norm": 47.91656494140625,
"learning_rate": 4.595e-07,
"loss": 0.4008,
"step": 925
},
{
"epoch": 0.06,
"grad_norm": 36.263668060302734,
"learning_rate": 4.7199999999999994e-07,
"loss": 0.2041,
"step": 950
},
{
"epoch": 0.06,
"grad_norm": 12.398943901062012,
"learning_rate": 4.845e-07,
"loss": 0.2978,
"step": 975
},
{
"epoch": 0.06,
"grad_norm": 4.42283821105957,
"learning_rate": 4.97e-07,
"loss": 0.2614,
"step": 1000
},
{
"epoch": 0.06,
"eval_loss": 0.29864633083343506,
"eval_runtime": 7667.7674,
"eval_samples_per_second": 1.228,
"eval_steps_per_second": 0.614,
"eval_wer": 0.14664944291942517,
"step": 1000
},
{
"epoch": 0.07,
"grad_norm": 59.681270599365234,
"learning_rate": 5.095e-07,
"loss": 0.2588,
"step": 1025
},
{
"epoch": 0.07,
"grad_norm": 27.44911766052246,
"learning_rate": 5.22e-07,
"loss": 0.2839,
"step": 1050
},
{
"epoch": 0.07,
"grad_norm": 56.26525115966797,
"learning_rate": 5.344999999999999e-07,
"loss": 0.2329,
"step": 1075
},
{
"epoch": 0.07,
"grad_norm": 112.37168884277344,
"learning_rate": 5.47e-07,
"loss": 0.2746,
"step": 1100
},
{
"epoch": 0.07,
"grad_norm": 92.82706451416016,
"learning_rate": 5.595e-07,
"loss": 0.2621,
"step": 1125
},
{
"epoch": 0.07,
"grad_norm": 17.20562744140625,
"learning_rate": 5.719999999999999e-07,
"loss": 0.1826,
"step": 1150
},
{
"epoch": 0.08,
"grad_norm": 31.119354248046875,
"learning_rate": 5.845e-07,
"loss": 0.2487,
"step": 1175
},
{
"epoch": 0.08,
"grad_norm": 7.4088850021362305,
"learning_rate": 5.97e-07,
"loss": 0.297,
"step": 1200
},
{
"epoch": 0.08,
"grad_norm": 72.84540557861328,
"learning_rate": 6.095e-07,
"loss": 0.3108,
"step": 1225
},
{
"epoch": 0.08,
"grad_norm": 38.68337631225586,
"learning_rate": 6.219999999999999e-07,
"loss": 0.2819,
"step": 1250
},
{
"epoch": 0.08,
"grad_norm": 5.215000152587891,
"learning_rate": 6.344999999999999e-07,
"loss": 0.1953,
"step": 1275
},
{
"epoch": 0.08,
"grad_norm": 41.42685317993164,
"learning_rate": 6.47e-07,
"loss": 0.2333,
"step": 1300
},
{
"epoch": 0.09,
"grad_norm": 6.224233150482178,
"learning_rate": 6.595e-07,
"loss": 0.2213,
"step": 1325
},
{
"epoch": 0.09,
"grad_norm": 48.12126541137695,
"learning_rate": 6.72e-07,
"loss": 0.3121,
"step": 1350
},
{
"epoch": 0.09,
"grad_norm": 23.151887893676758,
"learning_rate": 6.845e-07,
"loss": 0.2076,
"step": 1375
},
{
"epoch": 0.09,
"grad_norm": 53.516395568847656,
"learning_rate": 6.97e-07,
"loss": 0.2835,
"step": 1400
},
{
"epoch": 0.09,
"grad_norm": 41.62558364868164,
"learning_rate": 7.094999999999999e-07,
"loss": 0.2574,
"step": 1425
},
{
"epoch": 0.09,
"grad_norm": 98.05493927001953,
"learning_rate": 7.219999999999999e-07,
"loss": 0.3034,
"step": 1450
},
{
"epoch": 0.09,
"grad_norm": 86.28963470458984,
"learning_rate": 7.345e-07,
"loss": 0.2657,
"step": 1475
},
{
"epoch": 0.1,
"grad_norm": 2.8914854526519775,
"learning_rate": 7.47e-07,
"loss": 0.2636,
"step": 1500
},
{
"epoch": 0.1,
"grad_norm": 56.13273239135742,
"learning_rate": 7.594999999999999e-07,
"loss": 0.1522,
"step": 1525
},
{
"epoch": 0.1,
"grad_norm": 12.941767692565918,
"learning_rate": 7.72e-07,
"loss": 0.2097,
"step": 1550
},
{
"epoch": 0.1,
"grad_norm": 13.613518714904785,
"learning_rate": 7.845e-07,
"loss": 0.2303,
"step": 1575
},
{
"epoch": 0.1,
"grad_norm": 23.761892318725586,
"learning_rate": 7.970000000000001e-07,
"loss": 0.2763,
"step": 1600
},
{
"epoch": 0.1,
"grad_norm": 31.896230697631836,
"learning_rate": 8.094999999999999e-07,
"loss": 0.2722,
"step": 1625
},
{
"epoch": 0.11,
"grad_norm": 51.43158721923828,
"learning_rate": 8.219999999999999e-07,
"loss": 0.2228,
"step": 1650
},
{
"epoch": 0.11,
"grad_norm": 2.879077196121216,
"learning_rate": 8.345e-07,
"loss": 0.2694,
"step": 1675
},
{
"epoch": 0.11,
"grad_norm": 98.36167907714844,
"learning_rate": 8.469999999999999e-07,
"loss": 0.3088,
"step": 1700
},
{
"epoch": 0.11,
"grad_norm": 16.084274291992188,
"learning_rate": 8.595e-07,
"loss": 0.1828,
"step": 1725
},
{
"epoch": 0.11,
"grad_norm": 5.136277675628662,
"learning_rate": 8.72e-07,
"loss": 0.1753,
"step": 1750
},
{
"epoch": 0.11,
"grad_norm": 64.78803253173828,
"learning_rate": 8.845e-07,
"loss": 0.198,
"step": 1775
},
{
"epoch": 0.12,
"grad_norm": 41.84619903564453,
"learning_rate": 8.969999999999999e-07,
"loss": 0.2894,
"step": 1800
},
{
"epoch": 0.12,
"grad_norm": 45.18673324584961,
"learning_rate": 9.094999999999999e-07,
"loss": 0.1844,
"step": 1825
},
{
"epoch": 0.12,
"grad_norm": 20.42123794555664,
"learning_rate": 9.22e-07,
"loss": 0.2576,
"step": 1850
},
{
"epoch": 0.12,
"grad_norm": 8.751657485961914,
"learning_rate": 9.344999999999999e-07,
"loss": 0.2492,
"step": 1875
},
{
"epoch": 0.12,
"grad_norm": 29.69828224182129,
"learning_rate": 9.469999999999999e-07,
"loss": 0.1479,
"step": 1900
},
{
"epoch": 0.12,
"grad_norm": 22.91360855102539,
"learning_rate": 9.594999999999999e-07,
"loss": 0.2164,
"step": 1925
},
{
"epoch": 0.13,
"grad_norm": 17.22205352783203,
"learning_rate": 9.72e-07,
"loss": 0.2276,
"step": 1950
},
{
"epoch": 0.13,
"grad_norm": 83.4366683959961,
"learning_rate": 9.845e-07,
"loss": 0.2717,
"step": 1975
},
{
"epoch": 0.13,
"grad_norm": 11.829337120056152,
"learning_rate": 9.97e-07,
"loss": 0.2632,
"step": 2000
},
{
"epoch": 0.13,
"eval_loss": 0.22439254820346832,
"eval_runtime": 7771.0673,
"eval_samples_per_second": 1.211,
"eval_steps_per_second": 0.606,
"eval_wer": 0.13156789924107865,
"step": 2000
}
],
"logging_steps": 25,
"max_steps": 5000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 1000,
"total_flos": 4.0824196724424704e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}