ruT5-base-summarizer / trainer_state.json
sarahai's picture
Upload 4 files
e9b2c0d verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.34351145038168,
"eval_steps": 500,
"global_step": 35000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.08,
"grad_norm": 7.7123026847839355,
"learning_rate": 1.23e-05,
"loss": 3.9019,
"step": 500
},
{
"epoch": 0.15,
"grad_norm": 5.605632781982422,
"learning_rate": 2.4775000000000003e-05,
"loss": 1.5616,
"step": 1000
},
{
"epoch": 0.23,
"grad_norm": 3.154059886932373,
"learning_rate": 3.7275000000000005e-05,
"loss": 1.4864,
"step": 1500
},
{
"epoch": 0.31,
"grad_norm": 2.2530698776245117,
"learning_rate": 4.9775000000000004e-05,
"loss": 1.4422,
"step": 2000
},
{
"epoch": 0.38,
"grad_norm": 2.2986319065093994,
"learning_rate": 4.9341823056300266e-05,
"loss": 1.4061,
"step": 2500
},
{
"epoch": 0.46,
"grad_norm": 1.661163091659546,
"learning_rate": 4.8671581769436996e-05,
"loss": 1.4029,
"step": 3000
},
{
"epoch": 0.53,
"grad_norm": 4.1285400390625,
"learning_rate": 4.800134048257373e-05,
"loss": 1.3777,
"step": 3500
},
{
"epoch": 0.61,
"grad_norm": 1.6614463329315186,
"learning_rate": 4.733109919571046e-05,
"loss": 1.359,
"step": 4000
},
{
"epoch": 0.69,
"grad_norm": 1.6893428564071655,
"learning_rate": 4.666085790884719e-05,
"loss": 1.3417,
"step": 4500
},
{
"epoch": 0.76,
"grad_norm": 1.9169539213180542,
"learning_rate": 4.599061662198392e-05,
"loss": 1.3231,
"step": 5000
},
{
"epoch": 0.84,
"grad_norm": 1.7877219915390015,
"learning_rate": 4.532037533512065e-05,
"loss": 1.3365,
"step": 5500
},
{
"epoch": 0.92,
"grad_norm": 1.6105085611343384,
"learning_rate": 4.465013404825737e-05,
"loss": 1.3108,
"step": 6000
},
{
"epoch": 0.99,
"grad_norm": 3.9035630226135254,
"learning_rate": 4.39798927613941e-05,
"loss": 1.3242,
"step": 6500
},
{
"epoch": 1.07,
"grad_norm": 1.9192873239517212,
"learning_rate": 4.330965147453083e-05,
"loss": 1.2635,
"step": 7000
},
{
"epoch": 1.15,
"grad_norm": 2.6928493976593018,
"learning_rate": 4.263941018766756e-05,
"loss": 1.2415,
"step": 7500
},
{
"epoch": 1.22,
"grad_norm": 2.442293643951416,
"learning_rate": 4.196916890080429e-05,
"loss": 1.2646,
"step": 8000
},
{
"epoch": 1.3,
"grad_norm": 1.960336446762085,
"learning_rate": 4.129892761394102e-05,
"loss": 1.2569,
"step": 8500
},
{
"epoch": 1.37,
"grad_norm": 1.510266661643982,
"learning_rate": 4.062868632707775e-05,
"loss": 1.249,
"step": 9000
},
{
"epoch": 1.45,
"grad_norm": 2.045361042022705,
"learning_rate": 3.9958445040214485e-05,
"loss": 1.2357,
"step": 9500
},
{
"epoch": 1.53,
"grad_norm": 1.5785582065582275,
"learning_rate": 3.928820375335121e-05,
"loss": 1.2464,
"step": 10000
},
{
"epoch": 1.6,
"grad_norm": 1.7004880905151367,
"learning_rate": 3.861796246648794e-05,
"loss": 1.2521,
"step": 10500
},
{
"epoch": 1.68,
"grad_norm": 6.9023847579956055,
"learning_rate": 3.7947721179624666e-05,
"loss": 1.2383,
"step": 11000
},
{
"epoch": 1.76,
"grad_norm": 1.391236424446106,
"learning_rate": 3.7277479892761396e-05,
"loss": 1.2449,
"step": 11500
},
{
"epoch": 1.83,
"grad_norm": 2.4769670963287354,
"learning_rate": 3.6607238605898125e-05,
"loss": 1.2397,
"step": 12000
},
{
"epoch": 1.91,
"grad_norm": 2.1677072048187256,
"learning_rate": 3.5936997319034855e-05,
"loss": 1.2375,
"step": 12500
},
{
"epoch": 1.98,
"grad_norm": 1.4357081651687622,
"learning_rate": 3.5266756032171584e-05,
"loss": 1.2176,
"step": 13000
},
{
"epoch": 2.06,
"grad_norm": 1.5423144102096558,
"learning_rate": 3.4596514745308314e-05,
"loss": 1.1792,
"step": 13500
},
{
"epoch": 2.14,
"grad_norm": 2.3282928466796875,
"learning_rate": 3.392627345844504e-05,
"loss": 1.1803,
"step": 14000
},
{
"epoch": 2.21,
"grad_norm": 1.8388934135437012,
"learning_rate": 3.325603217158177e-05,
"loss": 1.1808,
"step": 14500
},
{
"epoch": 2.29,
"grad_norm": 1.5079582929611206,
"learning_rate": 3.2585790884718495e-05,
"loss": 1.1779,
"step": 15000
},
{
"epoch": 2.37,
"grad_norm": 1.625044345855713,
"learning_rate": 3.1916890080428955e-05,
"loss": 1.1788,
"step": 15500
},
{
"epoch": 2.44,
"grad_norm": 1.4615004062652588,
"learning_rate": 3.1246648793565684e-05,
"loss": 1.185,
"step": 16000
},
{
"epoch": 2.52,
"grad_norm": 1.3896290063858032,
"learning_rate": 3.0576407506702414e-05,
"loss": 1.1716,
"step": 16500
},
{
"epoch": 2.6,
"grad_norm": 1.5188448429107666,
"learning_rate": 2.9906166219839143e-05,
"loss": 1.1811,
"step": 17000
},
{
"epoch": 2.67,
"grad_norm": 1.431693434715271,
"learning_rate": 2.9235924932975873e-05,
"loss": 1.1667,
"step": 17500
},
{
"epoch": 2.75,
"grad_norm": 1.6156121492385864,
"learning_rate": 2.8565683646112602e-05,
"loss": 1.1801,
"step": 18000
},
{
"epoch": 2.82,
"grad_norm": 1.875617265701294,
"learning_rate": 2.789678284182306e-05,
"loss": 1.1717,
"step": 18500
},
{
"epoch": 2.9,
"grad_norm": 1.362160325050354,
"learning_rate": 2.7226541554959788e-05,
"loss": 1.1721,
"step": 19000
},
{
"epoch": 2.98,
"grad_norm": 1.9847090244293213,
"learning_rate": 2.6556300268096518e-05,
"loss": 1.1659,
"step": 19500
},
{
"epoch": 3.05,
"grad_norm": 1.6233386993408203,
"learning_rate": 2.5886058981233247e-05,
"loss": 1.126,
"step": 20000
},
{
"epoch": 3.13,
"grad_norm": 1.5847933292388916,
"learning_rate": 2.5215817694369976e-05,
"loss": 1.1146,
"step": 20500
},
{
"epoch": 3.21,
"grad_norm": 1.467008352279663,
"learning_rate": 2.454691689008043e-05,
"loss": 1.1267,
"step": 21000
},
{
"epoch": 3.28,
"grad_norm": 1.5421602725982666,
"learning_rate": 2.387667560321716e-05,
"loss": 1.1332,
"step": 21500
},
{
"epoch": 3.36,
"grad_norm": 1.4515670537948608,
"learning_rate": 2.320643431635389e-05,
"loss": 1.1319,
"step": 22000
},
{
"epoch": 3.44,
"grad_norm": 1.4106440544128418,
"learning_rate": 2.2536193029490618e-05,
"loss": 1.1215,
"step": 22500
},
{
"epoch": 3.51,
"grad_norm": 1.476091980934143,
"learning_rate": 2.1867292225201075e-05,
"loss": 1.1221,
"step": 23000
},
{
"epoch": 3.59,
"grad_norm": 1.3340860605239868,
"learning_rate": 2.1197050938337804e-05,
"loss": 1.122,
"step": 23500
},
{
"epoch": 3.66,
"grad_norm": 2.5545642375946045,
"learning_rate": 2.0526809651474533e-05,
"loss": 1.1311,
"step": 24000
},
{
"epoch": 3.74,
"grad_norm": 1.5924307107925415,
"learning_rate": 1.985656836461126e-05,
"loss": 1.1339,
"step": 24500
},
{
"epoch": 3.82,
"grad_norm": 1.6188321113586426,
"learning_rate": 1.918632707774799e-05,
"loss": 1.1179,
"step": 25000
},
{
"epoch": 3.89,
"grad_norm": 1.5157597064971924,
"learning_rate": 1.8516085790884718e-05,
"loss": 1.1147,
"step": 25500
},
{
"epoch": 3.97,
"grad_norm": 1.6910189390182495,
"learning_rate": 1.7845844504021448e-05,
"loss": 1.1282,
"step": 26000
},
{
"epoch": 4.05,
"grad_norm": 1.870836853981018,
"learning_rate": 1.7176943699731904e-05,
"loss": 1.105,
"step": 26500
},
{
"epoch": 4.12,
"grad_norm": 1.5650787353515625,
"learning_rate": 1.6506702412868634e-05,
"loss": 1.0913,
"step": 27000
},
{
"epoch": 4.2,
"grad_norm": 1.369066596031189,
"learning_rate": 1.583646112600536e-05,
"loss": 1.0874,
"step": 27500
},
{
"epoch": 4.27,
"grad_norm": 1.8052254915237427,
"learning_rate": 1.5166219839142093e-05,
"loss": 1.101,
"step": 28000
},
{
"epoch": 4.35,
"grad_norm": 1.5547306537628174,
"learning_rate": 1.4495978552278822e-05,
"loss": 1.0728,
"step": 28500
},
{
"epoch": 4.43,
"grad_norm": 1.4759701490402222,
"learning_rate": 1.382573726541555e-05,
"loss": 1.0975,
"step": 29000
},
{
"epoch": 4.5,
"grad_norm": 1.3856291770935059,
"learning_rate": 1.3155495978552279e-05,
"loss": 1.0851,
"step": 29500
},
{
"epoch": 4.58,
"grad_norm": 1.5180363655090332,
"learning_rate": 1.2486595174262736e-05,
"loss": 1.0878,
"step": 30000
},
{
"epoch": 4.66,
"grad_norm": 1.4963964223861694,
"learning_rate": 1.1816353887399465e-05,
"loss": 1.0904,
"step": 30500
},
{
"epoch": 4.73,
"grad_norm": 1.6953232288360596,
"learning_rate": 1.1146112600536193e-05,
"loss": 1.0908,
"step": 31000
},
{
"epoch": 4.81,
"grad_norm": 1.5045945644378662,
"learning_rate": 1.0475871313672924e-05,
"loss": 1.0894,
"step": 31500
},
{
"epoch": 4.89,
"grad_norm": 1.935835599899292,
"learning_rate": 9.805630026809652e-06,
"loss": 1.0839,
"step": 32000
},
{
"epoch": 4.96,
"grad_norm": 1.7409507036209106,
"learning_rate": 9.136729222520107e-06,
"loss": 1.0923,
"step": 32500
},
{
"epoch": 5.04,
"grad_norm": 1.4888629913330078,
"learning_rate": 8.466487935656838e-06,
"loss": 1.0795,
"step": 33000
},
{
"epoch": 5.11,
"grad_norm": 1.7588920593261719,
"learning_rate": 7.796246648793565e-06,
"loss": 1.0697,
"step": 33500
},
{
"epoch": 5.19,
"grad_norm": 1.4674702882766724,
"learning_rate": 7.126005361930295e-06,
"loss": 1.0567,
"step": 34000
},
{
"epoch": 5.27,
"grad_norm": 1.5165821313858032,
"learning_rate": 6.455764075067025e-06,
"loss": 1.0583,
"step": 34500
},
{
"epoch": 5.34,
"grad_norm": 1.7669235467910767,
"learning_rate": 5.785522788203754e-06,
"loss": 1.0621,
"step": 35000
}
],
"logging_steps": 500,
"max_steps": 39300,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 5000,
"total_flos": 1.705082093568e+17,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}