|
{ |
|
"best_metric": 0.34203216433525085, |
|
"best_model_checkpoint": "autotrain-7ejr4-3wbhb/checkpoint-446", |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 446, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04932735426008968, |
|
"grad_norm": 5.397119045257568, |
|
"learning_rate": 8.208955223880597e-06, |
|
"loss": 1.6001, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.09865470852017937, |
|
"grad_norm": 10.270828247070312, |
|
"learning_rate": 1.6417910447761194e-05, |
|
"loss": 1.6421, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.14798206278026907, |
|
"grad_norm": 6.842929363250732, |
|
"learning_rate": 2.4626865671641793e-05, |
|
"loss": 1.5659, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.19730941704035873, |
|
"grad_norm": 14.523164749145508, |
|
"learning_rate": 3.208955223880597e-05, |
|
"loss": 1.2199, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.24663677130044842, |
|
"grad_norm": Infinity, |
|
"learning_rate": 3.9552238805970146e-05, |
|
"loss": 0.9104, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.29596412556053814, |
|
"grad_norm": Infinity, |
|
"learning_rate": 4.7014925373134335e-05, |
|
"loss": 0.6866, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3452914798206278, |
|
"grad_norm": 39.58903884887695, |
|
"learning_rate": 4.941860465116279e-05, |
|
"loss": 0.9257, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.39461883408071746, |
|
"grad_norm": 38.08815383911133, |
|
"learning_rate": 4.8504983388704325e-05, |
|
"loss": 0.5447, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4439461883408072, |
|
"grad_norm": 32.093482971191406, |
|
"learning_rate": 4.7674418604651164e-05, |
|
"loss": 0.5951, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.49327354260089684, |
|
"grad_norm": 9.36325454711914, |
|
"learning_rate": 4.684385382059801e-05, |
|
"loss": 0.4972, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5426008968609866, |
|
"grad_norm": 16.48912811279297, |
|
"learning_rate": 4.593023255813954e-05, |
|
"loss": 0.5117, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5919282511210763, |
|
"grad_norm": 15.17750072479248, |
|
"learning_rate": 4.5016611295681064e-05, |
|
"loss": 0.5249, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6412556053811659, |
|
"grad_norm": 73.44496154785156, |
|
"learning_rate": 4.410299003322259e-05, |
|
"loss": 0.2932, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6905829596412556, |
|
"grad_norm": 7.012796878814697, |
|
"learning_rate": 4.3189368770764125e-05, |
|
"loss": 0.3703, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7399103139013453, |
|
"grad_norm": 36.769142150878906, |
|
"learning_rate": 4.227574750830565e-05, |
|
"loss": 0.384, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7892376681614349, |
|
"grad_norm": 0.5750433206558228, |
|
"learning_rate": 4.136212624584718e-05, |
|
"loss": 0.2024, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8385650224215246, |
|
"grad_norm": 0.8097955584526062, |
|
"learning_rate": 4.0448504983388706e-05, |
|
"loss": 0.2261, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8878923766816144, |
|
"grad_norm": 0.3052847683429718, |
|
"learning_rate": 3.953488372093023e-05, |
|
"loss": 0.3784, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.9372197309417041, |
|
"grad_norm": 9.378923416137695, |
|
"learning_rate": 3.862126245847176e-05, |
|
"loss": 0.334, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.9865470852017937, |
|
"grad_norm": 57.845924377441406, |
|
"learning_rate": 3.7707641196013294e-05, |
|
"loss": 0.5124, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8337078651685393, |
|
"eval_f1_macro": 0.8277707222765114, |
|
"eval_f1_micro": 0.8337078651685393, |
|
"eval_f1_weighted": 0.8270484241051885, |
|
"eval_loss": 0.6007972359657288, |
|
"eval_precision_macro": 0.8743021944472813, |
|
"eval_precision_micro": 0.8337078651685393, |
|
"eval_precision_weighted": 0.8814664248669926, |
|
"eval_recall_macro": 0.8408823529411764, |
|
"eval_recall_micro": 0.8337078651685393, |
|
"eval_recall_weighted": 0.8337078651685393, |
|
"eval_runtime": 1.7336, |
|
"eval_samples_per_second": 256.693, |
|
"eval_steps_per_second": 16.151, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.0358744394618835, |
|
"grad_norm": 4.146377086639404, |
|
"learning_rate": 3.679401993355482e-05, |
|
"loss": 0.4657, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.0852017937219731, |
|
"grad_norm": 2.958444833755493, |
|
"learning_rate": 3.588039867109635e-05, |
|
"loss": 0.2262, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.1345291479820627, |
|
"grad_norm": 18.36431312561035, |
|
"learning_rate": 3.4966777408637875e-05, |
|
"loss": 0.2503, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.1838565022421526, |
|
"grad_norm": 0.3763713538646698, |
|
"learning_rate": 3.40531561461794e-05, |
|
"loss": 0.1309, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.2331838565022422, |
|
"grad_norm": 0.1498165726661682, |
|
"learning_rate": 3.313953488372093e-05, |
|
"loss": 0.1149, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.2825112107623318, |
|
"grad_norm": 0.13080233335494995, |
|
"learning_rate": 3.222591362126246e-05, |
|
"loss": 0.2224, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.3318385650224216, |
|
"grad_norm": 6.366682052612305, |
|
"learning_rate": 3.131229235880399e-05, |
|
"loss": 0.3254, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.3811659192825112, |
|
"grad_norm": 0.08186858147382736, |
|
"learning_rate": 3.0398671096345517e-05, |
|
"loss": 0.0981, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.4304932735426008, |
|
"grad_norm": 0.14895343780517578, |
|
"learning_rate": 2.9485049833887047e-05, |
|
"loss": 0.1536, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.4798206278026906, |
|
"grad_norm": 0.07964171469211578, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.0458, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.5291479820627802, |
|
"grad_norm": 0.16557298600673676, |
|
"learning_rate": 2.76578073089701e-05, |
|
"loss": 0.2189, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.57847533632287, |
|
"grad_norm": 13.367328643798828, |
|
"learning_rate": 2.674418604651163e-05, |
|
"loss": 0.2818, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.6278026905829597, |
|
"grad_norm": 0.042071498930454254, |
|
"learning_rate": 2.591362126245847e-05, |
|
"loss": 0.1282, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.6771300448430493, |
|
"grad_norm": 0.0784522220492363, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.2491, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.726457399103139, |
|
"grad_norm": 0.17997685074806213, |
|
"learning_rate": 2.4086378737541528e-05, |
|
"loss": 0.022, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.7757847533632287, |
|
"grad_norm": 36.073631286621094, |
|
"learning_rate": 2.3172757475083055e-05, |
|
"loss": 0.1468, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.8251121076233185, |
|
"grad_norm": 46.98909378051758, |
|
"learning_rate": 2.2259136212624586e-05, |
|
"loss": 0.2726, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.8744394618834082, |
|
"grad_norm": 0.6519285440444946, |
|
"learning_rate": 2.1345514950166116e-05, |
|
"loss": 0.0038, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.9237668161434978, |
|
"grad_norm": 29.345619201660156, |
|
"learning_rate": 2.0431893687707643e-05, |
|
"loss": 0.251, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.9730941704035874, |
|
"grad_norm": 0.05096792057156563, |
|
"learning_rate": 1.951827242524917e-05, |
|
"loss": 0.3514, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.946067415730337, |
|
"eval_f1_macro": 0.9457020850649197, |
|
"eval_f1_micro": 0.946067415730337, |
|
"eval_f1_weighted": 0.9461015789750475, |
|
"eval_loss": 0.34203216433525085, |
|
"eval_precision_macro": 0.9447370569809594, |
|
"eval_precision_micro": 0.946067415730337, |
|
"eval_precision_weighted": 0.9466487598452521, |
|
"eval_recall_macro": 0.9472065189712249, |
|
"eval_recall_micro": 0.946067415730337, |
|
"eval_recall_weighted": 0.946067415730337, |
|
"eval_runtime": 1.7697, |
|
"eval_samples_per_second": 251.457, |
|
"eval_steps_per_second": 15.822, |
|
"step": 446 |
|
} |
|
], |
|
"logging_steps": 11, |
|
"max_steps": 669, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 234175146792960.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|