|
{ |
|
"best_metric": 0.45924699306488037, |
|
"best_model_checkpoint": "tipe-tweet/checkpoint-381", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 381, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.047244094488188976, |
|
"grad_norm": 17.715389251708984, |
|
"learning_rate": 6.41025641025641e-06, |
|
"loss": 1.0923, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.09448818897637795, |
|
"grad_norm": 15.7538480758667, |
|
"learning_rate": 1.282051282051282e-05, |
|
"loss": 1.1273, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.14173228346456693, |
|
"grad_norm": 31.92573356628418, |
|
"learning_rate": 2.0512820512820512e-05, |
|
"loss": 1.0638, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1889763779527559, |
|
"grad_norm": 17.218690872192383, |
|
"learning_rate": 2.8205128205128207e-05, |
|
"loss": 1.1217, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.23622047244094488, |
|
"grad_norm": 24.77151107788086, |
|
"learning_rate": 3.58974358974359e-05, |
|
"loss": 0.9608, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.28346456692913385, |
|
"grad_norm": 18.787757873535156, |
|
"learning_rate": 4.358974358974359e-05, |
|
"loss": 0.9236, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.33070866141732286, |
|
"grad_norm": 16.54275894165039, |
|
"learning_rate": 4.985380116959065e-05, |
|
"loss": 0.8091, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.3779527559055118, |
|
"grad_norm": 8.500039100646973, |
|
"learning_rate": 4.8976608187134504e-05, |
|
"loss": 0.7872, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4251968503937008, |
|
"grad_norm": Infinity, |
|
"learning_rate": 4.824561403508772e-05, |
|
"loss": 0.6929, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.47244094488188976, |
|
"grad_norm": 9.164787292480469, |
|
"learning_rate": 4.736842105263158e-05, |
|
"loss": 0.6466, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5196850393700787, |
|
"grad_norm": 15.12178897857666, |
|
"learning_rate": 4.649122807017544e-05, |
|
"loss": 0.9249, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5669291338582677, |
|
"grad_norm": Infinity, |
|
"learning_rate": 4.576023391812866e-05, |
|
"loss": 0.6676, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6141732283464567, |
|
"grad_norm": 17.93980598449707, |
|
"learning_rate": 4.488304093567251e-05, |
|
"loss": 0.5475, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.6614173228346457, |
|
"grad_norm": 5.964590072631836, |
|
"learning_rate": 4.400584795321638e-05, |
|
"loss": 0.6425, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7086614173228346, |
|
"grad_norm": 4.273876667022705, |
|
"learning_rate": 4.3128654970760236e-05, |
|
"loss": 0.6865, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7559055118110236, |
|
"grad_norm": 15.413244247436523, |
|
"learning_rate": 4.22514619883041e-05, |
|
"loss": 0.984, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8031496062992126, |
|
"grad_norm": 20.45796012878418, |
|
"learning_rate": 4.137426900584795e-05, |
|
"loss": 1.0419, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.8503937007874016, |
|
"grad_norm": 19.420028686523438, |
|
"learning_rate": 4.0497076023391814e-05, |
|
"loss": 0.6984, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.8976377952755905, |
|
"grad_norm": 9.930095672607422, |
|
"learning_rate": 3.9619883040935676e-05, |
|
"loss": 0.7064, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.9448818897637795, |
|
"grad_norm": 5.611089706420898, |
|
"learning_rate": 3.874269005847954e-05, |
|
"loss": 0.3895, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.9921259842519685, |
|
"grad_norm": 11.97076416015625, |
|
"learning_rate": 3.786549707602339e-05, |
|
"loss": 0.3185, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8228346456692913, |
|
"eval_f1_macro": 0.8213343063917776, |
|
"eval_f1_micro": 0.8228346456692913, |
|
"eval_f1_weighted": 0.8214016351404352, |
|
"eval_loss": 0.5371202826499939, |
|
"eval_precision_macro": 0.8425807026930622, |
|
"eval_precision_micro": 0.8228346456692913, |
|
"eval_precision_weighted": 0.8430483057595328, |
|
"eval_recall_macro": 0.823155929038282, |
|
"eval_recall_micro": 0.8228346456692913, |
|
"eval_recall_weighted": 0.8228346456692913, |
|
"eval_runtime": 0.5181, |
|
"eval_samples_per_second": 490.234, |
|
"eval_steps_per_second": 30.881, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.0393700787401574, |
|
"grad_norm": 8.59676742553711, |
|
"learning_rate": 3.6988304093567254e-05, |
|
"loss": 0.2363, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.0866141732283465, |
|
"grad_norm": 9.258766174316406, |
|
"learning_rate": 3.611111111111111e-05, |
|
"loss": 0.3952, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.1338582677165354, |
|
"grad_norm": 18.296749114990234, |
|
"learning_rate": 3.523391812865498e-05, |
|
"loss": 0.244, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.1811023622047245, |
|
"grad_norm": 7.507699489593506, |
|
"learning_rate": 3.435672514619883e-05, |
|
"loss": 0.2133, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.2283464566929134, |
|
"grad_norm": 7.04826545715332, |
|
"learning_rate": 3.3479532163742695e-05, |
|
"loss": 0.1629, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.2755905511811023, |
|
"grad_norm": 9.134398460388184, |
|
"learning_rate": 3.260233918128655e-05, |
|
"loss": 0.2612, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.3228346456692912, |
|
"grad_norm": 22.54612922668457, |
|
"learning_rate": 3.172514619883041e-05, |
|
"loss": 0.4343, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.3700787401574803, |
|
"grad_norm": 8.786137580871582, |
|
"learning_rate": 3.084795321637427e-05, |
|
"loss": 0.8668, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.4173228346456692, |
|
"grad_norm": 20.6187801361084, |
|
"learning_rate": 2.997076023391813e-05, |
|
"loss": 0.5004, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.4645669291338583, |
|
"grad_norm": 7.807861804962158, |
|
"learning_rate": 2.909356725146199e-05, |
|
"loss": 0.2473, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.5118110236220472, |
|
"grad_norm": 10.55922794342041, |
|
"learning_rate": 2.821637426900585e-05, |
|
"loss": 0.3118, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.5590551181102361, |
|
"grad_norm": 18.061159133911133, |
|
"learning_rate": 2.733918128654971e-05, |
|
"loss": 0.3446, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.6062992125984252, |
|
"grad_norm": 36.26211929321289, |
|
"learning_rate": 2.6461988304093572e-05, |
|
"loss": 0.5152, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.6535433070866141, |
|
"grad_norm": 35.57120895385742, |
|
"learning_rate": 2.5584795321637427e-05, |
|
"loss": 0.4002, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.7007874015748032, |
|
"grad_norm": 16.414527893066406, |
|
"learning_rate": 2.470760233918129e-05, |
|
"loss": 0.2597, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.7480314960629921, |
|
"grad_norm": 6.738377571105957, |
|
"learning_rate": 2.3830409356725147e-05, |
|
"loss": 0.2621, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.795275590551181, |
|
"grad_norm": 23.30550193786621, |
|
"learning_rate": 2.295321637426901e-05, |
|
"loss": 0.4844, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.84251968503937, |
|
"grad_norm": 1.9320651292800903, |
|
"learning_rate": 2.2076023391812867e-05, |
|
"loss": 0.2584, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.889763779527559, |
|
"grad_norm": 0.505867063999176, |
|
"learning_rate": 2.1198830409356725e-05, |
|
"loss": 0.0506, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.9370078740157481, |
|
"grad_norm": 12.698741912841797, |
|
"learning_rate": 2.0321637426900587e-05, |
|
"loss": 0.2443, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.984251968503937, |
|
"grad_norm": 23.977083206176758, |
|
"learning_rate": 1.9444444444444445e-05, |
|
"loss": 0.5257, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.8307086614173228, |
|
"eval_f1_macro": 0.828053679874294, |
|
"eval_f1_micro": 0.8307086614173228, |
|
"eval_f1_weighted": 0.8280853872753738, |
|
"eval_loss": 0.6542023420333862, |
|
"eval_precision_macro": 0.8619388041171052, |
|
"eval_precision_micro": 0.8307086614173228, |
|
"eval_precision_weighted": 0.8625492066855815, |
|
"eval_recall_macro": 0.8312791783380019, |
|
"eval_recall_micro": 0.8307086614173228, |
|
"eval_recall_weighted": 0.8307086614173228, |
|
"eval_runtime": 0.5199, |
|
"eval_samples_per_second": 488.573, |
|
"eval_steps_per_second": 30.776, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.031496062992126, |
|
"grad_norm": 3.3990590572357178, |
|
"learning_rate": 1.8567251461988304e-05, |
|
"loss": 0.4507, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.078740157480315, |
|
"grad_norm": 77.66793823242188, |
|
"learning_rate": 1.7690058479532165e-05, |
|
"loss": 0.1908, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.1259842519685037, |
|
"grad_norm": 12.624061584472656, |
|
"learning_rate": 1.6812865497076024e-05, |
|
"loss": 0.1952, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.173228346456693, |
|
"grad_norm": 36.28937911987305, |
|
"learning_rate": 1.5935672514619886e-05, |
|
"loss": 0.064, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.220472440944882, |
|
"grad_norm": 0.4415770471096039, |
|
"learning_rate": 1.5058479532163744e-05, |
|
"loss": 0.1891, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.267716535433071, |
|
"grad_norm": 0.10503505915403366, |
|
"learning_rate": 1.4181286549707604e-05, |
|
"loss": 0.0693, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.3149606299212597, |
|
"grad_norm": 0.07962560653686523, |
|
"learning_rate": 1.3304093567251464e-05, |
|
"loss": 0.2182, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.362204724409449, |
|
"grad_norm": 0.4876466989517212, |
|
"learning_rate": 1.242690058479532e-05, |
|
"loss": 0.1929, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.409448818897638, |
|
"grad_norm": 3.6953608989715576, |
|
"learning_rate": 1.154970760233918e-05, |
|
"loss": 0.0129, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.456692913385827, |
|
"grad_norm": 1.1707993745803833, |
|
"learning_rate": 1.067251461988304e-05, |
|
"loss": 0.2443, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.5039370078740157, |
|
"grad_norm": 44.5469856262207, |
|
"learning_rate": 9.795321637426901e-06, |
|
"loss": 0.3835, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.5511811023622046, |
|
"grad_norm": 29.1602840423584, |
|
"learning_rate": 8.918128654970761e-06, |
|
"loss": 0.2782, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.5984251968503935, |
|
"grad_norm": 33.679283142089844, |
|
"learning_rate": 8.04093567251462e-06, |
|
"loss": 0.4945, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.6456692913385824, |
|
"grad_norm": 74.44416809082031, |
|
"learning_rate": 7.163742690058479e-06, |
|
"loss": 0.3764, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.6929133858267718, |
|
"grad_norm": 0.05699335038661957, |
|
"learning_rate": 6.286549707602339e-06, |
|
"loss": 0.3058, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.7401574803149606, |
|
"grad_norm": 5.521697044372559, |
|
"learning_rate": 5.409356725146199e-06, |
|
"loss": 0.1731, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.7874015748031495, |
|
"grad_norm": 8.134651184082031, |
|
"learning_rate": 4.532163742690059e-06, |
|
"loss": 0.1502, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.8346456692913384, |
|
"grad_norm": 0.10008104890584946, |
|
"learning_rate": 3.6549707602339183e-06, |
|
"loss": 0.1965, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.8818897637795278, |
|
"grad_norm": 26.00634765625, |
|
"learning_rate": 2.777777777777778e-06, |
|
"loss": 0.1173, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.9291338582677167, |
|
"grad_norm": 0.10696706175804138, |
|
"learning_rate": 1.9005847953216373e-06, |
|
"loss": 0.0334, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.9763779527559056, |
|
"grad_norm": 2.274127960205078, |
|
"learning_rate": 1.0233918128654972e-06, |
|
"loss": 0.0283, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.889763779527559, |
|
"eval_f1_macro": 0.8898961097467847, |
|
"eval_f1_micro": 0.889763779527559, |
|
"eval_f1_weighted": 0.8899204214771557, |
|
"eval_loss": 0.45924699306488037, |
|
"eval_precision_macro": 0.8906384102462533, |
|
"eval_precision_micro": 0.889763779527559, |
|
"eval_precision_weighted": 0.8907447175163711, |
|
"eval_recall_macro": 0.8898225957049486, |
|
"eval_recall_micro": 0.889763779527559, |
|
"eval_recall_weighted": 0.889763779527559, |
|
"eval_runtime": 0.5017, |
|
"eval_samples_per_second": 506.311, |
|
"eval_steps_per_second": 31.894, |
|
"step": 381 |
|
} |
|
], |
|
"logging_steps": 6, |
|
"max_steps": 381, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.01 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 199703084055552.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|