|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy_identity_hate": 0.9913520216941833, |
|
"eval_accuracy_insult": 0.9763121008872986, |
|
"eval_accuracy_obscene": 0.9816073775291443, |
|
"eval_accuracy_severe_toxic": 0.9898793697357178, |
|
"eval_accuracy_threat": 0.9973680377006531, |
|
"eval_accuracy_toxic": 0.9678207635879517, |
|
"eval_aucroc_macro": 0.9897471070289612, |
|
"eval_aucroc_micro": 0.9920395612716675, |
|
"eval_f1_identity_hate": 0.4479999840259552, |
|
"eval_f1_insult": 0.7662338018417358, |
|
"eval_f1_macro": 0.6105479001998901, |
|
"eval_f1_micro": 0.7807540893554688, |
|
"eval_f1_obscene": 0.8337581753730774, |
|
"eval_f1_severe_toxic": 0.3312629163265228, |
|
"eval_f1_threat": 0.45454540848731995, |
|
"eval_f1_toxic": 0.8294870257377625, |
|
"eval_f2_macro": 0.6015251874923706, |
|
"eval_f2_micro": 0.783759593963623, |
|
"eval_loss": 0.039291638880968094, |
|
"eval_matthews_corrcoef": 0.7724997401237488, |
|
"eval_overall_accuracy": 0.9840565919876099, |
|
"eval_overall_f1": 0.9840565919876099, |
|
"eval_overall_f2": 0.9840565919876099, |
|
"eval_overall_precision": 0.9840565919876099, |
|
"eval_overall_recall": 0.9840565919876099, |
|
"eval_precision_macro": 0.6348888278007507, |
|
"eval_precision_micro": 0.7757956385612488, |
|
"eval_recall_macro": 0.5971536636352539, |
|
"eval_recall_micro": 0.7857762575149536, |
|
"eval_runtime": 334.4401, |
|
"eval_samples_per_second": 95.428, |
|
"eval_steps_per_second": 7.954 |
|
} |