|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.7286374133949192, |
|
"eval_accuracy_label_arts, culture, entertainment and media": 0.8333333333333334, |
|
"eval_accuracy_label_conflict, war and peace": 0.723404255319149, |
|
"eval_accuracy_label_crime, law and justice": 0.791907514450867, |
|
"eval_accuracy_label_disaster, accident, and emergency incident": 0.8931297709923665, |
|
"eval_accuracy_label_economy, business, and finance": 0.7974683544303798, |
|
"eval_accuracy_label_environment": 0.4375, |
|
"eval_accuracy_label_health": 0.7, |
|
"eval_accuracy_label_human interest": 0.3333333333333333, |
|
"eval_accuracy_label_labour": 0.5, |
|
"eval_accuracy_label_lifestyle and leisure": 0.5, |
|
"eval_accuracy_label_politics": 0.6330935251798561, |
|
"eval_accuracy_label_religion": 0.0, |
|
"eval_accuracy_label_science and technology": 0.4166666666666667, |
|
"eval_accuracy_label_society": 0.45614035087719296, |
|
"eval_accuracy_label_sport": 0.9615384615384616, |
|
"eval_accuracy_label_weather": 1.0, |
|
"eval_f1": 0.730012835488787, |
|
"eval_loss": 0.8615460395812988, |
|
"eval_precision": 0.7350865087902028, |
|
"eval_recall": 0.7286374133949192, |
|
"eval_runtime": 6.8008, |
|
"eval_samples_per_second": 127.338, |
|
"eval_steps_per_second": 8.087, |
|
"step": 1779 |
|
} |