lora-roberta-large-0927 / all_results.json
anniew666's picture
End of training
43a8173
{
"epoch": 25.0,
"eval_accuracy": 0.4471636546184739,
"eval_b_acc": 0.14285714285714285,
"eval_f1": 0.2763410114310333,
"eval_f1_anger": 0.0,
"eval_f1_disgust": 0.0,
"eval_f1_fear": 0.0,
"eval_f1_joy": 0.0,
"eval_f1_neutral": 0.6179862978059145,
"eval_f1_sadness": 0.0,
"eval_f1_surprise": 0.0,
"eval_loss": 1.5365694761276245,
"eval_micro_f1": 0.4471636546184739,
"eval_prec": 0.1999553340117498,
"eval_prec_anger": 0.0,
"eval_prec_disgust": 0.0,
"eval_prec_fear": 0.0,
"eval_prec_joy": 0.0,
"eval_prec_neutral": 0.4471636546184739,
"eval_prec_sadness": 0.0,
"eval_prec_surprise": 0.0,
"eval_recall": 0.4471636546184739,
"eval_recall_anger": 0.0,
"eval_recall_disgust": 0.0,
"eval_recall_fear": 0.0,
"eval_recall_joy": 0.0,
"eval_recall_neutral": 1.0,
"eval_recall_sadness": 0.0,
"eval_recall_surprise": 0.0,
"eval_runtime": 52.8586,
"eval_samples": 23904,
"eval_samples_per_second": 452.225,
"eval_steps_per_second": 14.132,
"train_loss": 1.4594040881953265,
"train_runtime": 28719.4719,
"train_samples": 214113,
"train_samples_per_second": 186.383,
"train_steps_per_second": 1.456
}