File size: 1,816 Bytes
8dd1579
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
{
  "best_metric": 0.828283390419402,
  "best_model_checkpoint": "/content/result/unsup-simcse-roberta-large-semeval2015-laptops",
  "epoch": 30.0,
  "global_step": 840,
  "is_hyper_param_search": false,
  "is_local_process_zero": true,
  "is_world_process_zero": true,
  "log_history": [
    {
      "epoch": 4.46,
      "eval_avg_sts": 0.7866241458854294,
      "eval_sickr_spearman": 0.7525337179492227,
      "eval_stsb_spearman": 0.820714573821636,
      "step": 125
    },
    {
      "epoch": 8.93,
      "eval_avg_sts": 0.7755652925596941,
      "eval_sickr_spearman": 0.7325310696931179,
      "eval_stsb_spearman": 0.8185995154262702,
      "step": 250
    },
    {
      "epoch": 13.39,
      "eval_avg_sts": 0.7783432375652614,
      "eval_sickr_spearman": 0.7284030847111207,
      "eval_stsb_spearman": 0.828283390419402,
      "step": 375
    },
    {
      "epoch": 17.86,
      "learning_rate": 2.023809523809524e-05,
      "loss": 0.0015,
      "step": 500
    },
    {
      "epoch": 17.86,
      "eval_avg_sts": 0.7641535833191572,
      "eval_sickr_spearman": 0.7357065979335973,
      "eval_stsb_spearman": 0.7926005687047171,
      "step": 500
    },
    {
      "epoch": 22.32,
      "eval_avg_sts": 0.7692839856507644,
      "eval_sickr_spearman": 0.7284775809494513,
      "eval_stsb_spearman": 0.8100903903520775,
      "step": 625
    },
    {
      "epoch": 26.79,
      "eval_avg_sts": 0.7722064928209667,
      "eval_sickr_spearman": 0.7298525192592187,
      "eval_stsb_spearman": 0.8145604663827146,
      "step": 750
    },
    {
      "epoch": 30.0,
      "step": 840,
      "train_runtime": 1610.2853,
      "train_samples_per_second": 0.522
    }
  ],
  "max_steps": 840,
  "num_train_epochs": 30,
  "total_flos": 7117989287583744,
  "trial_name": null,
  "trial_params": null
}