File size: 1,908 Bytes
493aec4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
{
  "best_metric": 0.05154639175257732,
  "best_model_checkpoint": "5_distilbert-base-uncased\\checkpoint-137",
  "epoch": 5.0,
  "eval_steps": 500,
  "global_step": 685,
  "is_hyper_param_search": false,
  "is_local_process_zero": true,
  "is_world_process_zero": true,
  "log_history": [
    {
      "epoch": 1.0,
      "eval_accuracy": 0.05154639175257732,
      "eval_loss": 0.44556552171707153,
      "eval_runtime": 0.6355,
      "eval_samples_per_second": 763.192,
      "eval_steps_per_second": 25.177,
      "step": 137
    },
    {
      "epoch": 2.0,
      "eval_accuracy": 0.03917525773195876,
      "eval_loss": 0.4118332266807556,
      "eval_runtime": 0.6311,
      "eval_samples_per_second": 768.455,
      "eval_steps_per_second": 25.351,
      "step": 274
    },
    {
      "epoch": 3.0,
      "eval_accuracy": 0.03505154639175258,
      "eval_loss": 0.4265700578689575,
      "eval_runtime": 0.6437,
      "eval_samples_per_second": 753.463,
      "eval_steps_per_second": 24.857,
      "step": 411
    },
    {
      "epoch": 3.65,
      "grad_norm": 8.072059631347656,
      "learning_rate": 5.401459854014599e-06,
      "loss": 0.3595,
      "step": 500
    },
    {
      "epoch": 4.0,
      "eval_accuracy": 0.026804123711340205,
      "eval_loss": 0.4528670310974121,
      "eval_runtime": 0.6395,
      "eval_samples_per_second": 758.363,
      "eval_steps_per_second": 25.018,
      "step": 548
    },
    {
      "epoch": 5.0,
      "eval_accuracy": 0.024742268041237112,
      "eval_loss": 0.4807242155075073,
      "eval_runtime": 0.7005,
      "eval_samples_per_second": 692.381,
      "eval_steps_per_second": 22.841,
      "step": 685
    }
  ],
  "logging_steps": 500,
  "max_steps": 685,
  "num_input_tokens_seen": 0,
  "num_train_epochs": 5,
  "save_steps": 500,
  "total_flos": 365833873627920.0,
  "train_batch_size": 32,
  "trial_name": null,
  "trial_params": null
}