File size: 1,349 Bytes
a0f0a5d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
{
  "best_metric": 0.03925073519349098,
  "best_model_checkpoint": "man_woman_face_image_detection/checkpoint-1326",
  "epoch": 2.0,
  "eval_steps": 500,
  "global_step": 1326,
  "is_hyper_param_search": false,
  "is_local_process_zero": true,
  "is_world_process_zero": true,
  "log_history": [
    {
      "epoch": 0.75,
      "grad_norm": 0.15377628803253174,
      "learning_rate": 1.2946708463949842e-06,
      "loss": 0.0556,
      "step": 500
    },
    {
      "epoch": 1.0,
      "eval_accuracy": 0.9905960545853072,
      "eval_loss": 0.039327312260866165,
      "eval_runtime": 134.6358,
      "eval_samples_per_second": 105.046,
      "eval_steps_per_second": 13.132,
      "step": 663
    },
    {
      "epoch": 1.51,
      "grad_norm": 0.16108155250549316,
      "learning_rate": 5.109717868338558e-07,
      "loss": 0.0532,
      "step": 1000
    },
    {
      "epoch": 2.0,
      "eval_accuracy": 0.9903132291593014,
      "eval_loss": 0.03925073519349098,
      "eval_runtime": 130.4103,
      "eval_samples_per_second": 108.45,
      "eval_steps_per_second": 13.557,
      "step": 1326
    }
  ],
  "logging_steps": 500,
  "max_steps": 1326,
  "num_input_tokens_seen": 0,
  "num_train_epochs": 2,
  "save_steps": 500,
  "total_flos": 3.28767515138868e+18,
  "train_batch_size": 32,
  "trial_name": null,
  "trial_params": null
}