cedpsam commited on
Commit
9a376f4
1 Parent(s): 36ca2e8

Training in progress, step 2000

Browse files
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d7e5418e608b5dc6cf6036f67b88a21c5a94f9cee4f0ef1f77fda59bfbb020ed
3
  size 1001681601
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38b092dfda9074f5157e4bd86a0328d554b8e72a5626a9bc321321cc36f072ce
3
  size 1001681601
last-checkpoint/scaler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:13a70eb1c7ed1b7b50ca88a0d58b195bbbd43ac81f49980750c437ba42b82ae9
3
  size 559
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd51c2fa4070296203cb870e417cf342a24d6f4daf52267069468a9392c81b1e
3
  size 559
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e9837d8c5d08d60f65b3e9bc1340820f89ab7a8ce6c532d4ccb9b38bffd601d0
3
  size 623
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b1f479ec258d6cf215c17bde71062cb3b074d6531e1ee5dc85695a575ebeead
3
  size 623
last-checkpoint/trainer_state.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.020347811931956918,
5
- "global_step": 1500,
6
  "is_hyper_param_search": false,
7
  "is_local_process_zero": true,
8
  "is_world_process_zero": true,
@@ -24,11 +24,17 @@
24
  "learning_rate": 0.0,
25
  "loss": 0.0381,
26
  "step": 1500
 
 
 
 
 
 
27
  }
28
  ],
29
  "max_steps": 73718,
30
  "num_train_epochs": 1,
31
- "total_flos": 1569332240842752.0,
32
  "trial_name": null,
33
  "trial_params": null
34
  }
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 0.02713041590927589,
5
+ "global_step": 2000,
6
  "is_hyper_param_search": false,
7
  "is_local_process_zero": true,
8
  "is_world_process_zero": true,
 
24
  "learning_rate": 0.0,
25
  "loss": 0.0381,
26
  "step": 1500
27
+ },
28
+ {
29
+ "epoch": 0.03,
30
+ "learning_rate": 0.0,
31
+ "loss": 0.0369,
32
+ "step": 2000
33
  }
34
  ],
35
  "max_steps": 73718,
36
  "num_train_epochs": 1,
37
+ "total_flos": 2091746435530752.0,
38
  "trial_name": null,
39
  "trial_params": null
40
  }
runs/Oct16_21-59-35_1b3a04926831/events.out.tfevents.1665958666.1b3a04926831.23.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6ac275ce1f5cc306eda57deeca0657115b496acbb5f405f829fe3451014f32c4
3
- size 46084
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04bd9ada616b26937701694f4651f64b06a163cdec854668486234de616027e9
3
+ size 46241