Training in progress, epoch 1, checkpoint
Browse files- last-checkpoint/optimizer.pt +1 -1
- last-checkpoint/rng_state.pth +1 -1
- last-checkpoint/scheduler.pt +1 -1
- last-checkpoint/trainer_state.json +2629 -4
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1101572914
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8f82829ffee530562250a891ebc2de26fad2f54c5434110c9be18e0716ac6d22
|
3 |
size 1101572914
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ed244bccd74c18cfe87600cb347ede07968b51d25052de564ed5d70d1359f01
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1380500cb5ce47f6085ce18023f66434aacc2bfc3f43d387430fdbaa4c016dad
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch":
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -2640,6 +2640,2631 @@
|
|
2640 |
"eval_samples_per_second": 7.59,
|
2641 |
"eval_steps_per_second": 1.907,
|
2642 |
"step": 375
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2643 |
}
|
2644 |
],
|
2645 |
"logging_steps": 1,
|
@@ -2654,12 +5279,12 @@
|
|
2654 |
"should_evaluate": false,
|
2655 |
"should_log": false,
|
2656 |
"should_save": true,
|
2657 |
-
"should_training_stop":
|
2658 |
},
|
2659 |
"attributes": {}
|
2660 |
}
|
2661 |
},
|
2662 |
-
"total_flos":
|
2663 |
"train_batch_size": 4,
|
2664 |
"trial_name": null,
|
2665 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.997006983704689,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 750,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
2640 |
"eval_samples_per_second": 7.59,
|
2641 |
"eval_steps_per_second": 1.907,
|
2642 |
"step": 375
|
2643 |
+
},
|
2644 |
+
{
|
2645 |
+
"epoch": 1.0019953441968739,
|
2646 |
+
"grad_norm": NaN,
|
2647 |
+
"learning_rate": 5.084903828741312e-05,
|
2648 |
+
"loss": 0.0,
|
2649 |
+
"step": 376
|
2650 |
+
},
|
2651 |
+
{
|
2652 |
+
"epoch": 1.0046558031260393,
|
2653 |
+
"grad_norm": NaN,
|
2654 |
+
"learning_rate": 5.063679210562602e-05,
|
2655 |
+
"loss": 0.0,
|
2656 |
+
"step": 377
|
2657 |
+
},
|
2658 |
+
{
|
2659 |
+
"epoch": 1.0073162620552045,
|
2660 |
+
"grad_norm": NaN,
|
2661 |
+
"learning_rate": 5.042453444670828e-05,
|
2662 |
+
"loss": 0.0,
|
2663 |
+
"step": 378
|
2664 |
+
},
|
2665 |
+
{
|
2666 |
+
"epoch": 1.0099767209843697,
|
2667 |
+
"grad_norm": NaN,
|
2668 |
+
"learning_rate": 5.021226913625522e-05,
|
2669 |
+
"loss": 0.0,
|
2670 |
+
"step": 379
|
2671 |
+
},
|
2672 |
+
{
|
2673 |
+
"epoch": 1.0126371799135352,
|
2674 |
+
"grad_norm": NaN,
|
2675 |
+
"learning_rate": 5e-05,
|
2676 |
+
"loss": 0.0,
|
2677 |
+
"step": 380
|
2678 |
+
},
|
2679 |
+
{
|
2680 |
+
"epoch": 1.0152976388427004,
|
2681 |
+
"grad_norm": NaN,
|
2682 |
+
"learning_rate": 4.978773086374479e-05,
|
2683 |
+
"loss": 0.0,
|
2684 |
+
"step": 381
|
2685 |
+
},
|
2686 |
+
{
|
2687 |
+
"epoch": 1.0179580977718656,
|
2688 |
+
"grad_norm": NaN,
|
2689 |
+
"learning_rate": 4.957546555329173e-05,
|
2690 |
+
"loss": 0.0,
|
2691 |
+
"step": 382
|
2692 |
+
},
|
2693 |
+
{
|
2694 |
+
"epoch": 1.0206185567010309,
|
2695 |
+
"grad_norm": NaN,
|
2696 |
+
"learning_rate": 4.9363207894374e-05,
|
2697 |
+
"loss": 0.0,
|
2698 |
+
"step": 383
|
2699 |
+
},
|
2700 |
+
{
|
2701 |
+
"epoch": 1.0232790156301963,
|
2702 |
+
"grad_norm": NaN,
|
2703 |
+
"learning_rate": 4.9150961712586895e-05,
|
2704 |
+
"loss": 0.0,
|
2705 |
+
"step": 384
|
2706 |
+
},
|
2707 |
+
{
|
2708 |
+
"epoch": 1.0259394745593615,
|
2709 |
+
"grad_norm": NaN,
|
2710 |
+
"learning_rate": 4.893873083331882e-05,
|
2711 |
+
"loss": 0.0,
|
2712 |
+
"step": 385
|
2713 |
+
},
|
2714 |
+
{
|
2715 |
+
"epoch": 1.0285999334885267,
|
2716 |
+
"grad_norm": NaN,
|
2717 |
+
"learning_rate": 4.8726519081682444e-05,
|
2718 |
+
"loss": 0.0,
|
2719 |
+
"step": 386
|
2720 |
+
},
|
2721 |
+
{
|
2722 |
+
"epoch": 1.031260392417692,
|
2723 |
+
"grad_norm": NaN,
|
2724 |
+
"learning_rate": 4.851433028244562e-05,
|
2725 |
+
"loss": 0.0,
|
2726 |
+
"step": 387
|
2727 |
+
},
|
2728 |
+
{
|
2729 |
+
"epoch": 1.0339208513468574,
|
2730 |
+
"grad_norm": NaN,
|
2731 |
+
"learning_rate": 4.830216825996257e-05,
|
2732 |
+
"loss": 0.0,
|
2733 |
+
"step": 388
|
2734 |
+
},
|
2735 |
+
{
|
2736 |
+
"epoch": 1.0365813102760226,
|
2737 |
+
"grad_norm": NaN,
|
2738 |
+
"learning_rate": 4.809003683810486e-05,
|
2739 |
+
"loss": 0.0,
|
2740 |
+
"step": 389
|
2741 |
+
},
|
2742 |
+
{
|
2743 |
+
"epoch": 1.0392417692051878,
|
2744 |
+
"grad_norm": NaN,
|
2745 |
+
"learning_rate": 4.78779398401926e-05,
|
2746 |
+
"loss": 0.0,
|
2747 |
+
"step": 390
|
2748 |
+
},
|
2749 |
+
{
|
2750 |
+
"epoch": 1.041902228134353,
|
2751 |
+
"grad_norm": NaN,
|
2752 |
+
"learning_rate": 4.7665881088925376e-05,
|
2753 |
+
"loss": 0.0,
|
2754 |
+
"step": 391
|
2755 |
+
},
|
2756 |
+
{
|
2757 |
+
"epoch": 1.0445626870635185,
|
2758 |
+
"grad_norm": NaN,
|
2759 |
+
"learning_rate": 4.7453864406313544e-05,
|
2760 |
+
"loss": 0.0,
|
2761 |
+
"step": 392
|
2762 |
+
},
|
2763 |
+
{
|
2764 |
+
"epoch": 1.0472231459926837,
|
2765 |
+
"grad_norm": NaN,
|
2766 |
+
"learning_rate": 4.7241893613609126e-05,
|
2767 |
+
"loss": 0.0,
|
2768 |
+
"step": 393
|
2769 |
+
},
|
2770 |
+
{
|
2771 |
+
"epoch": 1.049883604921849,
|
2772 |
+
"grad_norm": NaN,
|
2773 |
+
"learning_rate": 4.702997253123716e-05,
|
2774 |
+
"loss": 0.0,
|
2775 |
+
"step": 394
|
2776 |
+
},
|
2777 |
+
{
|
2778 |
+
"epoch": 1.0525440638510144,
|
2779 |
+
"grad_norm": NaN,
|
2780 |
+
"learning_rate": 4.6818104978726685e-05,
|
2781 |
+
"loss": 0.0,
|
2782 |
+
"step": 395
|
2783 |
+
},
|
2784 |
+
{
|
2785 |
+
"epoch": 1.0552045227801796,
|
2786 |
+
"grad_norm": NaN,
|
2787 |
+
"learning_rate": 4.6606294774641966e-05,
|
2788 |
+
"loss": 0.0,
|
2789 |
+
"step": 396
|
2790 |
+
},
|
2791 |
+
{
|
2792 |
+
"epoch": 1.0578649817093448,
|
2793 |
+
"grad_norm": NaN,
|
2794 |
+
"learning_rate": 4.6394545736513634e-05,
|
2795 |
+
"loss": 0.0,
|
2796 |
+
"step": 397
|
2797 |
+
},
|
2798 |
+
{
|
2799 |
+
"epoch": 1.06052544063851,
|
2800 |
+
"grad_norm": NaN,
|
2801 |
+
"learning_rate": 4.618286168076993e-05,
|
2802 |
+
"loss": 0.0,
|
2803 |
+
"step": 398
|
2804 |
+
},
|
2805 |
+
{
|
2806 |
+
"epoch": 1.0631858995676755,
|
2807 |
+
"grad_norm": NaN,
|
2808 |
+
"learning_rate": 4.597124642266788e-05,
|
2809 |
+
"loss": 0.0,
|
2810 |
+
"step": 399
|
2811 |
+
},
|
2812 |
+
{
|
2813 |
+
"epoch": 1.0658463584968407,
|
2814 |
+
"grad_norm": NaN,
|
2815 |
+
"learning_rate": 4.575970377622456e-05,
|
2816 |
+
"loss": 0.0,
|
2817 |
+
"step": 400
|
2818 |
+
},
|
2819 |
+
{
|
2820 |
+
"epoch": 1.068506817426006,
|
2821 |
+
"grad_norm": NaN,
|
2822 |
+
"learning_rate": 4.554823755414829e-05,
|
2823 |
+
"loss": 0.0,
|
2824 |
+
"step": 401
|
2825 |
+
},
|
2826 |
+
{
|
2827 |
+
"epoch": 1.0711672763551712,
|
2828 |
+
"grad_norm": NaN,
|
2829 |
+
"learning_rate": 4.5336851567770076e-05,
|
2830 |
+
"loss": 0.0,
|
2831 |
+
"step": 402
|
2832 |
+
},
|
2833 |
+
{
|
2834 |
+
"epoch": 1.0738277352843366,
|
2835 |
+
"grad_norm": NaN,
|
2836 |
+
"learning_rate": 4.5125549626974696e-05,
|
2837 |
+
"loss": 0.0,
|
2838 |
+
"step": 403
|
2839 |
+
},
|
2840 |
+
{
|
2841 |
+
"epoch": 1.0764881942135018,
|
2842 |
+
"grad_norm": NaN,
|
2843 |
+
"learning_rate": 4.491433554013221e-05,
|
2844 |
+
"loss": 0.0,
|
2845 |
+
"step": 404
|
2846 |
+
},
|
2847 |
+
{
|
2848 |
+
"epoch": 1.079148653142667,
|
2849 |
+
"grad_norm": NaN,
|
2850 |
+
"learning_rate": 4.47032131140292e-05,
|
2851 |
+
"loss": 0.0,
|
2852 |
+
"step": 405
|
2853 |
+
},
|
2854 |
+
{
|
2855 |
+
"epoch": 1.0818091120718325,
|
2856 |
+
"grad_norm": NaN,
|
2857 |
+
"learning_rate": 4.449218615380029e-05,
|
2858 |
+
"loss": 0.0,
|
2859 |
+
"step": 406
|
2860 |
+
},
|
2861 |
+
{
|
2862 |
+
"epoch": 1.0844695710009977,
|
2863 |
+
"grad_norm": NaN,
|
2864 |
+
"learning_rate": 4.4281258462859396e-05,
|
2865 |
+
"loss": 0.0,
|
2866 |
+
"step": 407
|
2867 |
+
},
|
2868 |
+
{
|
2869 |
+
"epoch": 1.087130029930163,
|
2870 |
+
"grad_norm": NaN,
|
2871 |
+
"learning_rate": 4.407043384283136e-05,
|
2872 |
+
"loss": 0.0,
|
2873 |
+
"step": 408
|
2874 |
+
},
|
2875 |
+
{
|
2876 |
+
"epoch": 1.0897904888593282,
|
2877 |
+
"grad_norm": NaN,
|
2878 |
+
"learning_rate": 4.3859716093483245e-05,
|
2879 |
+
"loss": 0.0,
|
2880 |
+
"step": 409
|
2881 |
+
},
|
2882 |
+
{
|
2883 |
+
"epoch": 1.0924509477884936,
|
2884 |
+
"grad_norm": NaN,
|
2885 |
+
"learning_rate": 4.364910901265606e-05,
|
2886 |
+
"loss": 0.0,
|
2887 |
+
"step": 410
|
2888 |
+
},
|
2889 |
+
{
|
2890 |
+
"epoch": 1.0951114067176588,
|
2891 |
+
"grad_norm": NaN,
|
2892 |
+
"learning_rate": 4.343861639619611e-05,
|
2893 |
+
"loss": 0.0,
|
2894 |
+
"step": 411
|
2895 |
+
},
|
2896 |
+
{
|
2897 |
+
"epoch": 1.097771865646824,
|
2898 |
+
"grad_norm": NaN,
|
2899 |
+
"learning_rate": 4.322824203788669e-05,
|
2900 |
+
"loss": 0.0,
|
2901 |
+
"step": 412
|
2902 |
+
},
|
2903 |
+
{
|
2904 |
+
"epoch": 1.1004323245759893,
|
2905 |
+
"grad_norm": NaN,
|
2906 |
+
"learning_rate": 4.3017989729379675e-05,
|
2907 |
+
"loss": 0.0,
|
2908 |
+
"step": 413
|
2909 |
+
},
|
2910 |
+
{
|
2911 |
+
"epoch": 1.1030927835051547,
|
2912 |
+
"grad_norm": NaN,
|
2913 |
+
"learning_rate": 4.280786326012723e-05,
|
2914 |
+
"loss": 0.0,
|
2915 |
+
"step": 414
|
2916 |
+
},
|
2917 |
+
{
|
2918 |
+
"epoch": 1.10575324243432,
|
2919 |
+
"grad_norm": NaN,
|
2920 |
+
"learning_rate": 4.2597866417313436e-05,
|
2921 |
+
"loss": 0.0,
|
2922 |
+
"step": 415
|
2923 |
+
},
|
2924 |
+
{
|
2925 |
+
"epoch": 1.1084137013634852,
|
2926 |
+
"grad_norm": NaN,
|
2927 |
+
"learning_rate": 4.23880029857861e-05,
|
2928 |
+
"loss": 0.0,
|
2929 |
+
"step": 416
|
2930 |
+
},
|
2931 |
+
{
|
2932 |
+
"epoch": 1.1110741602926504,
|
2933 |
+
"grad_norm": NaN,
|
2934 |
+
"learning_rate": 4.2178276747988446e-05,
|
2935 |
+
"loss": 0.0,
|
2936 |
+
"step": 417
|
2937 |
+
},
|
2938 |
+
{
|
2939 |
+
"epoch": 1.1137346192218158,
|
2940 |
+
"grad_norm": NaN,
|
2941 |
+
"learning_rate": 4.196869148389114e-05,
|
2942 |
+
"loss": 0.0,
|
2943 |
+
"step": 418
|
2944 |
+
},
|
2945 |
+
{
|
2946 |
+
"epoch": 1.116395078150981,
|
2947 |
+
"grad_norm": NaN,
|
2948 |
+
"learning_rate": 4.175925097092388e-05,
|
2949 |
+
"loss": 0.0,
|
2950 |
+
"step": 419
|
2951 |
+
},
|
2952 |
+
{
|
2953 |
+
"epoch": 1.1190555370801463,
|
2954 |
+
"grad_norm": NaN,
|
2955 |
+
"learning_rate": 4.1549958983907555e-05,
|
2956 |
+
"loss": 0.0,
|
2957 |
+
"step": 420
|
2958 |
+
},
|
2959 |
+
{
|
2960 |
+
"epoch": 1.1217159960093115,
|
2961 |
+
"grad_norm": NaN,
|
2962 |
+
"learning_rate": 4.1340819294986076e-05,
|
2963 |
+
"loss": 0.0,
|
2964 |
+
"step": 421
|
2965 |
+
},
|
2966 |
+
{
|
2967 |
+
"epoch": 1.124376454938477,
|
2968 |
+
"grad_norm": NaN,
|
2969 |
+
"learning_rate": 4.113183567355846e-05,
|
2970 |
+
"loss": 0.0,
|
2971 |
+
"step": 422
|
2972 |
+
},
|
2973 |
+
{
|
2974 |
+
"epoch": 1.1270369138676422,
|
2975 |
+
"grad_norm": NaN,
|
2976 |
+
"learning_rate": 4.092301188621084e-05,
|
2977 |
+
"loss": 0.0,
|
2978 |
+
"step": 423
|
2979 |
+
},
|
2980 |
+
{
|
2981 |
+
"epoch": 1.1296973727968074,
|
2982 |
+
"grad_norm": NaN,
|
2983 |
+
"learning_rate": 4.0714351696648614e-05,
|
2984 |
+
"loss": 0.0,
|
2985 |
+
"step": 424
|
2986 |
+
},
|
2987 |
+
{
|
2988 |
+
"epoch": 1.1323578317259728,
|
2989 |
+
"grad_norm": NaN,
|
2990 |
+
"learning_rate": 4.050585886562858e-05,
|
2991 |
+
"loss": 0.0,
|
2992 |
+
"step": 425
|
2993 |
+
},
|
2994 |
+
{
|
2995 |
+
"epoch": 1.135018290655138,
|
2996 |
+
"grad_norm": NaN,
|
2997 |
+
"learning_rate": 4.0297537150891235e-05,
|
2998 |
+
"loss": 0.0,
|
2999 |
+
"step": 426
|
3000 |
+
},
|
3001 |
+
{
|
3002 |
+
"epoch": 1.1376787495843033,
|
3003 |
+
"grad_norm": NaN,
|
3004 |
+
"learning_rate": 4.008939030709291e-05,
|
3005 |
+
"loss": 0.0,
|
3006 |
+
"step": 427
|
3007 |
+
},
|
3008 |
+
{
|
3009 |
+
"epoch": 1.1403392085134685,
|
3010 |
+
"grad_norm": NaN,
|
3011 |
+
"learning_rate": 3.988142208573822e-05,
|
3012 |
+
"loss": 0.0,
|
3013 |
+
"step": 428
|
3014 |
+
},
|
3015 |
+
{
|
3016 |
+
"epoch": 1.142999667442634,
|
3017 |
+
"grad_norm": NaN,
|
3018 |
+
"learning_rate": 3.9673636235112376e-05,
|
3019 |
+
"loss": 0.0,
|
3020 |
+
"step": 429
|
3021 |
+
},
|
3022 |
+
{
|
3023 |
+
"epoch": 1.1456601263717991,
|
3024 |
+
"grad_norm": NaN,
|
3025 |
+
"learning_rate": 3.94660365002137e-05,
|
3026 |
+
"loss": 0.0,
|
3027 |
+
"step": 430
|
3028 |
+
},
|
3029 |
+
{
|
3030 |
+
"epoch": 1.1483205853009644,
|
3031 |
+
"grad_norm": NaN,
|
3032 |
+
"learning_rate": 3.925862662268602e-05,
|
3033 |
+
"loss": 0.0,
|
3034 |
+
"step": 431
|
3035 |
+
},
|
3036 |
+
{
|
3037 |
+
"epoch": 1.1509810442301296,
|
3038 |
+
"grad_norm": NaN,
|
3039 |
+
"learning_rate": 3.905141034075135e-05,
|
3040 |
+
"loss": 0.0,
|
3041 |
+
"step": 432
|
3042 |
+
},
|
3043 |
+
{
|
3044 |
+
"epoch": 1.153641503159295,
|
3045 |
+
"grad_norm": NaN,
|
3046 |
+
"learning_rate": 3.884439138914243e-05,
|
3047 |
+
"loss": 0.0,
|
3048 |
+
"step": 433
|
3049 |
+
},
|
3050 |
+
{
|
3051 |
+
"epoch": 1.1563019620884603,
|
3052 |
+
"grad_norm": NaN,
|
3053 |
+
"learning_rate": 3.863757349903551e-05,
|
3054 |
+
"loss": 0.0,
|
3055 |
+
"step": 434
|
3056 |
+
},
|
3057 |
+
{
|
3058 |
+
"epoch": 1.1589624210176255,
|
3059 |
+
"grad_norm": NaN,
|
3060 |
+
"learning_rate": 3.843096039798293e-05,
|
3061 |
+
"loss": 0.0,
|
3062 |
+
"step": 435
|
3063 |
+
},
|
3064 |
+
{
|
3065 |
+
"epoch": 1.161622879946791,
|
3066 |
+
"grad_norm": NaN,
|
3067 |
+
"learning_rate": 3.822455580984613e-05,
|
3068 |
+
"loss": 0.0,
|
3069 |
+
"step": 436
|
3070 |
+
},
|
3071 |
+
{
|
3072 |
+
"epoch": 1.1642833388759561,
|
3073 |
+
"grad_norm": NaN,
|
3074 |
+
"learning_rate": 3.801836345472841e-05,
|
3075 |
+
"loss": 0.0,
|
3076 |
+
"step": 437
|
3077 |
+
},
|
3078 |
+
{
|
3079 |
+
"epoch": 1.1669437978051214,
|
3080 |
+
"grad_norm": NaN,
|
3081 |
+
"learning_rate": 3.781238704890793e-05,
|
3082 |
+
"loss": 0.0,
|
3083 |
+
"step": 438
|
3084 |
+
},
|
3085 |
+
{
|
3086 |
+
"epoch": 1.1696042567342866,
|
3087 |
+
"grad_norm": NaN,
|
3088 |
+
"learning_rate": 3.760663030477072e-05,
|
3089 |
+
"loss": 0.0,
|
3090 |
+
"step": 439
|
3091 |
+
},
|
3092 |
+
{
|
3093 |
+
"epoch": 1.172264715663452,
|
3094 |
+
"grad_norm": NaN,
|
3095 |
+
"learning_rate": 3.740109693074375e-05,
|
3096 |
+
"loss": 0.0,
|
3097 |
+
"step": 440
|
3098 |
+
},
|
3099 |
+
{
|
3100 |
+
"epoch": 1.1749251745926172,
|
3101 |
+
"grad_norm": NaN,
|
3102 |
+
"learning_rate": 3.7195790631228136e-05,
|
3103 |
+
"loss": 0.0,
|
3104 |
+
"step": 441
|
3105 |
+
},
|
3106 |
+
{
|
3107 |
+
"epoch": 1.1775856335217825,
|
3108 |
+
"grad_norm": NaN,
|
3109 |
+
"learning_rate": 3.6990715106532356e-05,
|
3110 |
+
"loss": 0.0,
|
3111 |
+
"step": 442
|
3112 |
+
},
|
3113 |
+
{
|
3114 |
+
"epoch": 1.1802460924509477,
|
3115 |
+
"grad_norm": NaN,
|
3116 |
+
"learning_rate": 3.6785874052805516e-05,
|
3117 |
+
"loss": 0.0,
|
3118 |
+
"step": 443
|
3119 |
+
},
|
3120 |
+
{
|
3121 |
+
"epoch": 1.1829065513801131,
|
3122 |
+
"grad_norm": NaN,
|
3123 |
+
"learning_rate": 3.658127116197079e-05,
|
3124 |
+
"loss": 0.0,
|
3125 |
+
"step": 444
|
3126 |
+
},
|
3127 |
+
{
|
3128 |
+
"epoch": 1.1855670103092784,
|
3129 |
+
"grad_norm": NaN,
|
3130 |
+
"learning_rate": 3.637691012165886e-05,
|
3131 |
+
"loss": 0.0,
|
3132 |
+
"step": 445
|
3133 |
+
},
|
3134 |
+
{
|
3135 |
+
"epoch": 1.1882274692384436,
|
3136 |
+
"grad_norm": NaN,
|
3137 |
+
"learning_rate": 3.6172794615141446e-05,
|
3138 |
+
"loss": 0.0,
|
3139 |
+
"step": 446
|
3140 |
+
},
|
3141 |
+
{
|
3142 |
+
"epoch": 1.190887928167609,
|
3143 |
+
"grad_norm": NaN,
|
3144 |
+
"learning_rate": 3.596892832126494e-05,
|
3145 |
+
"loss": 0.0,
|
3146 |
+
"step": 447
|
3147 |
+
},
|
3148 |
+
{
|
3149 |
+
"epoch": 1.1935483870967742,
|
3150 |
+
"grad_norm": NaN,
|
3151 |
+
"learning_rate": 3.5765314914384026e-05,
|
3152 |
+
"loss": 0.0,
|
3153 |
+
"step": 448
|
3154 |
+
},
|
3155 |
+
{
|
3156 |
+
"epoch": 1.1962088460259395,
|
3157 |
+
"grad_norm": NaN,
|
3158 |
+
"learning_rate": 3.556195806429559e-05,
|
3159 |
+
"loss": 0.0,
|
3160 |
+
"step": 449
|
3161 |
+
},
|
3162 |
+
{
|
3163 |
+
"epoch": 1.1988693049551047,
|
3164 |
+
"grad_norm": NaN,
|
3165 |
+
"learning_rate": 3.5358861436172485e-05,
|
3166 |
+
"loss": 0.0,
|
3167 |
+
"step": 450
|
3168 |
+
},
|
3169 |
+
{
|
3170 |
+
"epoch": 1.2015297638842701,
|
3171 |
+
"grad_norm": NaN,
|
3172 |
+
"learning_rate": 3.5156028690497454e-05,
|
3173 |
+
"loss": 0.0,
|
3174 |
+
"step": 451
|
3175 |
+
},
|
3176 |
+
{
|
3177 |
+
"epoch": 1.2041902228134354,
|
3178 |
+
"grad_norm": NaN,
|
3179 |
+
"learning_rate": 3.495346348299724e-05,
|
3180 |
+
"loss": 0.0,
|
3181 |
+
"step": 452
|
3182 |
+
},
|
3183 |
+
{
|
3184 |
+
"epoch": 1.2068506817426006,
|
3185 |
+
"grad_norm": NaN,
|
3186 |
+
"learning_rate": 3.4751169464576615e-05,
|
3187 |
+
"loss": 0.0,
|
3188 |
+
"step": 453
|
3189 |
+
},
|
3190 |
+
{
|
3191 |
+
"epoch": 1.2095111406717658,
|
3192 |
+
"grad_norm": NaN,
|
3193 |
+
"learning_rate": 3.4549150281252636e-05,
|
3194 |
+
"loss": 0.0,
|
3195 |
+
"step": 454
|
3196 |
+
},
|
3197 |
+
{
|
3198 |
+
"epoch": 1.2121715996009312,
|
3199 |
+
"grad_norm": NaN,
|
3200 |
+
"learning_rate": 3.434740957408889e-05,
|
3201 |
+
"loss": 0.0,
|
3202 |
+
"step": 455
|
3203 |
+
},
|
3204 |
+
{
|
3205 |
+
"epoch": 1.2148320585300965,
|
3206 |
+
"grad_norm": NaN,
|
3207 |
+
"learning_rate": 3.4145950979129915e-05,
|
3208 |
+
"loss": 0.0,
|
3209 |
+
"step": 456
|
3210 |
+
},
|
3211 |
+
{
|
3212 |
+
"epoch": 1.2174925174592617,
|
3213 |
+
"grad_norm": NaN,
|
3214 |
+
"learning_rate": 3.3944778127335595e-05,
|
3215 |
+
"loss": 0.0,
|
3216 |
+
"step": 457
|
3217 |
+
},
|
3218 |
+
{
|
3219 |
+
"epoch": 1.2201529763884271,
|
3220 |
+
"grad_norm": NaN,
|
3221 |
+
"learning_rate": 3.374389464451583e-05,
|
3222 |
+
"loss": 0.0,
|
3223 |
+
"step": 458
|
3224 |
+
},
|
3225 |
+
{
|
3226 |
+
"epoch": 1.2228134353175923,
|
3227 |
+
"grad_norm": NaN,
|
3228 |
+
"learning_rate": 3.3543304151265074e-05,
|
3229 |
+
"loss": 0.0,
|
3230 |
+
"step": 459
|
3231 |
+
},
|
3232 |
+
{
|
3233 |
+
"epoch": 1.2254738942467576,
|
3234 |
+
"grad_norm": NaN,
|
3235 |
+
"learning_rate": 3.334301026289712e-05,
|
3236 |
+
"loss": 0.0,
|
3237 |
+
"step": 460
|
3238 |
+
},
|
3239 |
+
{
|
3240 |
+
"epoch": 1.2281343531759228,
|
3241 |
+
"grad_norm": NaN,
|
3242 |
+
"learning_rate": 3.314301658937998e-05,
|
3243 |
+
"loss": 0.0,
|
3244 |
+
"step": 461
|
3245 |
+
},
|
3246 |
+
{
|
3247 |
+
"epoch": 1.230794812105088,
|
3248 |
+
"grad_norm": NaN,
|
3249 |
+
"learning_rate": 3.294332673527076e-05,
|
3250 |
+
"loss": 0.0,
|
3251 |
+
"step": 462
|
3252 |
+
},
|
3253 |
+
{
|
3254 |
+
"epoch": 1.2334552710342535,
|
3255 |
+
"grad_norm": NaN,
|
3256 |
+
"learning_rate": 3.274394429965073e-05,
|
3257 |
+
"loss": 0.0,
|
3258 |
+
"step": 463
|
3259 |
+
},
|
3260 |
+
{
|
3261 |
+
"epoch": 1.2361157299634187,
|
3262 |
+
"grad_norm": NaN,
|
3263 |
+
"learning_rate": 3.254487287606044e-05,
|
3264 |
+
"loss": 0.0,
|
3265 |
+
"step": 464
|
3266 |
+
},
|
3267 |
+
{
|
3268 |
+
"epoch": 1.238776188892584,
|
3269 |
+
"grad_norm": NaN,
|
3270 |
+
"learning_rate": 3.234611605243496e-05,
|
3271 |
+
"loss": 0.0,
|
3272 |
+
"step": 465
|
3273 |
+
},
|
3274 |
+
{
|
3275 |
+
"epoch": 1.2414366478217493,
|
3276 |
+
"grad_norm": NaN,
|
3277 |
+
"learning_rate": 3.214767741103923e-05,
|
3278 |
+
"loss": 0.0,
|
3279 |
+
"step": 466
|
3280 |
+
},
|
3281 |
+
{
|
3282 |
+
"epoch": 1.2440971067509146,
|
3283 |
+
"grad_norm": NaN,
|
3284 |
+
"learning_rate": 3.1949560528403486e-05,
|
3285 |
+
"loss": 0.0,
|
3286 |
+
"step": 467
|
3287 |
+
},
|
3288 |
+
{
|
3289 |
+
"epoch": 1.2467575656800798,
|
3290 |
+
"grad_norm": NaN,
|
3291 |
+
"learning_rate": 3.175176897525874e-05,
|
3292 |
+
"loss": 0.0,
|
3293 |
+
"step": 468
|
3294 |
+
},
|
3295 |
+
{
|
3296 |
+
"epoch": 1.249418024609245,
|
3297 |
+
"grad_norm": NaN,
|
3298 |
+
"learning_rate": 3.1554306316472536e-05,
|
3299 |
+
"loss": 0.0,
|
3300 |
+
"step": 469
|
3301 |
+
},
|
3302 |
+
{
|
3303 |
+
"epoch": 1.2520784835384104,
|
3304 |
+
"grad_norm": NaN,
|
3305 |
+
"learning_rate": 3.135717611098458e-05,
|
3306 |
+
"loss": 0.0,
|
3307 |
+
"step": 470
|
3308 |
+
},
|
3309 |
+
{
|
3310 |
+
"epoch": 1.2547389424675757,
|
3311 |
+
"grad_norm": NaN,
|
3312 |
+
"learning_rate": 3.1160381911742695e-05,
|
3313 |
+
"loss": 0.0,
|
3314 |
+
"step": 471
|
3315 |
+
},
|
3316 |
+
{
|
3317 |
+
"epoch": 1.257399401396741,
|
3318 |
+
"grad_norm": NaN,
|
3319 |
+
"learning_rate": 3.096392726563874e-05,
|
3320 |
+
"loss": 0.0,
|
3321 |
+
"step": 472
|
3322 |
+
},
|
3323 |
+
{
|
3324 |
+
"epoch": 1.2600598603259061,
|
3325 |
+
"grad_norm": NaN,
|
3326 |
+
"learning_rate": 3.076781571344465e-05,
|
3327 |
+
"loss": 0.0,
|
3328 |
+
"step": 473
|
3329 |
+
},
|
3330 |
+
{
|
3331 |
+
"epoch": 1.2627203192550716,
|
3332 |
+
"grad_norm": NaN,
|
3333 |
+
"learning_rate": 3.057205078974873e-05,
|
3334 |
+
"loss": 0.0,
|
3335 |
+
"step": 474
|
3336 |
+
},
|
3337 |
+
{
|
3338 |
+
"epoch": 1.2653807781842368,
|
3339 |
+
"grad_norm": NaN,
|
3340 |
+
"learning_rate": 3.0376636022891812e-05,
|
3341 |
+
"loss": 0.0,
|
3342 |
+
"step": 475
|
3343 |
+
},
|
3344 |
+
{
|
3345 |
+
"epoch": 1.268041237113402,
|
3346 |
+
"grad_norm": NaN,
|
3347 |
+
"learning_rate": 3.0181574934903743e-05,
|
3348 |
+
"loss": 0.0,
|
3349 |
+
"step": 476
|
3350 |
+
},
|
3351 |
+
{
|
3352 |
+
"epoch": 1.2707016960425674,
|
3353 |
+
"grad_norm": NaN,
|
3354 |
+
"learning_rate": 2.9986871041439908e-05,
|
3355 |
+
"loss": 0.0,
|
3356 |
+
"step": 477
|
3357 |
+
},
|
3358 |
+
{
|
3359 |
+
"epoch": 1.2733621549717327,
|
3360 |
+
"grad_norm": NaN,
|
3361 |
+
"learning_rate": 2.9792527851717805e-05,
|
3362 |
+
"loss": 0.0,
|
3363 |
+
"step": 478
|
3364 |
+
},
|
3365 |
+
{
|
3366 |
+
"epoch": 1.2760226139008979,
|
3367 |
+
"grad_norm": NaN,
|
3368 |
+
"learning_rate": 2.95985488684539e-05,
|
3369 |
+
"loss": 0.0,
|
3370 |
+
"step": 479
|
3371 |
+
},
|
3372 |
+
{
|
3373 |
+
"epoch": 1.278683072830063,
|
3374 |
+
"grad_norm": NaN,
|
3375 |
+
"learning_rate": 2.9404937587800375e-05,
|
3376 |
+
"loss": 0.0,
|
3377 |
+
"step": 480
|
3378 |
+
},
|
3379 |
+
{
|
3380 |
+
"epoch": 1.2813435317592285,
|
3381 |
+
"grad_norm": NaN,
|
3382 |
+
"learning_rate": 2.921169749928222e-05,
|
3383 |
+
"loss": 0.0,
|
3384 |
+
"step": 481
|
3385 |
+
},
|
3386 |
+
{
|
3387 |
+
"epoch": 1.2840039906883938,
|
3388 |
+
"grad_norm": NaN,
|
3389 |
+
"learning_rate": 2.9018832085734298e-05,
|
3390 |
+
"loss": 0.0,
|
3391 |
+
"step": 482
|
3392 |
+
},
|
3393 |
+
{
|
3394 |
+
"epoch": 1.286664449617559,
|
3395 |
+
"grad_norm": NaN,
|
3396 |
+
"learning_rate": 2.882634482323856e-05,
|
3397 |
+
"loss": 0.0,
|
3398 |
+
"step": 483
|
3399 |
+
},
|
3400 |
+
{
|
3401 |
+
"epoch": 1.2893249085467242,
|
3402 |
+
"grad_norm": NaN,
|
3403 |
+
"learning_rate": 2.8634239181061384e-05,
|
3404 |
+
"loss": 0.0,
|
3405 |
+
"step": 484
|
3406 |
+
},
|
3407 |
+
{
|
3408 |
+
"epoch": 1.2919853674758897,
|
3409 |
+
"grad_norm": NaN,
|
3410 |
+
"learning_rate": 2.8442518621591086e-05,
|
3411 |
+
"loss": 0.0,
|
3412 |
+
"step": 485
|
3413 |
+
},
|
3414 |
+
{
|
3415 |
+
"epoch": 1.2946458264050549,
|
3416 |
+
"grad_norm": NaN,
|
3417 |
+
"learning_rate": 2.8251186600275533e-05,
|
3418 |
+
"loss": 0.0,
|
3419 |
+
"step": 486
|
3420 |
+
},
|
3421 |
+
{
|
3422 |
+
"epoch": 1.29730628533422,
|
3423 |
+
"grad_norm": NaN,
|
3424 |
+
"learning_rate": 2.8060246565559778e-05,
|
3425 |
+
"loss": 0.0,
|
3426 |
+
"step": 487
|
3427 |
+
},
|
3428 |
+
{
|
3429 |
+
"epoch": 1.2999667442633855,
|
3430 |
+
"grad_norm": NaN,
|
3431 |
+
"learning_rate": 2.786970195882398e-05,
|
3432 |
+
"loss": 0.0,
|
3433 |
+
"step": 488
|
3434 |
+
},
|
3435 |
+
{
|
3436 |
+
"epoch": 1.3026272031925508,
|
3437 |
+
"grad_norm": NaN,
|
3438 |
+
"learning_rate": 2.7679556214321357e-05,
|
3439 |
+
"loss": 0.0,
|
3440 |
+
"step": 489
|
3441 |
+
},
|
3442 |
+
{
|
3443 |
+
"epoch": 1.305287662121716,
|
3444 |
+
"grad_norm": NaN,
|
3445 |
+
"learning_rate": 2.748981275911633e-05,
|
3446 |
+
"loss": 0.0,
|
3447 |
+
"step": 490
|
3448 |
+
},
|
3449 |
+
{
|
3450 |
+
"epoch": 1.3079481210508812,
|
3451 |
+
"grad_norm": NaN,
|
3452 |
+
"learning_rate": 2.7300475013022663e-05,
|
3453 |
+
"loss": 0.0,
|
3454 |
+
"step": 491
|
3455 |
+
},
|
3456 |
+
{
|
3457 |
+
"epoch": 1.3106085799800464,
|
3458 |
+
"grad_norm": NaN,
|
3459 |
+
"learning_rate": 2.7111546388541896e-05,
|
3460 |
+
"loss": 0.0,
|
3461 |
+
"step": 492
|
3462 |
+
},
|
3463 |
+
{
|
3464 |
+
"epoch": 1.3132690389092119,
|
3465 |
+
"grad_norm": NaN,
|
3466 |
+
"learning_rate": 2.6923030290801815e-05,
|
3467 |
+
"loss": 0.0,
|
3468 |
+
"step": 493
|
3469 |
+
},
|
3470 |
+
{
|
3471 |
+
"epoch": 1.315929497838377,
|
3472 |
+
"grad_norm": NaN,
|
3473 |
+
"learning_rate": 2.6734930117495134e-05,
|
3474 |
+
"loss": 0.0,
|
3475 |
+
"step": 494
|
3476 |
+
},
|
3477 |
+
{
|
3478 |
+
"epoch": 1.3185899567675423,
|
3479 |
+
"grad_norm": NaN,
|
3480 |
+
"learning_rate": 2.6547249258818164e-05,
|
3481 |
+
"loss": 0.0,
|
3482 |
+
"step": 495
|
3483 |
+
},
|
3484 |
+
{
|
3485 |
+
"epoch": 1.3212504156967078,
|
3486 |
+
"grad_norm": NaN,
|
3487 |
+
"learning_rate": 2.6359991097409765e-05,
|
3488 |
+
"loss": 0.0,
|
3489 |
+
"step": 496
|
3490 |
+
},
|
3491 |
+
{
|
3492 |
+
"epoch": 1.323910874625873,
|
3493 |
+
"grad_norm": NaN,
|
3494 |
+
"learning_rate": 2.6173159008290406e-05,
|
3495 |
+
"loss": 0.0,
|
3496 |
+
"step": 497
|
3497 |
+
},
|
3498 |
+
{
|
3499 |
+
"epoch": 1.3265713335550382,
|
3500 |
+
"grad_norm": NaN,
|
3501 |
+
"learning_rate": 2.598675635880129e-05,
|
3502 |
+
"loss": 0.0,
|
3503 |
+
"step": 498
|
3504 |
+
},
|
3505 |
+
{
|
3506 |
+
"epoch": 1.3292317924842036,
|
3507 |
+
"grad_norm": NaN,
|
3508 |
+
"learning_rate": 2.580078650854366e-05,
|
3509 |
+
"loss": 0.0,
|
3510 |
+
"step": 499
|
3511 |
+
},
|
3512 |
+
{
|
3513 |
+
"epoch": 1.3318922514133689,
|
3514 |
+
"grad_norm": NaN,
|
3515 |
+
"learning_rate": 2.5615252809318284e-05,
|
3516 |
+
"loss": 0.0,
|
3517 |
+
"step": 500
|
3518 |
+
},
|
3519 |
+
{
|
3520 |
+
"epoch": 1.334552710342534,
|
3521 |
+
"grad_norm": NaN,
|
3522 |
+
"learning_rate": 2.543015860506499e-05,
|
3523 |
+
"loss": 0.0,
|
3524 |
+
"step": 501
|
3525 |
+
},
|
3526 |
+
{
|
3527 |
+
"epoch": 1.3372131692716993,
|
3528 |
+
"grad_norm": NaN,
|
3529 |
+
"learning_rate": 2.524550723180249e-05,
|
3530 |
+
"loss": 0.0,
|
3531 |
+
"step": 502
|
3532 |
+
},
|
3533 |
+
{
|
3534 |
+
"epoch": 1.3398736282008645,
|
3535 |
+
"grad_norm": NaN,
|
3536 |
+
"learning_rate": 2.506130201756815e-05,
|
3537 |
+
"loss": 0.0,
|
3538 |
+
"step": 503
|
3539 |
+
},
|
3540 |
+
{
|
3541 |
+
"epoch": 1.34253408713003,
|
3542 |
+
"grad_norm": NaN,
|
3543 |
+
"learning_rate": 2.487754628235805e-05,
|
3544 |
+
"loss": 0.0,
|
3545 |
+
"step": 504
|
3546 |
+
},
|
3547 |
+
{
|
3548 |
+
"epoch": 1.3451945460591952,
|
3549 |
+
"grad_norm": NaN,
|
3550 |
+
"learning_rate": 2.469424333806718e-05,
|
3551 |
+
"loss": 0.0,
|
3552 |
+
"step": 505
|
3553 |
+
},
|
3554 |
+
{
|
3555 |
+
"epoch": 1.3478550049883604,
|
3556 |
+
"grad_norm": NaN,
|
3557 |
+
"learning_rate": 2.4511396488429727e-05,
|
3558 |
+
"loss": 0.0,
|
3559 |
+
"step": 506
|
3560 |
+
},
|
3561 |
+
{
|
3562 |
+
"epoch": 1.3505154639175259,
|
3563 |
+
"grad_norm": NaN,
|
3564 |
+
"learning_rate": 2.4329009028959482e-05,
|
3565 |
+
"loss": 0.0,
|
3566 |
+
"step": 507
|
3567 |
+
},
|
3568 |
+
{
|
3569 |
+
"epoch": 1.353175922846691,
|
3570 |
+
"grad_norm": NaN,
|
3571 |
+
"learning_rate": 2.4147084246890478e-05,
|
3572 |
+
"loss": 0.0,
|
3573 |
+
"step": 508
|
3574 |
+
},
|
3575 |
+
{
|
3576 |
+
"epoch": 1.3558363817758563,
|
3577 |
+
"grad_norm": NaN,
|
3578 |
+
"learning_rate": 2.396562542111781e-05,
|
3579 |
+
"loss": 0.0,
|
3580 |
+
"step": 509
|
3581 |
+
},
|
3582 |
+
{
|
3583 |
+
"epoch": 1.3584968407050217,
|
3584 |
+
"grad_norm": NaN,
|
3585 |
+
"learning_rate": 2.3784635822138424e-05,
|
3586 |
+
"loss": 0.0,
|
3587 |
+
"step": 510
|
3588 |
+
},
|
3589 |
+
{
|
3590 |
+
"epoch": 1.361157299634187,
|
3591 |
+
"grad_norm": NaN,
|
3592 |
+
"learning_rate": 2.3604118711992235e-05,
|
3593 |
+
"loss": 0.0,
|
3594 |
+
"step": 511
|
3595 |
+
},
|
3596 |
+
{
|
3597 |
+
"epoch": 1.3638177585633522,
|
3598 |
+
"grad_norm": NaN,
|
3599 |
+
"learning_rate": 2.342407734420331e-05,
|
3600 |
+
"loss": 0.0,
|
3601 |
+
"step": 512
|
3602 |
+
},
|
3603 |
+
{
|
3604 |
+
"epoch": 1.3664782174925174,
|
3605 |
+
"grad_norm": NaN,
|
3606 |
+
"learning_rate": 2.3244514963721276e-05,
|
3607 |
+
"loss": 0.0,
|
3608 |
+
"step": 513
|
3609 |
+
},
|
3610 |
+
{
|
3611 |
+
"epoch": 1.3691386764216826,
|
3612 |
+
"grad_norm": NaN,
|
3613 |
+
"learning_rate": 2.3065434806862807e-05,
|
3614 |
+
"loss": 0.0,
|
3615 |
+
"step": 514
|
3616 |
+
},
|
3617 |
+
{
|
3618 |
+
"epoch": 1.371799135350848,
|
3619 |
+
"grad_norm": NaN,
|
3620 |
+
"learning_rate": 2.288684010125325e-05,
|
3621 |
+
"loss": 0.0,
|
3622 |
+
"step": 515
|
3623 |
+
},
|
3624 |
+
{
|
3625 |
+
"epoch": 1.3744595942800133,
|
3626 |
+
"grad_norm": NaN,
|
3627 |
+
"learning_rate": 2.2708734065768488e-05,
|
3628 |
+
"loss": 0.0,
|
3629 |
+
"step": 516
|
3630 |
+
},
|
3631 |
+
{
|
3632 |
+
"epoch": 1.3771200532091785,
|
3633 |
+
"grad_norm": NaN,
|
3634 |
+
"learning_rate": 2.2531119910476993e-05,
|
3635 |
+
"loss": 0.0,
|
3636 |
+
"step": 517
|
3637 |
+
},
|
3638 |
+
{
|
3639 |
+
"epoch": 1.379780512138344,
|
3640 |
+
"grad_norm": NaN,
|
3641 |
+
"learning_rate": 2.2354000836581834e-05,
|
3642 |
+
"loss": 0.0,
|
3643 |
+
"step": 518
|
3644 |
+
},
|
3645 |
+
{
|
3646 |
+
"epoch": 1.3824409710675092,
|
3647 |
+
"grad_norm": NaN,
|
3648 |
+
"learning_rate": 2.2177380036363088e-05,
|
3649 |
+
"loss": 0.0,
|
3650 |
+
"step": 519
|
3651 |
+
},
|
3652 |
+
{
|
3653 |
+
"epoch": 1.3851014299966744,
|
3654 |
+
"grad_norm": NaN,
|
3655 |
+
"learning_rate": 2.2001260693120233e-05,
|
3656 |
+
"loss": 0.0,
|
3657 |
+
"step": 520
|
3658 |
+
},
|
3659 |
+
{
|
3660 |
+
"epoch": 1.3877618889258396,
|
3661 |
+
"grad_norm": NaN,
|
3662 |
+
"learning_rate": 2.182564598111486e-05,
|
3663 |
+
"loss": 0.0,
|
3664 |
+
"step": 521
|
3665 |
+
},
|
3666 |
+
{
|
3667 |
+
"epoch": 1.390422347855005,
|
3668 |
+
"grad_norm": NaN,
|
3669 |
+
"learning_rate": 2.1650539065513413e-05,
|
3670 |
+
"loss": 0.0,
|
3671 |
+
"step": 522
|
3672 |
+
},
|
3673 |
+
{
|
3674 |
+
"epoch": 1.3930828067841703,
|
3675 |
+
"grad_norm": NaN,
|
3676 |
+
"learning_rate": 2.1475943102330087e-05,
|
3677 |
+
"loss": 0.0,
|
3678 |
+
"step": 523
|
3679 |
+
},
|
3680 |
+
{
|
3681 |
+
"epoch": 1.3957432657133355,
|
3682 |
+
"grad_norm": NaN,
|
3683 |
+
"learning_rate": 2.130186123837002e-05,
|
3684 |
+
"loss": 0.0,
|
3685 |
+
"step": 524
|
3686 |
+
},
|
3687 |
+
{
|
3688 |
+
"epoch": 1.3984037246425007,
|
3689 |
+
"grad_norm": NaN,
|
3690 |
+
"learning_rate": 2.1128296611172593e-05,
|
3691 |
+
"loss": 0.0,
|
3692 |
+
"step": 525
|
3693 |
+
},
|
3694 |
+
{
|
3695 |
+
"epoch": 1.4010641835716662,
|
3696 |
+
"grad_norm": NaN,
|
3697 |
+
"learning_rate": 2.0955252348954807e-05,
|
3698 |
+
"loss": 0.0,
|
3699 |
+
"step": 526
|
3700 |
+
},
|
3701 |
+
{
|
3702 |
+
"epoch": 1.4037246425008314,
|
3703 |
+
"grad_norm": NaN,
|
3704 |
+
"learning_rate": 2.0782731570554947e-05,
|
3705 |
+
"loss": 0.0,
|
3706 |
+
"step": 527
|
3707 |
+
},
|
3708 |
+
{
|
3709 |
+
"epoch": 1.4063851014299966,
|
3710 |
+
"grad_norm": NaN,
|
3711 |
+
"learning_rate": 2.061073738537635e-05,
|
3712 |
+
"loss": 0.0,
|
3713 |
+
"step": 528
|
3714 |
+
},
|
3715 |
+
{
|
3716 |
+
"epoch": 1.409045560359162,
|
3717 |
+
"grad_norm": NaN,
|
3718 |
+
"learning_rate": 2.043927289333141e-05,
|
3719 |
+
"loss": 0.0,
|
3720 |
+
"step": 529
|
3721 |
+
},
|
3722 |
+
{
|
3723 |
+
"epoch": 1.4117060192883273,
|
3724 |
+
"grad_norm": NaN,
|
3725 |
+
"learning_rate": 2.026834118478567e-05,
|
3726 |
+
"loss": 0.0,
|
3727 |
+
"step": 530
|
3728 |
+
},
|
3729 |
+
{
|
3730 |
+
"epoch": 1.4143664782174925,
|
3731 |
+
"grad_norm": NaN,
|
3732 |
+
"learning_rate": 2.00979453405021e-05,
|
3733 |
+
"loss": 0.0,
|
3734 |
+
"step": 531
|
3735 |
+
},
|
3736 |
+
{
|
3737 |
+
"epoch": 1.4170269371466577,
|
3738 |
+
"grad_norm": NaN,
|
3739 |
+
"learning_rate": 1.992808843158559e-05,
|
3740 |
+
"loss": 0.0,
|
3741 |
+
"step": 532
|
3742 |
+
},
|
3743 |
+
{
|
3744 |
+
"epoch": 1.419687396075823,
|
3745 |
+
"grad_norm": NaN,
|
3746 |
+
"learning_rate": 1.9758773519427665e-05,
|
3747 |
+
"loss": 0.0,
|
3748 |
+
"step": 533
|
3749 |
+
},
|
3750 |
+
{
|
3751 |
+
"epoch": 1.4223478550049884,
|
3752 |
+
"grad_norm": NaN,
|
3753 |
+
"learning_rate": 1.959000365565119e-05,
|
3754 |
+
"loss": 0.0,
|
3755 |
+
"step": 534
|
3756 |
+
},
|
3757 |
+
{
|
3758 |
+
"epoch": 1.4250083139341536,
|
3759 |
+
"grad_norm": NaN,
|
3760 |
+
"learning_rate": 1.9421781882055444e-05,
|
3761 |
+
"loss": 0.0,
|
3762 |
+
"step": 535
|
3763 |
+
},
|
3764 |
+
{
|
3765 |
+
"epoch": 1.4276687728633188,
|
3766 |
+
"grad_norm": NaN,
|
3767 |
+
"learning_rate": 1.9254111230561283e-05,
|
3768 |
+
"loss": 0.0,
|
3769 |
+
"step": 536
|
3770 |
+
},
|
3771 |
+
{
|
3772 |
+
"epoch": 1.4303292317924843,
|
3773 |
+
"grad_norm": NaN,
|
3774 |
+
"learning_rate": 1.908699472315651e-05,
|
3775 |
+
"loss": 0.0,
|
3776 |
+
"step": 537
|
3777 |
+
},
|
3778 |
+
{
|
3779 |
+
"epoch": 1.4329896907216495,
|
3780 |
+
"grad_norm": NaN,
|
3781 |
+
"learning_rate": 1.8920435371841394e-05,
|
3782 |
+
"loss": 0.0,
|
3783 |
+
"step": 538
|
3784 |
+
},
|
3785 |
+
{
|
3786 |
+
"epoch": 1.4356501496508147,
|
3787 |
+
"grad_norm": NaN,
|
3788 |
+
"learning_rate": 1.875443617857436e-05,
|
3789 |
+
"loss": 0.0,
|
3790 |
+
"step": 539
|
3791 |
+
},
|
3792 |
+
{
|
3793 |
+
"epoch": 1.4383106085799802,
|
3794 |
+
"grad_norm": NaN,
|
3795 |
+
"learning_rate": 1.858900013521788e-05,
|
3796 |
+
"loss": 0.0,
|
3797 |
+
"step": 540
|
3798 |
+
},
|
3799 |
+
{
|
3800 |
+
"epoch": 1.4409710675091454,
|
3801 |
+
"grad_norm": NaN,
|
3802 |
+
"learning_rate": 1.8424130223484653e-05,
|
3803 |
+
"loss": 0.0,
|
3804 |
+
"step": 541
|
3805 |
+
},
|
3806 |
+
{
|
3807 |
+
"epoch": 1.4436315264383106,
|
3808 |
+
"grad_norm": NaN,
|
3809 |
+
"learning_rate": 1.8259829414883728e-05,
|
3810 |
+
"loss": 0.0,
|
3811 |
+
"step": 542
|
3812 |
+
},
|
3813 |
+
{
|
3814 |
+
"epoch": 1.4462919853674758,
|
3815 |
+
"grad_norm": NaN,
|
3816 |
+
"learning_rate": 1.809610067066701e-05,
|
3817 |
+
"loss": 0.0,
|
3818 |
+
"step": 543
|
3819 |
+
},
|
3820 |
+
{
|
3821 |
+
"epoch": 1.448952444296641,
|
3822 |
+
"grad_norm": NaN,
|
3823 |
+
"learning_rate": 1.793294694177588e-05,
|
3824 |
+
"loss": 0.0,
|
3825 |
+
"step": 544
|
3826 |
+
},
|
3827 |
+
{
|
3828 |
+
"epoch": 1.4516129032258065,
|
3829 |
+
"grad_norm": NaN,
|
3830 |
+
"learning_rate": 1.777037116878804e-05,
|
3831 |
+
"loss": 0.0,
|
3832 |
+
"step": 545
|
3833 |
+
},
|
3834 |
+
{
|
3835 |
+
"epoch": 1.4542733621549717,
|
3836 |
+
"grad_norm": NaN,
|
3837 |
+
"learning_rate": 1.76083762818645e-05,
|
3838 |
+
"loss": 0.0,
|
3839 |
+
"step": 546
|
3840 |
+
},
|
3841 |
+
{
|
3842 |
+
"epoch": 1.456933821084137,
|
3843 |
+
"grad_norm": NaN,
|
3844 |
+
"learning_rate": 1.7446965200696703e-05,
|
3845 |
+
"loss": 0.0,
|
3846 |
+
"step": 547
|
3847 |
+
},
|
3848 |
+
{
|
3849 |
+
"epoch": 1.4595942800133024,
|
3850 |
+
"grad_norm": NaN,
|
3851 |
+
"learning_rate": 1.7286140834453956e-05,
|
3852 |
+
"loss": 0.0,
|
3853 |
+
"step": 548
|
3854 |
+
},
|
3855 |
+
{
|
3856 |
+
"epoch": 1.4622547389424676,
|
3857 |
+
"grad_norm": NaN,
|
3858 |
+
"learning_rate": 1.7125906081731064e-05,
|
3859 |
+
"loss": 0.0,
|
3860 |
+
"step": 549
|
3861 |
+
},
|
3862 |
+
{
|
3863 |
+
"epoch": 1.4649151978716328,
|
3864 |
+
"grad_norm": NaN,
|
3865 |
+
"learning_rate": 1.6966263830495936e-05,
|
3866 |
+
"loss": 0.0,
|
3867 |
+
"step": 550
|
3868 |
+
},
|
3869 |
+
{
|
3870 |
+
"epoch": 1.4675756568007983,
|
3871 |
+
"grad_norm": NaN,
|
3872 |
+
"learning_rate": 1.6807216958037663e-05,
|
3873 |
+
"loss": 0.0,
|
3874 |
+
"step": 551
|
3875 |
+
},
|
3876 |
+
{
|
3877 |
+
"epoch": 1.4702361157299635,
|
3878 |
+
"grad_norm": NaN,
|
3879 |
+
"learning_rate": 1.6648768330914578e-05,
|
3880 |
+
"loss": 0.0,
|
3881 |
+
"step": 552
|
3882 |
+
},
|
3883 |
+
{
|
3884 |
+
"epoch": 1.4728965746591287,
|
3885 |
+
"grad_norm": NaN,
|
3886 |
+
"learning_rate": 1.649092080490266e-05,
|
3887 |
+
"loss": 0.0,
|
3888 |
+
"step": 553
|
3889 |
+
},
|
3890 |
+
{
|
3891 |
+
"epoch": 1.475557033588294,
|
3892 |
+
"grad_norm": NaN,
|
3893 |
+
"learning_rate": 1.633367722494404e-05,
|
3894 |
+
"loss": 0.0,
|
3895 |
+
"step": 554
|
3896 |
+
},
|
3897 |
+
{
|
3898 |
+
"epoch": 1.4782174925174592,
|
3899 |
+
"grad_norm": NaN,
|
3900 |
+
"learning_rate": 1.6177040425095662e-05,
|
3901 |
+
"loss": 0.0,
|
3902 |
+
"step": 555
|
3903 |
+
},
|
3904 |
+
{
|
3905 |
+
"epoch": 1.4808779514466246,
|
3906 |
+
"grad_norm": NaN,
|
3907 |
+
"learning_rate": 1.6021013228478278e-05,
|
3908 |
+
"loss": 0.0,
|
3909 |
+
"step": 556
|
3910 |
+
},
|
3911 |
+
{
|
3912 |
+
"epoch": 1.4835384103757898,
|
3913 |
+
"grad_norm": NaN,
|
3914 |
+
"learning_rate": 1.5865598447225577e-05,
|
3915 |
+
"loss": 0.0,
|
3916 |
+
"step": 557
|
3917 |
+
},
|
3918 |
+
{
|
3919 |
+
"epoch": 1.486198869304955,
|
3920 |
+
"grad_norm": NaN,
|
3921 |
+
"learning_rate": 1.5710798882433432e-05,
|
3922 |
+
"loss": 0.0,
|
3923 |
+
"step": 558
|
3924 |
+
},
|
3925 |
+
{
|
3926 |
+
"epoch": 1.4888593282341205,
|
3927 |
+
"grad_norm": NaN,
|
3928 |
+
"learning_rate": 1.5556617324109442e-05,
|
3929 |
+
"loss": 0.0,
|
3930 |
+
"step": 559
|
3931 |
+
},
|
3932 |
+
{
|
3933 |
+
"epoch": 1.4915197871632857,
|
3934 |
+
"grad_norm": NaN,
|
3935 |
+
"learning_rate": 1.5403056551122697e-05,
|
3936 |
+
"loss": 0.0,
|
3937 |
+
"step": 560
|
3938 |
+
},
|
3939 |
+
{
|
3940 |
+
"epoch": 1.494180246092451,
|
3941 |
+
"grad_norm": NaN,
|
3942 |
+
"learning_rate": 1.5250119331153606e-05,
|
3943 |
+
"loss": 0.0,
|
3944 |
+
"step": 561
|
3945 |
+
},
|
3946 |
+
{
|
3947 |
+
"epoch": 1.4968407050216161,
|
3948 |
+
"grad_norm": NaN,
|
3949 |
+
"learning_rate": 1.5097808420644115e-05,
|
3950 |
+
"loss": 0.0,
|
3951 |
+
"step": 562
|
3952 |
+
},
|
3953 |
+
{
|
3954 |
+
"epoch": 1.4995011639507816,
|
3955 |
+
"grad_norm": NaN,
|
3956 |
+
"learning_rate": 1.494612656474792e-05,
|
3957 |
+
"loss": 0.0,
|
3958 |
+
"step": 563
|
3959 |
+
},
|
3960 |
+
{
|
3961 |
+
"epoch": 1.5021616228799468,
|
3962 |
+
"grad_norm": NaN,
|
3963 |
+
"learning_rate": 1.4795076497281052e-05,
|
3964 |
+
"loss": 0.0,
|
3965 |
+
"step": 564
|
3966 |
+
},
|
3967 |
+
{
|
3968 |
+
"epoch": 1.504822081809112,
|
3969 |
+
"grad_norm": NaN,
|
3970 |
+
"learning_rate": 1.4644660940672627e-05,
|
3971 |
+
"loss": 0.0,
|
3972 |
+
"step": 565
|
3973 |
+
},
|
3974 |
+
{
|
3975 |
+
"epoch": 1.5074825407382773,
|
3976 |
+
"grad_norm": NaN,
|
3977 |
+
"learning_rate": 1.4494882605915717e-05,
|
3978 |
+
"loss": 0.0,
|
3979 |
+
"step": 566
|
3980 |
+
},
|
3981 |
+
{
|
3982 |
+
"epoch": 1.5101429996674427,
|
3983 |
+
"grad_norm": NaN,
|
3984 |
+
"learning_rate": 1.4345744192518506e-05,
|
3985 |
+
"loss": 0.0,
|
3986 |
+
"step": 567
|
3987 |
+
},
|
3988 |
+
{
|
3989 |
+
"epoch": 1.512803458596608,
|
3990 |
+
"grad_norm": NaN,
|
3991 |
+
"learning_rate": 1.4197248388455691e-05,
|
3992 |
+
"loss": 0.0,
|
3993 |
+
"step": 568
|
3994 |
+
},
|
3995 |
+
{
|
3996 |
+
"epoch": 1.5154639175257731,
|
3997 |
+
"grad_norm": NaN,
|
3998 |
+
"learning_rate": 1.4049397870119962e-05,
|
3999 |
+
"loss": 0.0,
|
4000 |
+
"step": 569
|
4001 |
+
},
|
4002 |
+
{
|
4003 |
+
"epoch": 1.5181243764549386,
|
4004 |
+
"grad_norm": NaN,
|
4005 |
+
"learning_rate": 1.3902195302273779e-05,
|
4006 |
+
"loss": 0.0,
|
4007 |
+
"step": 570
|
4008 |
+
},
|
4009 |
+
{
|
4010 |
+
"epoch": 1.5207848353841038,
|
4011 |
+
"grad_norm": NaN,
|
4012 |
+
"learning_rate": 1.3755643338001412e-05,
|
4013 |
+
"loss": 0.0,
|
4014 |
+
"step": 571
|
4015 |
+
},
|
4016 |
+
{
|
4017 |
+
"epoch": 1.523445294313269,
|
4018 |
+
"grad_norm": NaN,
|
4019 |
+
"learning_rate": 1.3609744618661013e-05,
|
4020 |
+
"loss": 0.0,
|
4021 |
+
"step": 572
|
4022 |
+
},
|
4023 |
+
{
|
4024 |
+
"epoch": 1.5261057532424345,
|
4025 |
+
"grad_norm": NaN,
|
4026 |
+
"learning_rate": 1.3464501773837124e-05,
|
4027 |
+
"loss": 0.0,
|
4028 |
+
"step": 573
|
4029 |
+
},
|
4030 |
+
{
|
4031 |
+
"epoch": 1.5287662121715995,
|
4032 |
+
"grad_norm": NaN,
|
4033 |
+
"learning_rate": 1.3319917421293182e-05,
|
4034 |
+
"loss": 0.0,
|
4035 |
+
"step": 574
|
4036 |
+
},
|
4037 |
+
{
|
4038 |
+
"epoch": 1.531426671100765,
|
4039 |
+
"grad_norm": NaN,
|
4040 |
+
"learning_rate": 1.3175994166924394e-05,
|
4041 |
+
"loss": 0.0,
|
4042 |
+
"step": 575
|
4043 |
+
},
|
4044 |
+
{
|
4045 |
+
"epoch": 1.5340871300299301,
|
4046 |
+
"grad_norm": NaN,
|
4047 |
+
"learning_rate": 1.3032734604710783e-05,
|
4048 |
+
"loss": 0.0,
|
4049 |
+
"step": 576
|
4050 |
+
},
|
4051 |
+
{
|
4052 |
+
"epoch": 1.5367475889590954,
|
4053 |
+
"grad_norm": NaN,
|
4054 |
+
"learning_rate": 1.289014131667039e-05,
|
4055 |
+
"loss": 0.0,
|
4056 |
+
"step": 577
|
4057 |
+
},
|
4058 |
+
{
|
4059 |
+
"epoch": 1.5394080478882608,
|
4060 |
+
"grad_norm": NaN,
|
4061 |
+
"learning_rate": 1.2748216872812745e-05,
|
4062 |
+
"loss": 0.0,
|
4063 |
+
"step": 578
|
4064 |
+
},
|
4065 |
+
{
|
4066 |
+
"epoch": 1.542068506817426,
|
4067 |
+
"grad_norm": NaN,
|
4068 |
+
"learning_rate": 1.26069638310926e-05,
|
4069 |
+
"loss": 0.0,
|
4070 |
+
"step": 579
|
4071 |
+
},
|
4072 |
+
{
|
4073 |
+
"epoch": 1.5447289657465912,
|
4074 |
+
"grad_norm": NaN,
|
4075 |
+
"learning_rate": 1.246638473736378e-05,
|
4076 |
+
"loss": 0.0,
|
4077 |
+
"step": 580
|
4078 |
+
},
|
4079 |
+
{
|
4080 |
+
"epoch": 1.5473894246757567,
|
4081 |
+
"grad_norm": NaN,
|
4082 |
+
"learning_rate": 1.2326482125333284e-05,
|
4083 |
+
"loss": 0.0,
|
4084 |
+
"step": 581
|
4085 |
+
},
|
4086 |
+
{
|
4087 |
+
"epoch": 1.5500498836049217,
|
4088 |
+
"grad_norm": NaN,
|
4089 |
+
"learning_rate": 1.2187258516515642e-05,
|
4090 |
+
"loss": 0.0,
|
4091 |
+
"step": 582
|
4092 |
+
},
|
4093 |
+
{
|
4094 |
+
"epoch": 1.5527103425340871,
|
4095 |
+
"grad_norm": NaN,
|
4096 |
+
"learning_rate": 1.2048716420187472e-05,
|
4097 |
+
"loss": 0.0,
|
4098 |
+
"step": 583
|
4099 |
+
},
|
4100 |
+
{
|
4101 |
+
"epoch": 1.5553708014632526,
|
4102 |
+
"grad_norm": NaN,
|
4103 |
+
"learning_rate": 1.191085833334228e-05,
|
4104 |
+
"loss": 0.0,
|
4105 |
+
"step": 584
|
4106 |
+
},
|
4107 |
+
{
|
4108 |
+
"epoch": 1.5580312603924176,
|
4109 |
+
"grad_norm": NaN,
|
4110 |
+
"learning_rate": 1.1773686740645384e-05,
|
4111 |
+
"loss": 0.0,
|
4112 |
+
"step": 585
|
4113 |
+
},
|
4114 |
+
{
|
4115 |
+
"epoch": 1.560691719321583,
|
4116 |
+
"grad_norm": NaN,
|
4117 |
+
"learning_rate": 1.1637204114389177e-05,
|
4118 |
+
"loss": 0.0,
|
4119 |
+
"step": 586
|
4120 |
+
},
|
4121 |
+
{
|
4122 |
+
"epoch": 1.5633521782507482,
|
4123 |
+
"grad_norm": NaN,
|
4124 |
+
"learning_rate": 1.1501412914448595e-05,
|
4125 |
+
"loss": 0.0,
|
4126 |
+
"step": 587
|
4127 |
+
},
|
4128 |
+
{
|
4129 |
+
"epoch": 1.5660126371799135,
|
4130 |
+
"grad_norm": NaN,
|
4131 |
+
"learning_rate": 1.1366315588236742e-05,
|
4132 |
+
"loss": 0.0,
|
4133 |
+
"step": 588
|
4134 |
+
},
|
4135 |
+
{
|
4136 |
+
"epoch": 1.568673096109079,
|
4137 |
+
"grad_norm": NaN,
|
4138 |
+
"learning_rate": 1.1231914570660774e-05,
|
4139 |
+
"loss": 0.0,
|
4140 |
+
"step": 589
|
4141 |
+
},
|
4142 |
+
{
|
4143 |
+
"epoch": 1.5713335550382441,
|
4144 |
+
"grad_norm": NaN,
|
4145 |
+
"learning_rate": 1.1098212284078036e-05,
|
4146 |
+
"loss": 0.0,
|
4147 |
+
"step": 590
|
4148 |
+
},
|
4149 |
+
{
|
4150 |
+
"epoch": 1.5739940139674093,
|
4151 |
+
"grad_norm": NaN,
|
4152 |
+
"learning_rate": 1.0965211138252373e-05,
|
4153 |
+
"loss": 0.0,
|
4154 |
+
"step": 591
|
4155 |
+
},
|
4156 |
+
{
|
4157 |
+
"epoch": 1.5766544728965748,
|
4158 |
+
"grad_norm": NaN,
|
4159 |
+
"learning_rate": 1.0832913530310784e-05,
|
4160 |
+
"loss": 0.0,
|
4161 |
+
"step": 592
|
4162 |
+
},
|
4163 |
+
{
|
4164 |
+
"epoch": 1.5793149318257398,
|
4165 |
+
"grad_norm": NaN,
|
4166 |
+
"learning_rate": 1.0701321844700096e-05,
|
4167 |
+
"loss": 0.0,
|
4168 |
+
"step": 593
|
4169 |
+
},
|
4170 |
+
{
|
4171 |
+
"epoch": 1.5819753907549052,
|
4172 |
+
"grad_norm": NaN,
|
4173 |
+
"learning_rate": 1.0570438453144043e-05,
|
4174 |
+
"loss": 0.0,
|
4175 |
+
"step": 594
|
4176 |
+
},
|
4177 |
+
{
|
4178 |
+
"epoch": 1.5846358496840705,
|
4179 |
+
"grad_norm": NaN,
|
4180 |
+
"learning_rate": 1.0440265714600572e-05,
|
4181 |
+
"loss": 0.0,
|
4182 |
+
"step": 595
|
4183 |
+
},
|
4184 |
+
{
|
4185 |
+
"epoch": 1.5872963086132357,
|
4186 |
+
"grad_norm": NaN,
|
4187 |
+
"learning_rate": 1.0310805975219256e-05,
|
4188 |
+
"loss": 0.0,
|
4189 |
+
"step": 596
|
4190 |
+
},
|
4191 |
+
{
|
4192 |
+
"epoch": 1.5899567675424011,
|
4193 |
+
"grad_norm": NaN,
|
4194 |
+
"learning_rate": 1.0182061568299018e-05,
|
4195 |
+
"loss": 0.0,
|
4196 |
+
"step": 597
|
4197 |
+
},
|
4198 |
+
{
|
4199 |
+
"epoch": 1.5926172264715663,
|
4200 |
+
"grad_norm": NaN,
|
4201 |
+
"learning_rate": 1.0054034814246094e-05,
|
4202 |
+
"loss": 0.0,
|
4203 |
+
"step": 598
|
4204 |
+
},
|
4205 |
+
{
|
4206 |
+
"epoch": 1.5952776854007316,
|
4207 |
+
"grad_norm": NaN,
|
4208 |
+
"learning_rate": 9.926728020532195e-06,
|
4209 |
+
"loss": 0.0,
|
4210 |
+
"step": 599
|
4211 |
+
},
|
4212 |
+
{
|
4213 |
+
"epoch": 1.597938144329897,
|
4214 |
+
"grad_norm": NaN,
|
4215 |
+
"learning_rate": 9.800143481652979e-06,
|
4216 |
+
"loss": 0.0,
|
4217 |
+
"step": 600
|
4218 |
+
},
|
4219 |
+
{
|
4220 |
+
"epoch": 1.6005986032590622,
|
4221 |
+
"grad_norm": NaN,
|
4222 |
+
"learning_rate": 9.674283479086599e-06,
|
4223 |
+
"loss": 0.0,
|
4224 |
+
"step": 601
|
4225 |
+
},
|
4226 |
+
{
|
4227 |
+
"epoch": 1.6032590621882274,
|
4228 |
+
"grad_norm": NaN,
|
4229 |
+
"learning_rate": 9.549150281252633e-06,
|
4230 |
+
"loss": 0.0,
|
4231 |
+
"step": 602
|
4232 |
+
},
|
4233 |
+
{
|
4234 |
+
"epoch": 1.605919521117393,
|
4235 |
+
"grad_norm": NaN,
|
4236 |
+
"learning_rate": 9.42474614347123e-06,
|
4237 |
+
"loss": 0.0,
|
4238 |
+
"step": 603
|
4239 |
+
},
|
4240 |
+
{
|
4241 |
+
"epoch": 1.608579980046558,
|
4242 |
+
"grad_norm": NaN,
|
4243 |
+
"learning_rate": 9.30107330792243e-06,
|
4244 |
+
"loss": 0.0,
|
4245 |
+
"step": 604
|
4246 |
+
},
|
4247 |
+
{
|
4248 |
+
"epoch": 1.6112404389757233,
|
4249 |
+
"grad_norm": NaN,
|
4250 |
+
"learning_rate": 9.17813400360572e-06,
|
4251 |
+
"loss": 0.0,
|
4252 |
+
"step": 605
|
4253 |
+
},
|
4254 |
+
{
|
4255 |
+
"epoch": 1.6139008979048886,
|
4256 |
+
"grad_norm": NaN,
|
4257 |
+
"learning_rate": 9.055930446299915e-06,
|
4258 |
+
"loss": 0.0,
|
4259 |
+
"step": 606
|
4260 |
+
},
|
4261 |
+
{
|
4262 |
+
"epoch": 1.6165613568340538,
|
4263 |
+
"grad_norm": NaN,
|
4264 |
+
"learning_rate": 8.934464838523182e-06,
|
4265 |
+
"loss": 0.0,
|
4266 |
+
"step": 607
|
4267 |
+
},
|
4268 |
+
{
|
4269 |
+
"epoch": 1.6192218157632192,
|
4270 |
+
"grad_norm": NaN,
|
4271 |
+
"learning_rate": 8.813739369493395e-06,
|
4272 |
+
"loss": 0.0,
|
4273 |
+
"step": 608
|
4274 |
+
},
|
4275 |
+
{
|
4276 |
+
"epoch": 1.6218822746923844,
|
4277 |
+
"grad_norm": NaN,
|
4278 |
+
"learning_rate": 8.693756215088617e-06,
|
4279 |
+
"loss": 0.0,
|
4280 |
+
"step": 609
|
4281 |
+
},
|
4282 |
+
{
|
4283 |
+
"epoch": 1.6245427336215497,
|
4284 |
+
"grad_norm": NaN,
|
4285 |
+
"learning_rate": 8.574517537807897e-06,
|
4286 |
+
"loss": 0.0,
|
4287 |
+
"step": 610
|
4288 |
+
},
|
4289 |
+
{
|
4290 |
+
"epoch": 1.627203192550715,
|
4291 |
+
"grad_norm": NaN,
|
4292 |
+
"learning_rate": 8.45602548673235e-06,
|
4293 |
+
"loss": 0.0,
|
4294 |
+
"step": 611
|
4295 |
+
},
|
4296 |
+
{
|
4297 |
+
"epoch": 1.6298636514798803,
|
4298 |
+
"grad_norm": NaN,
|
4299 |
+
"learning_rate": 8.338282197486364e-06,
|
4300 |
+
"loss": 0.0,
|
4301 |
+
"step": 612
|
4302 |
+
},
|
4303 |
+
{
|
4304 |
+
"epoch": 1.6325241104090455,
|
4305 |
+
"grad_norm": NaN,
|
4306 |
+
"learning_rate": 8.221289792199116e-06,
|
4307 |
+
"loss": 0.0,
|
4308 |
+
"step": 613
|
4309 |
+
},
|
4310 |
+
{
|
4311 |
+
"epoch": 1.635184569338211,
|
4312 |
+
"grad_norm": NaN,
|
4313 |
+
"learning_rate": 8.105050379466333e-06,
|
4314 |
+
"loss": 0.0,
|
4315 |
+
"step": 614
|
4316 |
+
},
|
4317 |
+
{
|
4318 |
+
"epoch": 1.637845028267376,
|
4319 |
+
"grad_norm": NaN,
|
4320 |
+
"learning_rate": 7.989566054312287e-06,
|
4321 |
+
"loss": 0.0,
|
4322 |
+
"step": 615
|
4323 |
+
},
|
4324 |
+
{
|
4325 |
+
"epoch": 1.6405054871965414,
|
4326 |
+
"grad_norm": NaN,
|
4327 |
+
"learning_rate": 7.87483889815207e-06,
|
4328 |
+
"loss": 0.0,
|
4329 |
+
"step": 616
|
4330 |
+
},
|
4331 |
+
{
|
4332 |
+
"epoch": 1.6431659461257067,
|
4333 |
+
"grad_norm": NaN,
|
4334 |
+
"learning_rate": 7.760870978754014e-06,
|
4335 |
+
"loss": 0.0,
|
4336 |
+
"step": 617
|
4337 |
+
},
|
4338 |
+
{
|
4339 |
+
"epoch": 1.6458264050548719,
|
4340 |
+
"grad_norm": NaN,
|
4341 |
+
"learning_rate": 7.64766435020246e-06,
|
4342 |
+
"loss": 0.0,
|
4343 |
+
"step": 618
|
4344 |
+
},
|
4345 |
+
{
|
4346 |
+
"epoch": 1.6484868639840373,
|
4347 |
+
"grad_norm": NaN,
|
4348 |
+
"learning_rate": 7.535221052860747e-06,
|
4349 |
+
"loss": 0.0,
|
4350 |
+
"step": 619
|
4351 |
+
},
|
4352 |
+
{
|
4353 |
+
"epoch": 1.6511473229132025,
|
4354 |
+
"grad_norm": NaN,
|
4355 |
+
"learning_rate": 7.423543113334436e-06,
|
4356 |
+
"loss": 0.0,
|
4357 |
+
"step": 620
|
4358 |
+
},
|
4359 |
+
{
|
4360 |
+
"epoch": 1.6538077818423678,
|
4361 |
+
"grad_norm": NaN,
|
4362 |
+
"learning_rate": 7.312632544434738e-06,
|
4363 |
+
"loss": 0.0,
|
4364 |
+
"step": 621
|
4365 |
+
},
|
4366 |
+
{
|
4367 |
+
"epoch": 1.6564682407715332,
|
4368 |
+
"grad_norm": NaN,
|
4369 |
+
"learning_rate": 7.2024913451422875e-06,
|
4370 |
+
"loss": 0.0,
|
4371 |
+
"step": 622
|
4372 |
+
},
|
4373 |
+
{
|
4374 |
+
"epoch": 1.6591286997006982,
|
4375 |
+
"grad_norm": NaN,
|
4376 |
+
"learning_rate": 7.093121500571082e-06,
|
4377 |
+
"loss": 0.0,
|
4378 |
+
"step": 623
|
4379 |
+
},
|
4380 |
+
{
|
4381 |
+
"epoch": 1.6617891586298636,
|
4382 |
+
"grad_norm": NaN,
|
4383 |
+
"learning_rate": 6.984524981932755e-06,
|
4384 |
+
"loss": 0.0,
|
4385 |
+
"step": 624
|
4386 |
+
},
|
4387 |
+
{
|
4388 |
+
"epoch": 1.664449617559029,
|
4389 |
+
"grad_norm": NaN,
|
4390 |
+
"learning_rate": 6.876703746500984e-06,
|
4391 |
+
"loss": 0.0,
|
4392 |
+
"step": 625
|
4393 |
+
},
|
4394 |
+
{
|
4395 |
+
"epoch": 1.667110076488194,
|
4396 |
+
"grad_norm": NaN,
|
4397 |
+
"learning_rate": 6.769659737576229e-06,
|
4398 |
+
"loss": 0.0,
|
4399 |
+
"step": 626
|
4400 |
+
},
|
4401 |
+
{
|
4402 |
+
"epoch": 1.6697705354173595,
|
4403 |
+
"grad_norm": NaN,
|
4404 |
+
"learning_rate": 6.663394884450752e-06,
|
4405 |
+
"loss": 0.0,
|
4406 |
+
"step": 627
|
4407 |
+
},
|
4408 |
+
{
|
4409 |
+
"epoch": 1.6724309943465248,
|
4410 |
+
"grad_norm": NaN,
|
4411 |
+
"learning_rate": 6.557911102373809e-06,
|
4412 |
+
"loss": 0.0,
|
4413 |
+
"step": 628
|
4414 |
+
},
|
4415 |
+
{
|
4416 |
+
"epoch": 1.67509145327569,
|
4417 |
+
"grad_norm": NaN,
|
4418 |
+
"learning_rate": 6.453210292517114e-06,
|
4419 |
+
"loss": 0.0,
|
4420 |
+
"step": 629
|
4421 |
+
},
|
4422 |
+
{
|
4423 |
+
"epoch": 1.6777519122048554,
|
4424 |
+
"grad_norm": NaN,
|
4425 |
+
"learning_rate": 6.349294341940593e-06,
|
4426 |
+
"loss": 0.0,
|
4427 |
+
"step": 630
|
4428 |
+
},
|
4429 |
+
{
|
4430 |
+
"epoch": 1.6804123711340206,
|
4431 |
+
"grad_norm": NaN,
|
4432 |
+
"learning_rate": 6.246165123558401e-06,
|
4433 |
+
"loss": 0.0,
|
4434 |
+
"step": 631
|
4435 |
+
},
|
4436 |
+
{
|
4437 |
+
"epoch": 1.6830728300631859,
|
4438 |
+
"grad_norm": NaN,
|
4439 |
+
"learning_rate": 6.143824496105121e-06,
|
4440 |
+
"loss": 0.0,
|
4441 |
+
"step": 632
|
4442 |
+
},
|
4443 |
+
{
|
4444 |
+
"epoch": 1.6857332889923513,
|
4445 |
+
"grad_norm": NaN,
|
4446 |
+
"learning_rate": 6.04227430410228e-06,
|
4447 |
+
"loss": 0.0,
|
4448 |
+
"step": 633
|
4449 |
+
},
|
4450 |
+
{
|
4451 |
+
"epoch": 1.6883937479215163,
|
4452 |
+
"grad_norm": NaN,
|
4453 |
+
"learning_rate": 5.941516377825102e-06,
|
4454 |
+
"loss": 0.0,
|
4455 |
+
"step": 634
|
4456 |
+
},
|
4457 |
+
{
|
4458 |
+
"epoch": 1.6910542068506818,
|
4459 |
+
"grad_norm": NaN,
|
4460 |
+
"learning_rate": 5.841552533269534e-06,
|
4461 |
+
"loss": 0.0,
|
4462 |
+
"step": 635
|
4463 |
+
},
|
4464 |
+
{
|
4465 |
+
"epoch": 1.693714665779847,
|
4466 |
+
"grad_norm": NaN,
|
4467 |
+
"learning_rate": 5.742384572119519e-06,
|
4468 |
+
"loss": 0.0,
|
4469 |
+
"step": 636
|
4470 |
+
},
|
4471 |
+
{
|
4472 |
+
"epoch": 1.6963751247090122,
|
4473 |
+
"grad_norm": NaN,
|
4474 |
+
"learning_rate": 5.6440142817144826e-06,
|
4475 |
+
"loss": 0.0,
|
4476 |
+
"step": 637
|
4477 |
+
},
|
4478 |
+
{
|
4479 |
+
"epoch": 1.6990355836381776,
|
4480 |
+
"grad_norm": NaN,
|
4481 |
+
"learning_rate": 5.546443435017146e-06,
|
4482 |
+
"loss": 0.0,
|
4483 |
+
"step": 638
|
4484 |
+
},
|
4485 |
+
{
|
4486 |
+
"epoch": 1.7016960425673429,
|
4487 |
+
"grad_norm": NaN,
|
4488 |
+
"learning_rate": 5.449673790581611e-06,
|
4489 |
+
"loss": 0.0,
|
4490 |
+
"step": 639
|
4491 |
+
},
|
4492 |
+
{
|
4493 |
+
"epoch": 1.704356501496508,
|
4494 |
+
"grad_norm": NaN,
|
4495 |
+
"learning_rate": 5.353707092521582e-06,
|
4496 |
+
"loss": 0.0,
|
4497 |
+
"step": 640
|
4498 |
+
},
|
4499 |
+
{
|
4500 |
+
"epoch": 1.7070169604256735,
|
4501 |
+
"grad_norm": NaN,
|
4502 |
+
"learning_rate": 5.258545070478999e-06,
|
4503 |
+
"loss": 0.0,
|
4504 |
+
"step": 641
|
4505 |
+
},
|
4506 |
+
{
|
4507 |
+
"epoch": 1.7096774193548387,
|
4508 |
+
"grad_norm": NaN,
|
4509 |
+
"learning_rate": 5.1641894395928205e-06,
|
4510 |
+
"loss": 0.0,
|
4511 |
+
"step": 642
|
4512 |
+
},
|
4513 |
+
{
|
4514 |
+
"epoch": 1.712337878284004,
|
4515 |
+
"grad_norm": NaN,
|
4516 |
+
"learning_rate": 5.0706419004681485e-06,
|
4517 |
+
"loss": 0.0,
|
4518 |
+
"step": 643
|
4519 |
+
},
|
4520 |
+
{
|
4521 |
+
"epoch": 1.7149983372131694,
|
4522 |
+
"grad_norm": NaN,
|
4523 |
+
"learning_rate": 4.977904139145578e-06,
|
4524 |
+
"loss": 0.0,
|
4525 |
+
"step": 644
|
4526 |
+
},
|
4527 |
+
{
|
4528 |
+
"epoch": 1.7176587961423344,
|
4529 |
+
"grad_norm": NaN,
|
4530 |
+
"learning_rate": 4.885977827070748e-06,
|
4531 |
+
"loss": 0.0,
|
4532 |
+
"step": 645
|
4533 |
+
},
|
4534 |
+
{
|
4535 |
+
"epoch": 1.7203192550714999,
|
4536 |
+
"grad_norm": NaN,
|
4537 |
+
"learning_rate": 4.794864621064266e-06,
|
4538 |
+
"loss": 0.0,
|
4539 |
+
"step": 646
|
4540 |
+
},
|
4541 |
+
{
|
4542 |
+
"epoch": 1.722979714000665,
|
4543 |
+
"grad_norm": NaN,
|
4544 |
+
"learning_rate": 4.704566163291879e-06,
|
4545 |
+
"loss": 0.0,
|
4546 |
+
"step": 647
|
4547 |
+
},
|
4548 |
+
{
|
4549 |
+
"epoch": 1.7256401729298303,
|
4550 |
+
"grad_norm": NaN,
|
4551 |
+
"learning_rate": 4.6150840812347995e-06,
|
4552 |
+
"loss": 0.0,
|
4553 |
+
"step": 648
|
4554 |
+
},
|
4555 |
+
{
|
4556 |
+
"epoch": 1.7283006318589957,
|
4557 |
+
"grad_norm": NaN,
|
4558 |
+
"learning_rate": 4.526419987660418e-06,
|
4559 |
+
"loss": 0.0,
|
4560 |
+
"step": 649
|
4561 |
+
},
|
4562 |
+
{
|
4563 |
+
"epoch": 1.730961090788161,
|
4564 |
+
"grad_norm": NaN,
|
4565 |
+
"learning_rate": 4.43857548059321e-06,
|
4566 |
+
"loss": 0.0,
|
4567 |
+
"step": 650
|
4568 |
+
},
|
4569 |
+
{
|
4570 |
+
"epoch": 1.7336215497173262,
|
4571 |
+
"grad_norm": NaN,
|
4572 |
+
"learning_rate": 4.351552143286014e-06,
|
4573 |
+
"loss": 0.0,
|
4574 |
+
"step": 651
|
4575 |
+
},
|
4576 |
+
{
|
4577 |
+
"epoch": 1.7362820086464916,
|
4578 |
+
"grad_norm": NaN,
|
4579 |
+
"learning_rate": 4.265351544191365e-06,
|
4580 |
+
"loss": 0.0,
|
4581 |
+
"step": 652
|
4582 |
+
},
|
4583 |
+
{
|
4584 |
+
"epoch": 1.7389424675756568,
|
4585 |
+
"grad_norm": NaN,
|
4586 |
+
"learning_rate": 4.179975236933331e-06,
|
4587 |
+
"loss": 0.0,
|
4588 |
+
"step": 653
|
4589 |
+
},
|
4590 |
+
{
|
4591 |
+
"epoch": 1.741602926504822,
|
4592 |
+
"grad_norm": NaN,
|
4593 |
+
"learning_rate": 4.095424760279454e-06,
|
4594 |
+
"loss": 0.0,
|
4595 |
+
"step": 654
|
4596 |
+
},
|
4597 |
+
{
|
4598 |
+
"epoch": 1.7442633854339875,
|
4599 |
+
"grad_norm": NaN,
|
4600 |
+
"learning_rate": 4.011701638113063e-06,
|
4601 |
+
"loss": 0.0,
|
4602 |
+
"step": 655
|
4603 |
+
},
|
4604 |
+
{
|
4605 |
+
"epoch": 1.7469238443631525,
|
4606 |
+
"grad_norm": NaN,
|
4607 |
+
"learning_rate": 3.928807379405764e-06,
|
4608 |
+
"loss": 0.0,
|
4609 |
+
"step": 656
|
4610 |
+
},
|
4611 |
+
{
|
4612 |
+
"epoch": 1.749584303292318,
|
4613 |
+
"grad_norm": NaN,
|
4614 |
+
"learning_rate": 3.8467434781902626e-06,
|
4615 |
+
"loss": 0.0,
|
4616 |
+
"step": 657
|
4617 |
+
},
|
4618 |
+
{
|
4619 |
+
"epoch": 1.7522447622214832,
|
4620 |
+
"grad_norm": NaN,
|
4621 |
+
"learning_rate": 3.765511413533429e-06,
|
4622 |
+
"loss": 0.0,
|
4623 |
+
"step": 658
|
4624 |
+
},
|
4625 |
+
{
|
4626 |
+
"epoch": 1.7549052211506484,
|
4627 |
+
"grad_norm": NaN,
|
4628 |
+
"learning_rate": 3.685112649509681e-06,
|
4629 |
+
"loss": 0.0,
|
4630 |
+
"step": 659
|
4631 |
+
},
|
4632 |
+
{
|
4633 |
+
"epoch": 1.7575656800798138,
|
4634 |
+
"grad_norm": NaN,
|
4635 |
+
"learning_rate": 3.605548635174533e-06,
|
4636 |
+
"loss": 0.0,
|
4637 |
+
"step": 660
|
4638 |
+
},
|
4639 |
+
{
|
4640 |
+
"epoch": 1.760226139008979,
|
4641 |
+
"grad_norm": NaN,
|
4642 |
+
"learning_rate": 3.5268208045385164e-06,
|
4643 |
+
"loss": 0.0,
|
4644 |
+
"step": 661
|
4645 |
+
},
|
4646 |
+
{
|
4647 |
+
"epoch": 1.7628865979381443,
|
4648 |
+
"grad_norm": NaN,
|
4649 |
+
"learning_rate": 3.448930576541309e-06,
|
4650 |
+
"loss": 0.0,
|
4651 |
+
"step": 662
|
4652 |
+
},
|
4653 |
+
{
|
4654 |
+
"epoch": 1.7655470568673097,
|
4655 |
+
"grad_norm": NaN,
|
4656 |
+
"learning_rate": 3.3718793550262195e-06,
|
4657 |
+
"loss": 0.0,
|
4658 |
+
"step": 663
|
4659 |
+
},
|
4660 |
+
{
|
4661 |
+
"epoch": 1.7682075157964747,
|
4662 |
+
"grad_norm": NaN,
|
4663 |
+
"learning_rate": 3.2956685287148016e-06,
|
4664 |
+
"loss": 0.0,
|
4665 |
+
"step": 664
|
4666 |
+
},
|
4667 |
+
{
|
4668 |
+
"epoch": 1.7708679747256402,
|
4669 |
+
"grad_norm": NaN,
|
4670 |
+
"learning_rate": 3.220299471181898e-06,
|
4671 |
+
"loss": 0.0,
|
4672 |
+
"step": 665
|
4673 |
+
},
|
4674 |
+
{
|
4675 |
+
"epoch": 1.7735284336548056,
|
4676 |
+
"grad_norm": NaN,
|
4677 |
+
"learning_rate": 3.1457735408308154e-06,
|
4678 |
+
"loss": 0.0,
|
4679 |
+
"step": 666
|
4680 |
+
},
|
4681 |
+
{
|
4682 |
+
"epoch": 1.7761888925839706,
|
4683 |
+
"grad_norm": NaN,
|
4684 |
+
"learning_rate": 3.0720920808689434e-06,
|
4685 |
+
"loss": 0.0,
|
4686 |
+
"step": 667
|
4687 |
+
},
|
4688 |
+
{
|
4689 |
+
"epoch": 1.778849351513136,
|
4690 |
+
"grad_norm": NaN,
|
4691 |
+
"learning_rate": 2.999256419283425e-06,
|
4692 |
+
"loss": 0.0,
|
4693 |
+
"step": 668
|
4694 |
+
},
|
4695 |
+
{
|
4696 |
+
"epoch": 1.7815098104423013,
|
4697 |
+
"grad_norm": NaN,
|
4698 |
+
"learning_rate": 2.9272678688172905e-06,
|
4699 |
+
"loss": 0.0,
|
4700 |
+
"step": 669
|
4701 |
+
},
|
4702 |
+
{
|
4703 |
+
"epoch": 1.7841702693714665,
|
4704 |
+
"grad_norm": NaN,
|
4705 |
+
"learning_rate": 2.85612772694579e-06,
|
4706 |
+
"loss": 0.0,
|
4707 |
+
"step": 670
|
4708 |
+
},
|
4709 |
+
{
|
4710 |
+
"epoch": 1.786830728300632,
|
4711 |
+
"grad_norm": NaN,
|
4712 |
+
"learning_rate": 2.7858372758530205e-06,
|
4713 |
+
"loss": 0.0,
|
4714 |
+
"step": 671
|
4715 |
+
},
|
4716 |
+
{
|
4717 |
+
"epoch": 1.7894911872297972,
|
4718 |
+
"grad_norm": NaN,
|
4719 |
+
"learning_rate": 2.7163977824087694e-06,
|
4720 |
+
"loss": 0.0,
|
4721 |
+
"step": 672
|
4722 |
+
},
|
4723 |
+
{
|
4724 |
+
"epoch": 1.7921516461589624,
|
4725 |
+
"grad_norm": NaN,
|
4726 |
+
"learning_rate": 2.6478104981457253e-06,
|
4727 |
+
"loss": 0.0,
|
4728 |
+
"step": 673
|
4729 |
+
},
|
4730 |
+
{
|
4731 |
+
"epoch": 1.7948121050881278,
|
4732 |
+
"grad_norm": NaN,
|
4733 |
+
"learning_rate": 2.5800766592369073e-06,
|
4734 |
+
"loss": 0.0,
|
4735 |
+
"step": 674
|
4736 |
+
},
|
4737 |
+
{
|
4738 |
+
"epoch": 1.7974725640172928,
|
4739 |
+
"grad_norm": NaN,
|
4740 |
+
"learning_rate": 2.5131974864734066e-06,
|
4741 |
+
"loss": 0.0,
|
4742 |
+
"step": 675
|
4743 |
+
},
|
4744 |
+
{
|
4745 |
+
"epoch": 1.8001330229464583,
|
4746 |
+
"grad_norm": NaN,
|
4747 |
+
"learning_rate": 2.4471741852423237e-06,
|
4748 |
+
"loss": 0.0,
|
4749 |
+
"step": 676
|
4750 |
+
},
|
4751 |
+
{
|
4752 |
+
"epoch": 1.8027934818756235,
|
4753 |
+
"grad_norm": NaN,
|
4754 |
+
"learning_rate": 2.382007945505105e-06,
|
4755 |
+
"loss": 0.0,
|
4756 |
+
"step": 677
|
4757 |
+
},
|
4758 |
+
{
|
4759 |
+
"epoch": 1.8054539408047887,
|
4760 |
+
"grad_norm": NaN,
|
4761 |
+
"learning_rate": 2.317699941776064e-06,
|
4762 |
+
"loss": 0.0,
|
4763 |
+
"step": 678
|
4764 |
+
},
|
4765 |
+
{
|
4766 |
+
"epoch": 1.8081143997339542,
|
4767 |
+
"grad_norm": NaN,
|
4768 |
+
"learning_rate": 2.254251333101237e-06,
|
4769 |
+
"loss": 0.0,
|
4770 |
+
"step": 679
|
4771 |
+
},
|
4772 |
+
{
|
4773 |
+
"epoch": 1.8107748586631194,
|
4774 |
+
"grad_norm": NaN,
|
4775 |
+
"learning_rate": 2.191663263037458e-06,
|
4776 |
+
"loss": 0.0,
|
4777 |
+
"step": 680
|
4778 |
+
},
|
4779 |
+
{
|
4780 |
+
"epoch": 1.8134353175922846,
|
4781 |
+
"grad_norm": NaN,
|
4782 |
+
"learning_rate": 2.1299368596317702e-06,
|
4783 |
+
"loss": 0.0,
|
4784 |
+
"step": 681
|
4785 |
+
},
|
4786 |
+
{
|
4787 |
+
"epoch": 1.81609577652145,
|
4788 |
+
"grad_norm": NaN,
|
4789 |
+
"learning_rate": 2.069073235401109e-06,
|
4790 |
+
"loss": 0.0,
|
4791 |
+
"step": 682
|
4792 |
+
},
|
4793 |
+
{
|
4794 |
+
"epoch": 1.8187562354506153,
|
4795 |
+
"grad_norm": NaN,
|
4796 |
+
"learning_rate": 2.009073487312224e-06,
|
4797 |
+
"loss": 0.0,
|
4798 |
+
"step": 683
|
4799 |
+
},
|
4800 |
+
{
|
4801 |
+
"epoch": 1.8214166943797805,
|
4802 |
+
"grad_norm": NaN,
|
4803 |
+
"learning_rate": 1.9499386967619103e-06,
|
4804 |
+
"loss": 0.0,
|
4805 |
+
"step": 684
|
4806 |
+
},
|
4807 |
+
{
|
4808 |
+
"epoch": 1.824077153308946,
|
4809 |
+
"grad_norm": NaN,
|
4810 |
+
"learning_rate": 1.8916699295575324e-06,
|
4811 |
+
"loss": 0.0,
|
4812 |
+
"step": 685
|
4813 |
+
},
|
4814 |
+
{
|
4815 |
+
"epoch": 1.826737612238111,
|
4816 |
+
"grad_norm": NaN,
|
4817 |
+
"learning_rate": 1.8342682358978069e-06,
|
4818 |
+
"loss": 0.0,
|
4819 |
+
"step": 686
|
4820 |
+
},
|
4821 |
+
{
|
4822 |
+
"epoch": 1.8293980711672764,
|
4823 |
+
"grad_norm": NaN,
|
4824 |
+
"learning_rate": 1.7777346503538794e-06,
|
4825 |
+
"loss": 0.0,
|
4826 |
+
"step": 687
|
4827 |
+
},
|
4828 |
+
{
|
4829 |
+
"epoch": 1.8320585300964416,
|
4830 |
+
"grad_norm": NaN,
|
4831 |
+
"learning_rate": 1.7220701918506665e-06,
|
4832 |
+
"loss": 0.0,
|
4833 |
+
"step": 688
|
4834 |
+
},
|
4835 |
+
{
|
4836 |
+
"epoch": 1.8347189890256068,
|
4837 |
+
"grad_norm": NaN,
|
4838 |
+
"learning_rate": 1.6672758636485042e-06,
|
4839 |
+
"loss": 0.0,
|
4840 |
+
"step": 689
|
4841 |
+
},
|
4842 |
+
{
|
4843 |
+
"epoch": 1.8373794479547723,
|
4844 |
+
"grad_norm": NaN,
|
4845 |
+
"learning_rate": 1.6133526533250565e-06,
|
4846 |
+
"loss": 0.0,
|
4847 |
+
"step": 690
|
4848 |
+
},
|
4849 |
+
{
|
4850 |
+
"epoch": 1.8400399068839375,
|
4851 |
+
"grad_norm": NaN,
|
4852 |
+
"learning_rate": 1.5603015327575354e-06,
|
4853 |
+
"loss": 0.0,
|
4854 |
+
"step": 691
|
4855 |
+
},
|
4856 |
+
{
|
4857 |
+
"epoch": 1.8427003658131027,
|
4858 |
+
"grad_norm": NaN,
|
4859 |
+
"learning_rate": 1.5081234581051485e-06,
|
4860 |
+
"loss": 0.0,
|
4861 |
+
"step": 692
|
4862 |
+
},
|
4863 |
+
{
|
4864 |
+
"epoch": 1.8453608247422681,
|
4865 |
+
"grad_norm": NaN,
|
4866 |
+
"learning_rate": 1.4568193697918841e-06,
|
4867 |
+
"loss": 0.0,
|
4868 |
+
"step": 693
|
4869 |
+
},
|
4870 |
+
{
|
4871 |
+
"epoch": 1.8480212836714334,
|
4872 |
+
"grad_norm": NaN,
|
4873 |
+
"learning_rate": 1.4063901924895984e-06,
|
4874 |
+
"loss": 0.0,
|
4875 |
+
"step": 694
|
4876 |
+
},
|
4877 |
+
{
|
4878 |
+
"epoch": 1.8506817426005986,
|
4879 |
+
"grad_norm": NaN,
|
4880 |
+
"learning_rate": 1.3568368351012717e-06,
|
4881 |
+
"loss": 0.0,
|
4882 |
+
"step": 695
|
4883 |
+
},
|
4884 |
+
{
|
4885 |
+
"epoch": 1.853342201529764,
|
4886 |
+
"grad_norm": NaN,
|
4887 |
+
"learning_rate": 1.3081601907447006e-06,
|
4888 |
+
"loss": 0.0,
|
4889 |
+
"step": 696
|
4890 |
+
},
|
4891 |
+
{
|
4892 |
+
"epoch": 1.856002660458929,
|
4893 |
+
"grad_norm": NaN,
|
4894 |
+
"learning_rate": 1.260361136736349e-06,
|
4895 |
+
"loss": 0.0,
|
4896 |
+
"step": 697
|
4897 |
+
},
|
4898 |
+
{
|
4899 |
+
"epoch": 1.8586631193880945,
|
4900 |
+
"grad_norm": NaN,
|
4901 |
+
"learning_rate": 1.2134405345755773e-06,
|
4902 |
+
"loss": 0.0,
|
4903 |
+
"step": 698
|
4904 |
+
},
|
4905 |
+
{
|
4906 |
+
"epoch": 1.8613235783172597,
|
4907 |
+
"grad_norm": NaN,
|
4908 |
+
"learning_rate": 1.1673992299290892e-06,
|
4909 |
+
"loss": 0.0,
|
4910 |
+
"step": 699
|
4911 |
+
},
|
4912 |
+
{
|
4913 |
+
"epoch": 1.863984037246425,
|
4914 |
+
"grad_norm": NaN,
|
4915 |
+
"learning_rate": 1.1222380526156928e-06,
|
4916 |
+
"loss": 0.0,
|
4917 |
+
"step": 700
|
4918 |
+
},
|
4919 |
+
{
|
4920 |
+
"epoch": 1.8666444961755904,
|
4921 |
+
"grad_norm": NaN,
|
4922 |
+
"learning_rate": 1.0779578165913463e-06,
|
4923 |
+
"loss": 0.0,
|
4924 |
+
"step": 701
|
4925 |
+
},
|
4926 |
+
{
|
4927 |
+
"epoch": 1.8693049551047556,
|
4928 |
+
"grad_norm": NaN,
|
4929 |
+
"learning_rate": 1.034559319934497e-06,
|
4930 |
+
"loss": 0.0,
|
4931 |
+
"step": 702
|
4932 |
+
},
|
4933 |
+
{
|
4934 |
+
"epoch": 1.8719654140339208,
|
4935 |
+
"grad_norm": NaN,
|
4936 |
+
"learning_rate": 9.92043344831689e-07,
|
4937 |
+
"loss": 0.0,
|
4938 |
+
"step": 703
|
4939 |
+
},
|
4940 |
+
{
|
4941 |
+
"epoch": 1.8746258729630862,
|
4942 |
+
"grad_norm": NaN,
|
4943 |
+
"learning_rate": 9.504106575634664e-07,
|
4944 |
+
"loss": 0.0,
|
4945 |
+
"step": 704
|
4946 |
+
},
|
4947 |
+
{
|
4948 |
+
"epoch": 1.8772863318922512,
|
4949 |
+
"grad_norm": NaN,
|
4950 |
+
"learning_rate": 9.096620084905472e-07,
|
4951 |
+
"loss": 0.0,
|
4952 |
+
"step": 705
|
4953 |
+
},
|
4954 |
+
{
|
4955 |
+
"epoch": 1.8799467908214167,
|
4956 |
+
"grad_norm": NaN,
|
4957 |
+
"learning_rate": 8.697981320403337e-07,
|
4958 |
+
"loss": 0.0,
|
4959 |
+
"step": 706
|
4960 |
+
},
|
4961 |
+
{
|
4962 |
+
"epoch": 1.8826072497505821,
|
4963 |
+
"grad_norm": NaN,
|
4964 |
+
"learning_rate": 8.308197466936618e-07,
|
4965 |
+
"loss": 0.0,
|
4966 |
+
"step": 707
|
4967 |
+
},
|
4968 |
+
{
|
4969 |
+
"epoch": 1.8852677086797471,
|
4970 |
+
"grad_norm": NaN,
|
4971 |
+
"learning_rate": 7.927275549718228e-07,
|
4972 |
+
"loss": 0.0,
|
4973 |
+
"step": 708
|
4974 |
+
},
|
4975 |
+
{
|
4976 |
+
"epoch": 1.8879281676089126,
|
4977 |
+
"grad_norm": NaN,
|
4978 |
+
"learning_rate": 7.555222434239395e-07,
|
4979 |
+
"loss": 0.0,
|
4980 |
+
"step": 709
|
4981 |
+
},
|
4982 |
+
{
|
4983 |
+
"epoch": 1.8905886265380778,
|
4984 |
+
"grad_norm": NaN,
|
4985 |
+
"learning_rate": 7.192044826145771e-07,
|
4986 |
+
"loss": 0.0,
|
4987 |
+
"step": 710
|
4988 |
+
},
|
4989 |
+
{
|
4990 |
+
"epoch": 1.893249085467243,
|
4991 |
+
"grad_norm": NaN,
|
4992 |
+
"learning_rate": 6.837749271116578e-07,
|
4993 |
+
"loss": 0.0,
|
4994 |
+
"step": 711
|
4995 |
+
},
|
4996 |
+
{
|
4997 |
+
"epoch": 1.8959095443964085,
|
4998 |
+
"grad_norm": NaN,
|
4999 |
+
"learning_rate": 6.492342154746589e-07,
|
5000 |
+
"loss": 0.0,
|
5001 |
+
"step": 712
|
5002 |
+
},
|
5003 |
+
{
|
5004 |
+
"epoch": 1.8985700033255737,
|
5005 |
+
"grad_norm": NaN,
|
5006 |
+
"learning_rate": 6.15582970243117e-07,
|
5007 |
+
"loss": 0.0,
|
5008 |
+
"step": 713
|
5009 |
+
},
|
5010 |
+
{
|
5011 |
+
"epoch": 1.901230462254739,
|
5012 |
+
"grad_norm": NaN,
|
5013 |
+
"learning_rate": 5.828217979253869e-07,
|
5014 |
+
"loss": 0.0,
|
5015 |
+
"step": 714
|
5016 |
+
},
|
5017 |
+
{
|
5018 |
+
"epoch": 1.9038909211839044,
|
5019 |
+
"grad_norm": NaN,
|
5020 |
+
"learning_rate": 5.509512889877333e-07,
|
5021 |
+
"loss": 0.0,
|
5022 |
+
"step": 715
|
5023 |
+
},
|
5024 |
+
{
|
5025 |
+
"epoch": 1.9065513801130693,
|
5026 |
+
"grad_norm": NaN,
|
5027 |
+
"learning_rate": 5.19972017843684e-07,
|
5028 |
+
"loss": 0.0,
|
5029 |
+
"step": 716
|
5030 |
+
},
|
5031 |
+
{
|
5032 |
+
"epoch": 1.9092118390422348,
|
5033 |
+
"grad_norm": NaN,
|
5034 |
+
"learning_rate": 4.898845428436494e-07,
|
5035 |
+
"loss": 0.0,
|
5036 |
+
"step": 717
|
5037 |
+
},
|
5038 |
+
{
|
5039 |
+
"epoch": 1.9118722979714,
|
5040 |
+
"grad_norm": NaN,
|
5041 |
+
"learning_rate": 4.60689406264897e-07,
|
5042 |
+
"loss": 0.0,
|
5043 |
+
"step": 718
|
5044 |
+
},
|
5045 |
+
{
|
5046 |
+
"epoch": 1.9145327569005652,
|
5047 |
+
"grad_norm": NaN,
|
5048 |
+
"learning_rate": 4.323871343017649e-07,
|
5049 |
+
"loss": 0.0,
|
5050 |
+
"step": 719
|
5051 |
+
},
|
5052 |
+
{
|
5053 |
+
"epoch": 1.9171932158297307,
|
5054 |
+
"grad_norm": NaN,
|
5055 |
+
"learning_rate": 4.049782370561583e-07,
|
5056 |
+
"loss": 0.0,
|
5057 |
+
"step": 720
|
5058 |
+
},
|
5059 |
+
{
|
5060 |
+
"epoch": 1.919853674758896,
|
5061 |
+
"grad_norm": NaN,
|
5062 |
+
"learning_rate": 3.7846320852838456e-07,
|
5063 |
+
"loss": 0.0,
|
5064 |
+
"step": 721
|
5065 |
+
},
|
5066 |
+
{
|
5067 |
+
"epoch": 1.9225141336880611,
|
5068 |
+
"grad_norm": NaN,
|
5069 |
+
"learning_rate": 3.528425266082325e-07,
|
5070 |
+
"loss": 0.0,
|
5071 |
+
"step": 722
|
5072 |
+
},
|
5073 |
+
{
|
5074 |
+
"epoch": 1.9251745926172266,
|
5075 |
+
"grad_norm": NaN,
|
5076 |
+
"learning_rate": 3.281166530663682e-07,
|
5077 |
+
"loss": 0.0,
|
5078 |
+
"step": 723
|
5079 |
+
},
|
5080 |
+
{
|
5081 |
+
"epoch": 1.9278350515463918,
|
5082 |
+
"grad_norm": NaN,
|
5083 |
+
"learning_rate": 3.042860335460085e-07,
|
5084 |
+
"loss": 0.0,
|
5085 |
+
"step": 724
|
5086 |
+
},
|
5087 |
+
{
|
5088 |
+
"epoch": 1.930495510475557,
|
5089 |
+
"grad_norm": NaN,
|
5090 |
+
"learning_rate": 2.813510975548772e-07,
|
5091 |
+
"loss": 0.0,
|
5092 |
+
"step": 725
|
5093 |
+
},
|
5094 |
+
{
|
5095 |
+
"epoch": 1.9331559694047225,
|
5096 |
+
"grad_norm": NaN,
|
5097 |
+
"learning_rate": 2.593122584574892e-07,
|
5098 |
+
"loss": 0.0,
|
5099 |
+
"step": 726
|
5100 |
+
},
|
5101 |
+
{
|
5102 |
+
"epoch": 1.9358164283338875,
|
5103 |
+
"grad_norm": NaN,
|
5104 |
+
"learning_rate": 2.3816991346767847e-07,
|
5105 |
+
"loss": 0.0,
|
5106 |
+
"step": 727
|
5107 |
+
},
|
5108 |
+
{
|
5109 |
+
"epoch": 1.938476887263053,
|
5110 |
+
"grad_norm": NaN,
|
5111 |
+
"learning_rate": 2.1792444364144847e-07,
|
5112 |
+
"loss": 0.0,
|
5113 |
+
"step": 728
|
5114 |
+
},
|
5115 |
+
{
|
5116 |
+
"epoch": 1.9411373461922181,
|
5117 |
+
"grad_norm": NaN,
|
5118 |
+
"learning_rate": 1.9857621387010527e-07,
|
5119 |
+
"loss": 0.0,
|
5120 |
+
"step": 729
|
5121 |
+
},
|
5122 |
+
{
|
5123 |
+
"epoch": 1.9437978051213833,
|
5124 |
+
"grad_norm": NaN,
|
5125 |
+
"learning_rate": 1.8012557287367392e-07,
|
5126 |
+
"loss": 0.0,
|
5127 |
+
"step": 730
|
5128 |
+
},
|
5129 |
+
{
|
5130 |
+
"epoch": 1.9464582640505488,
|
5131 |
+
"grad_norm": NaN,
|
5132 |
+
"learning_rate": 1.6257285319462578e-07,
|
5133 |
+
"loss": 0.0,
|
5134 |
+
"step": 731
|
5135 |
+
},
|
5136 |
+
{
|
5137 |
+
"epoch": 1.949118722979714,
|
5138 |
+
"grad_norm": NaN,
|
5139 |
+
"learning_rate": 1.4591837119186103e-07,
|
5140 |
+
"loss": 0.0,
|
5141 |
+
"step": 732
|
5142 |
+
},
|
5143 |
+
{
|
5144 |
+
"epoch": 1.9517791819088792,
|
5145 |
+
"grad_norm": NaN,
|
5146 |
+
"learning_rate": 1.3016242703503544e-07,
|
5147 |
+
"loss": 0.0,
|
5148 |
+
"step": 733
|
5149 |
+
},
|
5150 |
+
{
|
5151 |
+
"epoch": 1.9544396408380447,
|
5152 |
+
"grad_norm": NaN,
|
5153 |
+
"learning_rate": 1.1530530469914259e-07,
|
5154 |
+
"loss": 0.0,
|
5155 |
+
"step": 734
|
5156 |
+
},
|
5157 |
+
{
|
5158 |
+
"epoch": 1.9571000997672099,
|
5159 |
+
"grad_norm": NaN,
|
5160 |
+
"learning_rate": 1.0134727195937333e-07,
|
5161 |
+
"loss": 0.0,
|
5162 |
+
"step": 735
|
5163 |
+
},
|
5164 |
+
{
|
5165 |
+
"epoch": 1.959760558696375,
|
5166 |
+
"grad_norm": NaN,
|
5167 |
+
"learning_rate": 8.828858038632536e-08,
|
5168 |
+
"loss": 0.0,
|
5169 |
+
"step": 736
|
5170 |
+
},
|
5171 |
+
{
|
5172 |
+
"epoch": 1.9624210176255406,
|
5173 |
+
"grad_norm": NaN,
|
5174 |
+
"learning_rate": 7.612946534143461e-08,
|
5175 |
+
"loss": 0.0,
|
5176 |
+
"step": 737
|
5177 |
+
},
|
5178 |
+
{
|
5179 |
+
"epoch": 1.9650814765547056,
|
5180 |
+
"grad_norm": NaN,
|
5181 |
+
"learning_rate": 6.487014597275631e-08,
|
5182 |
+
"loss": 0.0,
|
5183 |
+
"step": 738
|
5184 |
+
},
|
5185 |
+
{
|
5186 |
+
"epoch": 1.967741935483871,
|
5187 |
+
"grad_norm": NaN,
|
5188 |
+
"learning_rate": 5.4510825211012694e-08,
|
5189 |
+
"loss": 0.0,
|
5190 |
+
"step": 739
|
5191 |
+
},
|
5192 |
+
{
|
5193 |
+
"epoch": 1.9704023944130362,
|
5194 |
+
"grad_norm": NaN,
|
5195 |
+
"learning_rate": 4.5051689765929214e-08,
|
5196 |
+
"loss": 0.0,
|
5197 |
+
"step": 740
|
5198 |
+
},
|
5199 |
+
{
|
5200 |
+
"epoch": 1.9730628533422014,
|
5201 |
+
"grad_norm": NaN,
|
5202 |
+
"learning_rate": 3.6492910122859494e-08,
|
5203 |
+
"loss": 0.0,
|
5204 |
+
"step": 741
|
5205 |
+
},
|
5206 |
+
{
|
5207 |
+
"epoch": 1.9757233122713669,
|
5208 |
+
"grad_norm": NaN,
|
5209 |
+
"learning_rate": 2.8834640539737723e-08,
|
5210 |
+
"loss": 0.0,
|
5211 |
+
"step": 742
|
5212 |
+
},
|
5213 |
+
{
|
5214 |
+
"epoch": 1.978383771200532,
|
5215 |
+
"grad_norm": NaN,
|
5216 |
+
"learning_rate": 2.2077019044280943e-08,
|
5217 |
+
"loss": 0.0,
|
5218 |
+
"step": 743
|
5219 |
+
},
|
5220 |
+
{
|
5221 |
+
"epoch": 1.9810442301296973,
|
5222 |
+
"grad_norm": NaN,
|
5223 |
+
"learning_rate": 1.622016743150212e-08,
|
5224 |
+
"loss": 0.0,
|
5225 |
+
"step": 744
|
5226 |
+
},
|
5227 |
+
{
|
5228 |
+
"epoch": 1.9837046890588628,
|
5229 |
+
"grad_norm": NaN,
|
5230 |
+
"learning_rate": 1.1264191261528557e-08,
|
5231 |
+
"loss": 0.0,
|
5232 |
+
"step": 745
|
5233 |
+
},
|
5234 |
+
{
|
5235 |
+
"epoch": 1.9863651479880278,
|
5236 |
+
"grad_norm": NaN,
|
5237 |
+
"learning_rate": 7.209179857675663e-09,
|
5238 |
+
"loss": 0.0,
|
5239 |
+
"step": 746
|
5240 |
+
},
|
5241 |
+
{
|
5242 |
+
"epoch": 1.9890256069171932,
|
5243 |
+
"grad_norm": NaN,
|
5244 |
+
"learning_rate": 4.055206304859338e-09,
|
5245 |
+
"loss": 0.0,
|
5246 |
+
"step": 747
|
5247 |
+
},
|
5248 |
+
{
|
5249 |
+
"epoch": 1.9916860658463587,
|
5250 |
+
"grad_norm": NaN,
|
5251 |
+
"learning_rate": 1.8023274482636965e-09,
|
5252 |
+
"loss": 0.0,
|
5253 |
+
"step": 748
|
5254 |
+
},
|
5255 |
+
{
|
5256 |
+
"epoch": 1.9943465247755237,
|
5257 |
+
"grad_norm": NaN,
|
5258 |
+
"learning_rate": 4.5058389232521816e-10,
|
5259 |
+
"loss": 0.0,
|
5260 |
+
"step": 749
|
5261 |
+
},
|
5262 |
+
{
|
5263 |
+
"epoch": 1.997006983704689,
|
5264 |
+
"grad_norm": NaN,
|
5265 |
+
"learning_rate": 0.0,
|
5266 |
+
"loss": 0.0,
|
5267 |
+
"step": 750
|
5268 |
}
|
5269 |
],
|
5270 |
"logging_steps": 1,
|
|
|
5279 |
"should_evaluate": false,
|
5280 |
"should_log": false,
|
5281 |
"should_save": true,
|
5282 |
+
"should_training_stop": true
|
5283 |
},
|
5284 |
"attributes": {}
|
5285 |
}
|
5286 |
},
|
5287 |
+
"total_flos": 2.083419328610304e+18,
|
5288 |
"train_batch_size": 4,
|
5289 |
"trial_name": null,
|
5290 |
"trial_params": null
|