Training in progress, step 1000, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 97728
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:51398e150efacaf808159d514ee4653d467072c8032647bc4f39d7c4b2f9bc9e
|
3 |
size 97728
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 205350
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:970e5a6028f313759bf68568d2c01d51df557b6672cdd7eea92f1b34c79e30e7
|
3 |
size 205350
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bd47fce7f4351720fa479308f16f507f03bc563233e853ca8c2ff26a1d731735
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4077036d99500a708f700f75da24d51b5300e184ad35fda49dc5a4df5596cca2
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 250,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -3531,6 +3531,3522 @@
|
|
3531 |
"eval_samples_per_second": 11.735,
|
3532 |
"eval_steps_per_second": 5.871,
|
3533 |
"step": 500
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3534 |
}
|
3535 |
],
|
3536 |
"logging_steps": 1,
|
@@ -3545,12 +7061,12 @@
|
|
3545 |
"should_evaluate": false,
|
3546 |
"should_log": false,
|
3547 |
"should_save": true,
|
3548 |
-
"should_training_stop":
|
3549 |
},
|
3550 |
"attributes": {}
|
3551 |
}
|
3552 |
},
|
3553 |
-
"total_flos":
|
3554 |
"train_batch_size": 2,
|
3555 |
"trial_name": null,
|
3556 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.10981167298083787,
|
5 |
"eval_steps": 250,
|
6 |
+
"global_step": 1000,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
3531 |
"eval_samples_per_second": 11.735,
|
3532 |
"eval_steps_per_second": 5.871,
|
3533 |
"step": 500
|
3534 |
+
},
|
3535 |
+
{
|
3536 |
+
"epoch": 0.055015648163399766,
|
3537 |
+
"grad_norm": 0.03410876914858818,
|
3538 |
+
"learning_rate": 5.063464813980948e-05,
|
3539 |
+
"loss": 10.3254,
|
3540 |
+
"step": 501
|
3541 |
+
},
|
3542 |
+
{
|
3543 |
+
"epoch": 0.055125459836380605,
|
3544 |
+
"grad_norm": 0.04738672450184822,
|
3545 |
+
"learning_rate": 5.047599169697884e-05,
|
3546 |
+
"loss": 10.3248,
|
3547 |
+
"step": 502
|
3548 |
+
},
|
3549 |
+
{
|
3550 |
+
"epoch": 0.055235271509361444,
|
3551 |
+
"grad_norm": 0.03311553969979286,
|
3552 |
+
"learning_rate": 5.03173304609171e-05,
|
3553 |
+
"loss": 10.321,
|
3554 |
+
"step": 503
|
3555 |
+
},
|
3556 |
+
{
|
3557 |
+
"epoch": 0.05534508318234228,
|
3558 |
+
"grad_norm": 0.029791921377182007,
|
3559 |
+
"learning_rate": 5.015866602934112e-05,
|
3560 |
+
"loss": 10.3223,
|
3561 |
+
"step": 504
|
3562 |
+
},
|
3563 |
+
{
|
3564 |
+
"epoch": 0.05545489485532312,
|
3565 |
+
"grad_norm": 0.030381258577108383,
|
3566 |
+
"learning_rate": 5e-05,
|
3567 |
+
"loss": 10.3222,
|
3568 |
+
"step": 505
|
3569 |
+
},
|
3570 |
+
{
|
3571 |
+
"epoch": 0.05556470652830396,
|
3572 |
+
"grad_norm": 0.0378074012696743,
|
3573 |
+
"learning_rate": 4.984133397065889e-05,
|
3574 |
+
"loss": 10.3162,
|
3575 |
+
"step": 506
|
3576 |
+
},
|
3577 |
+
{
|
3578 |
+
"epoch": 0.055674518201284794,
|
3579 |
+
"grad_norm": 0.03906317427754402,
|
3580 |
+
"learning_rate": 4.968266953908292e-05,
|
3581 |
+
"loss": 10.314,
|
3582 |
+
"step": 507
|
3583 |
+
},
|
3584 |
+
{
|
3585 |
+
"epoch": 0.05578432987426563,
|
3586 |
+
"grad_norm": 0.033684585243463516,
|
3587 |
+
"learning_rate": 4.952400830302117e-05,
|
3588 |
+
"loss": 10.332,
|
3589 |
+
"step": 508
|
3590 |
+
},
|
3591 |
+
{
|
3592 |
+
"epoch": 0.05589414154724647,
|
3593 |
+
"grad_norm": 0.03505406528711319,
|
3594 |
+
"learning_rate": 4.9365351860190526e-05,
|
3595 |
+
"loss": 10.306,
|
3596 |
+
"step": 509
|
3597 |
+
},
|
3598 |
+
{
|
3599 |
+
"epoch": 0.05600395322022731,
|
3600 |
+
"grad_norm": 0.034469932317733765,
|
3601 |
+
"learning_rate": 4.92067018082596e-05,
|
3602 |
+
"loss": 10.3212,
|
3603 |
+
"step": 510
|
3604 |
+
},
|
3605 |
+
{
|
3606 |
+
"epoch": 0.05611376489320815,
|
3607 |
+
"grad_norm": 0.03993469104170799,
|
3608 |
+
"learning_rate": 4.9048059744832666e-05,
|
3609 |
+
"loss": 10.3174,
|
3610 |
+
"step": 511
|
3611 |
+
},
|
3612 |
+
{
|
3613 |
+
"epoch": 0.05622357656618899,
|
3614 |
+
"grad_norm": 0.03790479898452759,
|
3615 |
+
"learning_rate": 4.888942726743353e-05,
|
3616 |
+
"loss": 10.3236,
|
3617 |
+
"step": 512
|
3618 |
+
},
|
3619 |
+
{
|
3620 |
+
"epoch": 0.05633338823916982,
|
3621 |
+
"grad_norm": 0.025999998673796654,
|
3622 |
+
"learning_rate": 4.8730805973489476e-05,
|
3623 |
+
"loss": 10.3145,
|
3624 |
+
"step": 513
|
3625 |
+
},
|
3626 |
+
{
|
3627 |
+
"epoch": 0.05644319991215066,
|
3628 |
+
"grad_norm": 0.040338192135095596,
|
3629 |
+
"learning_rate": 4.85721974603152e-05,
|
3630 |
+
"loss": 10.3174,
|
3631 |
+
"step": 514
|
3632 |
+
},
|
3633 |
+
{
|
3634 |
+
"epoch": 0.0565530115851315,
|
3635 |
+
"grad_norm": 0.033280979841947556,
|
3636 |
+
"learning_rate": 4.841360332509663e-05,
|
3637 |
+
"loss": 10.3191,
|
3638 |
+
"step": 515
|
3639 |
+
},
|
3640 |
+
{
|
3641 |
+
"epoch": 0.05666282325811234,
|
3642 |
+
"grad_norm": 0.04504970461130142,
|
3643 |
+
"learning_rate": 4.825502516487497e-05,
|
3644 |
+
"loss": 10.3297,
|
3645 |
+
"step": 516
|
3646 |
+
},
|
3647 |
+
{
|
3648 |
+
"epoch": 0.05677263493109318,
|
3649 |
+
"grad_norm": 0.037833958864212036,
|
3650 |
+
"learning_rate": 4.8096464576530507e-05,
|
3651 |
+
"loss": 10.3151,
|
3652 |
+
"step": 517
|
3653 |
+
},
|
3654 |
+
{
|
3655 |
+
"epoch": 0.056882446604074016,
|
3656 |
+
"grad_norm": 0.028639158234000206,
|
3657 |
+
"learning_rate": 4.7937923156766646e-05,
|
3658 |
+
"loss": 10.3191,
|
3659 |
+
"step": 518
|
3660 |
+
},
|
3661 |
+
{
|
3662 |
+
"epoch": 0.05699225827705485,
|
3663 |
+
"grad_norm": 0.04431344196200371,
|
3664 |
+
"learning_rate": 4.77794025020937e-05,
|
3665 |
+
"loss": 10.3195,
|
3666 |
+
"step": 519
|
3667 |
+
},
|
3668 |
+
{
|
3669 |
+
"epoch": 0.05710206995003569,
|
3670 |
+
"grad_norm": 0.031215572729706764,
|
3671 |
+
"learning_rate": 4.762090420881289e-05,
|
3672 |
+
"loss": 10.3234,
|
3673 |
+
"step": 520
|
3674 |
+
},
|
3675 |
+
{
|
3676 |
+
"epoch": 0.057211881623016526,
|
3677 |
+
"grad_norm": 0.03430160507559776,
|
3678 |
+
"learning_rate": 4.7462429873000295e-05,
|
3679 |
+
"loss": 10.3208,
|
3680 |
+
"step": 521
|
3681 |
+
},
|
3682 |
+
{
|
3683 |
+
"epoch": 0.057321693295997365,
|
3684 |
+
"grad_norm": 0.04473254829645157,
|
3685 |
+
"learning_rate": 4.730398109049071e-05,
|
3686 |
+
"loss": 10.311,
|
3687 |
+
"step": 522
|
3688 |
+
},
|
3689 |
+
{
|
3690 |
+
"epoch": 0.057431504968978205,
|
3691 |
+
"grad_norm": 0.04297739267349243,
|
3692 |
+
"learning_rate": 4.71455594568616e-05,
|
3693 |
+
"loss": 10.3183,
|
3694 |
+
"step": 523
|
3695 |
+
},
|
3696 |
+
{
|
3697 |
+
"epoch": 0.057541316641959044,
|
3698 |
+
"grad_norm": 0.04153745248913765,
|
3699 |
+
"learning_rate": 4.698716656741708e-05,
|
3700 |
+
"loss": 10.3191,
|
3701 |
+
"step": 524
|
3702 |
+
},
|
3703 |
+
{
|
3704 |
+
"epoch": 0.057651128314939876,
|
3705 |
+
"grad_norm": 0.0311945341527462,
|
3706 |
+
"learning_rate": 4.6828804017171776e-05,
|
3707 |
+
"loss": 10.3177,
|
3708 |
+
"step": 525
|
3709 |
+
},
|
3710 |
+
{
|
3711 |
+
"epoch": 0.057760939987920715,
|
3712 |
+
"grad_norm": 0.03847968578338623,
|
3713 |
+
"learning_rate": 4.667047340083481e-05,
|
3714 |
+
"loss": 10.3191,
|
3715 |
+
"step": 526
|
3716 |
+
},
|
3717 |
+
{
|
3718 |
+
"epoch": 0.057870751660901554,
|
3719 |
+
"grad_norm": 0.039379045367240906,
|
3720 |
+
"learning_rate": 4.6512176312793736e-05,
|
3721 |
+
"loss": 10.3272,
|
3722 |
+
"step": 527
|
3723 |
+
},
|
3724 |
+
{
|
3725 |
+
"epoch": 0.05798056333388239,
|
3726 |
+
"grad_norm": 0.02749600075185299,
|
3727 |
+
"learning_rate": 4.635391434709847e-05,
|
3728 |
+
"loss": 10.3133,
|
3729 |
+
"step": 528
|
3730 |
+
},
|
3731 |
+
{
|
3732 |
+
"epoch": 0.05809037500686323,
|
3733 |
+
"grad_norm": 0.029182951897382736,
|
3734 |
+
"learning_rate": 4.619568909744524e-05,
|
3735 |
+
"loss": 10.3252,
|
3736 |
+
"step": 529
|
3737 |
+
},
|
3738 |
+
{
|
3739 |
+
"epoch": 0.058200186679844064,
|
3740 |
+
"grad_norm": 0.038201138377189636,
|
3741 |
+
"learning_rate": 4.603750215716057e-05,
|
3742 |
+
"loss": 10.3163,
|
3743 |
+
"step": 530
|
3744 |
+
},
|
3745 |
+
{
|
3746 |
+
"epoch": 0.0583099983528249,
|
3747 |
+
"grad_norm": 0.035310421139001846,
|
3748 |
+
"learning_rate": 4.587935511918521e-05,
|
3749 |
+
"loss": 10.3124,
|
3750 |
+
"step": 531
|
3751 |
+
},
|
3752 |
+
{
|
3753 |
+
"epoch": 0.05841981002580574,
|
3754 |
+
"grad_norm": 0.03495221585035324,
|
3755 |
+
"learning_rate": 4.5721249576058027e-05,
|
3756 |
+
"loss": 10.3175,
|
3757 |
+
"step": 532
|
3758 |
+
},
|
3759 |
+
{
|
3760 |
+
"epoch": 0.05852962169878658,
|
3761 |
+
"grad_norm": 0.032546330243349075,
|
3762 |
+
"learning_rate": 4.5563187119900104e-05,
|
3763 |
+
"loss": 10.3204,
|
3764 |
+
"step": 533
|
3765 |
+
},
|
3766 |
+
{
|
3767 |
+
"epoch": 0.05863943337176742,
|
3768 |
+
"grad_norm": 0.04046177119016647,
|
3769 |
+
"learning_rate": 4.5405169342398634e-05,
|
3770 |
+
"loss": 10.3216,
|
3771 |
+
"step": 534
|
3772 |
+
},
|
3773 |
+
{
|
3774 |
+
"epoch": 0.05874924504474826,
|
3775 |
+
"grad_norm": 0.035773757845163345,
|
3776 |
+
"learning_rate": 4.5247197834790876e-05,
|
3777 |
+
"loss": 10.3169,
|
3778 |
+
"step": 535
|
3779 |
+
},
|
3780 |
+
{
|
3781 |
+
"epoch": 0.05885905671772909,
|
3782 |
+
"grad_norm": 0.03923477232456207,
|
3783 |
+
"learning_rate": 4.508927418784815e-05,
|
3784 |
+
"loss": 10.3121,
|
3785 |
+
"step": 536
|
3786 |
+
},
|
3787 |
+
{
|
3788 |
+
"epoch": 0.05896886839070993,
|
3789 |
+
"grad_norm": 0.038092680275440216,
|
3790 |
+
"learning_rate": 4.493139999185983e-05,
|
3791 |
+
"loss": 10.3073,
|
3792 |
+
"step": 537
|
3793 |
+
},
|
3794 |
+
{
|
3795 |
+
"epoch": 0.05907868006369077,
|
3796 |
+
"grad_norm": 0.03591860458254814,
|
3797 |
+
"learning_rate": 4.477357683661734e-05,
|
3798 |
+
"loss": 10.3141,
|
3799 |
+
"step": 538
|
3800 |
+
},
|
3801 |
+
{
|
3802 |
+
"epoch": 0.05918849173667161,
|
3803 |
+
"grad_norm": 0.032019320875406265,
|
3804 |
+
"learning_rate": 4.461580631139805e-05,
|
3805 |
+
"loss": 10.3131,
|
3806 |
+
"step": 539
|
3807 |
+
},
|
3808 |
+
{
|
3809 |
+
"epoch": 0.05929830340965245,
|
3810 |
+
"grad_norm": 0.03614096716046333,
|
3811 |
+
"learning_rate": 4.445809000494946e-05,
|
3812 |
+
"loss": 10.3066,
|
3813 |
+
"step": 540
|
3814 |
+
},
|
3815 |
+
{
|
3816 |
+
"epoch": 0.05940811508263329,
|
3817 |
+
"grad_norm": 0.03614196926355362,
|
3818 |
+
"learning_rate": 4.4300429505472976e-05,
|
3819 |
+
"loss": 10.3307,
|
3820 |
+
"step": 541
|
3821 |
+
},
|
3822 |
+
{
|
3823 |
+
"epoch": 0.05951792675561412,
|
3824 |
+
"grad_norm": 0.03354780375957489,
|
3825 |
+
"learning_rate": 4.4142826400608086e-05,
|
3826 |
+
"loss": 10.3203,
|
3827 |
+
"step": 542
|
3828 |
+
},
|
3829 |
+
{
|
3830 |
+
"epoch": 0.05962773842859496,
|
3831 |
+
"grad_norm": 0.03033539280295372,
|
3832 |
+
"learning_rate": 4.398528227741633e-05,
|
3833 |
+
"loss": 10.3201,
|
3834 |
+
"step": 543
|
3835 |
+
},
|
3836 |
+
{
|
3837 |
+
"epoch": 0.0597375501015758,
|
3838 |
+
"grad_norm": 0.03290290758013725,
|
3839 |
+
"learning_rate": 4.3827798722365264e-05,
|
3840 |
+
"loss": 10.3181,
|
3841 |
+
"step": 544
|
3842 |
+
},
|
3843 |
+
{
|
3844 |
+
"epoch": 0.059847361774556636,
|
3845 |
+
"grad_norm": 0.05568011477589607,
|
3846 |
+
"learning_rate": 4.3670377321312535e-05,
|
3847 |
+
"loss": 10.3114,
|
3848 |
+
"step": 545
|
3849 |
+
},
|
3850 |
+
{
|
3851 |
+
"epoch": 0.059957173447537475,
|
3852 |
+
"grad_norm": 0.039659466594457626,
|
3853 |
+
"learning_rate": 4.351301965948991e-05,
|
3854 |
+
"loss": 10.3167,
|
3855 |
+
"step": 546
|
3856 |
+
},
|
3857 |
+
{
|
3858 |
+
"epoch": 0.060066985120518314,
|
3859 |
+
"grad_norm": 0.03856213763356209,
|
3860 |
+
"learning_rate": 4.33557273214873e-05,
|
3861 |
+
"loss": 10.3138,
|
3862 |
+
"step": 547
|
3863 |
+
},
|
3864 |
+
{
|
3865 |
+
"epoch": 0.060176796793499146,
|
3866 |
+
"grad_norm": 0.037546731531620026,
|
3867 |
+
"learning_rate": 4.3198501891236804e-05,
|
3868 |
+
"loss": 10.3267,
|
3869 |
+
"step": 548
|
3870 |
+
},
|
3871 |
+
{
|
3872 |
+
"epoch": 0.060286608466479985,
|
3873 |
+
"grad_norm": 0.037389788776636124,
|
3874 |
+
"learning_rate": 4.3041344951996746e-05,
|
3875 |
+
"loss": 10.3193,
|
3876 |
+
"step": 549
|
3877 |
+
},
|
3878 |
+
{
|
3879 |
+
"epoch": 0.060396420139460824,
|
3880 |
+
"grad_norm": 0.04678316041827202,
|
3881 |
+
"learning_rate": 4.288425808633575e-05,
|
3882 |
+
"loss": 10.3168,
|
3883 |
+
"step": 550
|
3884 |
+
},
|
3885 |
+
{
|
3886 |
+
"epoch": 0.06050623181244166,
|
3887 |
+
"grad_norm": 0.03719830885529518,
|
3888 |
+
"learning_rate": 4.272724287611684e-05,
|
3889 |
+
"loss": 10.3069,
|
3890 |
+
"step": 551
|
3891 |
+
},
|
3892 |
+
{
|
3893 |
+
"epoch": 0.0606160434854225,
|
3894 |
+
"grad_norm": 0.030142908915877342,
|
3895 |
+
"learning_rate": 4.2570300902481426e-05,
|
3896 |
+
"loss": 10.326,
|
3897 |
+
"step": 552
|
3898 |
+
},
|
3899 |
+
{
|
3900 |
+
"epoch": 0.06072585515840334,
|
3901 |
+
"grad_norm": 0.03186402842402458,
|
3902 |
+
"learning_rate": 4.241343374583343e-05,
|
3903 |
+
"loss": 10.3132,
|
3904 |
+
"step": 553
|
3905 |
+
},
|
3906 |
+
{
|
3907 |
+
"epoch": 0.06083566683138417,
|
3908 |
+
"grad_norm": 0.030028637498617172,
|
3909 |
+
"learning_rate": 4.2256642985823395e-05,
|
3910 |
+
"loss": 10.3255,
|
3911 |
+
"step": 554
|
3912 |
+
},
|
3913 |
+
{
|
3914 |
+
"epoch": 0.06094547850436501,
|
3915 |
+
"grad_norm": 0.03888958692550659,
|
3916 |
+
"learning_rate": 4.20999302013325e-05,
|
3917 |
+
"loss": 10.3231,
|
3918 |
+
"step": 555
|
3919 |
+
},
|
3920 |
+
{
|
3921 |
+
"epoch": 0.06105529017734585,
|
3922 |
+
"grad_norm": 0.03933922201395035,
|
3923 |
+
"learning_rate": 4.19432969704568e-05,
|
3924 |
+
"loss": 10.3206,
|
3925 |
+
"step": 556
|
3926 |
+
},
|
3927 |
+
{
|
3928 |
+
"epoch": 0.06116510185032669,
|
3929 |
+
"grad_norm": 0.030291898176074028,
|
3930 |
+
"learning_rate": 4.178674487049116e-05,
|
3931 |
+
"loss": 10.3205,
|
3932 |
+
"step": 557
|
3933 |
+
},
|
3934 |
+
{
|
3935 |
+
"epoch": 0.06127491352330753,
|
3936 |
+
"grad_norm": 0.04657311365008354,
|
3937 |
+
"learning_rate": 4.163027547791347e-05,
|
3938 |
+
"loss": 10.3188,
|
3939 |
+
"step": 558
|
3940 |
+
},
|
3941 |
+
{
|
3942 |
+
"epoch": 0.06138472519628837,
|
3943 |
+
"grad_norm": 0.03705879673361778,
|
3944 |
+
"learning_rate": 4.147389036836881e-05,
|
3945 |
+
"loss": 10.3169,
|
3946 |
+
"step": 559
|
3947 |
+
},
|
3948 |
+
{
|
3949 |
+
"epoch": 0.0614945368692692,
|
3950 |
+
"grad_norm": 0.0392281673848629,
|
3951 |
+
"learning_rate": 4.131759111665349e-05,
|
3952 |
+
"loss": 10.3157,
|
3953 |
+
"step": 560
|
3954 |
+
},
|
3955 |
+
{
|
3956 |
+
"epoch": 0.06160434854225004,
|
3957 |
+
"grad_norm": 0.03745996579527855,
|
3958 |
+
"learning_rate": 4.116137929669921e-05,
|
3959 |
+
"loss": 10.3125,
|
3960 |
+
"step": 561
|
3961 |
+
},
|
3962 |
+
{
|
3963 |
+
"epoch": 0.06171416021523088,
|
3964 |
+
"grad_norm": 0.02913571335375309,
|
3965 |
+
"learning_rate": 4.100525648155731e-05,
|
3966 |
+
"loss": 10.3165,
|
3967 |
+
"step": 562
|
3968 |
+
},
|
3969 |
+
{
|
3970 |
+
"epoch": 0.06182397188821172,
|
3971 |
+
"grad_norm": 0.03033732809126377,
|
3972 |
+
"learning_rate": 4.084922424338277e-05,
|
3973 |
+
"loss": 10.3298,
|
3974 |
+
"step": 563
|
3975 |
+
},
|
3976 |
+
{
|
3977 |
+
"epoch": 0.06193378356119256,
|
3978 |
+
"grad_norm": 0.03265180066227913,
|
3979 |
+
"learning_rate": 4.06932841534185e-05,
|
3980 |
+
"loss": 10.3178,
|
3981 |
+
"step": 564
|
3982 |
+
},
|
3983 |
+
{
|
3984 |
+
"epoch": 0.062043595234173396,
|
3985 |
+
"grad_norm": 0.036046724766492844,
|
3986 |
+
"learning_rate": 4.0537437781979506e-05,
|
3987 |
+
"loss": 10.3208,
|
3988 |
+
"step": 565
|
3989 |
+
},
|
3990 |
+
{
|
3991 |
+
"epoch": 0.06215340690715423,
|
3992 |
+
"grad_norm": 0.026036258786916733,
|
3993 |
+
"learning_rate": 4.038168669843697e-05,
|
3994 |
+
"loss": 10.3175,
|
3995 |
+
"step": 566
|
3996 |
+
},
|
3997 |
+
{
|
3998 |
+
"epoch": 0.06226321858013507,
|
3999 |
+
"grad_norm": 0.026027636602520943,
|
4000 |
+
"learning_rate": 4.0226032471202604e-05,
|
4001 |
+
"loss": 10.3206,
|
4002 |
+
"step": 567
|
4003 |
+
},
|
4004 |
+
{
|
4005 |
+
"epoch": 0.062373030253115906,
|
4006 |
+
"grad_norm": 0.03299787640571594,
|
4007 |
+
"learning_rate": 4.007047666771274e-05,
|
4008 |
+
"loss": 10.3177,
|
4009 |
+
"step": 568
|
4010 |
+
},
|
4011 |
+
{
|
4012 |
+
"epoch": 0.062482841926096745,
|
4013 |
+
"grad_norm": 0.03509662672877312,
|
4014 |
+
"learning_rate": 3.991502085441259e-05,
|
4015 |
+
"loss": 10.3257,
|
4016 |
+
"step": 569
|
4017 |
+
},
|
4018 |
+
{
|
4019 |
+
"epoch": 0.06259265359907758,
|
4020 |
+
"grad_norm": 0.03268042206764221,
|
4021 |
+
"learning_rate": 3.9759666596740476e-05,
|
4022 |
+
"loss": 10.3187,
|
4023 |
+
"step": 570
|
4024 |
+
},
|
4025 |
+
{
|
4026 |
+
"epoch": 0.06270246527205842,
|
4027 |
+
"grad_norm": 0.036411747336387634,
|
4028 |
+
"learning_rate": 3.960441545911204e-05,
|
4029 |
+
"loss": 10.3192,
|
4030 |
+
"step": 571
|
4031 |
+
},
|
4032 |
+
{
|
4033 |
+
"epoch": 0.06281227694503926,
|
4034 |
+
"grad_norm": 0.05730379745364189,
|
4035 |
+
"learning_rate": 3.944926900490452e-05,
|
4036 |
+
"loss": 10.3169,
|
4037 |
+
"step": 572
|
4038 |
+
},
|
4039 |
+
{
|
4040 |
+
"epoch": 0.0629220886180201,
|
4041 |
+
"grad_norm": 0.03837157413363457,
|
4042 |
+
"learning_rate": 3.929422879644099e-05,
|
4043 |
+
"loss": 10.3151,
|
4044 |
+
"step": 573
|
4045 |
+
},
|
4046 |
+
{
|
4047 |
+
"epoch": 0.06303190029100093,
|
4048 |
+
"grad_norm": 0.047361429780721664,
|
4049 |
+
"learning_rate": 3.913929639497462e-05,
|
4050 |
+
"loss": 10.3227,
|
4051 |
+
"step": 574
|
4052 |
+
},
|
4053 |
+
{
|
4054 |
+
"epoch": 0.06314171196398177,
|
4055 |
+
"grad_norm": 0.04197605699300766,
|
4056 |
+
"learning_rate": 3.898447336067297e-05,
|
4057 |
+
"loss": 10.3278,
|
4058 |
+
"step": 575
|
4059 |
+
},
|
4060 |
+
{
|
4061 |
+
"epoch": 0.06325152363696261,
|
4062 |
+
"grad_norm": 0.04603104665875435,
|
4063 |
+
"learning_rate": 3.882976125260229e-05,
|
4064 |
+
"loss": 10.3204,
|
4065 |
+
"step": 576
|
4066 |
+
},
|
4067 |
+
{
|
4068 |
+
"epoch": 0.06336133530994345,
|
4069 |
+
"grad_norm": 0.028033215552568436,
|
4070 |
+
"learning_rate": 3.8675161628711776e-05,
|
4071 |
+
"loss": 10.3153,
|
4072 |
+
"step": 577
|
4073 |
+
},
|
4074 |
+
{
|
4075 |
+
"epoch": 0.06347114698292429,
|
4076 |
+
"grad_norm": 0.03791102021932602,
|
4077 |
+
"learning_rate": 3.852067604581794e-05,
|
4078 |
+
"loss": 10.3199,
|
4079 |
+
"step": 578
|
4080 |
+
},
|
4081 |
+
{
|
4082 |
+
"epoch": 0.06358095865590513,
|
4083 |
+
"grad_norm": 0.04788900539278984,
|
4084 |
+
"learning_rate": 3.836630605958888e-05,
|
4085 |
+
"loss": 10.3185,
|
4086 |
+
"step": 579
|
4087 |
+
},
|
4088 |
+
{
|
4089 |
+
"epoch": 0.06369077032888595,
|
4090 |
+
"grad_norm": 0.030448194593191147,
|
4091 |
+
"learning_rate": 3.821205322452863e-05,
|
4092 |
+
"loss": 10.3183,
|
4093 |
+
"step": 580
|
4094 |
+
},
|
4095 |
+
{
|
4096 |
+
"epoch": 0.06380058200186679,
|
4097 |
+
"grad_norm": 0.03152129799127579,
|
4098 |
+
"learning_rate": 3.8057919093961553e-05,
|
4099 |
+
"loss": 10.325,
|
4100 |
+
"step": 581
|
4101 |
+
},
|
4102 |
+
{
|
4103 |
+
"epoch": 0.06391039367484763,
|
4104 |
+
"grad_norm": 0.035265106707811356,
|
4105 |
+
"learning_rate": 3.790390522001662e-05,
|
4106 |
+
"loss": 10.3314,
|
4107 |
+
"step": 582
|
4108 |
+
},
|
4109 |
+
{
|
4110 |
+
"epoch": 0.06402020534782847,
|
4111 |
+
"grad_norm": 0.040993936359882355,
|
4112 |
+
"learning_rate": 3.775001315361183e-05,
|
4113 |
+
"loss": 10.3221,
|
4114 |
+
"step": 583
|
4115 |
+
},
|
4116 |
+
{
|
4117 |
+
"epoch": 0.06413001702080931,
|
4118 |
+
"grad_norm": 0.05848237872123718,
|
4119 |
+
"learning_rate": 3.759624444443858e-05,
|
4120 |
+
"loss": 10.3188,
|
4121 |
+
"step": 584
|
4122 |
+
},
|
4123 |
+
{
|
4124 |
+
"epoch": 0.06423982869379015,
|
4125 |
+
"grad_norm": 0.042362719774246216,
|
4126 |
+
"learning_rate": 3.744260064094604e-05,
|
4127 |
+
"loss": 10.3205,
|
4128 |
+
"step": 585
|
4129 |
+
},
|
4130 |
+
{
|
4131 |
+
"epoch": 0.06434964036677099,
|
4132 |
+
"grad_norm": 0.031695008277893066,
|
4133 |
+
"learning_rate": 3.728908329032567e-05,
|
4134 |
+
"loss": 10.3269,
|
4135 |
+
"step": 586
|
4136 |
+
},
|
4137 |
+
{
|
4138 |
+
"epoch": 0.06445945203975183,
|
4139 |
+
"grad_norm": 0.0336785726249218,
|
4140 |
+
"learning_rate": 3.713569393849543e-05,
|
4141 |
+
"loss": 10.3177,
|
4142 |
+
"step": 587
|
4143 |
+
},
|
4144 |
+
{
|
4145 |
+
"epoch": 0.06456926371273267,
|
4146 |
+
"grad_norm": 0.035682737827301025,
|
4147 |
+
"learning_rate": 3.69824341300844e-05,
|
4148 |
+
"loss": 10.3147,
|
4149 |
+
"step": 588
|
4150 |
+
},
|
4151 |
+
{
|
4152 |
+
"epoch": 0.0646790753857135,
|
4153 |
+
"grad_norm": 0.03407788276672363,
|
4154 |
+
"learning_rate": 3.6829305408417166e-05,
|
4155 |
+
"loss": 10.328,
|
4156 |
+
"step": 589
|
4157 |
+
},
|
4158 |
+
{
|
4159 |
+
"epoch": 0.06478888705869434,
|
4160 |
+
"grad_norm": 0.039640314877033234,
|
4161 |
+
"learning_rate": 3.6676309315498256e-05,
|
4162 |
+
"loss": 10.324,
|
4163 |
+
"step": 590
|
4164 |
+
},
|
4165 |
+
{
|
4166 |
+
"epoch": 0.06489869873167518,
|
4167 |
+
"grad_norm": 0.047250282019376755,
|
4168 |
+
"learning_rate": 3.6523447391996614e-05,
|
4169 |
+
"loss": 10.3116,
|
4170 |
+
"step": 591
|
4171 |
+
},
|
4172 |
+
{
|
4173 |
+
"epoch": 0.06500851040465601,
|
4174 |
+
"grad_norm": 0.038214169442653656,
|
4175 |
+
"learning_rate": 3.6370721177230116e-05,
|
4176 |
+
"loss": 10.3118,
|
4177 |
+
"step": 592
|
4178 |
+
},
|
4179 |
+
{
|
4180 |
+
"epoch": 0.06511832207763685,
|
4181 |
+
"grad_norm": 0.03506159037351608,
|
4182 |
+
"learning_rate": 3.6218132209150045e-05,
|
4183 |
+
"loss": 10.3242,
|
4184 |
+
"step": 593
|
4185 |
+
},
|
4186 |
+
{
|
4187 |
+
"epoch": 0.06522813375061769,
|
4188 |
+
"grad_norm": 0.03700363263487816,
|
4189 |
+
"learning_rate": 3.606568202432562e-05,
|
4190 |
+
"loss": 10.3138,
|
4191 |
+
"step": 594
|
4192 |
+
},
|
4193 |
+
{
|
4194 |
+
"epoch": 0.06533794542359853,
|
4195 |
+
"grad_norm": 0.03475815802812576,
|
4196 |
+
"learning_rate": 3.591337215792852e-05,
|
4197 |
+
"loss": 10.3163,
|
4198 |
+
"step": 595
|
4199 |
+
},
|
4200 |
+
{
|
4201 |
+
"epoch": 0.06544775709657936,
|
4202 |
+
"grad_norm": 0.0341210775077343,
|
4203 |
+
"learning_rate": 3.5761204143717385e-05,
|
4204 |
+
"loss": 10.3262,
|
4205 |
+
"step": 596
|
4206 |
+
},
|
4207 |
+
{
|
4208 |
+
"epoch": 0.0655575687695602,
|
4209 |
+
"grad_norm": 0.04840511083602905,
|
4210 |
+
"learning_rate": 3.560917951402245e-05,
|
4211 |
+
"loss": 10.3188,
|
4212 |
+
"step": 597
|
4213 |
+
},
|
4214 |
+
{
|
4215 |
+
"epoch": 0.06566738044254104,
|
4216 |
+
"grad_norm": 0.03449239954352379,
|
4217 |
+
"learning_rate": 3.545729979973005e-05,
|
4218 |
+
"loss": 10.3226,
|
4219 |
+
"step": 598
|
4220 |
+
},
|
4221 |
+
{
|
4222 |
+
"epoch": 0.06577719211552188,
|
4223 |
+
"grad_norm": 0.03965664282441139,
|
4224 |
+
"learning_rate": 3.530556653026721e-05,
|
4225 |
+
"loss": 10.3202,
|
4226 |
+
"step": 599
|
4227 |
+
},
|
4228 |
+
{
|
4229 |
+
"epoch": 0.06588700378850272,
|
4230 |
+
"grad_norm": 0.029519766569137573,
|
4231 |
+
"learning_rate": 3.515398123358627e-05,
|
4232 |
+
"loss": 10.3089,
|
4233 |
+
"step": 600
|
4234 |
+
},
|
4235 |
+
{
|
4236 |
+
"epoch": 0.06599681546148356,
|
4237 |
+
"grad_norm": 0.0381334163248539,
|
4238 |
+
"learning_rate": 3.5002545436149474e-05,
|
4239 |
+
"loss": 10.3148,
|
4240 |
+
"step": 601
|
4241 |
+
},
|
4242 |
+
{
|
4243 |
+
"epoch": 0.0661066271344644,
|
4244 |
+
"grad_norm": 0.041288670152425766,
|
4245 |
+
"learning_rate": 3.485126066291364e-05,
|
4246 |
+
"loss": 10.3158,
|
4247 |
+
"step": 602
|
4248 |
+
},
|
4249 |
+
{
|
4250 |
+
"epoch": 0.06621643880744524,
|
4251 |
+
"grad_norm": 0.026856929063796997,
|
4252 |
+
"learning_rate": 3.470012843731476e-05,
|
4253 |
+
"loss": 10.3092,
|
4254 |
+
"step": 603
|
4255 |
+
},
|
4256 |
+
{
|
4257 |
+
"epoch": 0.06632625048042606,
|
4258 |
+
"grad_norm": 0.035839807242155075,
|
4259 |
+
"learning_rate": 3.4549150281252636e-05,
|
4260 |
+
"loss": 10.3276,
|
4261 |
+
"step": 604
|
4262 |
+
},
|
4263 |
+
{
|
4264 |
+
"epoch": 0.0664360621534069,
|
4265 |
+
"grad_norm": 0.04341225326061249,
|
4266 |
+
"learning_rate": 3.439832771507565e-05,
|
4267 |
+
"loss": 10.3243,
|
4268 |
+
"step": 605
|
4269 |
+
},
|
4270 |
+
{
|
4271 |
+
"epoch": 0.06654587382638774,
|
4272 |
+
"grad_norm": 0.03989730402827263,
|
4273 |
+
"learning_rate": 3.424766225756537e-05,
|
4274 |
+
"loss": 10.3178,
|
4275 |
+
"step": 606
|
4276 |
+
},
|
4277 |
+
{
|
4278 |
+
"epoch": 0.06665568549936858,
|
4279 |
+
"grad_norm": 0.031058041378855705,
|
4280 |
+
"learning_rate": 3.4097155425921254e-05,
|
4281 |
+
"loss": 10.3187,
|
4282 |
+
"step": 607
|
4283 |
+
},
|
4284 |
+
{
|
4285 |
+
"epoch": 0.06676549717234942,
|
4286 |
+
"grad_norm": 0.04681367799639702,
|
4287 |
+
"learning_rate": 3.394680873574546e-05,
|
4288 |
+
"loss": 10.3176,
|
4289 |
+
"step": 608
|
4290 |
+
},
|
4291 |
+
{
|
4292 |
+
"epoch": 0.06687530884533026,
|
4293 |
+
"grad_norm": 0.039223261177539825,
|
4294 |
+
"learning_rate": 3.3796623701027476e-05,
|
4295 |
+
"loss": 10.3141,
|
4296 |
+
"step": 609
|
4297 |
+
},
|
4298 |
+
{
|
4299 |
+
"epoch": 0.0669851205183111,
|
4300 |
+
"grad_norm": 0.037996046245098114,
|
4301 |
+
"learning_rate": 3.364660183412892e-05,
|
4302 |
+
"loss": 10.3196,
|
4303 |
+
"step": 610
|
4304 |
+
},
|
4305 |
+
{
|
4306 |
+
"epoch": 0.06709493219129194,
|
4307 |
+
"grad_norm": 0.04476340860128403,
|
4308 |
+
"learning_rate": 3.349674464576834e-05,
|
4309 |
+
"loss": 10.3166,
|
4310 |
+
"step": 611
|
4311 |
+
},
|
4312 |
+
{
|
4313 |
+
"epoch": 0.06720474386427278,
|
4314 |
+
"grad_norm": 0.038603756576776505,
|
4315 |
+
"learning_rate": 3.334705364500596e-05,
|
4316 |
+
"loss": 10.3207,
|
4317 |
+
"step": 612
|
4318 |
+
},
|
4319 |
+
{
|
4320 |
+
"epoch": 0.06731455553725361,
|
4321 |
+
"grad_norm": 0.05021185800433159,
|
4322 |
+
"learning_rate": 3.3197530339228487e-05,
|
4323 |
+
"loss": 10.3092,
|
4324 |
+
"step": 613
|
4325 |
+
},
|
4326 |
+
{
|
4327 |
+
"epoch": 0.06742436721023445,
|
4328 |
+
"grad_norm": 0.03497612476348877,
|
4329 |
+
"learning_rate": 3.304817623413397e-05,
|
4330 |
+
"loss": 10.315,
|
4331 |
+
"step": 614
|
4332 |
+
},
|
4333 |
+
{
|
4334 |
+
"epoch": 0.06753417888321528,
|
4335 |
+
"grad_norm": 0.025148971006274223,
|
4336 |
+
"learning_rate": 3.289899283371657e-05,
|
4337 |
+
"loss": 10.3156,
|
4338 |
+
"step": 615
|
4339 |
+
},
|
4340 |
+
{
|
4341 |
+
"epoch": 0.06764399055619612,
|
4342 |
+
"grad_norm": 0.043258484452962875,
|
4343 |
+
"learning_rate": 3.274998164025148e-05,
|
4344 |
+
"loss": 10.3091,
|
4345 |
+
"step": 616
|
4346 |
+
},
|
4347 |
+
{
|
4348 |
+
"epoch": 0.06775380222917696,
|
4349 |
+
"grad_norm": 0.039028916507959366,
|
4350 |
+
"learning_rate": 3.260114415427975e-05,
|
4351 |
+
"loss": 10.308,
|
4352 |
+
"step": 617
|
4353 |
+
},
|
4354 |
+
{
|
4355 |
+
"epoch": 0.0678636139021578,
|
4356 |
+
"grad_norm": 0.023269733414053917,
|
4357 |
+
"learning_rate": 3.2452481874593234e-05,
|
4358 |
+
"loss": 10.3169,
|
4359 |
+
"step": 618
|
4360 |
+
},
|
4361 |
+
{
|
4362 |
+
"epoch": 0.06797342557513864,
|
4363 |
+
"grad_norm": 0.032692115753889084,
|
4364 |
+
"learning_rate": 3.230399629821942e-05,
|
4365 |
+
"loss": 10.3187,
|
4366 |
+
"step": 619
|
4367 |
+
},
|
4368 |
+
{
|
4369 |
+
"epoch": 0.06808323724811947,
|
4370 |
+
"grad_norm": 0.03593998774886131,
|
4371 |
+
"learning_rate": 3.215568892040641e-05,
|
4372 |
+
"loss": 10.3258,
|
4373 |
+
"step": 620
|
4374 |
+
},
|
4375 |
+
{
|
4376 |
+
"epoch": 0.06819304892110031,
|
4377 |
+
"grad_norm": 0.048697151243686676,
|
4378 |
+
"learning_rate": 3.200756123460788e-05,
|
4379 |
+
"loss": 10.3091,
|
4380 |
+
"step": 621
|
4381 |
+
},
|
4382 |
+
{
|
4383 |
+
"epoch": 0.06830286059408115,
|
4384 |
+
"grad_norm": 0.049541596323251724,
|
4385 |
+
"learning_rate": 3.1859614732467954e-05,
|
4386 |
+
"loss": 10.3065,
|
4387 |
+
"step": 622
|
4388 |
+
},
|
4389 |
+
{
|
4390 |
+
"epoch": 0.06841267226706199,
|
4391 |
+
"grad_norm": 0.039022549986839294,
|
4392 |
+
"learning_rate": 3.171185090380628e-05,
|
4393 |
+
"loss": 10.3241,
|
4394 |
+
"step": 623
|
4395 |
+
},
|
4396 |
+
{
|
4397 |
+
"epoch": 0.06852248394004283,
|
4398 |
+
"grad_norm": 0.04879970848560333,
|
4399 |
+
"learning_rate": 3.156427123660297e-05,
|
4400 |
+
"loss": 10.3167,
|
4401 |
+
"step": 624
|
4402 |
+
},
|
4403 |
+
{
|
4404 |
+
"epoch": 0.06863229561302367,
|
4405 |
+
"grad_norm": 0.030641546472907066,
|
4406 |
+
"learning_rate": 3.141687721698363e-05,
|
4407 |
+
"loss": 10.3144,
|
4408 |
+
"step": 625
|
4409 |
+
},
|
4410 |
+
{
|
4411 |
+
"epoch": 0.06874210728600451,
|
4412 |
+
"grad_norm": 0.03837910294532776,
|
4413 |
+
"learning_rate": 3.12696703292044e-05,
|
4414 |
+
"loss": 10.3199,
|
4415 |
+
"step": 626
|
4416 |
+
},
|
4417 |
+
{
|
4418 |
+
"epoch": 0.06885191895898533,
|
4419 |
+
"grad_norm": 0.0463176965713501,
|
4420 |
+
"learning_rate": 3.1122652055637015e-05,
|
4421 |
+
"loss": 10.3281,
|
4422 |
+
"step": 627
|
4423 |
+
},
|
4424 |
+
{
|
4425 |
+
"epoch": 0.06896173063196617,
|
4426 |
+
"grad_norm": 0.02846435457468033,
|
4427 |
+
"learning_rate": 3.097582387675385e-05,
|
4428 |
+
"loss": 10.3113,
|
4429 |
+
"step": 628
|
4430 |
+
},
|
4431 |
+
{
|
4432 |
+
"epoch": 0.06907154230494701,
|
4433 |
+
"grad_norm": 0.038684189319610596,
|
4434 |
+
"learning_rate": 3.082918727111304e-05,
|
4435 |
+
"loss": 10.3169,
|
4436 |
+
"step": 629
|
4437 |
+
},
|
4438 |
+
{
|
4439 |
+
"epoch": 0.06918135397792785,
|
4440 |
+
"grad_norm": 0.034591104835271835,
|
4441 |
+
"learning_rate": 3.0682743715343564e-05,
|
4442 |
+
"loss": 10.3062,
|
4443 |
+
"step": 630
|
4444 |
+
},
|
4445 |
+
{
|
4446 |
+
"epoch": 0.06929116565090869,
|
4447 |
+
"grad_norm": 0.04411185905337334,
|
4448 |
+
"learning_rate": 3.053649468413043e-05,
|
4449 |
+
"loss": 10.3221,
|
4450 |
+
"step": 631
|
4451 |
+
},
|
4452 |
+
{
|
4453 |
+
"epoch": 0.06940097732388953,
|
4454 |
+
"grad_norm": 0.03224635869264603,
|
4455 |
+
"learning_rate": 3.0390441650199724e-05,
|
4456 |
+
"loss": 10.3193,
|
4457 |
+
"step": 632
|
4458 |
+
},
|
4459 |
+
{
|
4460 |
+
"epoch": 0.06951078899687037,
|
4461 |
+
"grad_norm": 0.03653174266219139,
|
4462 |
+
"learning_rate": 3.0244586084303905e-05,
|
4463 |
+
"loss": 10.3135,
|
4464 |
+
"step": 633
|
4465 |
+
},
|
4466 |
+
{
|
4467 |
+
"epoch": 0.06962060066985121,
|
4468 |
+
"grad_norm": 0.04843145236372948,
|
4469 |
+
"learning_rate": 3.0098929455206904e-05,
|
4470 |
+
"loss": 10.3105,
|
4471 |
+
"step": 634
|
4472 |
+
},
|
4473 |
+
{
|
4474 |
+
"epoch": 0.06973041234283205,
|
4475 |
+
"grad_norm": 0.03295741230249405,
|
4476 |
+
"learning_rate": 2.9953473229669328e-05,
|
4477 |
+
"loss": 10.323,
|
4478 |
+
"step": 635
|
4479 |
+
},
|
4480 |
+
{
|
4481 |
+
"epoch": 0.06984022401581289,
|
4482 |
+
"grad_norm": 0.037569474428892136,
|
4483 |
+
"learning_rate": 2.9808218872433767e-05,
|
4484 |
+
"loss": 10.3158,
|
4485 |
+
"step": 636
|
4486 |
+
},
|
4487 |
+
{
|
4488 |
+
"epoch": 0.06995003568879372,
|
4489 |
+
"grad_norm": 0.034265752881765366,
|
4490 |
+
"learning_rate": 2.9663167846209998e-05,
|
4491 |
+
"loss": 10.3198,
|
4492 |
+
"step": 637
|
4493 |
+
},
|
4494 |
+
{
|
4495 |
+
"epoch": 0.07005984736177456,
|
4496 |
+
"grad_norm": 0.03296723589301109,
|
4497 |
+
"learning_rate": 2.9518321611660237e-05,
|
4498 |
+
"loss": 10.3094,
|
4499 |
+
"step": 638
|
4500 |
+
},
|
4501 |
+
{
|
4502 |
+
"epoch": 0.07016965903475539,
|
4503 |
+
"grad_norm": 0.04063355177640915,
|
4504 |
+
"learning_rate": 2.9373681627384447e-05,
|
4505 |
+
"loss": 10.3247,
|
4506 |
+
"step": 639
|
4507 |
+
},
|
4508 |
+
{
|
4509 |
+
"epoch": 0.07027947070773623,
|
4510 |
+
"grad_norm": 0.029703807085752487,
|
4511 |
+
"learning_rate": 2.9229249349905684e-05,
|
4512 |
+
"loss": 10.3243,
|
4513 |
+
"step": 640
|
4514 |
+
},
|
4515 |
+
{
|
4516 |
+
"epoch": 0.07038928238071707,
|
4517 |
+
"grad_norm": 0.043013796210289,
|
4518 |
+
"learning_rate": 2.9085026233655365e-05,
|
4519 |
+
"loss": 10.32,
|
4520 |
+
"step": 641
|
4521 |
+
},
|
4522 |
+
{
|
4523 |
+
"epoch": 0.0704990940536979,
|
4524 |
+
"grad_norm": 0.046771373599767685,
|
4525 |
+
"learning_rate": 2.894101373095867e-05,
|
4526 |
+
"loss": 10.3242,
|
4527 |
+
"step": 642
|
4528 |
+
},
|
4529 |
+
{
|
4530 |
+
"epoch": 0.07060890572667874,
|
4531 |
+
"grad_norm": 0.05028558894991875,
|
4532 |
+
"learning_rate": 2.8797213292019926e-05,
|
4533 |
+
"loss": 10.3225,
|
4534 |
+
"step": 643
|
4535 |
+
},
|
4536 |
+
{
|
4537 |
+
"epoch": 0.07071871739965958,
|
4538 |
+
"grad_norm": 0.03822631761431694,
|
4539 |
+
"learning_rate": 2.8653626364907917e-05,
|
4540 |
+
"loss": 10.3087,
|
4541 |
+
"step": 644
|
4542 |
+
},
|
4543 |
+
{
|
4544 |
+
"epoch": 0.07082852907264042,
|
4545 |
+
"grad_norm": 0.03862608224153519,
|
4546 |
+
"learning_rate": 2.851025439554142e-05,
|
4547 |
+
"loss": 10.3213,
|
4548 |
+
"step": 645
|
4549 |
+
},
|
4550 |
+
{
|
4551 |
+
"epoch": 0.07093834074562126,
|
4552 |
+
"grad_norm": 0.041775893419981,
|
4553 |
+
"learning_rate": 2.8367098827674578e-05,
|
4554 |
+
"loss": 10.322,
|
4555 |
+
"step": 646
|
4556 |
+
},
|
4557 |
+
{
|
4558 |
+
"epoch": 0.0710481524186021,
|
4559 |
+
"grad_norm": 0.03678379952907562,
|
4560 |
+
"learning_rate": 2.8224161102882397e-05,
|
4561 |
+
"loss": 10.3153,
|
4562 |
+
"step": 647
|
4563 |
+
},
|
4564 |
+
{
|
4565 |
+
"epoch": 0.07115796409158294,
|
4566 |
+
"grad_norm": 0.055228762328624725,
|
4567 |
+
"learning_rate": 2.8081442660546125e-05,
|
4568 |
+
"loss": 10.3128,
|
4569 |
+
"step": 648
|
4570 |
+
},
|
4571 |
+
{
|
4572 |
+
"epoch": 0.07126777576456378,
|
4573 |
+
"grad_norm": 0.0325370691716671,
|
4574 |
+
"learning_rate": 2.7938944937838923e-05,
|
4575 |
+
"loss": 10.3156,
|
4576 |
+
"step": 649
|
4577 |
+
},
|
4578 |
+
{
|
4579 |
+
"epoch": 0.07137758743754462,
|
4580 |
+
"grad_norm": 0.038115836679935455,
|
4581 |
+
"learning_rate": 2.7796669369711294e-05,
|
4582 |
+
"loss": 10.3214,
|
4583 |
+
"step": 650
|
4584 |
+
},
|
4585 |
+
{
|
4586 |
+
"epoch": 0.07148739911052544,
|
4587 |
+
"grad_norm": 0.036208376288414,
|
4588 |
+
"learning_rate": 2.7654617388876615e-05,
|
4589 |
+
"loss": 10.3208,
|
4590 |
+
"step": 651
|
4591 |
+
},
|
4592 |
+
{
|
4593 |
+
"epoch": 0.07159721078350628,
|
4594 |
+
"grad_norm": 0.03381425887346268,
|
4595 |
+
"learning_rate": 2.7512790425796718e-05,
|
4596 |
+
"loss": 10.3184,
|
4597 |
+
"step": 652
|
4598 |
+
},
|
4599 |
+
{
|
4600 |
+
"epoch": 0.07170702245648712,
|
4601 |
+
"grad_norm": 0.04482642188668251,
|
4602 |
+
"learning_rate": 2.7371189908667604e-05,
|
4603 |
+
"loss": 10.3278,
|
4604 |
+
"step": 653
|
4605 |
+
},
|
4606 |
+
{
|
4607 |
+
"epoch": 0.07181683412946796,
|
4608 |
+
"grad_norm": 0.042877502739429474,
|
4609 |
+
"learning_rate": 2.7229817263404866e-05,
|
4610 |
+
"loss": 10.3253,
|
4611 |
+
"step": 654
|
4612 |
+
},
|
4613 |
+
{
|
4614 |
+
"epoch": 0.0719266458024488,
|
4615 |
+
"grad_norm": 0.04468453302979469,
|
4616 |
+
"learning_rate": 2.708867391362948e-05,
|
4617 |
+
"loss": 10.3093,
|
4618 |
+
"step": 655
|
4619 |
+
},
|
4620 |
+
{
|
4621 |
+
"epoch": 0.07203645747542964,
|
4622 |
+
"grad_norm": 0.03441416472196579,
|
4623 |
+
"learning_rate": 2.694776128065345e-05,
|
4624 |
+
"loss": 10.3097,
|
4625 |
+
"step": 656
|
4626 |
+
},
|
4627 |
+
{
|
4628 |
+
"epoch": 0.07214626914841048,
|
4629 |
+
"grad_norm": 0.025696834549307823,
|
4630 |
+
"learning_rate": 2.6807080783465376e-05,
|
4631 |
+
"loss": 10.3216,
|
4632 |
+
"step": 657
|
4633 |
+
},
|
4634 |
+
{
|
4635 |
+
"epoch": 0.07225608082139132,
|
4636 |
+
"grad_norm": 0.037037041038274765,
|
4637 |
+
"learning_rate": 2.6666633838716314e-05,
|
4638 |
+
"loss": 10.3137,
|
4639 |
+
"step": 658
|
4640 |
+
},
|
4641 |
+
{
|
4642 |
+
"epoch": 0.07236589249437216,
|
4643 |
+
"grad_norm": 0.04586685448884964,
|
4644 |
+
"learning_rate": 2.6526421860705473e-05,
|
4645 |
+
"loss": 10.315,
|
4646 |
+
"step": 659
|
4647 |
+
},
|
4648 |
+
{
|
4649 |
+
"epoch": 0.072475704167353,
|
4650 |
+
"grad_norm": 0.049783892929553986,
|
4651 |
+
"learning_rate": 2.638644626136587e-05,
|
4652 |
+
"loss": 10.3142,
|
4653 |
+
"step": 660
|
4654 |
+
},
|
4655 |
+
{
|
4656 |
+
"epoch": 0.07258551584033383,
|
4657 |
+
"grad_norm": 0.04380533844232559,
|
4658 |
+
"learning_rate": 2.6246708450250256e-05,
|
4659 |
+
"loss": 10.3082,
|
4660 |
+
"step": 661
|
4661 |
+
},
|
4662 |
+
{
|
4663 |
+
"epoch": 0.07269532751331466,
|
4664 |
+
"grad_norm": 0.03231223300099373,
|
4665 |
+
"learning_rate": 2.6107209834516854e-05,
|
4666 |
+
"loss": 10.3139,
|
4667 |
+
"step": 662
|
4668 |
+
},
|
4669 |
+
{
|
4670 |
+
"epoch": 0.0728051391862955,
|
4671 |
+
"grad_norm": 0.03345828503370285,
|
4672 |
+
"learning_rate": 2.596795181891514e-05,
|
4673 |
+
"loss": 10.3191,
|
4674 |
+
"step": 663
|
4675 |
+
},
|
4676 |
+
{
|
4677 |
+
"epoch": 0.07291495085927634,
|
4678 |
+
"grad_norm": 0.04067157581448555,
|
4679 |
+
"learning_rate": 2.5828935805771802e-05,
|
4680 |
+
"loss": 10.3255,
|
4681 |
+
"step": 664
|
4682 |
+
},
|
4683 |
+
{
|
4684 |
+
"epoch": 0.07302476253225718,
|
4685 |
+
"grad_norm": 0.0350385382771492,
|
4686 |
+
"learning_rate": 2.5690163194976575e-05,
|
4687 |
+
"loss": 10.3132,
|
4688 |
+
"step": 665
|
4689 |
+
},
|
4690 |
+
{
|
4691 |
+
"epoch": 0.07313457420523801,
|
4692 |
+
"grad_norm": 0.02556205540895462,
|
4693 |
+
"learning_rate": 2.5551635383968065e-05,
|
4694 |
+
"loss": 10.3139,
|
4695 |
+
"step": 666
|
4696 |
+
},
|
4697 |
+
{
|
4698 |
+
"epoch": 0.07324438587821885,
|
4699 |
+
"grad_norm": 0.036634381860494614,
|
4700 |
+
"learning_rate": 2.5413353767719805e-05,
|
4701 |
+
"loss": 10.3069,
|
4702 |
+
"step": 667
|
4703 |
+
},
|
4704 |
+
{
|
4705 |
+
"epoch": 0.07335419755119969,
|
4706 |
+
"grad_norm": 0.04193887859582901,
|
4707 |
+
"learning_rate": 2.5275319738726165e-05,
|
4708 |
+
"loss": 10.3103,
|
4709 |
+
"step": 668
|
4710 |
+
},
|
4711 |
+
{
|
4712 |
+
"epoch": 0.07346400922418053,
|
4713 |
+
"grad_norm": 0.04867958277463913,
|
4714 |
+
"learning_rate": 2.513753468698826e-05,
|
4715 |
+
"loss": 10.3253,
|
4716 |
+
"step": 669
|
4717 |
+
},
|
4718 |
+
{
|
4719 |
+
"epoch": 0.07357382089716137,
|
4720 |
+
"grad_norm": 0.03344857320189476,
|
4721 |
+
"learning_rate": 2.500000000000001e-05,
|
4722 |
+
"loss": 10.3175,
|
4723 |
+
"step": 670
|
4724 |
+
},
|
4725 |
+
{
|
4726 |
+
"epoch": 0.07368363257014221,
|
4727 |
+
"grad_norm": 0.04245550557971001,
|
4728 |
+
"learning_rate": 2.486271706273421e-05,
|
4729 |
+
"loss": 10.3247,
|
4730 |
+
"step": 671
|
4731 |
+
},
|
4732 |
+
{
|
4733 |
+
"epoch": 0.07379344424312305,
|
4734 |
+
"grad_norm": 0.03168286383152008,
|
4735 |
+
"learning_rate": 2.4725687257628534e-05,
|
4736 |
+
"loss": 10.3145,
|
4737 |
+
"step": 672
|
4738 |
+
},
|
4739 |
+
{
|
4740 |
+
"epoch": 0.07390325591610389,
|
4741 |
+
"grad_norm": 0.03433592617511749,
|
4742 |
+
"learning_rate": 2.4588911964571553e-05,
|
4743 |
+
"loss": 10.3111,
|
4744 |
+
"step": 673
|
4745 |
+
},
|
4746 |
+
{
|
4747 |
+
"epoch": 0.07401306758908471,
|
4748 |
+
"grad_norm": 0.027093639597296715,
|
4749 |
+
"learning_rate": 2.4452392560888976e-05,
|
4750 |
+
"loss": 10.3204,
|
4751 |
+
"step": 674
|
4752 |
+
},
|
4753 |
+
{
|
4754 |
+
"epoch": 0.07412287926206555,
|
4755 |
+
"grad_norm": 0.03639523312449455,
|
4756 |
+
"learning_rate": 2.4316130421329697e-05,
|
4757 |
+
"loss": 10.3152,
|
4758 |
+
"step": 675
|
4759 |
+
},
|
4760 |
+
{
|
4761 |
+
"epoch": 0.07423269093504639,
|
4762 |
+
"grad_norm": 0.03811733424663544,
|
4763 |
+
"learning_rate": 2.418012691805191e-05,
|
4764 |
+
"loss": 10.3224,
|
4765 |
+
"step": 676
|
4766 |
+
},
|
4767 |
+
{
|
4768 |
+
"epoch": 0.07434250260802723,
|
4769 |
+
"grad_norm": 0.033562976866960526,
|
4770 |
+
"learning_rate": 2.4044383420609406e-05,
|
4771 |
+
"loss": 10.3183,
|
4772 |
+
"step": 677
|
4773 |
+
},
|
4774 |
+
{
|
4775 |
+
"epoch": 0.07445231428100807,
|
4776 |
+
"grad_norm": 0.06074458360671997,
|
4777 |
+
"learning_rate": 2.3908901295937713e-05,
|
4778 |
+
"loss": 10.3242,
|
4779 |
+
"step": 678
|
4780 |
+
},
|
4781 |
+
{
|
4782 |
+
"epoch": 0.07456212595398891,
|
4783 |
+
"grad_norm": 0.03439025580883026,
|
4784 |
+
"learning_rate": 2.3773681908340284e-05,
|
4785 |
+
"loss": 10.3216,
|
4786 |
+
"step": 679
|
4787 |
+
},
|
4788 |
+
{
|
4789 |
+
"epoch": 0.07467193762696975,
|
4790 |
+
"grad_norm": 0.042043287307024,
|
4791 |
+
"learning_rate": 2.363872661947488e-05,
|
4792 |
+
"loss": 10.3206,
|
4793 |
+
"step": 680
|
4794 |
+
},
|
4795 |
+
{
|
4796 |
+
"epoch": 0.07478174929995059,
|
4797 |
+
"grad_norm": 0.02884497120976448,
|
4798 |
+
"learning_rate": 2.350403678833976e-05,
|
4799 |
+
"loss": 10.3124,
|
4800 |
+
"step": 681
|
4801 |
+
},
|
4802 |
+
{
|
4803 |
+
"epoch": 0.07489156097293143,
|
4804 |
+
"grad_norm": 0.03978228569030762,
|
4805 |
+
"learning_rate": 2.336961377126001e-05,
|
4806 |
+
"loss": 10.3218,
|
4807 |
+
"step": 682
|
4808 |
+
},
|
4809 |
+
{
|
4810 |
+
"epoch": 0.07500137264591226,
|
4811 |
+
"grad_norm": 0.030587781220674515,
|
4812 |
+
"learning_rate": 2.3235458921873925e-05,
|
4813 |
+
"loss": 10.3172,
|
4814 |
+
"step": 683
|
4815 |
+
},
|
4816 |
+
{
|
4817 |
+
"epoch": 0.0751111843188931,
|
4818 |
+
"grad_norm": 0.04183843359351158,
|
4819 |
+
"learning_rate": 2.310157359111938e-05,
|
4820 |
+
"loss": 10.3199,
|
4821 |
+
"step": 684
|
4822 |
+
},
|
4823 |
+
{
|
4824 |
+
"epoch": 0.07522099599187394,
|
4825 |
+
"grad_norm": 0.03554993122816086,
|
4826 |
+
"learning_rate": 2.296795912722014e-05,
|
4827 |
+
"loss": 10.3174,
|
4828 |
+
"step": 685
|
4829 |
+
},
|
4830 |
+
{
|
4831 |
+
"epoch": 0.07533080766485477,
|
4832 |
+
"grad_norm": 0.03490421175956726,
|
4833 |
+
"learning_rate": 2.283461687567236e-05,
|
4834 |
+
"loss": 10.3124,
|
4835 |
+
"step": 686
|
4836 |
+
},
|
4837 |
+
{
|
4838 |
+
"epoch": 0.07544061933783561,
|
4839 |
+
"grad_norm": 0.03740306943655014,
|
4840 |
+
"learning_rate": 2.2701548179231048e-05,
|
4841 |
+
"loss": 10.3259,
|
4842 |
+
"step": 687
|
4843 |
+
},
|
4844 |
+
{
|
4845 |
+
"epoch": 0.07555043101081645,
|
4846 |
+
"grad_norm": 0.02755308896303177,
|
4847 |
+
"learning_rate": 2.2568754377896516e-05,
|
4848 |
+
"loss": 10.3189,
|
4849 |
+
"step": 688
|
4850 |
+
},
|
4851 |
+
{
|
4852 |
+
"epoch": 0.07566024268379729,
|
4853 |
+
"grad_norm": 0.03202425315976143,
|
4854 |
+
"learning_rate": 2.2436236808900844e-05,
|
4855 |
+
"loss": 10.3191,
|
4856 |
+
"step": 689
|
4857 |
+
},
|
4858 |
+
{
|
4859 |
+
"epoch": 0.07577005435677812,
|
4860 |
+
"grad_norm": 0.034231580793857574,
|
4861 |
+
"learning_rate": 2.2303996806694488e-05,
|
4862 |
+
"loss": 10.319,
|
4863 |
+
"step": 690
|
4864 |
+
},
|
4865 |
+
{
|
4866 |
+
"epoch": 0.07587986602975896,
|
4867 |
+
"grad_norm": 0.03219657391309738,
|
4868 |
+
"learning_rate": 2.2172035702932825e-05,
|
4869 |
+
"loss": 10.3142,
|
4870 |
+
"step": 691
|
4871 |
+
},
|
4872 |
+
{
|
4873 |
+
"epoch": 0.0759896777027398,
|
4874 |
+
"grad_norm": 0.04530481621623039,
|
4875 |
+
"learning_rate": 2.2040354826462668e-05,
|
4876 |
+
"loss": 10.3175,
|
4877 |
+
"step": 692
|
4878 |
+
},
|
4879 |
+
{
|
4880 |
+
"epoch": 0.07609948937572064,
|
4881 |
+
"grad_norm": 0.04149880260229111,
|
4882 |
+
"learning_rate": 2.1908955503308993e-05,
|
4883 |
+
"loss": 10.3091,
|
4884 |
+
"step": 693
|
4885 |
+
},
|
4886 |
+
{
|
4887 |
+
"epoch": 0.07620930104870148,
|
4888 |
+
"grad_norm": 0.03388355299830437,
|
4889 |
+
"learning_rate": 2.1777839056661554e-05,
|
4890 |
+
"loss": 10.3195,
|
4891 |
+
"step": 694
|
4892 |
+
},
|
4893 |
+
{
|
4894 |
+
"epoch": 0.07631911272168232,
|
4895 |
+
"grad_norm": 0.051045697182416916,
|
4896 |
+
"learning_rate": 2.164700680686147e-05,
|
4897 |
+
"loss": 10.3205,
|
4898 |
+
"step": 695
|
4899 |
+
},
|
4900 |
+
{
|
4901 |
+
"epoch": 0.07642892439466316,
|
4902 |
+
"grad_norm": 0.055720798671245575,
|
4903 |
+
"learning_rate": 2.1516460071388062e-05,
|
4904 |
+
"loss": 10.3086,
|
4905 |
+
"step": 696
|
4906 |
+
},
|
4907 |
+
{
|
4908 |
+
"epoch": 0.07653873606764398,
|
4909 |
+
"grad_norm": 0.04313961789011955,
|
4910 |
+
"learning_rate": 2.1386200164845526e-05,
|
4911 |
+
"loss": 10.3159,
|
4912 |
+
"step": 697
|
4913 |
+
},
|
4914 |
+
{
|
4915 |
+
"epoch": 0.07664854774062482,
|
4916 |
+
"grad_norm": 0.05562193691730499,
|
4917 |
+
"learning_rate": 2.125622839894964e-05,
|
4918 |
+
"loss": 10.3138,
|
4919 |
+
"step": 698
|
4920 |
+
},
|
4921 |
+
{
|
4922 |
+
"epoch": 0.07675835941360566,
|
4923 |
+
"grad_norm": 0.03812890127301216,
|
4924 |
+
"learning_rate": 2.1126546082514664e-05,
|
4925 |
+
"loss": 10.3137,
|
4926 |
+
"step": 699
|
4927 |
+
},
|
4928 |
+
{
|
4929 |
+
"epoch": 0.0768681710865865,
|
4930 |
+
"grad_norm": 0.04385654255747795,
|
4931 |
+
"learning_rate": 2.09971545214401e-05,
|
4932 |
+
"loss": 10.3094,
|
4933 |
+
"step": 700
|
4934 |
+
},
|
4935 |
+
{
|
4936 |
+
"epoch": 0.07697798275956734,
|
4937 |
+
"grad_norm": 0.02911500446498394,
|
4938 |
+
"learning_rate": 2.086805501869749e-05,
|
4939 |
+
"loss": 10.321,
|
4940 |
+
"step": 701
|
4941 |
+
},
|
4942 |
+
{
|
4943 |
+
"epoch": 0.07708779443254818,
|
4944 |
+
"grad_norm": 0.048436541110277176,
|
4945 |
+
"learning_rate": 2.073924887431744e-05,
|
4946 |
+
"loss": 10.3085,
|
4947 |
+
"step": 702
|
4948 |
+
},
|
4949 |
+
{
|
4950 |
+
"epoch": 0.07719760610552902,
|
4951 |
+
"grad_norm": 0.028855659067630768,
|
4952 |
+
"learning_rate": 2.061073738537635e-05,
|
4953 |
+
"loss": 10.3155,
|
4954 |
+
"step": 703
|
4955 |
+
},
|
4956 |
+
{
|
4957 |
+
"epoch": 0.07730741777850986,
|
4958 |
+
"grad_norm": 0.03252778202295303,
|
4959 |
+
"learning_rate": 2.048252184598352e-05,
|
4960 |
+
"loss": 10.311,
|
4961 |
+
"step": 704
|
4962 |
+
},
|
4963 |
+
{
|
4964 |
+
"epoch": 0.0774172294514907,
|
4965 |
+
"grad_norm": 0.04148361086845398,
|
4966 |
+
"learning_rate": 2.0354603547267985e-05,
|
4967 |
+
"loss": 10.3228,
|
4968 |
+
"step": 705
|
4969 |
+
},
|
4970 |
+
{
|
4971 |
+
"epoch": 0.07752704112447154,
|
4972 |
+
"grad_norm": 0.038128580898046494,
|
4973 |
+
"learning_rate": 2.0226983777365604e-05,
|
4974 |
+
"loss": 10.3106,
|
4975 |
+
"step": 706
|
4976 |
+
},
|
4977 |
+
{
|
4978 |
+
"epoch": 0.07763685279745237,
|
4979 |
+
"grad_norm": 0.028530459851026535,
|
4980 |
+
"learning_rate": 2.0099663821406056e-05,
|
4981 |
+
"loss": 10.3058,
|
4982 |
+
"step": 707
|
4983 |
+
},
|
4984 |
+
{
|
4985 |
+
"epoch": 0.07774666447043321,
|
4986 |
+
"grad_norm": 0.04369957000017166,
|
4987 |
+
"learning_rate": 1.9972644961499854e-05,
|
4988 |
+
"loss": 10.3175,
|
4989 |
+
"step": 708
|
4990 |
+
},
|
4991 |
+
{
|
4992 |
+
"epoch": 0.07785647614341404,
|
4993 |
+
"grad_norm": 0.030100587755441666,
|
4994 |
+
"learning_rate": 1.9845928476725524e-05,
|
4995 |
+
"loss": 10.3142,
|
4996 |
+
"step": 709
|
4997 |
+
},
|
4998 |
+
{
|
4999 |
+
"epoch": 0.07796628781639488,
|
5000 |
+
"grad_norm": 0.04747960716485977,
|
5001 |
+
"learning_rate": 1.9719515643116674e-05,
|
5002 |
+
"loss": 10.3127,
|
5003 |
+
"step": 710
|
5004 |
+
},
|
5005 |
+
{
|
5006 |
+
"epoch": 0.07807609948937572,
|
5007 |
+
"grad_norm": 0.03726712614297867,
|
5008 |
+
"learning_rate": 1.959340773364911e-05,
|
5009 |
+
"loss": 10.3073,
|
5010 |
+
"step": 711
|
5011 |
+
},
|
5012 |
+
{
|
5013 |
+
"epoch": 0.07818591116235656,
|
5014 |
+
"grad_norm": 0.027646781876683235,
|
5015 |
+
"learning_rate": 1.946760601822809e-05,
|
5016 |
+
"loss": 10.3214,
|
5017 |
+
"step": 712
|
5018 |
+
},
|
5019 |
+
{
|
5020 |
+
"epoch": 0.0782957228353374,
|
5021 |
+
"grad_norm": 0.047327920794487,
|
5022 |
+
"learning_rate": 1.9342111763675512e-05,
|
5023 |
+
"loss": 10.3203,
|
5024 |
+
"step": 713
|
5025 |
+
},
|
5026 |
+
{
|
5027 |
+
"epoch": 0.07840553450831823,
|
5028 |
+
"grad_norm": 0.04683876410126686,
|
5029 |
+
"learning_rate": 1.9216926233717085e-05,
|
5030 |
+
"loss": 10.3167,
|
5031 |
+
"step": 714
|
5032 |
+
},
|
5033 |
+
{
|
5034 |
+
"epoch": 0.07851534618129907,
|
5035 |
+
"grad_norm": 0.03807590901851654,
|
5036 |
+
"learning_rate": 1.9092050688969738e-05,
|
5037 |
+
"loss": 10.326,
|
5038 |
+
"step": 715
|
5039 |
+
},
|
5040 |
+
{
|
5041 |
+
"epoch": 0.07862515785427991,
|
5042 |
+
"grad_norm": 0.03731735050678253,
|
5043 |
+
"learning_rate": 1.8967486386928817e-05,
|
5044 |
+
"loss": 10.3121,
|
5045 |
+
"step": 716
|
5046 |
+
},
|
5047 |
+
{
|
5048 |
+
"epoch": 0.07873496952726075,
|
5049 |
+
"grad_norm": 0.058313701301813126,
|
5050 |
+
"learning_rate": 1.8843234581955442e-05,
|
5051 |
+
"loss": 10.3094,
|
5052 |
+
"step": 717
|
5053 |
+
},
|
5054 |
+
{
|
5055 |
+
"epoch": 0.07884478120024159,
|
5056 |
+
"grad_norm": 0.042186010628938675,
|
5057 |
+
"learning_rate": 1.8719296525263922e-05,
|
5058 |
+
"loss": 10.3155,
|
5059 |
+
"step": 718
|
5060 |
+
},
|
5061 |
+
{
|
5062 |
+
"epoch": 0.07895459287322243,
|
5063 |
+
"grad_norm": 0.043809790164232254,
|
5064 |
+
"learning_rate": 1.859567346490913e-05,
|
5065 |
+
"loss": 10.3259,
|
5066 |
+
"step": 719
|
5067 |
+
},
|
5068 |
+
{
|
5069 |
+
"epoch": 0.07906440454620327,
|
5070 |
+
"grad_norm": 0.03344713896512985,
|
5071 |
+
"learning_rate": 1.847236664577389e-05,
|
5072 |
+
"loss": 10.3147,
|
5073 |
+
"step": 720
|
5074 |
+
},
|
5075 |
+
{
|
5076 |
+
"epoch": 0.0791742162191841,
|
5077 |
+
"grad_norm": 0.035155851393938065,
|
5078 |
+
"learning_rate": 1.8349377309556486e-05,
|
5079 |
+
"loss": 10.3162,
|
5080 |
+
"step": 721
|
5081 |
+
},
|
5082 |
+
{
|
5083 |
+
"epoch": 0.07928402789216493,
|
5084 |
+
"grad_norm": 0.060719750821590424,
|
5085 |
+
"learning_rate": 1.8226706694758195e-05,
|
5086 |
+
"loss": 10.3148,
|
5087 |
+
"step": 722
|
5088 |
+
},
|
5089 |
+
{
|
5090 |
+
"epoch": 0.07939383956514577,
|
5091 |
+
"grad_norm": 0.03652290999889374,
|
5092 |
+
"learning_rate": 1.810435603667075e-05,
|
5093 |
+
"loss": 10.311,
|
5094 |
+
"step": 723
|
5095 |
+
},
|
5096 |
+
{
|
5097 |
+
"epoch": 0.07950365123812661,
|
5098 |
+
"grad_norm": 0.041981663554906845,
|
5099 |
+
"learning_rate": 1.7982326567363888e-05,
|
5100 |
+
"loss": 10.3177,
|
5101 |
+
"step": 724
|
5102 |
+
},
|
5103 |
+
{
|
5104 |
+
"epoch": 0.07961346291110745,
|
5105 |
+
"grad_norm": 0.036146800965070724,
|
5106 |
+
"learning_rate": 1.7860619515673033e-05,
|
5107 |
+
"loss": 10.3255,
|
5108 |
+
"step": 725
|
5109 |
+
},
|
5110 |
+
{
|
5111 |
+
"epoch": 0.07972327458408829,
|
5112 |
+
"grad_norm": 0.038230050355196,
|
5113 |
+
"learning_rate": 1.773923610718686e-05,
|
5114 |
+
"loss": 10.3172,
|
5115 |
+
"step": 726
|
5116 |
+
},
|
5117 |
+
{
|
5118 |
+
"epoch": 0.07983308625706913,
|
5119 |
+
"grad_norm": 0.05691583827137947,
|
5120 |
+
"learning_rate": 1.7618177564234905e-05,
|
5121 |
+
"loss": 10.3222,
|
5122 |
+
"step": 727
|
5123 |
+
},
|
5124 |
+
{
|
5125 |
+
"epoch": 0.07994289793004997,
|
5126 |
+
"grad_norm": 0.03703266382217407,
|
5127 |
+
"learning_rate": 1.7497445105875377e-05,
|
5128 |
+
"loss": 10.3047,
|
5129 |
+
"step": 728
|
5130 |
+
},
|
5131 |
+
{
|
5132 |
+
"epoch": 0.0800527096030308,
|
5133 |
+
"grad_norm": 0.03908339887857437,
|
5134 |
+
"learning_rate": 1.73770399478828e-05,
|
5135 |
+
"loss": 10.3105,
|
5136 |
+
"step": 729
|
5137 |
+
},
|
5138 |
+
{
|
5139 |
+
"epoch": 0.08016252127601164,
|
5140 |
+
"grad_norm": 0.030511919409036636,
|
5141 |
+
"learning_rate": 1.725696330273575e-05,
|
5142 |
+
"loss": 10.3129,
|
5143 |
+
"step": 730
|
5144 |
+
},
|
5145 |
+
{
|
5146 |
+
"epoch": 0.08027233294899248,
|
5147 |
+
"grad_norm": 0.04107876121997833,
|
5148 |
+
"learning_rate": 1.7137216379604727e-05,
|
5149 |
+
"loss": 10.3126,
|
5150 |
+
"step": 731
|
5151 |
+
},
|
5152 |
+
{
|
5153 |
+
"epoch": 0.08038214462197332,
|
5154 |
+
"grad_norm": 0.04407944902777672,
|
5155 |
+
"learning_rate": 1.7017800384339928e-05,
|
5156 |
+
"loss": 10.3152,
|
5157 |
+
"step": 732
|
5158 |
+
},
|
5159 |
+
{
|
5160 |
+
"epoch": 0.08049195629495415,
|
5161 |
+
"grad_norm": 0.03675924614071846,
|
5162 |
+
"learning_rate": 1.6898716519459074e-05,
|
5163 |
+
"loss": 10.3152,
|
5164 |
+
"step": 733
|
5165 |
+
},
|
5166 |
+
{
|
5167 |
+
"epoch": 0.08060176796793499,
|
5168 |
+
"grad_norm": 0.04171142354607582,
|
5169 |
+
"learning_rate": 1.6779965984135377e-05,
|
5170 |
+
"loss": 10.3044,
|
5171 |
+
"step": 734
|
5172 |
+
},
|
5173 |
+
{
|
5174 |
+
"epoch": 0.08071157964091583,
|
5175 |
+
"grad_norm": 0.029391134157776833,
|
5176 |
+
"learning_rate": 1.6661549974185424e-05,
|
5177 |
+
"loss": 10.322,
|
5178 |
+
"step": 735
|
5179 |
+
},
|
5180 |
+
{
|
5181 |
+
"epoch": 0.08082139131389666,
|
5182 |
+
"grad_norm": 0.02885211445391178,
|
5183 |
+
"learning_rate": 1.6543469682057106e-05,
|
5184 |
+
"loss": 10.3165,
|
5185 |
+
"step": 736
|
5186 |
+
},
|
5187 |
+
{
|
5188 |
+
"epoch": 0.0809312029868775,
|
5189 |
+
"grad_norm": 0.0386587493121624,
|
5190 |
+
"learning_rate": 1.6425726296817633e-05,
|
5191 |
+
"loss": 10.3261,
|
5192 |
+
"step": 737
|
5193 |
+
},
|
5194 |
+
{
|
5195 |
+
"epoch": 0.08104101465985834,
|
5196 |
+
"grad_norm": 0.04038149490952492,
|
5197 |
+
"learning_rate": 1.6308321004141607e-05,
|
5198 |
+
"loss": 10.3022,
|
5199 |
+
"step": 738
|
5200 |
+
},
|
5201 |
+
{
|
5202 |
+
"epoch": 0.08115082633283918,
|
5203 |
+
"grad_norm": 0.04831194877624512,
|
5204 |
+
"learning_rate": 1.619125498629904e-05,
|
5205 |
+
"loss": 10.3086,
|
5206 |
+
"step": 739
|
5207 |
+
},
|
5208 |
+
{
|
5209 |
+
"epoch": 0.08126063800582002,
|
5210 |
+
"grad_norm": 0.03492288663983345,
|
5211 |
+
"learning_rate": 1.60745294221434e-05,
|
5212 |
+
"loss": 10.3102,
|
5213 |
+
"step": 740
|
5214 |
+
},
|
5215 |
+
{
|
5216 |
+
"epoch": 0.08137044967880086,
|
5217 |
+
"grad_norm": 0.054558295756578445,
|
5218 |
+
"learning_rate": 1.595814548709983e-05,
|
5219 |
+
"loss": 10.3038,
|
5220 |
+
"step": 741
|
5221 |
+
},
|
5222 |
+
{
|
5223 |
+
"epoch": 0.0814802613517817,
|
5224 |
+
"grad_norm": 0.040097616612911224,
|
5225 |
+
"learning_rate": 1.5842104353153287e-05,
|
5226 |
+
"loss": 10.3186,
|
5227 |
+
"step": 742
|
5228 |
+
},
|
5229 |
+
{
|
5230 |
+
"epoch": 0.08159007302476254,
|
5231 |
+
"grad_norm": 0.03179539740085602,
|
5232 |
+
"learning_rate": 1.5726407188836673e-05,
|
5233 |
+
"loss": 10.3188,
|
5234 |
+
"step": 743
|
5235 |
+
},
|
5236 |
+
{
|
5237 |
+
"epoch": 0.08169988469774336,
|
5238 |
+
"grad_norm": 0.04013778269290924,
|
5239 |
+
"learning_rate": 1.5611055159219152e-05,
|
5240 |
+
"loss": 10.3191,
|
5241 |
+
"step": 744
|
5242 |
+
},
|
5243 |
+
{
|
5244 |
+
"epoch": 0.0818096963707242,
|
5245 |
+
"grad_norm": 0.045401014387607574,
|
5246 |
+
"learning_rate": 1.549604942589441e-05,
|
5247 |
+
"loss": 10.3101,
|
5248 |
+
"step": 745
|
5249 |
+
},
|
5250 |
+
{
|
5251 |
+
"epoch": 0.08191950804370504,
|
5252 |
+
"grad_norm": 0.039099063724279404,
|
5253 |
+
"learning_rate": 1.5381391146968866e-05,
|
5254 |
+
"loss": 10.3063,
|
5255 |
+
"step": 746
|
5256 |
+
},
|
5257 |
+
{
|
5258 |
+
"epoch": 0.08202931971668588,
|
5259 |
+
"grad_norm": 0.04024317488074303,
|
5260 |
+
"learning_rate": 1.526708147705013e-05,
|
5261 |
+
"loss": 10.321,
|
5262 |
+
"step": 747
|
5263 |
+
},
|
5264 |
+
{
|
5265 |
+
"epoch": 0.08213913138966672,
|
5266 |
+
"grad_norm": 0.03901572898030281,
|
5267 |
+
"learning_rate": 1.5153121567235335e-05,
|
5268 |
+
"loss": 10.3149,
|
5269 |
+
"step": 748
|
5270 |
+
},
|
5271 |
+
{
|
5272 |
+
"epoch": 0.08224894306264756,
|
5273 |
+
"grad_norm": 0.03517254814505577,
|
5274 |
+
"learning_rate": 1.5039512565099467e-05,
|
5275 |
+
"loss": 10.3173,
|
5276 |
+
"step": 749
|
5277 |
+
},
|
5278 |
+
{
|
5279 |
+
"epoch": 0.0823587547356284,
|
5280 |
+
"grad_norm": 0.04018811881542206,
|
5281 |
+
"learning_rate": 1.4926255614683932e-05,
|
5282 |
+
"loss": 10.3144,
|
5283 |
+
"step": 750
|
5284 |
+
},
|
5285 |
+
{
|
5286 |
+
"epoch": 0.0823587547356284,
|
5287 |
+
"eval_loss": 10.313794136047363,
|
5288 |
+
"eval_runtime": 126.7622,
|
5289 |
+
"eval_samples_per_second": 11.731,
|
5290 |
+
"eval_steps_per_second": 5.869,
|
5291 |
+
"step": 750
|
5292 |
+
},
|
5293 |
+
{
|
5294 |
+
"epoch": 0.08246856640860924,
|
5295 |
+
"grad_norm": 0.030414637178182602,
|
5296 |
+
"learning_rate": 1.481335185648498e-05,
|
5297 |
+
"loss": 10.317,
|
5298 |
+
"step": 751
|
5299 |
+
},
|
5300 |
+
{
|
5301 |
+
"epoch": 0.08257837808159008,
|
5302 |
+
"grad_norm": 0.028846023604273796,
|
5303 |
+
"learning_rate": 1.4700802427442179e-05,
|
5304 |
+
"loss": 10.3138,
|
5305 |
+
"step": 752
|
5306 |
+
},
|
5307 |
+
{
|
5308 |
+
"epoch": 0.08268818975457091,
|
5309 |
+
"grad_norm": 0.03306645527482033,
|
5310 |
+
"learning_rate": 1.458860846092705e-05,
|
5311 |
+
"loss": 10.3088,
|
5312 |
+
"step": 753
|
5313 |
+
},
|
5314 |
+
{
|
5315 |
+
"epoch": 0.08279800142755175,
|
5316 |
+
"grad_norm": 0.038386497646570206,
|
5317 |
+
"learning_rate": 1.4476771086731567e-05,
|
5318 |
+
"loss": 10.3158,
|
5319 |
+
"step": 754
|
5320 |
+
},
|
5321 |
+
{
|
5322 |
+
"epoch": 0.08290781310053259,
|
5323 |
+
"grad_norm": 0.03763662651181221,
|
5324 |
+
"learning_rate": 1.4365291431056871e-05,
|
5325 |
+
"loss": 10.3225,
|
5326 |
+
"step": 755
|
5327 |
+
},
|
5328 |
+
{
|
5329 |
+
"epoch": 0.08301762477351342,
|
5330 |
+
"grad_norm": 0.033651165664196014,
|
5331 |
+
"learning_rate": 1.4254170616501827e-05,
|
5332 |
+
"loss": 10.3079,
|
5333 |
+
"step": 756
|
5334 |
+
},
|
5335 |
+
{
|
5336 |
+
"epoch": 0.08312743644649426,
|
5337 |
+
"grad_norm": 0.03197680786252022,
|
5338 |
+
"learning_rate": 1.414340976205183e-05,
|
5339 |
+
"loss": 10.3195,
|
5340 |
+
"step": 757
|
5341 |
+
},
|
5342 |
+
{
|
5343 |
+
"epoch": 0.0832372481194751,
|
5344 |
+
"grad_norm": 0.040615715086460114,
|
5345 |
+
"learning_rate": 1.4033009983067452e-05,
|
5346 |
+
"loss": 10.3074,
|
5347 |
+
"step": 758
|
5348 |
+
},
|
5349 |
+
{
|
5350 |
+
"epoch": 0.08334705979245594,
|
5351 |
+
"grad_norm": 0.04317568242549896,
|
5352 |
+
"learning_rate": 1.3922972391273226e-05,
|
5353 |
+
"loss": 10.3095,
|
5354 |
+
"step": 759
|
5355 |
+
},
|
5356 |
+
{
|
5357 |
+
"epoch": 0.08345687146543677,
|
5358 |
+
"grad_norm": 0.03475901857018471,
|
5359 |
+
"learning_rate": 1.3813298094746491e-05,
|
5360 |
+
"loss": 10.3156,
|
5361 |
+
"step": 760
|
5362 |
+
},
|
5363 |
+
{
|
5364 |
+
"epoch": 0.08356668313841761,
|
5365 |
+
"grad_norm": 0.044157762080430984,
|
5366 |
+
"learning_rate": 1.3703988197906209e-05,
|
5367 |
+
"loss": 10.3152,
|
5368 |
+
"step": 761
|
5369 |
+
},
|
5370 |
+
{
|
5371 |
+
"epoch": 0.08367649481139845,
|
5372 |
+
"grad_norm": 0.03343284875154495,
|
5373 |
+
"learning_rate": 1.3595043801501794e-05,
|
5374 |
+
"loss": 10.3174,
|
5375 |
+
"step": 762
|
5376 |
+
},
|
5377 |
+
{
|
5378 |
+
"epoch": 0.08378630648437929,
|
5379 |
+
"grad_norm": 0.029324904084205627,
|
5380 |
+
"learning_rate": 1.3486466002602133e-05,
|
5381 |
+
"loss": 10.3177,
|
5382 |
+
"step": 763
|
5383 |
+
},
|
5384 |
+
{
|
5385 |
+
"epoch": 0.08389611815736013,
|
5386 |
+
"grad_norm": 0.038032740354537964,
|
5387 |
+
"learning_rate": 1.3378255894584463e-05,
|
5388 |
+
"loss": 10.3177,
|
5389 |
+
"step": 764
|
5390 |
+
},
|
5391 |
+
{
|
5392 |
+
"epoch": 0.08400592983034097,
|
5393 |
+
"grad_norm": 0.049427881836891174,
|
5394 |
+
"learning_rate": 1.327041456712334e-05,
|
5395 |
+
"loss": 10.301,
|
5396 |
+
"step": 765
|
5397 |
+
},
|
5398 |
+
{
|
5399 |
+
"epoch": 0.08411574150332181,
|
5400 |
+
"grad_norm": 0.0315837599337101,
|
5401 |
+
"learning_rate": 1.3162943106179749e-05,
|
5402 |
+
"loss": 10.3037,
|
5403 |
+
"step": 766
|
5404 |
+
},
|
5405 |
+
{
|
5406 |
+
"epoch": 0.08422555317630265,
|
5407 |
+
"grad_norm": 0.040594566613435745,
|
5408 |
+
"learning_rate": 1.3055842593990131e-05,
|
5409 |
+
"loss": 10.306,
|
5410 |
+
"step": 767
|
5411 |
+
},
|
5412 |
+
{
|
5413 |
+
"epoch": 0.08433536484928347,
|
5414 |
+
"grad_norm": 0.04508155584335327,
|
5415 |
+
"learning_rate": 1.2949114109055415e-05,
|
5416 |
+
"loss": 10.3162,
|
5417 |
+
"step": 768
|
5418 |
+
},
|
5419 |
+
{
|
5420 |
+
"epoch": 0.08444517652226431,
|
5421 |
+
"grad_norm": 0.03993818908929825,
|
5422 |
+
"learning_rate": 1.2842758726130283e-05,
|
5423 |
+
"loss": 10.3142,
|
5424 |
+
"step": 769
|
5425 |
+
},
|
5426 |
+
{
|
5427 |
+
"epoch": 0.08455498819524515,
|
5428 |
+
"grad_norm": 0.04547726735472679,
|
5429 |
+
"learning_rate": 1.2736777516212266e-05,
|
5430 |
+
"loss": 10.3145,
|
5431 |
+
"step": 770
|
5432 |
+
},
|
5433 |
+
{
|
5434 |
+
"epoch": 0.08466479986822599,
|
5435 |
+
"grad_norm": 0.03807322680950165,
|
5436 |
+
"learning_rate": 1.2631171546530968e-05,
|
5437 |
+
"loss": 10.3149,
|
5438 |
+
"step": 771
|
5439 |
+
},
|
5440 |
+
{
|
5441 |
+
"epoch": 0.08477461154120683,
|
5442 |
+
"grad_norm": 0.048404186964035034,
|
5443 |
+
"learning_rate": 1.2525941880537307e-05,
|
5444 |
+
"loss": 10.3118,
|
5445 |
+
"step": 772
|
5446 |
+
},
|
5447 |
+
{
|
5448 |
+
"epoch": 0.08488442321418767,
|
5449 |
+
"grad_norm": 0.03061523661017418,
|
5450 |
+
"learning_rate": 1.2421089577892869e-05,
|
5451 |
+
"loss": 10.3091,
|
5452 |
+
"step": 773
|
5453 |
+
},
|
5454 |
+
{
|
5455 |
+
"epoch": 0.08499423488716851,
|
5456 |
+
"grad_norm": 0.04034106433391571,
|
5457 |
+
"learning_rate": 1.2316615694459189e-05,
|
5458 |
+
"loss": 10.3071,
|
5459 |
+
"step": 774
|
5460 |
+
},
|
5461 |
+
{
|
5462 |
+
"epoch": 0.08510404656014935,
|
5463 |
+
"grad_norm": 0.034991052001714706,
|
5464 |
+
"learning_rate": 1.2212521282287092e-05,
|
5465 |
+
"loss": 10.3175,
|
5466 |
+
"step": 775
|
5467 |
+
},
|
5468 |
+
{
|
5469 |
+
"epoch": 0.08521385823313019,
|
5470 |
+
"grad_norm": 0.033823732286691666,
|
5471 |
+
"learning_rate": 1.2108807389606158e-05,
|
5472 |
+
"loss": 10.3216,
|
5473 |
+
"step": 776
|
5474 |
+
},
|
5475 |
+
{
|
5476 |
+
"epoch": 0.08532366990611102,
|
5477 |
+
"grad_norm": 0.03538206219673157,
|
5478 |
+
"learning_rate": 1.2005475060814159e-05,
|
5479 |
+
"loss": 10.3111,
|
5480 |
+
"step": 777
|
5481 |
+
},
|
5482 |
+
{
|
5483 |
+
"epoch": 0.08543348157909186,
|
5484 |
+
"grad_norm": 0.03175722807645798,
|
5485 |
+
"learning_rate": 1.1902525336466464e-05,
|
5486 |
+
"loss": 10.3105,
|
5487 |
+
"step": 778
|
5488 |
+
},
|
5489 |
+
{
|
5490 |
+
"epoch": 0.08554329325207269,
|
5491 |
+
"grad_norm": 0.039146069437265396,
|
5492 |
+
"learning_rate": 1.1799959253265668e-05,
|
5493 |
+
"loss": 10.3121,
|
5494 |
+
"step": 779
|
5495 |
+
},
|
5496 |
+
{
|
5497 |
+
"epoch": 0.08565310492505353,
|
5498 |
+
"grad_norm": 0.04693768173456192,
|
5499 |
+
"learning_rate": 1.1697777844051105e-05,
|
5500 |
+
"loss": 10.3168,
|
5501 |
+
"step": 780
|
5502 |
+
},
|
5503 |
+
{
|
5504 |
+
"epoch": 0.08576291659803437,
|
5505 |
+
"grad_norm": 0.040665190666913986,
|
5506 |
+
"learning_rate": 1.1595982137788403e-05,
|
5507 |
+
"loss": 10.3135,
|
5508 |
+
"step": 781
|
5509 |
+
},
|
5510 |
+
{
|
5511 |
+
"epoch": 0.0858727282710152,
|
5512 |
+
"grad_norm": 0.04170459136366844,
|
5513 |
+
"learning_rate": 1.1494573159559213e-05,
|
5514 |
+
"loss": 10.3146,
|
5515 |
+
"step": 782
|
5516 |
+
},
|
5517 |
+
{
|
5518 |
+
"epoch": 0.08598253994399604,
|
5519 |
+
"grad_norm": 0.044261373579502106,
|
5520 |
+
"learning_rate": 1.1393551930550828e-05,
|
5521 |
+
"loss": 10.3217,
|
5522 |
+
"step": 783
|
5523 |
+
},
|
5524 |
+
{
|
5525 |
+
"epoch": 0.08609235161697688,
|
5526 |
+
"grad_norm": 0.037661824375391006,
|
5527 |
+
"learning_rate": 1.1292919468045877e-05,
|
5528 |
+
"loss": 10.3139,
|
5529 |
+
"step": 784
|
5530 |
+
},
|
5531 |
+
{
|
5532 |
+
"epoch": 0.08620216328995772,
|
5533 |
+
"grad_norm": 0.022352036088705063,
|
5534 |
+
"learning_rate": 1.1192676785412154e-05,
|
5535 |
+
"loss": 10.3142,
|
5536 |
+
"step": 785
|
5537 |
+
},
|
5538 |
+
{
|
5539 |
+
"epoch": 0.08631197496293856,
|
5540 |
+
"grad_norm": 0.032345980405807495,
|
5541 |
+
"learning_rate": 1.1092824892092373e-05,
|
5542 |
+
"loss": 10.319,
|
5543 |
+
"step": 786
|
5544 |
+
},
|
5545 |
+
{
|
5546 |
+
"epoch": 0.0864217866359194,
|
5547 |
+
"grad_norm": 0.05072391778230667,
|
5548 |
+
"learning_rate": 1.099336479359398e-05,
|
5549 |
+
"loss": 10.3104,
|
5550 |
+
"step": 787
|
5551 |
+
},
|
5552 |
+
{
|
5553 |
+
"epoch": 0.08653159830890024,
|
5554 |
+
"grad_norm": 0.03811797499656677,
|
5555 |
+
"learning_rate": 1.0894297491479045e-05,
|
5556 |
+
"loss": 10.3211,
|
5557 |
+
"step": 788
|
5558 |
+
},
|
5559 |
+
{
|
5560 |
+
"epoch": 0.08664140998188108,
|
5561 |
+
"grad_norm": 0.039311766624450684,
|
5562 |
+
"learning_rate": 1.0795623983354215e-05,
|
5563 |
+
"loss": 10.3168,
|
5564 |
+
"step": 789
|
5565 |
+
},
|
5566 |
+
{
|
5567 |
+
"epoch": 0.08675122165486192,
|
5568 |
+
"grad_norm": 0.044613540172576904,
|
5569 |
+
"learning_rate": 1.0697345262860636e-05,
|
5570 |
+
"loss": 10.3177,
|
5571 |
+
"step": 790
|
5572 |
+
},
|
5573 |
+
{
|
5574 |
+
"epoch": 0.08686103332784274,
|
5575 |
+
"grad_norm": 0.038676317781209946,
|
5576 |
+
"learning_rate": 1.0599462319663905e-05,
|
5577 |
+
"loss": 10.3117,
|
5578 |
+
"step": 791
|
5579 |
+
},
|
5580 |
+
{
|
5581 |
+
"epoch": 0.08697084500082358,
|
5582 |
+
"grad_norm": 0.035879697650671005,
|
5583 |
+
"learning_rate": 1.0501976139444191e-05,
|
5584 |
+
"loss": 10.3083,
|
5585 |
+
"step": 792
|
5586 |
+
},
|
5587 |
+
{
|
5588 |
+
"epoch": 0.08708065667380442,
|
5589 |
+
"grad_norm": 0.03627593815326691,
|
5590 |
+
"learning_rate": 1.0404887703886251e-05,
|
5591 |
+
"loss": 10.3153,
|
5592 |
+
"step": 793
|
5593 |
+
},
|
5594 |
+
{
|
5595 |
+
"epoch": 0.08719046834678526,
|
5596 |
+
"grad_norm": 0.032888129353523254,
|
5597 |
+
"learning_rate": 1.0308197990669538e-05,
|
5598 |
+
"loss": 10.3219,
|
5599 |
+
"step": 794
|
5600 |
+
},
|
5601 |
+
{
|
5602 |
+
"epoch": 0.0873002800197661,
|
5603 |
+
"grad_norm": 0.027540508657693863,
|
5604 |
+
"learning_rate": 1.021190797345839e-05,
|
5605 |
+
"loss": 10.314,
|
5606 |
+
"step": 795
|
5607 |
+
},
|
5608 |
+
{
|
5609 |
+
"epoch": 0.08741009169274694,
|
5610 |
+
"grad_norm": 0.032333459705114365,
|
5611 |
+
"learning_rate": 1.0116018621892237e-05,
|
5612 |
+
"loss": 10.3131,
|
5613 |
+
"step": 796
|
5614 |
+
},
|
5615 |
+
{
|
5616 |
+
"epoch": 0.08751990336572778,
|
5617 |
+
"grad_norm": 0.028225935995578766,
|
5618 |
+
"learning_rate": 1.0020530901575754e-05,
|
5619 |
+
"loss": 10.3154,
|
5620 |
+
"step": 797
|
5621 |
+
},
|
5622 |
+
{
|
5623 |
+
"epoch": 0.08762971503870862,
|
5624 |
+
"grad_norm": 0.03728807717561722,
|
5625 |
+
"learning_rate": 9.92544577406923e-06,
|
5626 |
+
"loss": 10.3162,
|
5627 |
+
"step": 798
|
5628 |
+
},
|
5629 |
+
{
|
5630 |
+
"epoch": 0.08773952671168946,
|
5631 |
+
"grad_norm": 0.03525736555457115,
|
5632 |
+
"learning_rate": 9.830764196878872e-06,
|
5633 |
+
"loss": 10.3099,
|
5634 |
+
"step": 799
|
5635 |
+
},
|
5636 |
+
{
|
5637 |
+
"epoch": 0.0878493383846703,
|
5638 |
+
"grad_norm": 0.04706577956676483,
|
5639 |
+
"learning_rate": 9.73648712344707e-06,
|
5640 |
+
"loss": 10.3132,
|
5641 |
+
"step": 800
|
5642 |
+
},
|
5643 |
+
{
|
5644 |
+
"epoch": 0.08795915005765113,
|
5645 |
+
"grad_norm": 0.028163529932498932,
|
5646 |
+
"learning_rate": 9.642615503142926e-06,
|
5647 |
+
"loss": 10.3124,
|
5648 |
+
"step": 801
|
5649 |
+
},
|
5650 |
+
{
|
5651 |
+
"epoch": 0.08806896173063197,
|
5652 |
+
"grad_norm": 0.04801159352064133,
|
5653 |
+
"learning_rate": 9.549150281252633e-06,
|
5654 |
+
"loss": 10.3221,
|
5655 |
+
"step": 802
|
5656 |
+
},
|
5657 |
+
{
|
5658 |
+
"epoch": 0.0881787734036128,
|
5659 |
+
"grad_norm": 0.04143408685922623,
|
5660 |
+
"learning_rate": 9.456092398969902e-06,
|
5661 |
+
"loss": 10.3191,
|
5662 |
+
"step": 803
|
5663 |
+
},
|
5664 |
+
{
|
5665 |
+
"epoch": 0.08828858507659364,
|
5666 |
+
"grad_norm": 0.04347795993089676,
|
5667 |
+
"learning_rate": 9.363442793386606e-06,
|
5668 |
+
"loss": 10.3035,
|
5669 |
+
"step": 804
|
5670 |
+
},
|
5671 |
+
{
|
5672 |
+
"epoch": 0.08839839674957448,
|
5673 |
+
"grad_norm": 0.0419791154563427,
|
5674 |
+
"learning_rate": 9.271202397483215e-06,
|
5675 |
+
"loss": 10.3162,
|
5676 |
+
"step": 805
|
5677 |
+
},
|
5678 |
+
{
|
5679 |
+
"epoch": 0.08850820842255532,
|
5680 |
+
"grad_norm": 0.04846560209989548,
|
5681 |
+
"learning_rate": 9.179372140119525e-06,
|
5682 |
+
"loss": 10.3174,
|
5683 |
+
"step": 806
|
5684 |
+
},
|
5685 |
+
{
|
5686 |
+
"epoch": 0.08861802009553615,
|
5687 |
+
"grad_norm": 0.031321022659540176,
|
5688 |
+
"learning_rate": 9.087952946025175e-06,
|
5689 |
+
"loss": 10.3094,
|
5690 |
+
"step": 807
|
5691 |
+
},
|
5692 |
+
{
|
5693 |
+
"epoch": 0.088727831768517,
|
5694 |
+
"grad_norm": 0.044255632907152176,
|
5695 |
+
"learning_rate": 8.996945735790447e-06,
|
5696 |
+
"loss": 10.3189,
|
5697 |
+
"step": 808
|
5698 |
+
},
|
5699 |
+
{
|
5700 |
+
"epoch": 0.08883764344149783,
|
5701 |
+
"grad_norm": 0.049325257539749146,
|
5702 |
+
"learning_rate": 8.906351425856952e-06,
|
5703 |
+
"loss": 10.3179,
|
5704 |
+
"step": 809
|
5705 |
+
},
|
5706 |
+
{
|
5707 |
+
"epoch": 0.08894745511447867,
|
5708 |
+
"grad_norm": 0.03295501694083214,
|
5709 |
+
"learning_rate": 8.816170928508365e-06,
|
5710 |
+
"loss": 10.3152,
|
5711 |
+
"step": 810
|
5712 |
+
},
|
5713 |
+
{
|
5714 |
+
"epoch": 0.08905726678745951,
|
5715 |
+
"grad_norm": 0.03943759202957153,
|
5716 |
+
"learning_rate": 8.7264051518613e-06,
|
5717 |
+
"loss": 10.3086,
|
5718 |
+
"step": 811
|
5719 |
+
},
|
5720 |
+
{
|
5721 |
+
"epoch": 0.08916707846044035,
|
5722 |
+
"grad_norm": 0.030172457918524742,
|
5723 |
+
"learning_rate": 8.637054999856148e-06,
|
5724 |
+
"loss": 10.3216,
|
5725 |
+
"step": 812
|
5726 |
+
},
|
5727 |
+
{
|
5728 |
+
"epoch": 0.08927689013342119,
|
5729 |
+
"grad_norm": 0.034907639026641846,
|
5730 |
+
"learning_rate": 8.548121372247918e-06,
|
5731 |
+
"loss": 10.3139,
|
5732 |
+
"step": 813
|
5733 |
+
},
|
5734 |
+
{
|
5735 |
+
"epoch": 0.08938670180640203,
|
5736 |
+
"grad_norm": 0.04455006122589111,
|
5737 |
+
"learning_rate": 8.459605164597267e-06,
|
5738 |
+
"loss": 10.3064,
|
5739 |
+
"step": 814
|
5740 |
+
},
|
5741 |
+
{
|
5742 |
+
"epoch": 0.08949651347938285,
|
5743 |
+
"grad_norm": 0.02922525629401207,
|
5744 |
+
"learning_rate": 8.371507268261437e-06,
|
5745 |
+
"loss": 10.3163,
|
5746 |
+
"step": 815
|
5747 |
+
},
|
5748 |
+
{
|
5749 |
+
"epoch": 0.08960632515236369,
|
5750 |
+
"grad_norm": 0.03390868008136749,
|
5751 |
+
"learning_rate": 8.283828570385238e-06,
|
5752 |
+
"loss": 10.3106,
|
5753 |
+
"step": 816
|
5754 |
+
},
|
5755 |
+
{
|
5756 |
+
"epoch": 0.08971613682534453,
|
5757 |
+
"grad_norm": 0.02741953358054161,
|
5758 |
+
"learning_rate": 8.196569953892202e-06,
|
5759 |
+
"loss": 10.3219,
|
5760 |
+
"step": 817
|
5761 |
+
},
|
5762 |
+
{
|
5763 |
+
"epoch": 0.08982594849832537,
|
5764 |
+
"grad_norm": 0.02589074708521366,
|
5765 |
+
"learning_rate": 8.109732297475635e-06,
|
5766 |
+
"loss": 10.323,
|
5767 |
+
"step": 818
|
5768 |
+
},
|
5769 |
+
{
|
5770 |
+
"epoch": 0.08993576017130621,
|
5771 |
+
"grad_norm": 0.04258984699845314,
|
5772 |
+
"learning_rate": 8.023316475589754e-06,
|
5773 |
+
"loss": 10.3244,
|
5774 |
+
"step": 819
|
5775 |
+
},
|
5776 |
+
{
|
5777 |
+
"epoch": 0.09004557184428705,
|
5778 |
+
"grad_norm": 0.04778209701180458,
|
5779 |
+
"learning_rate": 7.937323358440935e-06,
|
5780 |
+
"loss": 10.3081,
|
5781 |
+
"step": 820
|
5782 |
+
},
|
5783 |
+
{
|
5784 |
+
"epoch": 0.09015538351726789,
|
5785 |
+
"grad_norm": 0.043443091213703156,
|
5786 |
+
"learning_rate": 7.851753811978924e-06,
|
5787 |
+
"loss": 10.3217,
|
5788 |
+
"step": 821
|
5789 |
+
},
|
5790 |
+
{
|
5791 |
+
"epoch": 0.09026519519024873,
|
5792 |
+
"grad_norm": 0.041465628892183304,
|
5793 |
+
"learning_rate": 7.766608697888095e-06,
|
5794 |
+
"loss": 10.3095,
|
5795 |
+
"step": 822
|
5796 |
+
},
|
5797 |
+
{
|
5798 |
+
"epoch": 0.09037500686322957,
|
5799 |
+
"grad_norm": 0.03256349265575409,
|
5800 |
+
"learning_rate": 7.681888873578786e-06,
|
5801 |
+
"loss": 10.3116,
|
5802 |
+
"step": 823
|
5803 |
+
},
|
5804 |
+
{
|
5805 |
+
"epoch": 0.0904848185362104,
|
5806 |
+
"grad_norm": 0.04575566202402115,
|
5807 |
+
"learning_rate": 7.597595192178702e-06,
|
5808 |
+
"loss": 10.3091,
|
5809 |
+
"step": 824
|
5810 |
+
},
|
5811 |
+
{
|
5812 |
+
"epoch": 0.09059463020919124,
|
5813 |
+
"grad_norm": 0.028112446889281273,
|
5814 |
+
"learning_rate": 7.513728502524286e-06,
|
5815 |
+
"loss": 10.318,
|
5816 |
+
"step": 825
|
5817 |
+
},
|
5818 |
+
{
|
5819 |
+
"epoch": 0.09070444188217207,
|
5820 |
+
"grad_norm": 0.04239173233509064,
|
5821 |
+
"learning_rate": 7.430289649152156e-06,
|
5822 |
+
"loss": 10.3064,
|
5823 |
+
"step": 826
|
5824 |
+
},
|
5825 |
+
{
|
5826 |
+
"epoch": 0.09081425355515291,
|
5827 |
+
"grad_norm": 0.03836563974618912,
|
5828 |
+
"learning_rate": 7.347279472290647e-06,
|
5829 |
+
"loss": 10.3252,
|
5830 |
+
"step": 827
|
5831 |
+
},
|
5832 |
+
{
|
5833 |
+
"epoch": 0.09092406522813375,
|
5834 |
+
"grad_norm": 0.03923396021127701,
|
5835 |
+
"learning_rate": 7.264698807851328e-06,
|
5836 |
+
"loss": 10.3129,
|
5837 |
+
"step": 828
|
5838 |
+
},
|
5839 |
+
{
|
5840 |
+
"epoch": 0.09103387690111459,
|
5841 |
+
"grad_norm": 0.04261473938822746,
|
5842 |
+
"learning_rate": 7.182548487420554e-06,
|
5843 |
+
"loss": 10.3076,
|
5844 |
+
"step": 829
|
5845 |
+
},
|
5846 |
+
{
|
5847 |
+
"epoch": 0.09114368857409542,
|
5848 |
+
"grad_norm": 0.026719851419329643,
|
5849 |
+
"learning_rate": 7.100829338251147e-06,
|
5850 |
+
"loss": 10.3145,
|
5851 |
+
"step": 830
|
5852 |
+
},
|
5853 |
+
{
|
5854 |
+
"epoch": 0.09125350024707626,
|
5855 |
+
"grad_norm": 0.039296165108680725,
|
5856 |
+
"learning_rate": 7.019542183254046e-06,
|
5857 |
+
"loss": 10.3084,
|
5858 |
+
"step": 831
|
5859 |
+
},
|
5860 |
+
{
|
5861 |
+
"epoch": 0.0913633119200571,
|
5862 |
+
"grad_norm": 0.04548013210296631,
|
5863 |
+
"learning_rate": 6.9386878409899715e-06,
|
5864 |
+
"loss": 10.3146,
|
5865 |
+
"step": 832
|
5866 |
+
},
|
5867 |
+
{
|
5868 |
+
"epoch": 0.09147312359303794,
|
5869 |
+
"grad_norm": 0.036591462790966034,
|
5870 |
+
"learning_rate": 6.858267125661272e-06,
|
5871 |
+
"loss": 10.3044,
|
5872 |
+
"step": 833
|
5873 |
+
},
|
5874 |
+
{
|
5875 |
+
"epoch": 0.09158293526601878,
|
5876 |
+
"grad_norm": 0.04193099960684776,
|
5877 |
+
"learning_rate": 6.778280847103669e-06,
|
5878 |
+
"loss": 10.3068,
|
5879 |
+
"step": 834
|
5880 |
+
},
|
5881 |
+
{
|
5882 |
+
"epoch": 0.09169274693899962,
|
5883 |
+
"grad_norm": 0.036512311547994614,
|
5884 |
+
"learning_rate": 6.698729810778065e-06,
|
5885 |
+
"loss": 10.3261,
|
5886 |
+
"step": 835
|
5887 |
+
},
|
5888 |
+
{
|
5889 |
+
"epoch": 0.09180255861198046,
|
5890 |
+
"grad_norm": 0.04096691682934761,
|
5891 |
+
"learning_rate": 6.619614817762537e-06,
|
5892 |
+
"loss": 10.3102,
|
5893 |
+
"step": 836
|
5894 |
+
},
|
5895 |
+
{
|
5896 |
+
"epoch": 0.0919123702849613,
|
5897 |
+
"grad_norm": 0.026638969779014587,
|
5898 |
+
"learning_rate": 6.540936664744196e-06,
|
5899 |
+
"loss": 10.3144,
|
5900 |
+
"step": 837
|
5901 |
+
},
|
5902 |
+
{
|
5903 |
+
"epoch": 0.09202218195794212,
|
5904 |
+
"grad_norm": 0.030443793162703514,
|
5905 |
+
"learning_rate": 6.462696144011149e-06,
|
5906 |
+
"loss": 10.3162,
|
5907 |
+
"step": 838
|
5908 |
+
},
|
5909 |
+
{
|
5910 |
+
"epoch": 0.09213199363092296,
|
5911 |
+
"grad_norm": 0.05177822709083557,
|
5912 |
+
"learning_rate": 6.384894043444567e-06,
|
5913 |
+
"loss": 10.3126,
|
5914 |
+
"step": 839
|
5915 |
+
},
|
5916 |
+
{
|
5917 |
+
"epoch": 0.0922418053039038,
|
5918 |
+
"grad_norm": 0.030835647135972977,
|
5919 |
+
"learning_rate": 6.3075311465107535e-06,
|
5920 |
+
"loss": 10.3153,
|
5921 |
+
"step": 840
|
5922 |
+
},
|
5923 |
+
{
|
5924 |
+
"epoch": 0.09235161697688464,
|
5925 |
+
"grad_norm": 0.041804298758506775,
|
5926 |
+
"learning_rate": 6.230608232253227e-06,
|
5927 |
+
"loss": 10.311,
|
5928 |
+
"step": 841
|
5929 |
+
},
|
5930 |
+
{
|
5931 |
+
"epoch": 0.09246142864986548,
|
5932 |
+
"grad_norm": 0.035478003323078156,
|
5933 |
+
"learning_rate": 6.154126075284855e-06,
|
5934 |
+
"loss": 10.3128,
|
5935 |
+
"step": 842
|
5936 |
+
},
|
5937 |
+
{
|
5938 |
+
"epoch": 0.09257124032284632,
|
5939 |
+
"grad_norm": 0.05020277947187424,
|
5940 |
+
"learning_rate": 6.078085445780129e-06,
|
5941 |
+
"loss": 10.3162,
|
5942 |
+
"step": 843
|
5943 |
+
},
|
5944 |
+
{
|
5945 |
+
"epoch": 0.09268105199582716,
|
5946 |
+
"grad_norm": 0.051810123026371,
|
5947 |
+
"learning_rate": 6.002487109467347e-06,
|
5948 |
+
"loss": 10.3107,
|
5949 |
+
"step": 844
|
5950 |
+
},
|
5951 |
+
{
|
5952 |
+
"epoch": 0.092790863668808,
|
5953 |
+
"grad_norm": 0.039541371166706085,
|
5954 |
+
"learning_rate": 5.927331827620903e-06,
|
5955 |
+
"loss": 10.3042,
|
5956 |
+
"step": 845
|
5957 |
+
},
|
5958 |
+
{
|
5959 |
+
"epoch": 0.09290067534178884,
|
5960 |
+
"grad_norm": 0.04485049098730087,
|
5961 |
+
"learning_rate": 5.852620357053651e-06,
|
5962 |
+
"loss": 10.3146,
|
5963 |
+
"step": 846
|
5964 |
+
},
|
5965 |
+
{
|
5966 |
+
"epoch": 0.09301048701476967,
|
5967 |
+
"grad_norm": 0.03302866965532303,
|
5968 |
+
"learning_rate": 5.778353450109286e-06,
|
5969 |
+
"loss": 10.3181,
|
5970 |
+
"step": 847
|
5971 |
+
},
|
5972 |
+
{
|
5973 |
+
"epoch": 0.09312029868775051,
|
5974 |
+
"grad_norm": 0.028692902997136116,
|
5975 |
+
"learning_rate": 5.704531854654721e-06,
|
5976 |
+
"loss": 10.3134,
|
5977 |
+
"step": 848
|
5978 |
+
},
|
5979 |
+
{
|
5980 |
+
"epoch": 0.09323011036073135,
|
5981 |
+
"grad_norm": 0.054431699216365814,
|
5982 |
+
"learning_rate": 5.631156314072605e-06,
|
5983 |
+
"loss": 10.3177,
|
5984 |
+
"step": 849
|
5985 |
+
},
|
5986 |
+
{
|
5987 |
+
"epoch": 0.09333992203371218,
|
5988 |
+
"grad_norm": 0.036631833761930466,
|
5989 |
+
"learning_rate": 5.558227567253832e-06,
|
5990 |
+
"loss": 10.3251,
|
5991 |
+
"step": 850
|
5992 |
+
},
|
5993 |
+
{
|
5994 |
+
"epoch": 0.09344973370669302,
|
5995 |
+
"grad_norm": 0.03347776457667351,
|
5996 |
+
"learning_rate": 5.485746348590048e-06,
|
5997 |
+
"loss": 10.3152,
|
5998 |
+
"step": 851
|
5999 |
+
},
|
6000 |
+
{
|
6001 |
+
"epoch": 0.09355954537967386,
|
6002 |
+
"grad_norm": 0.04079202190041542,
|
6003 |
+
"learning_rate": 5.413713387966329e-06,
|
6004 |
+
"loss": 10.333,
|
6005 |
+
"step": 852
|
6006 |
+
},
|
6007 |
+
{
|
6008 |
+
"epoch": 0.0936693570526547,
|
6009 |
+
"grad_norm": 0.035952821373939514,
|
6010 |
+
"learning_rate": 5.34212941075381e-06,
|
6011 |
+
"loss": 10.3145,
|
6012 |
+
"step": 853
|
6013 |
+
},
|
6014 |
+
{
|
6015 |
+
"epoch": 0.09377916872563553,
|
6016 |
+
"grad_norm": 0.041086867451667786,
|
6017 |
+
"learning_rate": 5.270995137802315e-06,
|
6018 |
+
"loss": 10.3106,
|
6019 |
+
"step": 854
|
6020 |
+
},
|
6021 |
+
{
|
6022 |
+
"epoch": 0.09388898039861637,
|
6023 |
+
"grad_norm": 0.04070660099387169,
|
6024 |
+
"learning_rate": 5.200311285433213e-06,
|
6025 |
+
"loss": 10.3089,
|
6026 |
+
"step": 855
|
6027 |
+
},
|
6028 |
+
{
|
6029 |
+
"epoch": 0.09399879207159721,
|
6030 |
+
"grad_norm": 0.032837964594364166,
|
6031 |
+
"learning_rate": 5.13007856543209e-06,
|
6032 |
+
"loss": 10.3146,
|
6033 |
+
"step": 856
|
6034 |
+
},
|
6035 |
+
{
|
6036 |
+
"epoch": 0.09410860374457805,
|
6037 |
+
"grad_norm": 0.041533321142196655,
|
6038 |
+
"learning_rate": 5.060297685041659e-06,
|
6039 |
+
"loss": 10.3161,
|
6040 |
+
"step": 857
|
6041 |
+
},
|
6042 |
+
{
|
6043 |
+
"epoch": 0.09421841541755889,
|
6044 |
+
"grad_norm": 0.03643094375729561,
|
6045 |
+
"learning_rate": 4.99096934695461e-06,
|
6046 |
+
"loss": 10.3103,
|
6047 |
+
"step": 858
|
6048 |
+
},
|
6049 |
+
{
|
6050 |
+
"epoch": 0.09432822709053973,
|
6051 |
+
"grad_norm": 0.03696398064494133,
|
6052 |
+
"learning_rate": 4.922094249306558e-06,
|
6053 |
+
"loss": 10.3269,
|
6054 |
+
"step": 859
|
6055 |
+
},
|
6056 |
+
{
|
6057 |
+
"epoch": 0.09443803876352057,
|
6058 |
+
"grad_norm": 0.046098772436380386,
|
6059 |
+
"learning_rate": 4.853673085668947e-06,
|
6060 |
+
"loss": 10.3118,
|
6061 |
+
"step": 860
|
6062 |
+
},
|
6063 |
+
{
|
6064 |
+
"epoch": 0.0945478504365014,
|
6065 |
+
"grad_norm": 0.041556525975465775,
|
6066 |
+
"learning_rate": 4.78570654504214e-06,
|
6067 |
+
"loss": 10.323,
|
6068 |
+
"step": 861
|
6069 |
+
},
|
6070 |
+
{
|
6071 |
+
"epoch": 0.09465766210948223,
|
6072 |
+
"grad_norm": 0.04157395288348198,
|
6073 |
+
"learning_rate": 4.7181953118484556e-06,
|
6074 |
+
"loss": 10.3125,
|
6075 |
+
"step": 862
|
6076 |
+
},
|
6077 |
+
{
|
6078 |
+
"epoch": 0.09476747378246307,
|
6079 |
+
"grad_norm": 0.036072101444005966,
|
6080 |
+
"learning_rate": 4.651140065925269e-06,
|
6081 |
+
"loss": 10.3063,
|
6082 |
+
"step": 863
|
6083 |
+
},
|
6084 |
+
{
|
6085 |
+
"epoch": 0.09487728545544391,
|
6086 |
+
"grad_norm": 0.038894958794116974,
|
6087 |
+
"learning_rate": 4.58454148251814e-06,
|
6088 |
+
"loss": 10.3141,
|
6089 |
+
"step": 864
|
6090 |
+
},
|
6091 |
+
{
|
6092 |
+
"epoch": 0.09498709712842475,
|
6093 |
+
"grad_norm": 0.035335294902324677,
|
6094 |
+
"learning_rate": 4.5184002322740785e-06,
|
6095 |
+
"loss": 10.3173,
|
6096 |
+
"step": 865
|
6097 |
+
},
|
6098 |
+
{
|
6099 |
+
"epoch": 0.09509690880140559,
|
6100 |
+
"grad_norm": 0.040773481130599976,
|
6101 |
+
"learning_rate": 4.452716981234744e-06,
|
6102 |
+
"loss": 10.3155,
|
6103 |
+
"step": 866
|
6104 |
+
},
|
6105 |
+
{
|
6106 |
+
"epoch": 0.09520672047438643,
|
6107 |
+
"grad_norm": 0.03266516327857971,
|
6108 |
+
"learning_rate": 4.387492390829734e-06,
|
6109 |
+
"loss": 10.3169,
|
6110 |
+
"step": 867
|
6111 |
+
},
|
6112 |
+
{
|
6113 |
+
"epoch": 0.09531653214736727,
|
6114 |
+
"grad_norm": 0.056261539459228516,
|
6115 |
+
"learning_rate": 4.322727117869951e-06,
|
6116 |
+
"loss": 10.3119,
|
6117 |
+
"step": 868
|
6118 |
+
},
|
6119 |
+
{
|
6120 |
+
"epoch": 0.0954263438203481,
|
6121 |
+
"grad_norm": 0.04317648708820343,
|
6122 |
+
"learning_rate": 4.258421814540992e-06,
|
6123 |
+
"loss": 10.3124,
|
6124 |
+
"step": 869
|
6125 |
+
},
|
6126 |
+
{
|
6127 |
+
"epoch": 0.09553615549332894,
|
6128 |
+
"grad_norm": 0.033856358379125595,
|
6129 |
+
"learning_rate": 4.19457712839652e-06,
|
6130 |
+
"loss": 10.3216,
|
6131 |
+
"step": 870
|
6132 |
+
},
|
6133 |
+
{
|
6134 |
+
"epoch": 0.09564596716630978,
|
6135 |
+
"grad_norm": 0.036258719861507416,
|
6136 |
+
"learning_rate": 4.131193702351827e-06,
|
6137 |
+
"loss": 10.3018,
|
6138 |
+
"step": 871
|
6139 |
+
},
|
6140 |
+
{
|
6141 |
+
"epoch": 0.09575577883929062,
|
6142 |
+
"grad_norm": 0.03472882881760597,
|
6143 |
+
"learning_rate": 4.068272174677335e-06,
|
6144 |
+
"loss": 10.314,
|
6145 |
+
"step": 872
|
6146 |
+
},
|
6147 |
+
{
|
6148 |
+
"epoch": 0.09586559051227145,
|
6149 |
+
"grad_norm": 0.03793155401945114,
|
6150 |
+
"learning_rate": 4.005813178992091e-06,
|
6151 |
+
"loss": 10.3154,
|
6152 |
+
"step": 873
|
6153 |
+
},
|
6154 |
+
{
|
6155 |
+
"epoch": 0.09597540218525229,
|
6156 |
+
"grad_norm": 0.04314670339226723,
|
6157 |
+
"learning_rate": 3.9438173442575e-06,
|
6158 |
+
"loss": 10.3158,
|
6159 |
+
"step": 874
|
6160 |
+
},
|
6161 |
+
{
|
6162 |
+
"epoch": 0.09608521385823313,
|
6163 |
+
"grad_norm": 0.02972523681819439,
|
6164 |
+
"learning_rate": 3.8822852947709375e-06,
|
6165 |
+
"loss": 10.3107,
|
6166 |
+
"step": 875
|
6167 |
+
},
|
6168 |
+
{
|
6169 |
+
"epoch": 0.09619502553121397,
|
6170 |
+
"grad_norm": 0.035492341965436935,
|
6171 |
+
"learning_rate": 3.821217650159453e-06,
|
6172 |
+
"loss": 10.3126,
|
6173 |
+
"step": 876
|
6174 |
+
},
|
6175 |
+
{
|
6176 |
+
"epoch": 0.0963048372041948,
|
6177 |
+
"grad_norm": 0.03144872933626175,
|
6178 |
+
"learning_rate": 3.760615025373543e-06,
|
6179 |
+
"loss": 10.3118,
|
6180 |
+
"step": 877
|
6181 |
+
},
|
6182 |
+
{
|
6183 |
+
"epoch": 0.09641464887717564,
|
6184 |
+
"grad_norm": 0.04060778021812439,
|
6185 |
+
"learning_rate": 3.700478030680987e-06,
|
6186 |
+
"loss": 10.3134,
|
6187 |
+
"step": 878
|
6188 |
+
},
|
6189 |
+
{
|
6190 |
+
"epoch": 0.09652446055015648,
|
6191 |
+
"grad_norm": 0.04507026448845863,
|
6192 |
+
"learning_rate": 3.6408072716606346e-06,
|
6193 |
+
"loss": 10.3128,
|
6194 |
+
"step": 879
|
6195 |
+
},
|
6196 |
+
{
|
6197 |
+
"epoch": 0.09663427222313732,
|
6198 |
+
"grad_norm": 0.028524206951260567,
|
6199 |
+
"learning_rate": 3.581603349196372e-06,
|
6200 |
+
"loss": 10.3145,
|
6201 |
+
"step": 880
|
6202 |
+
},
|
6203 |
+
{
|
6204 |
+
"epoch": 0.09674408389611816,
|
6205 |
+
"grad_norm": 0.036721982061862946,
|
6206 |
+
"learning_rate": 3.522866859471047e-06,
|
6207 |
+
"loss": 10.3111,
|
6208 |
+
"step": 881
|
6209 |
+
},
|
6210 |
+
{
|
6211 |
+
"epoch": 0.096853895569099,
|
6212 |
+
"grad_norm": 0.04546421021223068,
|
6213 |
+
"learning_rate": 3.4645983939604496e-06,
|
6214 |
+
"loss": 10.3152,
|
6215 |
+
"step": 882
|
6216 |
+
},
|
6217 |
+
{
|
6218 |
+
"epoch": 0.09696370724207984,
|
6219 |
+
"grad_norm": 0.03185427933931351,
|
6220 |
+
"learning_rate": 3.406798539427386e-06,
|
6221 |
+
"loss": 10.3095,
|
6222 |
+
"step": 883
|
6223 |
+
},
|
6224 |
+
{
|
6225 |
+
"epoch": 0.09707351891506068,
|
6226 |
+
"grad_norm": 0.03573969006538391,
|
6227 |
+
"learning_rate": 3.349467877915746e-06,
|
6228 |
+
"loss": 10.3168,
|
6229 |
+
"step": 884
|
6230 |
+
},
|
6231 |
+
{
|
6232 |
+
"epoch": 0.0971833305880415,
|
6233 |
+
"grad_norm": 0.04182133823633194,
|
6234 |
+
"learning_rate": 3.2926069867446675e-06,
|
6235 |
+
"loss": 10.3345,
|
6236 |
+
"step": 885
|
6237 |
+
},
|
6238 |
+
{
|
6239 |
+
"epoch": 0.09729314226102234,
|
6240 |
+
"grad_norm": 0.04052354022860527,
|
6241 |
+
"learning_rate": 3.2362164385026706e-06,
|
6242 |
+
"loss": 10.3127,
|
6243 |
+
"step": 886
|
6244 |
+
},
|
6245 |
+
{
|
6246 |
+
"epoch": 0.09740295393400318,
|
6247 |
+
"grad_norm": 0.03702933341264725,
|
6248 |
+
"learning_rate": 3.180296801041971e-06,
|
6249 |
+
"loss": 10.3196,
|
6250 |
+
"step": 887
|
6251 |
+
},
|
6252 |
+
{
|
6253 |
+
"epoch": 0.09751276560698402,
|
6254 |
+
"grad_norm": 0.04575482755899429,
|
6255 |
+
"learning_rate": 3.1248486374726883e-06,
|
6256 |
+
"loss": 10.3089,
|
6257 |
+
"step": 888
|
6258 |
+
},
|
6259 |
+
{
|
6260 |
+
"epoch": 0.09762257727996486,
|
6261 |
+
"grad_norm": 0.03637000918388367,
|
6262 |
+
"learning_rate": 3.069872506157212e-06,
|
6263 |
+
"loss": 10.3069,
|
6264 |
+
"step": 889
|
6265 |
+
},
|
6266 |
+
{
|
6267 |
+
"epoch": 0.0977323889529457,
|
6268 |
+
"grad_norm": 0.03638225421309471,
|
6269 |
+
"learning_rate": 3.0153689607045845e-06,
|
6270 |
+
"loss": 10.3222,
|
6271 |
+
"step": 890
|
6272 |
+
},
|
6273 |
+
{
|
6274 |
+
"epoch": 0.09784220062592654,
|
6275 |
+
"grad_norm": 0.061212845146656036,
|
6276 |
+
"learning_rate": 2.961338549964893e-06,
|
6277 |
+
"loss": 10.3209,
|
6278 |
+
"step": 891
|
6279 |
+
},
|
6280 |
+
{
|
6281 |
+
"epoch": 0.09795201229890738,
|
6282 |
+
"grad_norm": 0.03995781019330025,
|
6283 |
+
"learning_rate": 2.9077818180237693e-06,
|
6284 |
+
"loss": 10.3103,
|
6285 |
+
"step": 892
|
6286 |
+
},
|
6287 |
+
{
|
6288 |
+
"epoch": 0.09806182397188822,
|
6289 |
+
"grad_norm": 0.0443757101893425,
|
6290 |
+
"learning_rate": 2.8546993041969173e-06,
|
6291 |
+
"loss": 10.3082,
|
6292 |
+
"step": 893
|
6293 |
+
},
|
6294 |
+
{
|
6295 |
+
"epoch": 0.09817163564486905,
|
6296 |
+
"grad_norm": 0.038907960057258606,
|
6297 |
+
"learning_rate": 2.802091543024671e-06,
|
6298 |
+
"loss": 10.323,
|
6299 |
+
"step": 894
|
6300 |
+
},
|
6301 |
+
{
|
6302 |
+
"epoch": 0.0982814473178499,
|
6303 |
+
"grad_norm": 0.03369826823472977,
|
6304 |
+
"learning_rate": 2.7499590642665774e-06,
|
6305 |
+
"loss": 10.3112,
|
6306 |
+
"step": 895
|
6307 |
+
},
|
6308 |
+
{
|
6309 |
+
"epoch": 0.09839125899083073,
|
6310 |
+
"grad_norm": 0.03591571003198624,
|
6311 |
+
"learning_rate": 2.6983023928961404e-06,
|
6312 |
+
"loss": 10.3122,
|
6313 |
+
"step": 896
|
6314 |
+
},
|
6315 |
+
{
|
6316 |
+
"epoch": 0.09850107066381156,
|
6317 |
+
"grad_norm": 0.04416611045598984,
|
6318 |
+
"learning_rate": 2.647122049095463e-06,
|
6319 |
+
"loss": 10.3143,
|
6320 |
+
"step": 897
|
6321 |
+
},
|
6322 |
+
{
|
6323 |
+
"epoch": 0.0986108823367924,
|
6324 |
+
"grad_norm": 0.041298989206552505,
|
6325 |
+
"learning_rate": 2.596418548250029e-06,
|
6326 |
+
"loss": 10.3093,
|
6327 |
+
"step": 898
|
6328 |
+
},
|
6329 |
+
{
|
6330 |
+
"epoch": 0.09872069400977324,
|
6331 |
+
"grad_norm": 0.03004969097673893,
|
6332 |
+
"learning_rate": 2.546192400943537e-06,
|
6333 |
+
"loss": 10.3169,
|
6334 |
+
"step": 899
|
6335 |
+
},
|
6336 |
+
{
|
6337 |
+
"epoch": 0.09883050568275407,
|
6338 |
+
"grad_norm": 0.04976905882358551,
|
6339 |
+
"learning_rate": 2.496444112952734e-06,
|
6340 |
+
"loss": 10.3227,
|
6341 |
+
"step": 900
|
6342 |
+
},
|
6343 |
+
{
|
6344 |
+
"epoch": 0.09894031735573491,
|
6345 |
+
"grad_norm": 0.03230028226971626,
|
6346 |
+
"learning_rate": 2.4471741852423237e-06,
|
6347 |
+
"loss": 10.3213,
|
6348 |
+
"step": 901
|
6349 |
+
},
|
6350 |
+
{
|
6351 |
+
"epoch": 0.09905012902871575,
|
6352 |
+
"grad_norm": 0.051530446857213974,
|
6353 |
+
"learning_rate": 2.3983831139599287e-06,
|
6354 |
+
"loss": 10.3224,
|
6355 |
+
"step": 902
|
6356 |
+
},
|
6357 |
+
{
|
6358 |
+
"epoch": 0.09915994070169659,
|
6359 |
+
"grad_norm": 0.0378115214407444,
|
6360 |
+
"learning_rate": 2.3500713904311024e-06,
|
6361 |
+
"loss": 10.3191,
|
6362 |
+
"step": 903
|
6363 |
+
},
|
6364 |
+
{
|
6365 |
+
"epoch": 0.09926975237467743,
|
6366 |
+
"grad_norm": 0.03399795666337013,
|
6367 |
+
"learning_rate": 2.3022395011543686e-06,
|
6368 |
+
"loss": 10.3116,
|
6369 |
+
"step": 904
|
6370 |
+
},
|
6371 |
+
{
|
6372 |
+
"epoch": 0.09937956404765827,
|
6373 |
+
"grad_norm": 0.05241768807172775,
|
6374 |
+
"learning_rate": 2.2548879277963064e-06,
|
6375 |
+
"loss": 10.3094,
|
6376 |
+
"step": 905
|
6377 |
+
},
|
6378 |
+
{
|
6379 |
+
"epoch": 0.09948937572063911,
|
6380 |
+
"grad_norm": 0.03988038748502731,
|
6381 |
+
"learning_rate": 2.208017147186736e-06,
|
6382 |
+
"loss": 10.3073,
|
6383 |
+
"step": 906
|
6384 |
+
},
|
6385 |
+
{
|
6386 |
+
"epoch": 0.09959918739361995,
|
6387 |
+
"grad_norm": 0.03977709636092186,
|
6388 |
+
"learning_rate": 2.161627631313923e-06,
|
6389 |
+
"loss": 10.3122,
|
6390 |
+
"step": 907
|
6391 |
+
},
|
6392 |
+
{
|
6393 |
+
"epoch": 0.09970899906660077,
|
6394 |
+
"grad_norm": 0.036957282572984695,
|
6395 |
+
"learning_rate": 2.1157198473197414e-06,
|
6396 |
+
"loss": 10.3245,
|
6397 |
+
"step": 908
|
6398 |
+
},
|
6399 |
+
{
|
6400 |
+
"epoch": 0.09981881073958161,
|
6401 |
+
"grad_norm": 0.02555002085864544,
|
6402 |
+
"learning_rate": 2.070294257495081e-06,
|
6403 |
+
"loss": 10.3177,
|
6404 |
+
"step": 909
|
6405 |
+
},
|
6406 |
+
{
|
6407 |
+
"epoch": 0.09992862241256245,
|
6408 |
+
"grad_norm": 0.03872646763920784,
|
6409 |
+
"learning_rate": 2.0253513192751373e-06,
|
6410 |
+
"loss": 10.3131,
|
6411 |
+
"step": 910
|
6412 |
+
},
|
6413 |
+
{
|
6414 |
+
"epoch": 0.10003843408554329,
|
6415 |
+
"grad_norm": 0.04097941890358925,
|
6416 |
+
"learning_rate": 1.9808914852347813e-06,
|
6417 |
+
"loss": 10.3127,
|
6418 |
+
"step": 911
|
6419 |
+
},
|
6420 |
+
{
|
6421 |
+
"epoch": 0.10014824575852413,
|
6422 |
+
"grad_norm": 0.04144563153386116,
|
6423 |
+
"learning_rate": 1.9369152030840556e-06,
|
6424 |
+
"loss": 10.3052,
|
6425 |
+
"step": 912
|
6426 |
+
},
|
6427 |
+
{
|
6428 |
+
"epoch": 0.10025805743150497,
|
6429 |
+
"grad_norm": 0.03275219723582268,
|
6430 |
+
"learning_rate": 1.8934229156636452e-06,
|
6431 |
+
"loss": 10.3125,
|
6432 |
+
"step": 913
|
6433 |
+
},
|
6434 |
+
{
|
6435 |
+
"epoch": 0.10036786910448581,
|
6436 |
+
"grad_norm": 0.04040839523077011,
|
6437 |
+
"learning_rate": 1.8504150609403858e-06,
|
6438 |
+
"loss": 10.313,
|
6439 |
+
"step": 914
|
6440 |
+
},
|
6441 |
+
{
|
6442 |
+
"epoch": 0.10047768077746665,
|
6443 |
+
"grad_norm": 0.03348499909043312,
|
6444 |
+
"learning_rate": 1.807892072002898e-06,
|
6445 |
+
"loss": 10.3179,
|
6446 |
+
"step": 915
|
6447 |
+
},
|
6448 |
+
{
|
6449 |
+
"epoch": 0.10058749245044749,
|
6450 |
+
"grad_norm": 0.04070667922496796,
|
6451 |
+
"learning_rate": 1.7658543770572189e-06,
|
6452 |
+
"loss": 10.3187,
|
6453 |
+
"step": 916
|
6454 |
+
},
|
6455 |
+
{
|
6456 |
+
"epoch": 0.10069730412342832,
|
6457 |
+
"grad_norm": 0.03918742388486862,
|
6458 |
+
"learning_rate": 1.724302399422456e-06,
|
6459 |
+
"loss": 10.3228,
|
6460 |
+
"step": 917
|
6461 |
+
},
|
6462 |
+
{
|
6463 |
+
"epoch": 0.10080711579640916,
|
6464 |
+
"grad_norm": 0.03554573655128479,
|
6465 |
+
"learning_rate": 1.6832365575265741e-06,
|
6466 |
+
"loss": 10.3079,
|
6467 |
+
"step": 918
|
6468 |
+
},
|
6469 |
+
{
|
6470 |
+
"epoch": 0.10091692746939,
|
6471 |
+
"grad_norm": 0.05401109531521797,
|
6472 |
+
"learning_rate": 1.6426572649021476e-06,
|
6473 |
+
"loss": 10.3102,
|
6474 |
+
"step": 919
|
6475 |
+
},
|
6476 |
+
{
|
6477 |
+
"epoch": 0.10102673914237083,
|
6478 |
+
"grad_norm": 0.04078887775540352,
|
6479 |
+
"learning_rate": 1.6025649301821876e-06,
|
6480 |
+
"loss": 10.3046,
|
6481 |
+
"step": 920
|
6482 |
+
},
|
6483 |
+
{
|
6484 |
+
"epoch": 0.10113655081535167,
|
6485 |
+
"grad_norm": 0.029533548280596733,
|
6486 |
+
"learning_rate": 1.5629599570960718e-06,
|
6487 |
+
"loss": 10.3108,
|
6488 |
+
"step": 921
|
6489 |
+
},
|
6490 |
+
{
|
6491 |
+
"epoch": 0.1012463624883325,
|
6492 |
+
"grad_norm": 0.04226626828312874,
|
6493 |
+
"learning_rate": 1.523842744465437e-06,
|
6494 |
+
"loss": 10.3127,
|
6495 |
+
"step": 922
|
6496 |
+
},
|
6497 |
+
{
|
6498 |
+
"epoch": 0.10135617416131334,
|
6499 |
+
"grad_norm": 0.025987090542912483,
|
6500 |
+
"learning_rate": 1.4852136862001764e-06,
|
6501 |
+
"loss": 10.3211,
|
6502 |
+
"step": 923
|
6503 |
+
},
|
6504 |
+
{
|
6505 |
+
"epoch": 0.10146598583429418,
|
6506 |
+
"grad_norm": 0.03992049768567085,
|
6507 |
+
"learning_rate": 1.4470731712944884e-06,
|
6508 |
+
"loss": 10.3015,
|
6509 |
+
"step": 924
|
6510 |
+
},
|
6511 |
+
{
|
6512 |
+
"epoch": 0.10157579750727502,
|
6513 |
+
"grad_norm": 0.04411383345723152,
|
6514 |
+
"learning_rate": 1.4094215838229176e-06,
|
6515 |
+
"loss": 10.3135,
|
6516 |
+
"step": 925
|
6517 |
+
},
|
6518 |
+
{
|
6519 |
+
"epoch": 0.10168560918025586,
|
6520 |
+
"grad_norm": 0.041523367166519165,
|
6521 |
+
"learning_rate": 1.372259302936546e-06,
|
6522 |
+
"loss": 10.3204,
|
6523 |
+
"step": 926
|
6524 |
+
},
|
6525 |
+
{
|
6526 |
+
"epoch": 0.1017954208532367,
|
6527 |
+
"grad_norm": 0.033936526626348495,
|
6528 |
+
"learning_rate": 1.3355867028591208e-06,
|
6529 |
+
"loss": 10.3115,
|
6530 |
+
"step": 927
|
6531 |
+
},
|
6532 |
+
{
|
6533 |
+
"epoch": 0.10190523252621754,
|
6534 |
+
"grad_norm": 0.03105180524289608,
|
6535 |
+
"learning_rate": 1.2994041528833266e-06,
|
6536 |
+
"loss": 10.3131,
|
6537 |
+
"step": 928
|
6538 |
+
},
|
6539 |
+
{
|
6540 |
+
"epoch": 0.10201504419919838,
|
6541 |
+
"grad_norm": 0.04320959001779556,
|
6542 |
+
"learning_rate": 1.2637120173670358e-06,
|
6543 |
+
"loss": 10.317,
|
6544 |
+
"step": 929
|
6545 |
+
},
|
6546 |
+
{
|
6547 |
+
"epoch": 0.10212485587217922,
|
6548 |
+
"grad_norm": 0.037952907383441925,
|
6549 |
+
"learning_rate": 1.2285106557296477e-06,
|
6550 |
+
"loss": 10.3224,
|
6551 |
+
"step": 930
|
6552 |
+
},
|
6553 |
+
{
|
6554 |
+
"epoch": 0.10223466754516006,
|
6555 |
+
"grad_norm": 0.046220093965530396,
|
6556 |
+
"learning_rate": 1.1938004224484988e-06,
|
6557 |
+
"loss": 10.3118,
|
6558 |
+
"step": 931
|
6559 |
+
},
|
6560 |
+
{
|
6561 |
+
"epoch": 0.10234447921814088,
|
6562 |
+
"grad_norm": 0.05611686035990715,
|
6563 |
+
"learning_rate": 1.1595816670552428e-06,
|
6564 |
+
"loss": 10.3142,
|
6565 |
+
"step": 932
|
6566 |
+
},
|
6567 |
+
{
|
6568 |
+
"epoch": 0.10245429089112172,
|
6569 |
+
"grad_norm": 0.03338133171200752,
|
6570 |
+
"learning_rate": 1.1258547341323699e-06,
|
6571 |
+
"loss": 10.3146,
|
6572 |
+
"step": 933
|
6573 |
+
},
|
6574 |
+
{
|
6575 |
+
"epoch": 0.10256410256410256,
|
6576 |
+
"grad_norm": 0.037339672446250916,
|
6577 |
+
"learning_rate": 1.0926199633097157e-06,
|
6578 |
+
"loss": 10.3126,
|
6579 |
+
"step": 934
|
6580 |
+
},
|
6581 |
+
{
|
6582 |
+
"epoch": 0.1026739142370834,
|
6583 |
+
"grad_norm": 0.03368791565299034,
|
6584 |
+
"learning_rate": 1.0598776892610685e-06,
|
6585 |
+
"loss": 10.3227,
|
6586 |
+
"step": 935
|
6587 |
+
},
|
6588 |
+
{
|
6589 |
+
"epoch": 0.10278372591006424,
|
6590 |
+
"grad_norm": 0.032310787588357925,
|
6591 |
+
"learning_rate": 1.02762824170074e-06,
|
6592 |
+
"loss": 10.3099,
|
6593 |
+
"step": 936
|
6594 |
+
},
|
6595 |
+
{
|
6596 |
+
"epoch": 0.10289353758304508,
|
6597 |
+
"grad_norm": 0.03587045520544052,
|
6598 |
+
"learning_rate": 9.958719453803278e-07,
|
6599 |
+
"loss": 10.315,
|
6600 |
+
"step": 937
|
6601 |
+
},
|
6602 |
+
{
|
6603 |
+
"epoch": 0.10300334925602592,
|
6604 |
+
"grad_norm": 0.03891875594854355,
|
6605 |
+
"learning_rate": 9.646091200853802e-07,
|
6606 |
+
"loss": 10.3223,
|
6607 |
+
"step": 938
|
6608 |
+
},
|
6609 |
+
{
|
6610 |
+
"epoch": 0.10311316092900676,
|
6611 |
+
"grad_norm": 0.031654711812734604,
|
6612 |
+
"learning_rate": 9.338400806321978e-07,
|
6613 |
+
"loss": 10.3094,
|
6614 |
+
"step": 939
|
6615 |
+
},
|
6616 |
+
{
|
6617 |
+
"epoch": 0.1032229726019876,
|
6618 |
+
"grad_norm": 0.03519720584154129,
|
6619 |
+
"learning_rate": 9.035651368646648e-07,
|
6620 |
+
"loss": 10.3187,
|
6621 |
+
"step": 940
|
6622 |
+
},
|
6623 |
+
{
|
6624 |
+
"epoch": 0.10333278427496843,
|
6625 |
+
"grad_norm": 0.042424995452165604,
|
6626 |
+
"learning_rate": 8.737845936511335e-07,
|
6627 |
+
"loss": 10.3054,
|
6628 |
+
"step": 941
|
6629 |
+
},
|
6630 |
+
{
|
6631 |
+
"epoch": 0.10344259594794927,
|
6632 |
+
"grad_norm": 0.03940219804644585,
|
6633 |
+
"learning_rate": 8.444987508813451e-07,
|
6634 |
+
"loss": 10.3128,
|
6635 |
+
"step": 942
|
6636 |
+
},
|
6637 |
+
{
|
6638 |
+
"epoch": 0.1035524076209301,
|
6639 |
+
"grad_norm": 0.0543336495757103,
|
6640 |
+
"learning_rate": 8.157079034633974e-07,
|
6641 |
+
"loss": 10.3178,
|
6642 |
+
"step": 943
|
6643 |
+
},
|
6644 |
+
{
|
6645 |
+
"epoch": 0.10366221929391094,
|
6646 |
+
"grad_norm": 0.03599490970373154,
|
6647 |
+
"learning_rate": 7.874123413208145e-07,
|
6648 |
+
"loss": 10.3213,
|
6649 |
+
"step": 944
|
6650 |
+
},
|
6651 |
+
{
|
6652 |
+
"epoch": 0.10377203096689178,
|
6653 |
+
"grad_norm": 0.04079907387495041,
|
6654 |
+
"learning_rate": 7.596123493895991e-07,
|
6655 |
+
"loss": 10.3136,
|
6656 |
+
"step": 945
|
6657 |
+
},
|
6658 |
+
{
|
6659 |
+
"epoch": 0.10388184263987262,
|
6660 |
+
"grad_norm": 0.03582724928855896,
|
6661 |
+
"learning_rate": 7.323082076153509e-07,
|
6662 |
+
"loss": 10.3198,
|
6663 |
+
"step": 946
|
6664 |
+
},
|
6665 |
+
{
|
6666 |
+
"epoch": 0.10399165431285345,
|
6667 |
+
"grad_norm": 0.03927293419837952,
|
6668 |
+
"learning_rate": 7.055001909504755e-07,
|
6669 |
+
"loss": 10.3103,
|
6670 |
+
"step": 947
|
6671 |
+
},
|
6672 |
+
{
|
6673 |
+
"epoch": 0.1041014659858343,
|
6674 |
+
"grad_norm": 0.05429311841726303,
|
6675 |
+
"learning_rate": 6.791885693514133e-07,
|
6676 |
+
"loss": 10.3139,
|
6677 |
+
"step": 948
|
6678 |
+
},
|
6679 |
+
{
|
6680 |
+
"epoch": 0.10421127765881513,
|
6681 |
+
"grad_norm": 0.034018926322460175,
|
6682 |
+
"learning_rate": 6.533736077758868e-07,
|
6683 |
+
"loss": 10.3105,
|
6684 |
+
"step": 949
|
6685 |
+
},
|
6686 |
+
{
|
6687 |
+
"epoch": 0.10432108933179597,
|
6688 |
+
"grad_norm": 0.03970513865351677,
|
6689 |
+
"learning_rate": 6.280555661802856e-07,
|
6690 |
+
"loss": 10.3111,
|
6691 |
+
"step": 950
|
6692 |
+
},
|
6693 |
+
{
|
6694 |
+
"epoch": 0.10443090100477681,
|
6695 |
+
"grad_norm": 0.05490114912390709,
|
6696 |
+
"learning_rate": 6.032346995169968e-07,
|
6697 |
+
"loss": 10.3033,
|
6698 |
+
"step": 951
|
6699 |
+
},
|
6700 |
+
{
|
6701 |
+
"epoch": 0.10454071267775765,
|
6702 |
+
"grad_norm": 0.04130084067583084,
|
6703 |
+
"learning_rate": 5.78911257731879e-07,
|
6704 |
+
"loss": 10.3252,
|
6705 |
+
"step": 952
|
6706 |
+
},
|
6707 |
+
{
|
6708 |
+
"epoch": 0.10465052435073849,
|
6709 |
+
"grad_norm": 0.041738107800483704,
|
6710 |
+
"learning_rate": 5.550854857617193e-07,
|
6711 |
+
"loss": 10.3104,
|
6712 |
+
"step": 953
|
6713 |
+
},
|
6714 |
+
{
|
6715 |
+
"epoch": 0.10476033602371933,
|
6716 |
+
"grad_norm": 0.03705933317542076,
|
6717 |
+
"learning_rate": 5.317576235317756e-07,
|
6718 |
+
"loss": 10.3114,
|
6719 |
+
"step": 954
|
6720 |
+
},
|
6721 |
+
{
|
6722 |
+
"epoch": 0.10487014769670015,
|
6723 |
+
"grad_norm": 0.04287085682153702,
|
6724 |
+
"learning_rate": 5.089279059533658e-07,
|
6725 |
+
"loss": 10.3118,
|
6726 |
+
"step": 955
|
6727 |
+
},
|
6728 |
+
{
|
6729 |
+
"epoch": 0.10497995936968099,
|
6730 |
+
"grad_norm": 0.036541227251291275,
|
6731 |
+
"learning_rate": 4.865965629214819e-07,
|
6732 |
+
"loss": 10.3191,
|
6733 |
+
"step": 956
|
6734 |
+
},
|
6735 |
+
{
|
6736 |
+
"epoch": 0.10508977104266183,
|
6737 |
+
"grad_norm": 0.03359196335077286,
|
6738 |
+
"learning_rate": 4.647638193125137e-07,
|
6739 |
+
"loss": 10.3126,
|
6740 |
+
"step": 957
|
6741 |
+
},
|
6742 |
+
{
|
6743 |
+
"epoch": 0.10519958271564267,
|
6744 |
+
"grad_norm": 0.029098298400640488,
|
6745 |
+
"learning_rate": 4.434298949819449e-07,
|
6746 |
+
"loss": 10.3174,
|
6747 |
+
"step": 958
|
6748 |
+
},
|
6749 |
+
{
|
6750 |
+
"epoch": 0.10530939438862351,
|
6751 |
+
"grad_norm": 0.0325823612511158,
|
6752 |
+
"learning_rate": 4.2259500476214407e-07,
|
6753 |
+
"loss": 10.3138,
|
6754 |
+
"step": 959
|
6755 |
+
},
|
6756 |
+
{
|
6757 |
+
"epoch": 0.10541920606160435,
|
6758 |
+
"grad_norm": 0.033848442137241364,
|
6759 |
+
"learning_rate": 4.02259358460233e-07,
|
6760 |
+
"loss": 10.3043,
|
6761 |
+
"step": 960
|
6762 |
+
},
|
6763 |
+
{
|
6764 |
+
"epoch": 0.10552901773458519,
|
6765 |
+
"grad_norm": 0.04012158140540123,
|
6766 |
+
"learning_rate": 3.824231608559492e-07,
|
6767 |
+
"loss": 10.3117,
|
6768 |
+
"step": 961
|
6769 |
+
},
|
6770 |
+
{
|
6771 |
+
"epoch": 0.10563882940756603,
|
6772 |
+
"grad_norm": 0.042495328933000565,
|
6773 |
+
"learning_rate": 3.630866116995757e-07,
|
6774 |
+
"loss": 10.3234,
|
6775 |
+
"step": 962
|
6776 |
+
},
|
6777 |
+
{
|
6778 |
+
"epoch": 0.10574864108054687,
|
6779 |
+
"grad_norm": 0.04280965402722359,
|
6780 |
+
"learning_rate": 3.4424990570994797e-07,
|
6781 |
+
"loss": 10.329,
|
6782 |
+
"step": 963
|
6783 |
+
},
|
6784 |
+
{
|
6785 |
+
"epoch": 0.1058584527535277,
|
6786 |
+
"grad_norm": 0.03574259579181671,
|
6787 |
+
"learning_rate": 3.2591323257248893e-07,
|
6788 |
+
"loss": 10.3192,
|
6789 |
+
"step": 964
|
6790 |
+
},
|
6791 |
+
{
|
6792 |
+
"epoch": 0.10596826442650854,
|
6793 |
+
"grad_norm": 0.03468066081404686,
|
6794 |
+
"learning_rate": 3.080767769372939e-07,
|
6795 |
+
"loss": 10.308,
|
6796 |
+
"step": 965
|
6797 |
+
},
|
6798 |
+
{
|
6799 |
+
"epoch": 0.10607807609948938,
|
6800 |
+
"grad_norm": 0.03749536722898483,
|
6801 |
+
"learning_rate": 2.907407184172706e-07,
|
6802 |
+
"loss": 10.3065,
|
6803 |
+
"step": 966
|
6804 |
+
},
|
6805 |
+
{
|
6806 |
+
"epoch": 0.10618788777247021,
|
6807 |
+
"grad_norm": 0.040267836302518845,
|
6808 |
+
"learning_rate": 2.7390523158633554e-07,
|
6809 |
+
"loss": 10.3101,
|
6810 |
+
"step": 967
|
6811 |
+
},
|
6812 |
+
{
|
6813 |
+
"epoch": 0.10629769944545105,
|
6814 |
+
"grad_norm": 0.04641765356063843,
|
6815 |
+
"learning_rate": 2.5757048597765396e-07,
|
6816 |
+
"loss": 10.3158,
|
6817 |
+
"step": 968
|
6818 |
+
},
|
6819 |
+
{
|
6820 |
+
"epoch": 0.10640751111843189,
|
6821 |
+
"grad_norm": 0.045349445194005966,
|
6822 |
+
"learning_rate": 2.4173664608193593e-07,
|
6823 |
+
"loss": 10.3114,
|
6824 |
+
"step": 969
|
6825 |
+
},
|
6826 |
+
{
|
6827 |
+
"epoch": 0.10651732279141272,
|
6828 |
+
"grad_norm": 0.033795084804296494,
|
6829 |
+
"learning_rate": 2.2640387134577058e-07,
|
6830 |
+
"loss": 10.3055,
|
6831 |
+
"step": 970
|
6832 |
+
},
|
6833 |
+
{
|
6834 |
+
"epoch": 0.10662713446439356,
|
6835 |
+
"grad_norm": 0.03448265418410301,
|
6836 |
+
"learning_rate": 2.1157231617002783e-07,
|
6837 |
+
"loss": 10.3157,
|
6838 |
+
"step": 971
|
6839 |
+
},
|
6840 |
+
{
|
6841 |
+
"epoch": 0.1067369461373744,
|
6842 |
+
"grad_norm": 0.032643064856529236,
|
6843 |
+
"learning_rate": 1.9724212990830938e-07,
|
6844 |
+
"loss": 10.3156,
|
6845 |
+
"step": 972
|
6846 |
+
},
|
6847 |
+
{
|
6848 |
+
"epoch": 0.10684675781035524,
|
6849 |
+
"grad_norm": 0.037873052060604095,
|
6850 |
+
"learning_rate": 1.8341345686543332e-07,
|
6851 |
+
"loss": 10.3146,
|
6852 |
+
"step": 973
|
6853 |
+
},
|
6854 |
+
{
|
6855 |
+
"epoch": 0.10695656948333608,
|
6856 |
+
"grad_norm": 0.0303360465914011,
|
6857 |
+
"learning_rate": 1.7008643629596866e-07,
|
6858 |
+
"loss": 10.3094,
|
6859 |
+
"step": 974
|
6860 |
+
},
|
6861 |
+
{
|
6862 |
+
"epoch": 0.10706638115631692,
|
6863 |
+
"grad_norm": 0.04015408083796501,
|
6864 |
+
"learning_rate": 1.5726120240288634e-07,
|
6865 |
+
"loss": 10.3039,
|
6866 |
+
"step": 975
|
6867 |
+
},
|
6868 |
+
{
|
6869 |
+
"epoch": 0.10717619282929776,
|
6870 |
+
"grad_norm": 0.03686315566301346,
|
6871 |
+
"learning_rate": 1.449378843361271e-07,
|
6872 |
+
"loss": 10.3094,
|
6873 |
+
"step": 976
|
6874 |
+
},
|
6875 |
+
{
|
6876 |
+
"epoch": 0.1072860045022786,
|
6877 |
+
"grad_norm": 0.04508250951766968,
|
6878 |
+
"learning_rate": 1.3311660619138578e-07,
|
6879 |
+
"loss": 10.309,
|
6880 |
+
"step": 977
|
6881 |
+
},
|
6882 |
+
{
|
6883 |
+
"epoch": 0.10739581617525942,
|
6884 |
+
"grad_norm": 0.040923111140728,
|
6885 |
+
"learning_rate": 1.2179748700879012e-07,
|
6886 |
+
"loss": 10.314,
|
6887 |
+
"step": 978
|
6888 |
+
},
|
6889 |
+
{
|
6890 |
+
"epoch": 0.10750562784824026,
|
6891 |
+
"grad_norm": 0.03168369084596634,
|
6892 |
+
"learning_rate": 1.109806407717462e-07,
|
6893 |
+
"loss": 10.3179,
|
6894 |
+
"step": 979
|
6895 |
+
},
|
6896 |
+
{
|
6897 |
+
"epoch": 0.1076154395212211,
|
6898 |
+
"grad_norm": 0.03265474736690521,
|
6899 |
+
"learning_rate": 1.0066617640578368e-07,
|
6900 |
+
"loss": 10.3111,
|
6901 |
+
"step": 980
|
6902 |
+
},
|
6903 |
+
{
|
6904 |
+
"epoch": 0.10772525119420194,
|
6905 |
+
"grad_norm": 0.03841938450932503,
|
6906 |
+
"learning_rate": 9.085419777743465e-08,
|
6907 |
+
"loss": 10.3139,
|
6908 |
+
"step": 981
|
6909 |
+
},
|
6910 |
+
{
|
6911 |
+
"epoch": 0.10783506286718278,
|
6912 |
+
"grad_norm": 0.04329368844628334,
|
6913 |
+
"learning_rate": 8.15448036932176e-08,
|
6914 |
+
"loss": 10.3161,
|
6915 |
+
"step": 982
|
6916 |
+
},
|
6917 |
+
{
|
6918 |
+
"epoch": 0.10794487454016362,
|
6919 |
+
"grad_norm": 0.040105242282152176,
|
6920 |
+
"learning_rate": 7.273808789862724e-08,
|
6921 |
+
"loss": 10.3131,
|
6922 |
+
"step": 983
|
6923 |
+
},
|
6924 |
+
{
|
6925 |
+
"epoch": 0.10805468621314446,
|
6926 |
+
"grad_norm": 0.03887563571333885,
|
6927 |
+
"learning_rate": 6.443413907720186e-08,
|
6928 |
+
"loss": 10.3237,
|
6929 |
+
"step": 984
|
6930 |
+
},
|
6931 |
+
{
|
6932 |
+
"epoch": 0.1081644978861253,
|
6933 |
+
"grad_norm": 0.04586820304393768,
|
6934 |
+
"learning_rate": 5.663304084960186e-08,
|
6935 |
+
"loss": 10.3174,
|
6936 |
+
"step": 985
|
6937 |
+
},
|
6938 |
+
{
|
6939 |
+
"epoch": 0.10827430955910614,
|
6940 |
+
"grad_norm": 0.046376392245292664,
|
6941 |
+
"learning_rate": 4.933487177280482e-08,
|
6942 |
+
"loss": 10.3119,
|
6943 |
+
"step": 986
|
6944 |
+
},
|
6945 |
+
{
|
6946 |
+
"epoch": 0.10838412123208697,
|
6947 |
+
"grad_norm": 0.04189673811197281,
|
6948 |
+
"learning_rate": 4.253970533929508e-08,
|
6949 |
+
"loss": 10.3188,
|
6950 |
+
"step": 987
|
6951 |
+
},
|
6952 |
+
{
|
6953 |
+
"epoch": 0.10849393290506781,
|
6954 |
+
"grad_norm": 0.040410179644823074,
|
6955 |
+
"learning_rate": 3.624760997631982e-08,
|
6956 |
+
"loss": 10.3182,
|
6957 |
+
"step": 988
|
6958 |
+
},
|
6959 |
+
{
|
6960 |
+
"epoch": 0.10860374457804865,
|
6961 |
+
"grad_norm": 0.04387160390615463,
|
6962 |
+
"learning_rate": 3.04586490452119e-08,
|
6963 |
+
"loss": 10.3111,
|
6964 |
+
"step": 989
|
6965 |
+
},
|
6966 |
+
{
|
6967 |
+
"epoch": 0.10871355625102948,
|
6968 |
+
"grad_norm": 0.03705020993947983,
|
6969 |
+
"learning_rate": 2.5172880840745873e-08,
|
6970 |
+
"loss": 10.3173,
|
6971 |
+
"step": 990
|
6972 |
+
},
|
6973 |
+
{
|
6974 |
+
"epoch": 0.10882336792401032,
|
6975 |
+
"grad_norm": 0.029674449935555458,
|
6976 |
+
"learning_rate": 2.0390358590538504e-08,
|
6977 |
+
"loss": 10.3057,
|
6978 |
+
"step": 991
|
6979 |
+
},
|
6980 |
+
{
|
6981 |
+
"epoch": 0.10893317959699116,
|
6982 |
+
"grad_norm": 0.03965083882212639,
|
6983 |
+
"learning_rate": 1.6111130454543598e-08,
|
6984 |
+
"loss": 10.3186,
|
6985 |
+
"step": 992
|
6986 |
+
},
|
6987 |
+
{
|
6988 |
+
"epoch": 0.109042991269972,
|
6989 |
+
"grad_norm": 0.02693052589893341,
|
6990 |
+
"learning_rate": 1.2335239524541299e-08,
|
6991 |
+
"loss": 10.3194,
|
6992 |
+
"step": 993
|
6993 |
+
},
|
6994 |
+
{
|
6995 |
+
"epoch": 0.10915280294295283,
|
6996 |
+
"grad_norm": 0.04057001695036888,
|
6997 |
+
"learning_rate": 9.06272382371065e-09,
|
6998 |
+
"loss": 10.3171,
|
6999 |
+
"step": 994
|
7000 |
+
},
|
7001 |
+
{
|
7002 |
+
"epoch": 0.10926261461593367,
|
7003 |
+
"grad_norm": 0.04335347190499306,
|
7004 |
+
"learning_rate": 6.293616306246586e-09,
|
7005 |
+
"loss": 10.3177,
|
7006 |
+
"step": 995
|
7007 |
+
},
|
7008 |
+
{
|
7009 |
+
"epoch": 0.10937242628891451,
|
7010 |
+
"grad_norm": 0.04777819290757179,
|
7011 |
+
"learning_rate": 4.0279448570323954e-09,
|
7012 |
+
"loss": 10.3211,
|
7013 |
+
"step": 996
|
7014 |
+
},
|
7015 |
+
{
|
7016 |
+
"epoch": 0.10948223796189535,
|
7017 |
+
"grad_norm": 0.034486740827560425,
|
7018 |
+
"learning_rate": 2.265732291356626e-09,
|
7019 |
+
"loss": 10.3233,
|
7020 |
+
"step": 997
|
7021 |
+
},
|
7022 |
+
{
|
7023 |
+
"epoch": 0.10959204963487619,
|
7024 |
+
"grad_norm": 0.03398129716515541,
|
7025 |
+
"learning_rate": 1.0069963546743832e-09,
|
7026 |
+
"loss": 10.3182,
|
7027 |
+
"step": 998
|
7028 |
+
},
|
7029 |
+
{
|
7030 |
+
"epoch": 0.10970186130785703,
|
7031 |
+
"grad_norm": 0.03073979541659355,
|
7032 |
+
"learning_rate": 2.5174972244634833e-10,
|
7033 |
+
"loss": 10.3134,
|
7034 |
+
"step": 999
|
7035 |
+
},
|
7036 |
+
{
|
7037 |
+
"epoch": 0.10981167298083787,
|
7038 |
+
"grad_norm": 0.030138185247778893,
|
7039 |
+
"learning_rate": 0.0,
|
7040 |
+
"loss": 10.314,
|
7041 |
+
"step": 1000
|
7042 |
+
},
|
7043 |
+
{
|
7044 |
+
"epoch": 0.10981167298083787,
|
7045 |
+
"eval_loss": 10.31318187713623,
|
7046 |
+
"eval_runtime": 126.9236,
|
7047 |
+
"eval_samples_per_second": 11.716,
|
7048 |
+
"eval_steps_per_second": 5.862,
|
7049 |
+
"step": 1000
|
7050 |
}
|
7051 |
],
|
7052 |
"logging_steps": 1,
|
|
|
7061 |
"should_evaluate": false,
|
7062 |
"should_log": false,
|
7063 |
"should_save": true,
|
7064 |
+
"should_training_stop": true
|
7065 |
},
|
7066 |
"attributes": {}
|
7067 |
}
|
7068 |
},
|
7069 |
+
"total_flos": 213840297984000.0,
|
7070 |
"train_batch_size": 2,
|
7071 |
"trial_name": null,
|
7072 |
"trial_params": null
|