Training in progress, step 1800
Browse files- last-checkpoint/optimizer.pt +1 -1
- last-checkpoint/pytorch_model.bin +1 -1
- last-checkpoint/rng_state.pth +1 -1
- last-checkpoint/scheduler.pt +1 -1
- last-checkpoint/trainer_state.json +1213 -5
- pytorch_model.bin +1 -1
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4736616809
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:052f70623aa8868a83cba6c72bff784cd35f1627f7830c96a8037c50efd03161
|
3 |
size 4736616809
|
last-checkpoint/pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2368281769
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e0581c601cbb195614ebff9150719cfc4b2d8be4e3802ef0fd4f340d4b33ab78
|
3 |
size 2368281769
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14575
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f2bf5cd3985dc56adcf7cef3650c97c2e1e8cbdf593411958cc282a06a916078
|
3 |
size 14575
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 627
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e94ec64346c3eafc3d12675fe356be8b89d123bd9cf0ee96e17680b54394d6f7
|
3 |
size 627
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
-
"best_metric":
|
3 |
-
"best_model_checkpoint": "output/checkpoint-
|
4 |
-
"epoch": 1.
|
5 |
-
"global_step":
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
@@ -9670,11 +9670,1219 @@
|
|
9670 |
"eval_samples_per_second": 6.083,
|
9671 |
"eval_steps_per_second": 6.083,
|
9672 |
"step": 1600
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9673 |
}
|
9674 |
],
|
9675 |
"max_steps": 4386,
|
9676 |
"num_train_epochs": 3,
|
9677 |
-
"total_flos":
|
9678 |
"trial_name": null,
|
9679 |
"trial_params": null
|
9680 |
}
|
|
|
1 |
{
|
2 |
+
"best_metric": 1.9992964267730713,
|
3 |
+
"best_model_checkpoint": "output/checkpoint-1800",
|
4 |
+
"epoch": 1.2306311791946416,
|
5 |
+
"global_step": 1800,
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
|
|
9670 |
"eval_samples_per_second": 6.083,
|
9671 |
"eval_steps_per_second": 6.083,
|
9672 |
"step": 1600
|
9673 |
+
},
|
9674 |
+
{
|
9675 |
+
"epoch": 1.09,
|
9676 |
+
"learning_rate": 0.0003991877687529861,
|
9677 |
+
"loss": 1.9763,
|
9678 |
+
"step": 1601
|
9679 |
+
},
|
9680 |
+
{
|
9681 |
+
"epoch": 1.1,
|
9682 |
+
"learning_rate": 0.0003990444338270425,
|
9683 |
+
"loss": 2.0162,
|
9684 |
+
"step": 1602
|
9685 |
+
},
|
9686 |
+
{
|
9687 |
+
"epoch": 1.1,
|
9688 |
+
"learning_rate": 0.00039890109890109885,
|
9689 |
+
"loss": 1.8628,
|
9690 |
+
"step": 1603
|
9691 |
+
},
|
9692 |
+
{
|
9693 |
+
"epoch": 1.1,
|
9694 |
+
"learning_rate": 0.0003987577639751553,
|
9695 |
+
"loss": 2.0199,
|
9696 |
+
"step": 1604
|
9697 |
+
},
|
9698 |
+
{
|
9699 |
+
"epoch": 1.1,
|
9700 |
+
"learning_rate": 0.0003986144290492116,
|
9701 |
+
"loss": 1.9488,
|
9702 |
+
"step": 1605
|
9703 |
+
},
|
9704 |
+
{
|
9705 |
+
"epoch": 1.1,
|
9706 |
+
"learning_rate": 0.000398471094123268,
|
9707 |
+
"loss": 2.0393,
|
9708 |
+
"step": 1606
|
9709 |
+
},
|
9710 |
+
{
|
9711 |
+
"epoch": 1.1,
|
9712 |
+
"learning_rate": 0.0003983277591973244,
|
9713 |
+
"loss": 1.9423,
|
9714 |
+
"step": 1607
|
9715 |
+
},
|
9716 |
+
{
|
9717 |
+
"epoch": 1.1,
|
9718 |
+
"learning_rate": 0.0003981844242713807,
|
9719 |
+
"loss": 2.1568,
|
9720 |
+
"step": 1608
|
9721 |
+
},
|
9722 |
+
{
|
9723 |
+
"epoch": 1.1,
|
9724 |
+
"learning_rate": 0.00039804108934543716,
|
9725 |
+
"loss": 2.0384,
|
9726 |
+
"step": 1609
|
9727 |
+
},
|
9728 |
+
{
|
9729 |
+
"epoch": 1.1,
|
9730 |
+
"learning_rate": 0.0003978977544194935,
|
9731 |
+
"loss": 1.9644,
|
9732 |
+
"step": 1610
|
9733 |
+
},
|
9734 |
+
{
|
9735 |
+
"epoch": 1.1,
|
9736 |
+
"learning_rate": 0.0003977544194935499,
|
9737 |
+
"loss": 2.0689,
|
9738 |
+
"step": 1611
|
9739 |
+
},
|
9740 |
+
{
|
9741 |
+
"epoch": 1.1,
|
9742 |
+
"learning_rate": 0.00039761108456760625,
|
9743 |
+
"loss": 2.0017,
|
9744 |
+
"step": 1612
|
9745 |
+
},
|
9746 |
+
{
|
9747 |
+
"epoch": 1.1,
|
9748 |
+
"learning_rate": 0.0003974677496416627,
|
9749 |
+
"loss": 1.9427,
|
9750 |
+
"step": 1613
|
9751 |
+
},
|
9752 |
+
{
|
9753 |
+
"epoch": 1.1,
|
9754 |
+
"learning_rate": 0.000397324414715719,
|
9755 |
+
"loss": 1.9709,
|
9756 |
+
"step": 1614
|
9757 |
+
},
|
9758 |
+
{
|
9759 |
+
"epoch": 1.1,
|
9760 |
+
"learning_rate": 0.0003971810797897754,
|
9761 |
+
"loss": 2.0033,
|
9762 |
+
"step": 1615
|
9763 |
+
},
|
9764 |
+
{
|
9765 |
+
"epoch": 1.1,
|
9766 |
+
"learning_rate": 0.0003970377448638318,
|
9767 |
+
"loss": 1.9937,
|
9768 |
+
"step": 1616
|
9769 |
+
},
|
9770 |
+
{
|
9771 |
+
"epoch": 1.11,
|
9772 |
+
"learning_rate": 0.00039689440993788817,
|
9773 |
+
"loss": 2.005,
|
9774 |
+
"step": 1617
|
9775 |
+
},
|
9776 |
+
{
|
9777 |
+
"epoch": 1.11,
|
9778 |
+
"learning_rate": 0.00039675107501194456,
|
9779 |
+
"loss": 2.0023,
|
9780 |
+
"step": 1618
|
9781 |
+
},
|
9782 |
+
{
|
9783 |
+
"epoch": 1.11,
|
9784 |
+
"learning_rate": 0.00039660774008600094,
|
9785 |
+
"loss": 2.0233,
|
9786 |
+
"step": 1619
|
9787 |
+
},
|
9788 |
+
{
|
9789 |
+
"epoch": 1.11,
|
9790 |
+
"learning_rate": 0.0003964644051600573,
|
9791 |
+
"loss": 1.962,
|
9792 |
+
"step": 1620
|
9793 |
+
},
|
9794 |
+
{
|
9795 |
+
"epoch": 1.11,
|
9796 |
+
"learning_rate": 0.0003963210702341137,
|
9797 |
+
"loss": 2.026,
|
9798 |
+
"step": 1621
|
9799 |
+
},
|
9800 |
+
{
|
9801 |
+
"epoch": 1.11,
|
9802 |
+
"learning_rate": 0.00039617773530817004,
|
9803 |
+
"loss": 2.0934,
|
9804 |
+
"step": 1622
|
9805 |
+
},
|
9806 |
+
{
|
9807 |
+
"epoch": 1.11,
|
9808 |
+
"learning_rate": 0.0003960344003822264,
|
9809 |
+
"loss": 2.0144,
|
9810 |
+
"step": 1623
|
9811 |
+
},
|
9812 |
+
{
|
9813 |
+
"epoch": 1.11,
|
9814 |
+
"learning_rate": 0.0003958910654562828,
|
9815 |
+
"loss": 1.9919,
|
9816 |
+
"step": 1624
|
9817 |
+
},
|
9818 |
+
{
|
9819 |
+
"epoch": 1.11,
|
9820 |
+
"learning_rate": 0.0003957477305303392,
|
9821 |
+
"loss": 1.9533,
|
9822 |
+
"step": 1625
|
9823 |
+
},
|
9824 |
+
{
|
9825 |
+
"epoch": 1.11,
|
9826 |
+
"learning_rate": 0.00039560439560439557,
|
9827 |
+
"loss": 1.9303,
|
9828 |
+
"step": 1626
|
9829 |
+
},
|
9830 |
+
{
|
9831 |
+
"epoch": 1.11,
|
9832 |
+
"learning_rate": 0.00039546106067845196,
|
9833 |
+
"loss": 1.9299,
|
9834 |
+
"step": 1627
|
9835 |
+
},
|
9836 |
+
{
|
9837 |
+
"epoch": 1.11,
|
9838 |
+
"learning_rate": 0.00039531772575250834,
|
9839 |
+
"loss": 2.0134,
|
9840 |
+
"step": 1628
|
9841 |
+
},
|
9842 |
+
{
|
9843 |
+
"epoch": 1.11,
|
9844 |
+
"learning_rate": 0.00039517439082656467,
|
9845 |
+
"loss": 1.9218,
|
9846 |
+
"step": 1629
|
9847 |
+
},
|
9848 |
+
{
|
9849 |
+
"epoch": 1.11,
|
9850 |
+
"learning_rate": 0.0003950310559006211,
|
9851 |
+
"loss": 2.0074,
|
9852 |
+
"step": 1630
|
9853 |
+
},
|
9854 |
+
{
|
9855 |
+
"epoch": 1.12,
|
9856 |
+
"learning_rate": 0.00039488772097467744,
|
9857 |
+
"loss": 2.1142,
|
9858 |
+
"step": 1631
|
9859 |
+
},
|
9860 |
+
{
|
9861 |
+
"epoch": 1.12,
|
9862 |
+
"learning_rate": 0.0003947443860487339,
|
9863 |
+
"loss": 2.0237,
|
9864 |
+
"step": 1632
|
9865 |
+
},
|
9866 |
+
{
|
9867 |
+
"epoch": 1.12,
|
9868 |
+
"learning_rate": 0.0003946010511227902,
|
9869 |
+
"loss": 2.0208,
|
9870 |
+
"step": 1633
|
9871 |
+
},
|
9872 |
+
{
|
9873 |
+
"epoch": 1.12,
|
9874 |
+
"learning_rate": 0.00039445771619684664,
|
9875 |
+
"loss": 1.9773,
|
9876 |
+
"step": 1634
|
9877 |
+
},
|
9878 |
+
{
|
9879 |
+
"epoch": 1.12,
|
9880 |
+
"learning_rate": 0.00039431438127090297,
|
9881 |
+
"loss": 1.9945,
|
9882 |
+
"step": 1635
|
9883 |
+
},
|
9884 |
+
{
|
9885 |
+
"epoch": 1.12,
|
9886 |
+
"learning_rate": 0.00039417104634495936,
|
9887 |
+
"loss": 1.9521,
|
9888 |
+
"step": 1636
|
9889 |
+
},
|
9890 |
+
{
|
9891 |
+
"epoch": 1.12,
|
9892 |
+
"learning_rate": 0.00039402771141901574,
|
9893 |
+
"loss": 2.0321,
|
9894 |
+
"step": 1637
|
9895 |
+
},
|
9896 |
+
{
|
9897 |
+
"epoch": 1.12,
|
9898 |
+
"learning_rate": 0.0003938843764930721,
|
9899 |
+
"loss": 2.0056,
|
9900 |
+
"step": 1638
|
9901 |
+
},
|
9902 |
+
{
|
9903 |
+
"epoch": 1.12,
|
9904 |
+
"learning_rate": 0.0003937410415671285,
|
9905 |
+
"loss": 2.008,
|
9906 |
+
"step": 1639
|
9907 |
+
},
|
9908 |
+
{
|
9909 |
+
"epoch": 1.12,
|
9910 |
+
"learning_rate": 0.00039359770664118484,
|
9911 |
+
"loss": 2.0351,
|
9912 |
+
"step": 1640
|
9913 |
+
},
|
9914 |
+
{
|
9915 |
+
"epoch": 1.12,
|
9916 |
+
"learning_rate": 0.0003934543717152413,
|
9917 |
+
"loss": 2.0366,
|
9918 |
+
"step": 1641
|
9919 |
+
},
|
9920 |
+
{
|
9921 |
+
"epoch": 1.12,
|
9922 |
+
"learning_rate": 0.0003933110367892976,
|
9923 |
+
"loss": 2.0498,
|
9924 |
+
"step": 1642
|
9925 |
+
},
|
9926 |
+
{
|
9927 |
+
"epoch": 1.12,
|
9928 |
+
"learning_rate": 0.000393167701863354,
|
9929 |
+
"loss": 2.0764,
|
9930 |
+
"step": 1643
|
9931 |
+
},
|
9932 |
+
{
|
9933 |
+
"epoch": 1.12,
|
9934 |
+
"learning_rate": 0.00039302436693741037,
|
9935 |
+
"loss": 1.9468,
|
9936 |
+
"step": 1644
|
9937 |
+
},
|
9938 |
+
{
|
9939 |
+
"epoch": 1.12,
|
9940 |
+
"learning_rate": 0.00039288103201146675,
|
9941 |
+
"loss": 2.0169,
|
9942 |
+
"step": 1645
|
9943 |
+
},
|
9944 |
+
{
|
9945 |
+
"epoch": 1.13,
|
9946 |
+
"learning_rate": 0.00039273769708552314,
|
9947 |
+
"loss": 1.9899,
|
9948 |
+
"step": 1646
|
9949 |
+
},
|
9950 |
+
{
|
9951 |
+
"epoch": 1.13,
|
9952 |
+
"learning_rate": 0.0003925943621595795,
|
9953 |
+
"loss": 2.0945,
|
9954 |
+
"step": 1647
|
9955 |
+
},
|
9956 |
+
{
|
9957 |
+
"epoch": 1.13,
|
9958 |
+
"learning_rate": 0.00039245102723363585,
|
9959 |
+
"loss": 2.055,
|
9960 |
+
"step": 1648
|
9961 |
+
},
|
9962 |
+
{
|
9963 |
+
"epoch": 1.13,
|
9964 |
+
"learning_rate": 0.0003923076923076923,
|
9965 |
+
"loss": 1.9816,
|
9966 |
+
"step": 1649
|
9967 |
+
},
|
9968 |
+
{
|
9969 |
+
"epoch": 1.13,
|
9970 |
+
"learning_rate": 0.0003921643573817486,
|
9971 |
+
"loss": 1.9407,
|
9972 |
+
"step": 1650
|
9973 |
+
},
|
9974 |
+
{
|
9975 |
+
"epoch": 1.13,
|
9976 |
+
"learning_rate": 0.00039202102245580506,
|
9977 |
+
"loss": 2.1146,
|
9978 |
+
"step": 1651
|
9979 |
+
},
|
9980 |
+
{
|
9981 |
+
"epoch": 1.13,
|
9982 |
+
"learning_rate": 0.0003918776875298614,
|
9983 |
+
"loss": 2.096,
|
9984 |
+
"step": 1652
|
9985 |
+
},
|
9986 |
+
{
|
9987 |
+
"epoch": 1.13,
|
9988 |
+
"learning_rate": 0.0003917343526039178,
|
9989 |
+
"loss": 2.0008,
|
9990 |
+
"step": 1653
|
9991 |
+
},
|
9992 |
+
{
|
9993 |
+
"epoch": 1.13,
|
9994 |
+
"learning_rate": 0.00039159101767797415,
|
9995 |
+
"loss": 2.0295,
|
9996 |
+
"step": 1654
|
9997 |
+
},
|
9998 |
+
{
|
9999 |
+
"epoch": 1.13,
|
10000 |
+
"learning_rate": 0.00039144768275203054,
|
10001 |
+
"loss": 1.9967,
|
10002 |
+
"step": 1655
|
10003 |
+
},
|
10004 |
+
{
|
10005 |
+
"epoch": 1.13,
|
10006 |
+
"learning_rate": 0.0003913043478260869,
|
10007 |
+
"loss": 2.0425,
|
10008 |
+
"step": 1656
|
10009 |
+
},
|
10010 |
+
{
|
10011 |
+
"epoch": 1.13,
|
10012 |
+
"learning_rate": 0.00039116101290014325,
|
10013 |
+
"loss": 2.062,
|
10014 |
+
"step": 1657
|
10015 |
+
},
|
10016 |
+
{
|
10017 |
+
"epoch": 1.13,
|
10018 |
+
"learning_rate": 0.0003910176779741997,
|
10019 |
+
"loss": 2.0158,
|
10020 |
+
"step": 1658
|
10021 |
+
},
|
10022 |
+
{
|
10023 |
+
"epoch": 1.13,
|
10024 |
+
"learning_rate": 0.000390874343048256,
|
10025 |
+
"loss": 1.9327,
|
10026 |
+
"step": 1659
|
10027 |
+
},
|
10028 |
+
{
|
10029 |
+
"epoch": 1.13,
|
10030 |
+
"learning_rate": 0.00039073100812231246,
|
10031 |
+
"loss": 2.0324,
|
10032 |
+
"step": 1660
|
10033 |
+
},
|
10034 |
+
{
|
10035 |
+
"epoch": 1.14,
|
10036 |
+
"learning_rate": 0.0003905876731963688,
|
10037 |
+
"loss": 1.9874,
|
10038 |
+
"step": 1661
|
10039 |
+
},
|
10040 |
+
{
|
10041 |
+
"epoch": 1.14,
|
10042 |
+
"learning_rate": 0.00039044433827042517,
|
10043 |
+
"loss": 1.9798,
|
10044 |
+
"step": 1662
|
10045 |
+
},
|
10046 |
+
{
|
10047 |
+
"epoch": 1.14,
|
10048 |
+
"learning_rate": 0.00039030100334448155,
|
10049 |
+
"loss": 1.8957,
|
10050 |
+
"step": 1663
|
10051 |
+
},
|
10052 |
+
{
|
10053 |
+
"epoch": 1.14,
|
10054 |
+
"learning_rate": 0.00039015766841853794,
|
10055 |
+
"loss": 2.0757,
|
10056 |
+
"step": 1664
|
10057 |
+
},
|
10058 |
+
{
|
10059 |
+
"epoch": 1.14,
|
10060 |
+
"learning_rate": 0.0003900143334925943,
|
10061 |
+
"loss": 2.0211,
|
10062 |
+
"step": 1665
|
10063 |
+
},
|
10064 |
+
{
|
10065 |
+
"epoch": 1.14,
|
10066 |
+
"learning_rate": 0.0003898709985666507,
|
10067 |
+
"loss": 2.0094,
|
10068 |
+
"step": 1666
|
10069 |
+
},
|
10070 |
+
{
|
10071 |
+
"epoch": 1.14,
|
10072 |
+
"learning_rate": 0.0003897276636407071,
|
10073 |
+
"loss": 2.0978,
|
10074 |
+
"step": 1667
|
10075 |
+
},
|
10076 |
+
{
|
10077 |
+
"epoch": 1.14,
|
10078 |
+
"learning_rate": 0.0003895843287147635,
|
10079 |
+
"loss": 2.0842,
|
10080 |
+
"step": 1668
|
10081 |
+
},
|
10082 |
+
{
|
10083 |
+
"epoch": 1.14,
|
10084 |
+
"learning_rate": 0.0003894409937888198,
|
10085 |
+
"loss": 2.1968,
|
10086 |
+
"step": 1669
|
10087 |
+
},
|
10088 |
+
{
|
10089 |
+
"epoch": 1.14,
|
10090 |
+
"learning_rate": 0.00038929765886287624,
|
10091 |
+
"loss": 2.0997,
|
10092 |
+
"step": 1670
|
10093 |
+
},
|
10094 |
+
{
|
10095 |
+
"epoch": 1.14,
|
10096 |
+
"learning_rate": 0.00038915432393693257,
|
10097 |
+
"loss": 1.9782,
|
10098 |
+
"step": 1671
|
10099 |
+
},
|
10100 |
+
{
|
10101 |
+
"epoch": 1.14,
|
10102 |
+
"learning_rate": 0.000389010989010989,
|
10103 |
+
"loss": 2.0218,
|
10104 |
+
"step": 1672
|
10105 |
+
},
|
10106 |
+
{
|
10107 |
+
"epoch": 1.14,
|
10108 |
+
"learning_rate": 0.00038886765408504534,
|
10109 |
+
"loss": 2.038,
|
10110 |
+
"step": 1673
|
10111 |
+
},
|
10112 |
+
{
|
10113 |
+
"epoch": 1.14,
|
10114 |
+
"learning_rate": 0.0003887243191591018,
|
10115 |
+
"loss": 2.049,
|
10116 |
+
"step": 1674
|
10117 |
+
},
|
10118 |
+
{
|
10119 |
+
"epoch": 1.15,
|
10120 |
+
"learning_rate": 0.0003885809842331581,
|
10121 |
+
"loss": 2.1634,
|
10122 |
+
"step": 1675
|
10123 |
+
},
|
10124 |
+
{
|
10125 |
+
"epoch": 1.15,
|
10126 |
+
"learning_rate": 0.00038843764930721444,
|
10127 |
+
"loss": 2.0358,
|
10128 |
+
"step": 1676
|
10129 |
+
},
|
10130 |
+
{
|
10131 |
+
"epoch": 1.15,
|
10132 |
+
"learning_rate": 0.0003882943143812709,
|
10133 |
+
"loss": 2.0982,
|
10134 |
+
"step": 1677
|
10135 |
+
},
|
10136 |
+
{
|
10137 |
+
"epoch": 1.15,
|
10138 |
+
"learning_rate": 0.0003881509794553272,
|
10139 |
+
"loss": 2.1026,
|
10140 |
+
"step": 1678
|
10141 |
+
},
|
10142 |
+
{
|
10143 |
+
"epoch": 1.15,
|
10144 |
+
"learning_rate": 0.00038800764452938364,
|
10145 |
+
"loss": 1.9847,
|
10146 |
+
"step": 1679
|
10147 |
+
},
|
10148 |
+
{
|
10149 |
+
"epoch": 1.15,
|
10150 |
+
"learning_rate": 0.00038786430960343997,
|
10151 |
+
"loss": 1.9671,
|
10152 |
+
"step": 1680
|
10153 |
+
},
|
10154 |
+
{
|
10155 |
+
"epoch": 1.15,
|
10156 |
+
"learning_rate": 0.0003877209746774964,
|
10157 |
+
"loss": 2.0934,
|
10158 |
+
"step": 1681
|
10159 |
+
},
|
10160 |
+
{
|
10161 |
+
"epoch": 1.15,
|
10162 |
+
"learning_rate": 0.00038757763975155274,
|
10163 |
+
"loss": 1.9865,
|
10164 |
+
"step": 1682
|
10165 |
+
},
|
10166 |
+
{
|
10167 |
+
"epoch": 1.15,
|
10168 |
+
"learning_rate": 0.0003874343048256091,
|
10169 |
+
"loss": 2.0684,
|
10170 |
+
"step": 1683
|
10171 |
+
},
|
10172 |
+
{
|
10173 |
+
"epoch": 1.15,
|
10174 |
+
"learning_rate": 0.0003872909698996655,
|
10175 |
+
"loss": 2.1101,
|
10176 |
+
"step": 1684
|
10177 |
+
},
|
10178 |
+
{
|
10179 |
+
"epoch": 1.15,
|
10180 |
+
"learning_rate": 0.0003871476349737219,
|
10181 |
+
"loss": 1.9562,
|
10182 |
+
"step": 1685
|
10183 |
+
},
|
10184 |
+
{
|
10185 |
+
"epoch": 1.15,
|
10186 |
+
"learning_rate": 0.0003870043000477783,
|
10187 |
+
"loss": 1.9814,
|
10188 |
+
"step": 1686
|
10189 |
+
},
|
10190 |
+
{
|
10191 |
+
"epoch": 1.15,
|
10192 |
+
"learning_rate": 0.00038686096512183466,
|
10193 |
+
"loss": 2.0065,
|
10194 |
+
"step": 1687
|
10195 |
+
},
|
10196 |
+
{
|
10197 |
+
"epoch": 1.15,
|
10198 |
+
"learning_rate": 0.00038671763019589104,
|
10199 |
+
"loss": 2.1368,
|
10200 |
+
"step": 1688
|
10201 |
+
},
|
10202 |
+
{
|
10203 |
+
"epoch": 1.15,
|
10204 |
+
"learning_rate": 0.0003865742952699474,
|
10205 |
+
"loss": 1.9981,
|
10206 |
+
"step": 1689
|
10207 |
+
},
|
10208 |
+
{
|
10209 |
+
"epoch": 1.16,
|
10210 |
+
"learning_rate": 0.00038643096034400375,
|
10211 |
+
"loss": 1.993,
|
10212 |
+
"step": 1690
|
10213 |
+
},
|
10214 |
+
{
|
10215 |
+
"epoch": 1.16,
|
10216 |
+
"learning_rate": 0.0003862876254180602,
|
10217 |
+
"loss": 2.0458,
|
10218 |
+
"step": 1691
|
10219 |
+
},
|
10220 |
+
{
|
10221 |
+
"epoch": 1.16,
|
10222 |
+
"learning_rate": 0.0003861442904921165,
|
10223 |
+
"loss": 1.9745,
|
10224 |
+
"step": 1692
|
10225 |
+
},
|
10226 |
+
{
|
10227 |
+
"epoch": 1.16,
|
10228 |
+
"learning_rate": 0.00038600095556617296,
|
10229 |
+
"loss": 1.932,
|
10230 |
+
"step": 1693
|
10231 |
+
},
|
10232 |
+
{
|
10233 |
+
"epoch": 1.16,
|
10234 |
+
"learning_rate": 0.0003858576206402293,
|
10235 |
+
"loss": 2.0137,
|
10236 |
+
"step": 1694
|
10237 |
+
},
|
10238 |
+
{
|
10239 |
+
"epoch": 1.16,
|
10240 |
+
"learning_rate": 0.0003857142857142857,
|
10241 |
+
"loss": 2.0512,
|
10242 |
+
"step": 1695
|
10243 |
+
},
|
10244 |
+
{
|
10245 |
+
"epoch": 1.16,
|
10246 |
+
"learning_rate": 0.00038557095078834206,
|
10247 |
+
"loss": 1.9725,
|
10248 |
+
"step": 1696
|
10249 |
+
},
|
10250 |
+
{
|
10251 |
+
"epoch": 1.16,
|
10252 |
+
"learning_rate": 0.0003854276158623984,
|
10253 |
+
"loss": 1.9678,
|
10254 |
+
"step": 1697
|
10255 |
+
},
|
10256 |
+
{
|
10257 |
+
"epoch": 1.16,
|
10258 |
+
"learning_rate": 0.0003852842809364548,
|
10259 |
+
"loss": 1.9604,
|
10260 |
+
"step": 1698
|
10261 |
+
},
|
10262 |
+
{
|
10263 |
+
"epoch": 1.16,
|
10264 |
+
"learning_rate": 0.00038514094601051115,
|
10265 |
+
"loss": 2.0289,
|
10266 |
+
"step": 1699
|
10267 |
+
},
|
10268 |
+
{
|
10269 |
+
"epoch": 1.16,
|
10270 |
+
"learning_rate": 0.0003849976110845676,
|
10271 |
+
"loss": 1.9483,
|
10272 |
+
"step": 1700
|
10273 |
+
},
|
10274 |
+
{
|
10275 |
+
"epoch": 1.16,
|
10276 |
+
"learning_rate": 0.0003848542761586239,
|
10277 |
+
"loss": 2.0639,
|
10278 |
+
"step": 1701
|
10279 |
+
},
|
10280 |
+
{
|
10281 |
+
"epoch": 1.16,
|
10282 |
+
"learning_rate": 0.00038471094123268036,
|
10283 |
+
"loss": 2.0865,
|
10284 |
+
"step": 1702
|
10285 |
+
},
|
10286 |
+
{
|
10287 |
+
"epoch": 1.16,
|
10288 |
+
"learning_rate": 0.0003845676063067367,
|
10289 |
+
"loss": 2.014,
|
10290 |
+
"step": 1703
|
10291 |
+
},
|
10292 |
+
{
|
10293 |
+
"epoch": 1.16,
|
10294 |
+
"learning_rate": 0.00038442427138079307,
|
10295 |
+
"loss": 2.0076,
|
10296 |
+
"step": 1704
|
10297 |
+
},
|
10298 |
+
{
|
10299 |
+
"epoch": 1.17,
|
10300 |
+
"learning_rate": 0.00038428093645484946,
|
10301 |
+
"loss": 2.042,
|
10302 |
+
"step": 1705
|
10303 |
+
},
|
10304 |
+
{
|
10305 |
+
"epoch": 1.17,
|
10306 |
+
"learning_rate": 0.00038413760152890584,
|
10307 |
+
"loss": 1.974,
|
10308 |
+
"step": 1706
|
10309 |
+
},
|
10310 |
+
{
|
10311 |
+
"epoch": 1.17,
|
10312 |
+
"learning_rate": 0.0003839942666029622,
|
10313 |
+
"loss": 2.0785,
|
10314 |
+
"step": 1707
|
10315 |
+
},
|
10316 |
+
{
|
10317 |
+
"epoch": 1.17,
|
10318 |
+
"learning_rate": 0.0003838509316770186,
|
10319 |
+
"loss": 1.971,
|
10320 |
+
"step": 1708
|
10321 |
+
},
|
10322 |
+
{
|
10323 |
+
"epoch": 1.17,
|
10324 |
+
"learning_rate": 0.000383707596751075,
|
10325 |
+
"loss": 2.0806,
|
10326 |
+
"step": 1709
|
10327 |
+
},
|
10328 |
+
{
|
10329 |
+
"epoch": 1.17,
|
10330 |
+
"learning_rate": 0.0003835642618251314,
|
10331 |
+
"loss": 2.0394,
|
10332 |
+
"step": 1710
|
10333 |
+
},
|
10334 |
+
{
|
10335 |
+
"epoch": 1.17,
|
10336 |
+
"learning_rate": 0.0003834209268991877,
|
10337 |
+
"loss": 2.0122,
|
10338 |
+
"step": 1711
|
10339 |
+
},
|
10340 |
+
{
|
10341 |
+
"epoch": 1.17,
|
10342 |
+
"learning_rate": 0.00038327759197324414,
|
10343 |
+
"loss": 1.9853,
|
10344 |
+
"step": 1712
|
10345 |
+
},
|
10346 |
+
{
|
10347 |
+
"epoch": 1.17,
|
10348 |
+
"learning_rate": 0.00038313425704730047,
|
10349 |
+
"loss": 1.9518,
|
10350 |
+
"step": 1713
|
10351 |
+
},
|
10352 |
+
{
|
10353 |
+
"epoch": 1.17,
|
10354 |
+
"learning_rate": 0.0003829909221213569,
|
10355 |
+
"loss": 2.0134,
|
10356 |
+
"step": 1714
|
10357 |
+
},
|
10358 |
+
{
|
10359 |
+
"epoch": 1.17,
|
10360 |
+
"learning_rate": 0.00038284758719541324,
|
10361 |
+
"loss": 2.0334,
|
10362 |
+
"step": 1715
|
10363 |
+
},
|
10364 |
+
{
|
10365 |
+
"epoch": 1.17,
|
10366 |
+
"learning_rate": 0.0003827042522694697,
|
10367 |
+
"loss": 2.0515,
|
10368 |
+
"step": 1716
|
10369 |
+
},
|
10370 |
+
{
|
10371 |
+
"epoch": 1.17,
|
10372 |
+
"learning_rate": 0.000382560917343526,
|
10373 |
+
"loss": 2.0012,
|
10374 |
+
"step": 1717
|
10375 |
+
},
|
10376 |
+
{
|
10377 |
+
"epoch": 1.17,
|
10378 |
+
"learning_rate": 0.00038241758241758234,
|
10379 |
+
"loss": 1.9745,
|
10380 |
+
"step": 1718
|
10381 |
+
},
|
10382 |
+
{
|
10383 |
+
"epoch": 1.18,
|
10384 |
+
"learning_rate": 0.0003822742474916388,
|
10385 |
+
"loss": 2.0406,
|
10386 |
+
"step": 1719
|
10387 |
+
},
|
10388 |
+
{
|
10389 |
+
"epoch": 1.18,
|
10390 |
+
"learning_rate": 0.0003821309125656951,
|
10391 |
+
"loss": 2.0851,
|
10392 |
+
"step": 1720
|
10393 |
+
},
|
10394 |
+
{
|
10395 |
+
"epoch": 1.18,
|
10396 |
+
"learning_rate": 0.00038198757763975154,
|
10397 |
+
"loss": 1.991,
|
10398 |
+
"step": 1721
|
10399 |
+
},
|
10400 |
+
{
|
10401 |
+
"epoch": 1.18,
|
10402 |
+
"learning_rate": 0.00038184424271380787,
|
10403 |
+
"loss": 1.9414,
|
10404 |
+
"step": 1722
|
10405 |
+
},
|
10406 |
+
{
|
10407 |
+
"epoch": 1.18,
|
10408 |
+
"learning_rate": 0.0003817009077878643,
|
10409 |
+
"loss": 1.9876,
|
10410 |
+
"step": 1723
|
10411 |
+
},
|
10412 |
+
{
|
10413 |
+
"epoch": 1.18,
|
10414 |
+
"learning_rate": 0.00038155757286192064,
|
10415 |
+
"loss": 1.9759,
|
10416 |
+
"step": 1724
|
10417 |
+
},
|
10418 |
+
{
|
10419 |
+
"epoch": 1.18,
|
10420 |
+
"learning_rate": 0.000381414237935977,
|
10421 |
+
"loss": 2.0176,
|
10422 |
+
"step": 1725
|
10423 |
+
},
|
10424 |
+
{
|
10425 |
+
"epoch": 1.18,
|
10426 |
+
"learning_rate": 0.0003812709030100334,
|
10427 |
+
"loss": 2.063,
|
10428 |
+
"step": 1726
|
10429 |
+
},
|
10430 |
+
{
|
10431 |
+
"epoch": 1.18,
|
10432 |
+
"learning_rate": 0.0003811275680840898,
|
10433 |
+
"loss": 2.0379,
|
10434 |
+
"step": 1727
|
10435 |
+
},
|
10436 |
+
{
|
10437 |
+
"epoch": 1.18,
|
10438 |
+
"learning_rate": 0.0003809842331581462,
|
10439 |
+
"loss": 2.0511,
|
10440 |
+
"step": 1728
|
10441 |
+
},
|
10442 |
+
{
|
10443 |
+
"epoch": 1.18,
|
10444 |
+
"learning_rate": 0.00038084089823220256,
|
10445 |
+
"loss": 2.0851,
|
10446 |
+
"step": 1729
|
10447 |
+
},
|
10448 |
+
{
|
10449 |
+
"epoch": 1.18,
|
10450 |
+
"learning_rate": 0.00038069756330625894,
|
10451 |
+
"loss": 2.0056,
|
10452 |
+
"step": 1730
|
10453 |
+
},
|
10454 |
+
{
|
10455 |
+
"epoch": 1.18,
|
10456 |
+
"learning_rate": 0.0003805542283803153,
|
10457 |
+
"loss": 2.0117,
|
10458 |
+
"step": 1731
|
10459 |
+
},
|
10460 |
+
{
|
10461 |
+
"epoch": 1.18,
|
10462 |
+
"learning_rate": 0.00038041089345437166,
|
10463 |
+
"loss": 1.924,
|
10464 |
+
"step": 1732
|
10465 |
+
},
|
10466 |
+
{
|
10467 |
+
"epoch": 1.18,
|
10468 |
+
"learning_rate": 0.0003802675585284281,
|
10469 |
+
"loss": 1.9935,
|
10470 |
+
"step": 1733
|
10471 |
+
},
|
10472 |
+
{
|
10473 |
+
"epoch": 1.19,
|
10474 |
+
"learning_rate": 0.0003801242236024844,
|
10475 |
+
"loss": 1.9831,
|
10476 |
+
"step": 1734
|
10477 |
+
},
|
10478 |
+
{
|
10479 |
+
"epoch": 1.19,
|
10480 |
+
"learning_rate": 0.00037998088867654086,
|
10481 |
+
"loss": 2.0066,
|
10482 |
+
"step": 1735
|
10483 |
+
},
|
10484 |
+
{
|
10485 |
+
"epoch": 1.19,
|
10486 |
+
"learning_rate": 0.0003798375537505972,
|
10487 |
+
"loss": 1.9368,
|
10488 |
+
"step": 1736
|
10489 |
+
},
|
10490 |
+
{
|
10491 |
+
"epoch": 1.19,
|
10492 |
+
"learning_rate": 0.00037969421882465363,
|
10493 |
+
"loss": 2.1333,
|
10494 |
+
"step": 1737
|
10495 |
+
},
|
10496 |
+
{
|
10497 |
+
"epoch": 1.19,
|
10498 |
+
"learning_rate": 0.00037955088389870996,
|
10499 |
+
"loss": 2.0094,
|
10500 |
+
"step": 1738
|
10501 |
+
},
|
10502 |
+
{
|
10503 |
+
"epoch": 1.19,
|
10504 |
+
"learning_rate": 0.0003794075489727663,
|
10505 |
+
"loss": 2.0104,
|
10506 |
+
"step": 1739
|
10507 |
+
},
|
10508 |
+
{
|
10509 |
+
"epoch": 1.19,
|
10510 |
+
"learning_rate": 0.0003792642140468227,
|
10511 |
+
"loss": 2.1044,
|
10512 |
+
"step": 1740
|
10513 |
+
},
|
10514 |
+
{
|
10515 |
+
"epoch": 1.19,
|
10516 |
+
"learning_rate": 0.00037912087912087906,
|
10517 |
+
"loss": 1.9823,
|
10518 |
+
"step": 1741
|
10519 |
+
},
|
10520 |
+
{
|
10521 |
+
"epoch": 1.19,
|
10522 |
+
"learning_rate": 0.0003789775441949355,
|
10523 |
+
"loss": 2.0191,
|
10524 |
+
"step": 1742
|
10525 |
+
},
|
10526 |
+
{
|
10527 |
+
"epoch": 1.19,
|
10528 |
+
"learning_rate": 0.0003788342092689918,
|
10529 |
+
"loss": 1.9903,
|
10530 |
+
"step": 1743
|
10531 |
+
},
|
10532 |
+
{
|
10533 |
+
"epoch": 1.19,
|
10534 |
+
"learning_rate": 0.00037869087434304826,
|
10535 |
+
"loss": 2.0378,
|
10536 |
+
"step": 1744
|
10537 |
+
},
|
10538 |
+
{
|
10539 |
+
"epoch": 1.19,
|
10540 |
+
"learning_rate": 0.0003785475394171046,
|
10541 |
+
"loss": 1.9787,
|
10542 |
+
"step": 1745
|
10543 |
+
},
|
10544 |
+
{
|
10545 |
+
"epoch": 1.19,
|
10546 |
+
"learning_rate": 0.000378404204491161,
|
10547 |
+
"loss": 2.016,
|
10548 |
+
"step": 1746
|
10549 |
+
},
|
10550 |
+
{
|
10551 |
+
"epoch": 1.19,
|
10552 |
+
"learning_rate": 0.00037826086956521736,
|
10553 |
+
"loss": 2.0677,
|
10554 |
+
"step": 1747
|
10555 |
+
},
|
10556 |
+
{
|
10557 |
+
"epoch": 1.2,
|
10558 |
+
"learning_rate": 0.00037811753463927374,
|
10559 |
+
"loss": 2.0095,
|
10560 |
+
"step": 1748
|
10561 |
+
},
|
10562 |
+
{
|
10563 |
+
"epoch": 1.2,
|
10564 |
+
"learning_rate": 0.0003779741997133301,
|
10565 |
+
"loss": 2.0386,
|
10566 |
+
"step": 1749
|
10567 |
+
},
|
10568 |
+
{
|
10569 |
+
"epoch": 1.2,
|
10570 |
+
"learning_rate": 0.0003778308647873865,
|
10571 |
+
"loss": 2.0146,
|
10572 |
+
"step": 1750
|
10573 |
+
},
|
10574 |
+
{
|
10575 |
+
"epoch": 1.2,
|
10576 |
+
"learning_rate": 0.00037768752986144284,
|
10577 |
+
"loss": 2.0429,
|
10578 |
+
"step": 1751
|
10579 |
+
},
|
10580 |
+
{
|
10581 |
+
"epoch": 1.2,
|
10582 |
+
"learning_rate": 0.0003775441949354993,
|
10583 |
+
"loss": 2.0113,
|
10584 |
+
"step": 1752
|
10585 |
+
},
|
10586 |
+
{
|
10587 |
+
"epoch": 1.2,
|
10588 |
+
"learning_rate": 0.0003774008600095556,
|
10589 |
+
"loss": 2.0442,
|
10590 |
+
"step": 1753
|
10591 |
+
},
|
10592 |
+
{
|
10593 |
+
"epoch": 1.2,
|
10594 |
+
"learning_rate": 0.00037725752508361204,
|
10595 |
+
"loss": 2.0165,
|
10596 |
+
"step": 1754
|
10597 |
+
},
|
10598 |
+
{
|
10599 |
+
"epoch": 1.2,
|
10600 |
+
"learning_rate": 0.0003771141901576684,
|
10601 |
+
"loss": 2.0509,
|
10602 |
+
"step": 1755
|
10603 |
+
},
|
10604 |
+
{
|
10605 |
+
"epoch": 1.2,
|
10606 |
+
"learning_rate": 0.0003769708552317248,
|
10607 |
+
"loss": 2.0619,
|
10608 |
+
"step": 1756
|
10609 |
+
},
|
10610 |
+
{
|
10611 |
+
"epoch": 1.2,
|
10612 |
+
"learning_rate": 0.00037682752030578114,
|
10613 |
+
"loss": 1.9896,
|
10614 |
+
"step": 1757
|
10615 |
+
},
|
10616 |
+
{
|
10617 |
+
"epoch": 1.2,
|
10618 |
+
"learning_rate": 0.00037668418537983747,
|
10619 |
+
"loss": 1.9639,
|
10620 |
+
"step": 1758
|
10621 |
+
},
|
10622 |
+
{
|
10623 |
+
"epoch": 1.2,
|
10624 |
+
"learning_rate": 0.0003765408504538939,
|
10625 |
+
"loss": 1.9989,
|
10626 |
+
"step": 1759
|
10627 |
+
},
|
10628 |
+
{
|
10629 |
+
"epoch": 1.2,
|
10630 |
+
"learning_rate": 0.00037639751552795024,
|
10631 |
+
"loss": 1.9954,
|
10632 |
+
"step": 1760
|
10633 |
+
},
|
10634 |
+
{
|
10635 |
+
"epoch": 1.2,
|
10636 |
+
"learning_rate": 0.0003762541806020067,
|
10637 |
+
"loss": 2.0313,
|
10638 |
+
"step": 1761
|
10639 |
+
},
|
10640 |
+
{
|
10641 |
+
"epoch": 1.2,
|
10642 |
+
"learning_rate": 0.000376110845676063,
|
10643 |
+
"loss": 1.9543,
|
10644 |
+
"step": 1762
|
10645 |
+
},
|
10646 |
+
{
|
10647 |
+
"epoch": 1.21,
|
10648 |
+
"learning_rate": 0.00037596751075011944,
|
10649 |
+
"loss": 1.982,
|
10650 |
+
"step": 1763
|
10651 |
+
},
|
10652 |
+
{
|
10653 |
+
"epoch": 1.21,
|
10654 |
+
"learning_rate": 0.0003758241758241758,
|
10655 |
+
"loss": 2.0651,
|
10656 |
+
"step": 1764
|
10657 |
+
},
|
10658 |
+
{
|
10659 |
+
"epoch": 1.21,
|
10660 |
+
"learning_rate": 0.00037568084089823216,
|
10661 |
+
"loss": 2.0458,
|
10662 |
+
"step": 1765
|
10663 |
+
},
|
10664 |
+
{
|
10665 |
+
"epoch": 1.21,
|
10666 |
+
"learning_rate": 0.00037553750597228854,
|
10667 |
+
"loss": 2.0584,
|
10668 |
+
"step": 1766
|
10669 |
+
},
|
10670 |
+
{
|
10671 |
+
"epoch": 1.21,
|
10672 |
+
"learning_rate": 0.0003753941710463449,
|
10673 |
+
"loss": 2.0646,
|
10674 |
+
"step": 1767
|
10675 |
+
},
|
10676 |
+
{
|
10677 |
+
"epoch": 1.21,
|
10678 |
+
"learning_rate": 0.0003752508361204013,
|
10679 |
+
"loss": 1.9832,
|
10680 |
+
"step": 1768
|
10681 |
+
},
|
10682 |
+
{
|
10683 |
+
"epoch": 1.21,
|
10684 |
+
"learning_rate": 0.0003751075011944577,
|
10685 |
+
"loss": 2.0284,
|
10686 |
+
"step": 1769
|
10687 |
+
},
|
10688 |
+
{
|
10689 |
+
"epoch": 1.21,
|
10690 |
+
"learning_rate": 0.0003749641662685141,
|
10691 |
+
"loss": 2.002,
|
10692 |
+
"step": 1770
|
10693 |
+
},
|
10694 |
+
{
|
10695 |
+
"epoch": 1.21,
|
10696 |
+
"learning_rate": 0.00037482083134257046,
|
10697 |
+
"loss": 2.0567,
|
10698 |
+
"step": 1771
|
10699 |
+
},
|
10700 |
+
{
|
10701 |
+
"epoch": 1.21,
|
10702 |
+
"learning_rate": 0.0003746774964166268,
|
10703 |
+
"loss": 1.9435,
|
10704 |
+
"step": 1772
|
10705 |
+
},
|
10706 |
+
{
|
10707 |
+
"epoch": 1.21,
|
10708 |
+
"learning_rate": 0.00037453416149068323,
|
10709 |
+
"loss": 2.0795,
|
10710 |
+
"step": 1773
|
10711 |
+
},
|
10712 |
+
{
|
10713 |
+
"epoch": 1.21,
|
10714 |
+
"learning_rate": 0.00037439082656473956,
|
10715 |
+
"loss": 1.9883,
|
10716 |
+
"step": 1774
|
10717 |
+
},
|
10718 |
+
{
|
10719 |
+
"epoch": 1.21,
|
10720 |
+
"learning_rate": 0.000374247491638796,
|
10721 |
+
"loss": 1.9813,
|
10722 |
+
"step": 1775
|
10723 |
+
},
|
10724 |
+
{
|
10725 |
+
"epoch": 1.21,
|
10726 |
+
"learning_rate": 0.0003741041567128523,
|
10727 |
+
"loss": 2.0155,
|
10728 |
+
"step": 1776
|
10729 |
+
},
|
10730 |
+
{
|
10731 |
+
"epoch": 1.21,
|
10732 |
+
"learning_rate": 0.00037396082178690876,
|
10733 |
+
"loss": 2.0065,
|
10734 |
+
"step": 1777
|
10735 |
+
},
|
10736 |
+
{
|
10737 |
+
"epoch": 1.22,
|
10738 |
+
"learning_rate": 0.0003738174868609651,
|
10739 |
+
"loss": 1.9658,
|
10740 |
+
"step": 1778
|
10741 |
+
},
|
10742 |
+
{
|
10743 |
+
"epoch": 1.22,
|
10744 |
+
"learning_rate": 0.0003736741519350214,
|
10745 |
+
"loss": 2.0513,
|
10746 |
+
"step": 1779
|
10747 |
+
},
|
10748 |
+
{
|
10749 |
+
"epoch": 1.22,
|
10750 |
+
"learning_rate": 0.00037353081700907786,
|
10751 |
+
"loss": 2.1119,
|
10752 |
+
"step": 1780
|
10753 |
+
},
|
10754 |
+
{
|
10755 |
+
"epoch": 1.22,
|
10756 |
+
"learning_rate": 0.0003733874820831342,
|
10757 |
+
"loss": 1.9894,
|
10758 |
+
"step": 1781
|
10759 |
+
},
|
10760 |
+
{
|
10761 |
+
"epoch": 1.22,
|
10762 |
+
"learning_rate": 0.00037324414715719063,
|
10763 |
+
"loss": 2.0833,
|
10764 |
+
"step": 1782
|
10765 |
+
},
|
10766 |
+
{
|
10767 |
+
"epoch": 1.22,
|
10768 |
+
"learning_rate": 0.00037310081223124696,
|
10769 |
+
"loss": 2.0896,
|
10770 |
+
"step": 1783
|
10771 |
+
},
|
10772 |
+
{
|
10773 |
+
"epoch": 1.22,
|
10774 |
+
"learning_rate": 0.0003729574773053034,
|
10775 |
+
"loss": 1.9927,
|
10776 |
+
"step": 1784
|
10777 |
+
},
|
10778 |
+
{
|
10779 |
+
"epoch": 1.22,
|
10780 |
+
"learning_rate": 0.0003728141423793597,
|
10781 |
+
"loss": 1.9954,
|
10782 |
+
"step": 1785
|
10783 |
+
},
|
10784 |
+
{
|
10785 |
+
"epoch": 1.22,
|
10786 |
+
"learning_rate": 0.0003726708074534161,
|
10787 |
+
"loss": 2.0618,
|
10788 |
+
"step": 1786
|
10789 |
+
},
|
10790 |
+
{
|
10791 |
+
"epoch": 1.22,
|
10792 |
+
"learning_rate": 0.0003725274725274725,
|
10793 |
+
"loss": 1.9595,
|
10794 |
+
"step": 1787
|
10795 |
+
},
|
10796 |
+
{
|
10797 |
+
"epoch": 1.22,
|
10798 |
+
"learning_rate": 0.0003723841376015289,
|
10799 |
+
"loss": 2.0487,
|
10800 |
+
"step": 1788
|
10801 |
+
},
|
10802 |
+
{
|
10803 |
+
"epoch": 1.22,
|
10804 |
+
"learning_rate": 0.00037224080267558526,
|
10805 |
+
"loss": 2.1212,
|
10806 |
+
"step": 1789
|
10807 |
+
},
|
10808 |
+
{
|
10809 |
+
"epoch": 1.22,
|
10810 |
+
"learning_rate": 0.00037209746774964164,
|
10811 |
+
"loss": 1.976,
|
10812 |
+
"step": 1790
|
10813 |
+
},
|
10814 |
+
{
|
10815 |
+
"epoch": 1.22,
|
10816 |
+
"learning_rate": 0.00037195413282369803,
|
10817 |
+
"loss": 1.8774,
|
10818 |
+
"step": 1791
|
10819 |
+
},
|
10820 |
+
{
|
10821 |
+
"epoch": 1.23,
|
10822 |
+
"learning_rate": 0.0003718107978977544,
|
10823 |
+
"loss": 2.0226,
|
10824 |
+
"step": 1792
|
10825 |
+
},
|
10826 |
+
{
|
10827 |
+
"epoch": 1.23,
|
10828 |
+
"learning_rate": 0.00037166746297181074,
|
10829 |
+
"loss": 1.9768,
|
10830 |
+
"step": 1793
|
10831 |
+
},
|
10832 |
+
{
|
10833 |
+
"epoch": 1.23,
|
10834 |
+
"learning_rate": 0.0003715241280458672,
|
10835 |
+
"loss": 2.0637,
|
10836 |
+
"step": 1794
|
10837 |
+
},
|
10838 |
+
{
|
10839 |
+
"epoch": 1.23,
|
10840 |
+
"learning_rate": 0.0003713807931199235,
|
10841 |
+
"loss": 2.0001,
|
10842 |
+
"step": 1795
|
10843 |
+
},
|
10844 |
+
{
|
10845 |
+
"epoch": 1.23,
|
10846 |
+
"learning_rate": 0.00037123745819397995,
|
10847 |
+
"loss": 2.0729,
|
10848 |
+
"step": 1796
|
10849 |
+
},
|
10850 |
+
{
|
10851 |
+
"epoch": 1.23,
|
10852 |
+
"learning_rate": 0.0003710941232680363,
|
10853 |
+
"loss": 1.9483,
|
10854 |
+
"step": 1797
|
10855 |
+
},
|
10856 |
+
{
|
10857 |
+
"epoch": 1.23,
|
10858 |
+
"learning_rate": 0.00037095078834209266,
|
10859 |
+
"loss": 2.0153,
|
10860 |
+
"step": 1798
|
10861 |
+
},
|
10862 |
+
{
|
10863 |
+
"epoch": 1.23,
|
10864 |
+
"learning_rate": 0.00037080745341614904,
|
10865 |
+
"loss": 2.0948,
|
10866 |
+
"step": 1799
|
10867 |
+
},
|
10868 |
+
{
|
10869 |
+
"epoch": 1.23,
|
10870 |
+
"learning_rate": 0.0003706641184902054,
|
10871 |
+
"loss": 2.0054,
|
10872 |
+
"step": 1800
|
10873 |
+
},
|
10874 |
+
{
|
10875 |
+
"epoch": 1.23,
|
10876 |
+
"eval_loss": 1.9992964267730713,
|
10877 |
+
"eval_runtime": 1649.7967,
|
10878 |
+
"eval_samples_per_second": 6.061,
|
10879 |
+
"eval_steps_per_second": 6.061,
|
10880 |
+
"step": 1800
|
10881 |
}
|
10882 |
],
|
10883 |
"max_steps": 4386,
|
10884 |
"num_train_epochs": 3,
|
10885 |
+
"total_flos": 2.2063711021239214e+18,
|
10886 |
"trial_name": null,
|
10887 |
"trial_params": null
|
10888 |
}
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2368281769
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e0581c601cbb195614ebff9150719cfc4b2d8be4e3802ef0fd4f340d4b33ab78
|
3 |
size 2368281769
|