Training in progress, step 2000
Browse files- last-checkpoint/optimizer.pt +1 -1
- last-checkpoint/pytorch_model.bin +1 -1
- last-checkpoint/rng_state.pth +1 -1
- last-checkpoint/scheduler.pt +1 -1
- last-checkpoint/trainer_state.json +1213 -5
- pytorch_model.bin +1 -1
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4736616809
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2840dcbe13ebc59d5938192139ea78cb7c1c1d2a91ba80b7d1dc7ea55876d800
|
3 |
size 4736616809
|
last-checkpoint/pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2368281769
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:af24d837954601d8f9729f848ea03545b9388763d421c6d2dce0a7e8ab73e67c
|
3 |
size 2368281769
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14575
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d22bcbbfffa1691c652a8c7287d2c77ac5f4da8a53b1ef656dda3633adb8346e
|
3 |
size 14575
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 627
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e13dfeda80fac3770a6173d870ebee06f4c0adc8fe9cfc980b6faa21c8de1112
|
3 |
size 627
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
-
"best_metric": 1.
|
3 |
-
"best_model_checkpoint": "output/checkpoint-
|
4 |
-
"epoch": 1.
|
5 |
-
"global_step":
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
@@ -10878,11 +10878,1219 @@
|
|
10878 |
"eval_samples_per_second": 6.061,
|
10879 |
"eval_steps_per_second": 6.061,
|
10880 |
"step": 1800
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10881 |
}
|
10882 |
],
|
10883 |
"max_steps": 4386,
|
10884 |
"num_train_epochs": 3,
|
10885 |
-
"total_flos": 2.
|
10886 |
"trial_name": null,
|
10887 |
"trial_params": null
|
10888 |
}
|
|
|
1 |
{
|
2 |
+
"best_metric": 1.9946362972259521,
|
3 |
+
"best_model_checkpoint": "output/checkpoint-2000",
|
4 |
+
"epoch": 1.3673679768829352,
|
5 |
+
"global_step": 2000,
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
|
|
10878 |
"eval_samples_per_second": 6.061,
|
10879 |
"eval_steps_per_second": 6.061,
|
10880 |
"step": 1800
|
10881 |
+
},
|
10882 |
+
{
|
10883 |
+
"epoch": 1.23,
|
10884 |
+
"learning_rate": 0.0003705207835642618,
|
10885 |
+
"loss": 1.9406,
|
10886 |
+
"step": 1801
|
10887 |
+
},
|
10888 |
+
{
|
10889 |
+
"epoch": 1.23,
|
10890 |
+
"learning_rate": 0.00037037744863831814,
|
10891 |
+
"loss": 2.164,
|
10892 |
+
"step": 1802
|
10893 |
+
},
|
10894 |
+
{
|
10895 |
+
"epoch": 1.23,
|
10896 |
+
"learning_rate": 0.0003702341137123746,
|
10897 |
+
"loss": 2.0001,
|
10898 |
+
"step": 1803
|
10899 |
+
},
|
10900 |
+
{
|
10901 |
+
"epoch": 1.23,
|
10902 |
+
"learning_rate": 0.0003700907787864309,
|
10903 |
+
"loss": 1.9451,
|
10904 |
+
"step": 1804
|
10905 |
+
},
|
10906 |
+
{
|
10907 |
+
"epoch": 1.23,
|
10908 |
+
"learning_rate": 0.00036994744386048735,
|
10909 |
+
"loss": 2.003,
|
10910 |
+
"step": 1805
|
10911 |
+
},
|
10912 |
+
{
|
10913 |
+
"epoch": 1.23,
|
10914 |
+
"learning_rate": 0.0003698041089345437,
|
10915 |
+
"loss": 2.0272,
|
10916 |
+
"step": 1806
|
10917 |
+
},
|
10918 |
+
{
|
10919 |
+
"epoch": 1.24,
|
10920 |
+
"learning_rate": 0.00036966077400860006,
|
10921 |
+
"loss": 1.9824,
|
10922 |
+
"step": 1807
|
10923 |
+
},
|
10924 |
+
{
|
10925 |
+
"epoch": 1.24,
|
10926 |
+
"learning_rate": 0.00036951743908265644,
|
10927 |
+
"loss": 2.0841,
|
10928 |
+
"step": 1808
|
10929 |
+
},
|
10930 |
+
{
|
10931 |
+
"epoch": 1.24,
|
10932 |
+
"learning_rate": 0.00036937410415671283,
|
10933 |
+
"loss": 2.0329,
|
10934 |
+
"step": 1809
|
10935 |
+
},
|
10936 |
+
{
|
10937 |
+
"epoch": 1.24,
|
10938 |
+
"learning_rate": 0.0003692307692307692,
|
10939 |
+
"loss": 2.0209,
|
10940 |
+
"step": 1810
|
10941 |
+
},
|
10942 |
+
{
|
10943 |
+
"epoch": 1.24,
|
10944 |
+
"learning_rate": 0.0003690874343048256,
|
10945 |
+
"loss": 1.8782,
|
10946 |
+
"step": 1811
|
10947 |
+
},
|
10948 |
+
{
|
10949 |
+
"epoch": 1.24,
|
10950 |
+
"learning_rate": 0.000368944099378882,
|
10951 |
+
"loss": 1.9467,
|
10952 |
+
"step": 1812
|
10953 |
+
},
|
10954 |
+
{
|
10955 |
+
"epoch": 1.24,
|
10956 |
+
"learning_rate": 0.00036880076445293836,
|
10957 |
+
"loss": 1.915,
|
10958 |
+
"step": 1813
|
10959 |
+
},
|
10960 |
+
{
|
10961 |
+
"epoch": 1.24,
|
10962 |
+
"learning_rate": 0.0003686574295269947,
|
10963 |
+
"loss": 2.019,
|
10964 |
+
"step": 1814
|
10965 |
+
},
|
10966 |
+
{
|
10967 |
+
"epoch": 1.24,
|
10968 |
+
"learning_rate": 0.0003685140946010511,
|
10969 |
+
"loss": 2.0315,
|
10970 |
+
"step": 1815
|
10971 |
+
},
|
10972 |
+
{
|
10973 |
+
"epoch": 1.24,
|
10974 |
+
"learning_rate": 0.00036837075967510746,
|
10975 |
+
"loss": 2.0391,
|
10976 |
+
"step": 1816
|
10977 |
+
},
|
10978 |
+
{
|
10979 |
+
"epoch": 1.24,
|
10980 |
+
"learning_rate": 0.00036822742474916384,
|
10981 |
+
"loss": 1.9748,
|
10982 |
+
"step": 1817
|
10983 |
+
},
|
10984 |
+
{
|
10985 |
+
"epoch": 1.24,
|
10986 |
+
"learning_rate": 0.0003680840898232202,
|
10987 |
+
"loss": 2.0068,
|
10988 |
+
"step": 1818
|
10989 |
+
},
|
10990 |
+
{
|
10991 |
+
"epoch": 1.24,
|
10992 |
+
"learning_rate": 0.0003679407548972766,
|
10993 |
+
"loss": 1.9701,
|
10994 |
+
"step": 1819
|
10995 |
+
},
|
10996 |
+
{
|
10997 |
+
"epoch": 1.24,
|
10998 |
+
"learning_rate": 0.000367797419971333,
|
10999 |
+
"loss": 1.9574,
|
11000 |
+
"step": 1820
|
11001 |
+
},
|
11002 |
+
{
|
11003 |
+
"epoch": 1.24,
|
11004 |
+
"learning_rate": 0.0003676540850453893,
|
11005 |
+
"loss": 2.0035,
|
11006 |
+
"step": 1821
|
11007 |
+
},
|
11008 |
+
{
|
11009 |
+
"epoch": 1.25,
|
11010 |
+
"learning_rate": 0.00036751075011944576,
|
11011 |
+
"loss": 1.9546,
|
11012 |
+
"step": 1822
|
11013 |
+
},
|
11014 |
+
{
|
11015 |
+
"epoch": 1.25,
|
11016 |
+
"learning_rate": 0.0003673674151935021,
|
11017 |
+
"loss": 1.9779,
|
11018 |
+
"step": 1823
|
11019 |
+
},
|
11020 |
+
{
|
11021 |
+
"epoch": 1.25,
|
11022 |
+
"learning_rate": 0.00036722408026755853,
|
11023 |
+
"loss": 2.1025,
|
11024 |
+
"step": 1824
|
11025 |
+
},
|
11026 |
+
{
|
11027 |
+
"epoch": 1.25,
|
11028 |
+
"learning_rate": 0.00036708074534161486,
|
11029 |
+
"loss": 2.028,
|
11030 |
+
"step": 1825
|
11031 |
+
},
|
11032 |
+
{
|
11033 |
+
"epoch": 1.25,
|
11034 |
+
"learning_rate": 0.0003669374104156713,
|
11035 |
+
"loss": 1.9185,
|
11036 |
+
"step": 1826
|
11037 |
+
},
|
11038 |
+
{
|
11039 |
+
"epoch": 1.25,
|
11040 |
+
"learning_rate": 0.0003667940754897276,
|
11041 |
+
"loss": 1.865,
|
11042 |
+
"step": 1827
|
11043 |
+
},
|
11044 |
+
{
|
11045 |
+
"epoch": 1.25,
|
11046 |
+
"learning_rate": 0.000366650740563784,
|
11047 |
+
"loss": 1.9609,
|
11048 |
+
"step": 1828
|
11049 |
+
},
|
11050 |
+
{
|
11051 |
+
"epoch": 1.25,
|
11052 |
+
"learning_rate": 0.0003665074056378404,
|
11053 |
+
"loss": 1.9569,
|
11054 |
+
"step": 1829
|
11055 |
+
},
|
11056 |
+
{
|
11057 |
+
"epoch": 1.25,
|
11058 |
+
"learning_rate": 0.0003663640707118968,
|
11059 |
+
"loss": 2.0257,
|
11060 |
+
"step": 1830
|
11061 |
+
},
|
11062 |
+
{
|
11063 |
+
"epoch": 1.25,
|
11064 |
+
"learning_rate": 0.00036622073578595316,
|
11065 |
+
"loss": 2.0884,
|
11066 |
+
"step": 1831
|
11067 |
+
},
|
11068 |
+
{
|
11069 |
+
"epoch": 1.25,
|
11070 |
+
"learning_rate": 0.0003660774008600095,
|
11071 |
+
"loss": 2.0377,
|
11072 |
+
"step": 1832
|
11073 |
+
},
|
11074 |
+
{
|
11075 |
+
"epoch": 1.25,
|
11076 |
+
"learning_rate": 0.00036593406593406593,
|
11077 |
+
"loss": 1.9751,
|
11078 |
+
"step": 1833
|
11079 |
+
},
|
11080 |
+
{
|
11081 |
+
"epoch": 1.25,
|
11082 |
+
"learning_rate": 0.00036579073100812226,
|
11083 |
+
"loss": 2.0617,
|
11084 |
+
"step": 1834
|
11085 |
+
},
|
11086 |
+
{
|
11087 |
+
"epoch": 1.25,
|
11088 |
+
"learning_rate": 0.00036564739608217864,
|
11089 |
+
"loss": 2.0659,
|
11090 |
+
"step": 1835
|
11091 |
+
},
|
11092 |
+
{
|
11093 |
+
"epoch": 1.26,
|
11094 |
+
"learning_rate": 0.000365504061156235,
|
11095 |
+
"loss": 1.9916,
|
11096 |
+
"step": 1836
|
11097 |
+
},
|
11098 |
+
{
|
11099 |
+
"epoch": 1.26,
|
11100 |
+
"learning_rate": 0.0003653607262302914,
|
11101 |
+
"loss": 2.0324,
|
11102 |
+
"step": 1837
|
11103 |
+
},
|
11104 |
+
{
|
11105 |
+
"epoch": 1.26,
|
11106 |
+
"learning_rate": 0.0003652173913043478,
|
11107 |
+
"loss": 2.0329,
|
11108 |
+
"step": 1838
|
11109 |
+
},
|
11110 |
+
{
|
11111 |
+
"epoch": 1.26,
|
11112 |
+
"learning_rate": 0.0003650740563784042,
|
11113 |
+
"loss": 1.9504,
|
11114 |
+
"step": 1839
|
11115 |
+
},
|
11116 |
+
{
|
11117 |
+
"epoch": 1.26,
|
11118 |
+
"learning_rate": 0.00036493072145246056,
|
11119 |
+
"loss": 1.8959,
|
11120 |
+
"step": 1840
|
11121 |
+
},
|
11122 |
+
{
|
11123 |
+
"epoch": 1.26,
|
11124 |
+
"learning_rate": 0.00036478738652651695,
|
11125 |
+
"loss": 2.0087,
|
11126 |
+
"step": 1841
|
11127 |
+
},
|
11128 |
+
{
|
11129 |
+
"epoch": 1.26,
|
11130 |
+
"learning_rate": 0.0003646440516005733,
|
11131 |
+
"loss": 2.1006,
|
11132 |
+
"step": 1842
|
11133 |
+
},
|
11134 |
+
{
|
11135 |
+
"epoch": 1.26,
|
11136 |
+
"learning_rate": 0.0003645007166746297,
|
11137 |
+
"loss": 2.0928,
|
11138 |
+
"step": 1843
|
11139 |
+
},
|
11140 |
+
{
|
11141 |
+
"epoch": 1.26,
|
11142 |
+
"learning_rate": 0.00036435738174868604,
|
11143 |
+
"loss": 2.0526,
|
11144 |
+
"step": 1844
|
11145 |
+
},
|
11146 |
+
{
|
11147 |
+
"epoch": 1.26,
|
11148 |
+
"learning_rate": 0.0003642140468227425,
|
11149 |
+
"loss": 1.9657,
|
11150 |
+
"step": 1845
|
11151 |
+
},
|
11152 |
+
{
|
11153 |
+
"epoch": 1.26,
|
11154 |
+
"learning_rate": 0.0003640707118967988,
|
11155 |
+
"loss": 1.9081,
|
11156 |
+
"step": 1846
|
11157 |
+
},
|
11158 |
+
{
|
11159 |
+
"epoch": 1.26,
|
11160 |
+
"learning_rate": 0.00036392737697085514,
|
11161 |
+
"loss": 2.007,
|
11162 |
+
"step": 1847
|
11163 |
+
},
|
11164 |
+
{
|
11165 |
+
"epoch": 1.26,
|
11166 |
+
"learning_rate": 0.0003637840420449116,
|
11167 |
+
"loss": 2.0273,
|
11168 |
+
"step": 1848
|
11169 |
+
},
|
11170 |
+
{
|
11171 |
+
"epoch": 1.26,
|
11172 |
+
"learning_rate": 0.0003636407071189679,
|
11173 |
+
"loss": 1.9917,
|
11174 |
+
"step": 1849
|
11175 |
+
},
|
11176 |
+
{
|
11177 |
+
"epoch": 1.26,
|
11178 |
+
"learning_rate": 0.00036349737219302435,
|
11179 |
+
"loss": 1.9588,
|
11180 |
+
"step": 1850
|
11181 |
+
},
|
11182 |
+
{
|
11183 |
+
"epoch": 1.27,
|
11184 |
+
"learning_rate": 0.0003633540372670807,
|
11185 |
+
"loss": 1.9464,
|
11186 |
+
"step": 1851
|
11187 |
+
},
|
11188 |
+
{
|
11189 |
+
"epoch": 1.27,
|
11190 |
+
"learning_rate": 0.0003632107023411371,
|
11191 |
+
"loss": 1.9609,
|
11192 |
+
"step": 1852
|
11193 |
+
},
|
11194 |
+
{
|
11195 |
+
"epoch": 1.27,
|
11196 |
+
"learning_rate": 0.00036306736741519344,
|
11197 |
+
"loss": 1.9257,
|
11198 |
+
"step": 1853
|
11199 |
+
},
|
11200 |
+
{
|
11201 |
+
"epoch": 1.27,
|
11202 |
+
"learning_rate": 0.0003629240324892498,
|
11203 |
+
"loss": 2.0676,
|
11204 |
+
"step": 1854
|
11205 |
+
},
|
11206 |
+
{
|
11207 |
+
"epoch": 1.27,
|
11208 |
+
"learning_rate": 0.0003627806975633062,
|
11209 |
+
"loss": 2.0322,
|
11210 |
+
"step": 1855
|
11211 |
+
},
|
11212 |
+
{
|
11213 |
+
"epoch": 1.27,
|
11214 |
+
"learning_rate": 0.0003626373626373626,
|
11215 |
+
"loss": 2.023,
|
11216 |
+
"step": 1856
|
11217 |
+
},
|
11218 |
+
{
|
11219 |
+
"epoch": 1.27,
|
11220 |
+
"learning_rate": 0.000362494027711419,
|
11221 |
+
"loss": 1.9282,
|
11222 |
+
"step": 1857
|
11223 |
+
},
|
11224 |
+
{
|
11225 |
+
"epoch": 1.27,
|
11226 |
+
"learning_rate": 0.00036235069278547536,
|
11227 |
+
"loss": 2.0074,
|
11228 |
+
"step": 1858
|
11229 |
+
},
|
11230 |
+
{
|
11231 |
+
"epoch": 1.27,
|
11232 |
+
"learning_rate": 0.00036220735785953175,
|
11233 |
+
"loss": 2.037,
|
11234 |
+
"step": 1859
|
11235 |
+
},
|
11236 |
+
{
|
11237 |
+
"epoch": 1.27,
|
11238 |
+
"learning_rate": 0.00036206402293358813,
|
11239 |
+
"loss": 1.9538,
|
11240 |
+
"step": 1860
|
11241 |
+
},
|
11242 |
+
{
|
11243 |
+
"epoch": 1.27,
|
11244 |
+
"learning_rate": 0.00036192068800764446,
|
11245 |
+
"loss": 1.852,
|
11246 |
+
"step": 1861
|
11247 |
+
},
|
11248 |
+
{
|
11249 |
+
"epoch": 1.27,
|
11250 |
+
"learning_rate": 0.0003617773530817009,
|
11251 |
+
"loss": 2.0184,
|
11252 |
+
"step": 1862
|
11253 |
+
},
|
11254 |
+
{
|
11255 |
+
"epoch": 1.27,
|
11256 |
+
"learning_rate": 0.0003616340181557572,
|
11257 |
+
"loss": 2.0041,
|
11258 |
+
"step": 1863
|
11259 |
+
},
|
11260 |
+
{
|
11261 |
+
"epoch": 1.27,
|
11262 |
+
"learning_rate": 0.00036149068322981366,
|
11263 |
+
"loss": 1.988,
|
11264 |
+
"step": 1864
|
11265 |
+
},
|
11266 |
+
{
|
11267 |
+
"epoch": 1.28,
|
11268 |
+
"learning_rate": 0.00036134734830387,
|
11269 |
+
"loss": 2.0504,
|
11270 |
+
"step": 1865
|
11271 |
+
},
|
11272 |
+
{
|
11273 |
+
"epoch": 1.28,
|
11274 |
+
"learning_rate": 0.00036120401337792643,
|
11275 |
+
"loss": 2.0778,
|
11276 |
+
"step": 1866
|
11277 |
+
},
|
11278 |
+
{
|
11279 |
+
"epoch": 1.28,
|
11280 |
+
"learning_rate": 0.00036106067845198276,
|
11281 |
+
"loss": 2.066,
|
11282 |
+
"step": 1867
|
11283 |
+
},
|
11284 |
+
{
|
11285 |
+
"epoch": 1.28,
|
11286 |
+
"learning_rate": 0.0003609173435260391,
|
11287 |
+
"loss": 2.0418,
|
11288 |
+
"step": 1868
|
11289 |
+
},
|
11290 |
+
{
|
11291 |
+
"epoch": 1.28,
|
11292 |
+
"learning_rate": 0.00036077400860009553,
|
11293 |
+
"loss": 2.0154,
|
11294 |
+
"step": 1869
|
11295 |
+
},
|
11296 |
+
{
|
11297 |
+
"epoch": 1.28,
|
11298 |
+
"learning_rate": 0.00036063067367415186,
|
11299 |
+
"loss": 1.9723,
|
11300 |
+
"step": 1870
|
11301 |
+
},
|
11302 |
+
{
|
11303 |
+
"epoch": 1.28,
|
11304 |
+
"learning_rate": 0.0003604873387482083,
|
11305 |
+
"loss": 1.9196,
|
11306 |
+
"step": 1871
|
11307 |
+
},
|
11308 |
+
{
|
11309 |
+
"epoch": 1.28,
|
11310 |
+
"learning_rate": 0.0003603440038222646,
|
11311 |
+
"loss": 2.0374,
|
11312 |
+
"step": 1872
|
11313 |
+
},
|
11314 |
+
{
|
11315 |
+
"epoch": 1.28,
|
11316 |
+
"learning_rate": 0.00036020066889632106,
|
11317 |
+
"loss": 2.1146,
|
11318 |
+
"step": 1873
|
11319 |
+
},
|
11320 |
+
{
|
11321 |
+
"epoch": 1.28,
|
11322 |
+
"learning_rate": 0.0003600573339703774,
|
11323 |
+
"loss": 2.0713,
|
11324 |
+
"step": 1874
|
11325 |
+
},
|
11326 |
+
{
|
11327 |
+
"epoch": 1.28,
|
11328 |
+
"learning_rate": 0.0003599139990444338,
|
11329 |
+
"loss": 2.0524,
|
11330 |
+
"step": 1875
|
11331 |
+
},
|
11332 |
+
{
|
11333 |
+
"epoch": 1.28,
|
11334 |
+
"learning_rate": 0.00035977066411849016,
|
11335 |
+
"loss": 1.9778,
|
11336 |
+
"step": 1876
|
11337 |
+
},
|
11338 |
+
{
|
11339 |
+
"epoch": 1.28,
|
11340 |
+
"learning_rate": 0.00035962732919254654,
|
11341 |
+
"loss": 2.0138,
|
11342 |
+
"step": 1877
|
11343 |
+
},
|
11344 |
+
{
|
11345 |
+
"epoch": 1.28,
|
11346 |
+
"learning_rate": 0.00035948399426660293,
|
11347 |
+
"loss": 1.9767,
|
11348 |
+
"step": 1878
|
11349 |
+
},
|
11350 |
+
{
|
11351 |
+
"epoch": 1.28,
|
11352 |
+
"learning_rate": 0.0003593406593406593,
|
11353 |
+
"loss": 1.9536,
|
11354 |
+
"step": 1879
|
11355 |
+
},
|
11356 |
+
{
|
11357 |
+
"epoch": 1.29,
|
11358 |
+
"learning_rate": 0.0003591973244147157,
|
11359 |
+
"loss": 2.1092,
|
11360 |
+
"step": 1880
|
11361 |
+
},
|
11362 |
+
{
|
11363 |
+
"epoch": 1.29,
|
11364 |
+
"learning_rate": 0.0003590539894887721,
|
11365 |
+
"loss": 1.9906,
|
11366 |
+
"step": 1881
|
11367 |
+
},
|
11368 |
+
{
|
11369 |
+
"epoch": 1.29,
|
11370 |
+
"learning_rate": 0.0003589106545628284,
|
11371 |
+
"loss": 2.0383,
|
11372 |
+
"step": 1882
|
11373 |
+
},
|
11374 |
+
{
|
11375 |
+
"epoch": 1.29,
|
11376 |
+
"learning_rate": 0.00035876731963688485,
|
11377 |
+
"loss": 1.9626,
|
11378 |
+
"step": 1883
|
11379 |
+
},
|
11380 |
+
{
|
11381 |
+
"epoch": 1.29,
|
11382 |
+
"learning_rate": 0.0003586239847109412,
|
11383 |
+
"loss": 2.0121,
|
11384 |
+
"step": 1884
|
11385 |
+
},
|
11386 |
+
{
|
11387 |
+
"epoch": 1.29,
|
11388 |
+
"learning_rate": 0.0003584806497849976,
|
11389 |
+
"loss": 2.0007,
|
11390 |
+
"step": 1885
|
11391 |
+
},
|
11392 |
+
{
|
11393 |
+
"epoch": 1.29,
|
11394 |
+
"learning_rate": 0.00035833731485905394,
|
11395 |
+
"loss": 2.0846,
|
11396 |
+
"step": 1886
|
11397 |
+
},
|
11398 |
+
{
|
11399 |
+
"epoch": 1.29,
|
11400 |
+
"learning_rate": 0.0003581939799331104,
|
11401 |
+
"loss": 2.1493,
|
11402 |
+
"step": 1887
|
11403 |
+
},
|
11404 |
+
{
|
11405 |
+
"epoch": 1.29,
|
11406 |
+
"learning_rate": 0.0003580506450071667,
|
11407 |
+
"loss": 1.9866,
|
11408 |
+
"step": 1888
|
11409 |
+
},
|
11410 |
+
{
|
11411 |
+
"epoch": 1.29,
|
11412 |
+
"learning_rate": 0.00035790731008122304,
|
11413 |
+
"loss": 1.9876,
|
11414 |
+
"step": 1889
|
11415 |
+
},
|
11416 |
+
{
|
11417 |
+
"epoch": 1.29,
|
11418 |
+
"learning_rate": 0.0003577639751552795,
|
11419 |
+
"loss": 2.0256,
|
11420 |
+
"step": 1890
|
11421 |
+
},
|
11422 |
+
{
|
11423 |
+
"epoch": 1.29,
|
11424 |
+
"learning_rate": 0.0003576206402293358,
|
11425 |
+
"loss": 2.0551,
|
11426 |
+
"step": 1891
|
11427 |
+
},
|
11428 |
+
{
|
11429 |
+
"epoch": 1.29,
|
11430 |
+
"learning_rate": 0.00035747730530339225,
|
11431 |
+
"loss": 1.9491,
|
11432 |
+
"step": 1892
|
11433 |
+
},
|
11434 |
+
{
|
11435 |
+
"epoch": 1.29,
|
11436 |
+
"learning_rate": 0.0003573339703774486,
|
11437 |
+
"loss": 1.976,
|
11438 |
+
"step": 1893
|
11439 |
+
},
|
11440 |
+
{
|
11441 |
+
"epoch": 1.29,
|
11442 |
+
"learning_rate": 0.000357190635451505,
|
11443 |
+
"loss": 2.0242,
|
11444 |
+
"step": 1894
|
11445 |
+
},
|
11446 |
+
{
|
11447 |
+
"epoch": 1.3,
|
11448 |
+
"learning_rate": 0.00035704730052556134,
|
11449 |
+
"loss": 1.9942,
|
11450 |
+
"step": 1895
|
11451 |
+
},
|
11452 |
+
{
|
11453 |
+
"epoch": 1.3,
|
11454 |
+
"learning_rate": 0.00035690396559961773,
|
11455 |
+
"loss": 1.9462,
|
11456 |
+
"step": 1896
|
11457 |
+
},
|
11458 |
+
{
|
11459 |
+
"epoch": 1.3,
|
11460 |
+
"learning_rate": 0.0003567606306736741,
|
11461 |
+
"loss": 1.9941,
|
11462 |
+
"step": 1897
|
11463 |
+
},
|
11464 |
+
{
|
11465 |
+
"epoch": 1.3,
|
11466 |
+
"learning_rate": 0.0003566172957477305,
|
11467 |
+
"loss": 1.987,
|
11468 |
+
"step": 1898
|
11469 |
+
},
|
11470 |
+
{
|
11471 |
+
"epoch": 1.3,
|
11472 |
+
"learning_rate": 0.0003564739608217869,
|
11473 |
+
"loss": 1.9921,
|
11474 |
+
"step": 1899
|
11475 |
+
},
|
11476 |
+
{
|
11477 |
+
"epoch": 1.3,
|
11478 |
+
"learning_rate": 0.00035633062589584326,
|
11479 |
+
"loss": 1.9913,
|
11480 |
+
"step": 1900
|
11481 |
+
},
|
11482 |
+
{
|
11483 |
+
"epoch": 1.3,
|
11484 |
+
"learning_rate": 0.00035618729096989965,
|
11485 |
+
"loss": 2.0761,
|
11486 |
+
"step": 1901
|
11487 |
+
},
|
11488 |
+
{
|
11489 |
+
"epoch": 1.3,
|
11490 |
+
"learning_rate": 0.00035604395604395603,
|
11491 |
+
"loss": 2.0225,
|
11492 |
+
"step": 1902
|
11493 |
+
},
|
11494 |
+
{
|
11495 |
+
"epoch": 1.3,
|
11496 |
+
"learning_rate": 0.00035590062111801236,
|
11497 |
+
"loss": 2.1646,
|
11498 |
+
"step": 1903
|
11499 |
+
},
|
11500 |
+
{
|
11501 |
+
"epoch": 1.3,
|
11502 |
+
"learning_rate": 0.0003557572861920688,
|
11503 |
+
"loss": 1.9713,
|
11504 |
+
"step": 1904
|
11505 |
+
},
|
11506 |
+
{
|
11507 |
+
"epoch": 1.3,
|
11508 |
+
"learning_rate": 0.00035561395126612513,
|
11509 |
+
"loss": 2.0381,
|
11510 |
+
"step": 1905
|
11511 |
+
},
|
11512 |
+
{
|
11513 |
+
"epoch": 1.3,
|
11514 |
+
"learning_rate": 0.00035547061634018157,
|
11515 |
+
"loss": 2.1288,
|
11516 |
+
"step": 1906
|
11517 |
+
},
|
11518 |
+
{
|
11519 |
+
"epoch": 1.3,
|
11520 |
+
"learning_rate": 0.0003553272814142379,
|
11521 |
+
"loss": 1.9958,
|
11522 |
+
"step": 1907
|
11523 |
+
},
|
11524 |
+
{
|
11525 |
+
"epoch": 1.3,
|
11526 |
+
"learning_rate": 0.00035518394648829433,
|
11527 |
+
"loss": 2.0927,
|
11528 |
+
"step": 1908
|
11529 |
+
},
|
11530 |
+
{
|
11531 |
+
"epoch": 1.31,
|
11532 |
+
"learning_rate": 0.00035504061156235066,
|
11533 |
+
"loss": 2.0493,
|
11534 |
+
"step": 1909
|
11535 |
+
},
|
11536 |
+
{
|
11537 |
+
"epoch": 1.31,
|
11538 |
+
"learning_rate": 0.000354897276636407,
|
11539 |
+
"loss": 1.9751,
|
11540 |
+
"step": 1910
|
11541 |
+
},
|
11542 |
+
{
|
11543 |
+
"epoch": 1.31,
|
11544 |
+
"learning_rate": 0.00035475394171046343,
|
11545 |
+
"loss": 1.9947,
|
11546 |
+
"step": 1911
|
11547 |
+
},
|
11548 |
+
{
|
11549 |
+
"epoch": 1.31,
|
11550 |
+
"learning_rate": 0.00035461060678451976,
|
11551 |
+
"loss": 1.9878,
|
11552 |
+
"step": 1912
|
11553 |
+
},
|
11554 |
+
{
|
11555 |
+
"epoch": 1.31,
|
11556 |
+
"learning_rate": 0.0003544672718585762,
|
11557 |
+
"loss": 2.0322,
|
11558 |
+
"step": 1913
|
11559 |
+
},
|
11560 |
+
{
|
11561 |
+
"epoch": 1.31,
|
11562 |
+
"learning_rate": 0.00035432393693263253,
|
11563 |
+
"loss": 1.9825,
|
11564 |
+
"step": 1914
|
11565 |
+
},
|
11566 |
+
{
|
11567 |
+
"epoch": 1.31,
|
11568 |
+
"learning_rate": 0.00035418060200668897,
|
11569 |
+
"loss": 2.0174,
|
11570 |
+
"step": 1915
|
11571 |
+
},
|
11572 |
+
{
|
11573 |
+
"epoch": 1.31,
|
11574 |
+
"learning_rate": 0.0003540372670807453,
|
11575 |
+
"loss": 1.9631,
|
11576 |
+
"step": 1916
|
11577 |
+
},
|
11578 |
+
{
|
11579 |
+
"epoch": 1.31,
|
11580 |
+
"learning_rate": 0.0003538939321548017,
|
11581 |
+
"loss": 1.9678,
|
11582 |
+
"step": 1917
|
11583 |
+
},
|
11584 |
+
{
|
11585 |
+
"epoch": 1.31,
|
11586 |
+
"learning_rate": 0.00035375059722885806,
|
11587 |
+
"loss": 2.0006,
|
11588 |
+
"step": 1918
|
11589 |
+
},
|
11590 |
+
{
|
11591 |
+
"epoch": 1.31,
|
11592 |
+
"learning_rate": 0.00035360726230291445,
|
11593 |
+
"loss": 1.9347,
|
11594 |
+
"step": 1919
|
11595 |
+
},
|
11596 |
+
{
|
11597 |
+
"epoch": 1.31,
|
11598 |
+
"learning_rate": 0.00035346392737697083,
|
11599 |
+
"loss": 2.0409,
|
11600 |
+
"step": 1920
|
11601 |
+
},
|
11602 |
+
{
|
11603 |
+
"epoch": 1.31,
|
11604 |
+
"learning_rate": 0.0003533205924510272,
|
11605 |
+
"loss": 2.0187,
|
11606 |
+
"step": 1921
|
11607 |
+
},
|
11608 |
+
{
|
11609 |
+
"epoch": 1.31,
|
11610 |
+
"learning_rate": 0.0003531772575250836,
|
11611 |
+
"loss": 1.9847,
|
11612 |
+
"step": 1922
|
11613 |
+
},
|
11614 |
+
{
|
11615 |
+
"epoch": 1.31,
|
11616 |
+
"learning_rate": 0.00035303392259914,
|
11617 |
+
"loss": 2.0057,
|
11618 |
+
"step": 1923
|
11619 |
+
},
|
11620 |
+
{
|
11621 |
+
"epoch": 1.32,
|
11622 |
+
"learning_rate": 0.0003528905876731963,
|
11623 |
+
"loss": 2.0196,
|
11624 |
+
"step": 1924
|
11625 |
+
},
|
11626 |
+
{
|
11627 |
+
"epoch": 1.32,
|
11628 |
+
"learning_rate": 0.00035274725274725275,
|
11629 |
+
"loss": 1.8575,
|
11630 |
+
"step": 1925
|
11631 |
+
},
|
11632 |
+
{
|
11633 |
+
"epoch": 1.32,
|
11634 |
+
"learning_rate": 0.0003526039178213091,
|
11635 |
+
"loss": 1.9818,
|
11636 |
+
"step": 1926
|
11637 |
+
},
|
11638 |
+
{
|
11639 |
+
"epoch": 1.32,
|
11640 |
+
"learning_rate": 0.0003524605828953655,
|
11641 |
+
"loss": 2.0204,
|
11642 |
+
"step": 1927
|
11643 |
+
},
|
11644 |
+
{
|
11645 |
+
"epoch": 1.32,
|
11646 |
+
"learning_rate": 0.00035231724796942185,
|
11647 |
+
"loss": 2.0446,
|
11648 |
+
"step": 1928
|
11649 |
+
},
|
11650 |
+
{
|
11651 |
+
"epoch": 1.32,
|
11652 |
+
"learning_rate": 0.0003521739130434783,
|
11653 |
+
"loss": 2.0144,
|
11654 |
+
"step": 1929
|
11655 |
+
},
|
11656 |
+
{
|
11657 |
+
"epoch": 1.32,
|
11658 |
+
"learning_rate": 0.0003520305781175346,
|
11659 |
+
"loss": 1.9512,
|
11660 |
+
"step": 1930
|
11661 |
+
},
|
11662 |
+
{
|
11663 |
+
"epoch": 1.32,
|
11664 |
+
"learning_rate": 0.00035188724319159094,
|
11665 |
+
"loss": 1.9937,
|
11666 |
+
"step": 1931
|
11667 |
+
},
|
11668 |
+
{
|
11669 |
+
"epoch": 1.32,
|
11670 |
+
"learning_rate": 0.0003517439082656474,
|
11671 |
+
"loss": 1.9763,
|
11672 |
+
"step": 1932
|
11673 |
+
},
|
11674 |
+
{
|
11675 |
+
"epoch": 1.32,
|
11676 |
+
"learning_rate": 0.0003516005733397037,
|
11677 |
+
"loss": 2.0291,
|
11678 |
+
"step": 1933
|
11679 |
+
},
|
11680 |
+
{
|
11681 |
+
"epoch": 1.32,
|
11682 |
+
"learning_rate": 0.00035145723841376015,
|
11683 |
+
"loss": 2.0598,
|
11684 |
+
"step": 1934
|
11685 |
+
},
|
11686 |
+
{
|
11687 |
+
"epoch": 1.32,
|
11688 |
+
"learning_rate": 0.0003513139034878165,
|
11689 |
+
"loss": 2.0737,
|
11690 |
+
"step": 1935
|
11691 |
+
},
|
11692 |
+
{
|
11693 |
+
"epoch": 1.32,
|
11694 |
+
"learning_rate": 0.0003511705685618729,
|
11695 |
+
"loss": 2.1272,
|
11696 |
+
"step": 1936
|
11697 |
+
},
|
11698 |
+
{
|
11699 |
+
"epoch": 1.32,
|
11700 |
+
"learning_rate": 0.00035102723363592925,
|
11701 |
+
"loss": 2.0258,
|
11702 |
+
"step": 1937
|
11703 |
+
},
|
11704 |
+
{
|
11705 |
+
"epoch": 1.32,
|
11706 |
+
"learning_rate": 0.00035088389870998563,
|
11707 |
+
"loss": 2.0811,
|
11708 |
+
"step": 1938
|
11709 |
+
},
|
11710 |
+
{
|
11711 |
+
"epoch": 1.33,
|
11712 |
+
"learning_rate": 0.000350740563784042,
|
11713 |
+
"loss": 1.9712,
|
11714 |
+
"step": 1939
|
11715 |
+
},
|
11716 |
+
{
|
11717 |
+
"epoch": 1.33,
|
11718 |
+
"learning_rate": 0.0003505972288580984,
|
11719 |
+
"loss": 1.9419,
|
11720 |
+
"step": 1940
|
11721 |
+
},
|
11722 |
+
{
|
11723 |
+
"epoch": 1.33,
|
11724 |
+
"learning_rate": 0.0003504538939321548,
|
11725 |
+
"loss": 2.0849,
|
11726 |
+
"step": 1941
|
11727 |
+
},
|
11728 |
+
{
|
11729 |
+
"epoch": 1.33,
|
11730 |
+
"learning_rate": 0.00035031055900621116,
|
11731 |
+
"loss": 2.0123,
|
11732 |
+
"step": 1942
|
11733 |
+
},
|
11734 |
+
{
|
11735 |
+
"epoch": 1.33,
|
11736 |
+
"learning_rate": 0.00035016722408026755,
|
11737 |
+
"loss": 1.9185,
|
11738 |
+
"step": 1943
|
11739 |
+
},
|
11740 |
+
{
|
11741 |
+
"epoch": 1.33,
|
11742 |
+
"learning_rate": 0.00035002388915432393,
|
11743 |
+
"loss": 2.0759,
|
11744 |
+
"step": 1944
|
11745 |
+
},
|
11746 |
+
{
|
11747 |
+
"epoch": 1.33,
|
11748 |
+
"learning_rate": 0.00034988055422838026,
|
11749 |
+
"loss": 2.0272,
|
11750 |
+
"step": 1945
|
11751 |
+
},
|
11752 |
+
{
|
11753 |
+
"epoch": 1.33,
|
11754 |
+
"learning_rate": 0.0003497372193024367,
|
11755 |
+
"loss": 2.0296,
|
11756 |
+
"step": 1946
|
11757 |
+
},
|
11758 |
+
{
|
11759 |
+
"epoch": 1.33,
|
11760 |
+
"learning_rate": 0.00034959388437649303,
|
11761 |
+
"loss": 2.095,
|
11762 |
+
"step": 1947
|
11763 |
+
},
|
11764 |
+
{
|
11765 |
+
"epoch": 1.33,
|
11766 |
+
"learning_rate": 0.00034945054945054947,
|
11767 |
+
"loss": 2.1172,
|
11768 |
+
"step": 1948
|
11769 |
+
},
|
11770 |
+
{
|
11771 |
+
"epoch": 1.33,
|
11772 |
+
"learning_rate": 0.0003493072145246058,
|
11773 |
+
"loss": 1.9911,
|
11774 |
+
"step": 1949
|
11775 |
+
},
|
11776 |
+
{
|
11777 |
+
"epoch": 1.33,
|
11778 |
+
"learning_rate": 0.0003491638795986621,
|
11779 |
+
"loss": 1.9346,
|
11780 |
+
"step": 1950
|
11781 |
+
},
|
11782 |
+
{
|
11783 |
+
"epoch": 1.33,
|
11784 |
+
"learning_rate": 0.00034902054467271856,
|
11785 |
+
"loss": 2.0437,
|
11786 |
+
"step": 1951
|
11787 |
+
},
|
11788 |
+
{
|
11789 |
+
"epoch": 1.33,
|
11790 |
+
"learning_rate": 0.0003488772097467749,
|
11791 |
+
"loss": 1.9926,
|
11792 |
+
"step": 1952
|
11793 |
+
},
|
11794 |
+
{
|
11795 |
+
"epoch": 1.34,
|
11796 |
+
"learning_rate": 0.00034873387482083133,
|
11797 |
+
"loss": 1.8601,
|
11798 |
+
"step": 1953
|
11799 |
+
},
|
11800 |
+
{
|
11801 |
+
"epoch": 1.34,
|
11802 |
+
"learning_rate": 0.00034859053989488766,
|
11803 |
+
"loss": 2.0288,
|
11804 |
+
"step": 1954
|
11805 |
+
},
|
11806 |
+
{
|
11807 |
+
"epoch": 1.34,
|
11808 |
+
"learning_rate": 0.0003484472049689441,
|
11809 |
+
"loss": 1.9644,
|
11810 |
+
"step": 1955
|
11811 |
+
},
|
11812 |
+
{
|
11813 |
+
"epoch": 1.34,
|
11814 |
+
"learning_rate": 0.00034830387004300043,
|
11815 |
+
"loss": 2.0219,
|
11816 |
+
"step": 1956
|
11817 |
+
},
|
11818 |
+
{
|
11819 |
+
"epoch": 1.34,
|
11820 |
+
"learning_rate": 0.0003481605351170568,
|
11821 |
+
"loss": 2.0288,
|
11822 |
+
"step": 1957
|
11823 |
+
},
|
11824 |
+
{
|
11825 |
+
"epoch": 1.34,
|
11826 |
+
"learning_rate": 0.0003480172001911132,
|
11827 |
+
"loss": 2.0549,
|
11828 |
+
"step": 1958
|
11829 |
+
},
|
11830 |
+
{
|
11831 |
+
"epoch": 1.34,
|
11832 |
+
"learning_rate": 0.0003478738652651696,
|
11833 |
+
"loss": 2.0246,
|
11834 |
+
"step": 1959
|
11835 |
+
},
|
11836 |
+
{
|
11837 |
+
"epoch": 1.34,
|
11838 |
+
"learning_rate": 0.00034773053033922596,
|
11839 |
+
"loss": 2.0496,
|
11840 |
+
"step": 1960
|
11841 |
+
},
|
11842 |
+
{
|
11843 |
+
"epoch": 1.34,
|
11844 |
+
"learning_rate": 0.00034758719541328235,
|
11845 |
+
"loss": 1.9183,
|
11846 |
+
"step": 1961
|
11847 |
+
},
|
11848 |
+
{
|
11849 |
+
"epoch": 1.34,
|
11850 |
+
"learning_rate": 0.00034744386048733873,
|
11851 |
+
"loss": 2.0135,
|
11852 |
+
"step": 1962
|
11853 |
+
},
|
11854 |
+
{
|
11855 |
+
"epoch": 1.34,
|
11856 |
+
"learning_rate": 0.0003473005255613951,
|
11857 |
+
"loss": 1.9164,
|
11858 |
+
"step": 1963
|
11859 |
+
},
|
11860 |
+
{
|
11861 |
+
"epoch": 1.34,
|
11862 |
+
"learning_rate": 0.00034715719063545145,
|
11863 |
+
"loss": 2.0452,
|
11864 |
+
"step": 1964
|
11865 |
+
},
|
11866 |
+
{
|
11867 |
+
"epoch": 1.34,
|
11868 |
+
"learning_rate": 0.0003470138557095079,
|
11869 |
+
"loss": 2.1023,
|
11870 |
+
"step": 1965
|
11871 |
+
},
|
11872 |
+
{
|
11873 |
+
"epoch": 1.34,
|
11874 |
+
"learning_rate": 0.0003468705207835642,
|
11875 |
+
"loss": 1.9568,
|
11876 |
+
"step": 1966
|
11877 |
+
},
|
11878 |
+
{
|
11879 |
+
"epoch": 1.34,
|
11880 |
+
"learning_rate": 0.00034672718585762065,
|
11881 |
+
"loss": 2.0042,
|
11882 |
+
"step": 1967
|
11883 |
+
},
|
11884 |
+
{
|
11885 |
+
"epoch": 1.35,
|
11886 |
+
"learning_rate": 0.000346583850931677,
|
11887 |
+
"loss": 1.9825,
|
11888 |
+
"step": 1968
|
11889 |
+
},
|
11890 |
+
{
|
11891 |
+
"epoch": 1.35,
|
11892 |
+
"learning_rate": 0.0003464405160057334,
|
11893 |
+
"loss": 2.0362,
|
11894 |
+
"step": 1969
|
11895 |
+
},
|
11896 |
+
{
|
11897 |
+
"epoch": 1.35,
|
11898 |
+
"learning_rate": 0.00034629718107978975,
|
11899 |
+
"loss": 1.9395,
|
11900 |
+
"step": 1970
|
11901 |
+
},
|
11902 |
+
{
|
11903 |
+
"epoch": 1.35,
|
11904 |
+
"learning_rate": 0.0003461538461538461,
|
11905 |
+
"loss": 1.9586,
|
11906 |
+
"step": 1971
|
11907 |
+
},
|
11908 |
+
{
|
11909 |
+
"epoch": 1.35,
|
11910 |
+
"learning_rate": 0.0003460105112279025,
|
11911 |
+
"loss": 2.0692,
|
11912 |
+
"step": 1972
|
11913 |
+
},
|
11914 |
+
{
|
11915 |
+
"epoch": 1.35,
|
11916 |
+
"learning_rate": 0.00034586717630195885,
|
11917 |
+
"loss": 2.0178,
|
11918 |
+
"step": 1973
|
11919 |
+
},
|
11920 |
+
{
|
11921 |
+
"epoch": 1.35,
|
11922 |
+
"learning_rate": 0.0003457238413760153,
|
11923 |
+
"loss": 1.9349,
|
11924 |
+
"step": 1974
|
11925 |
+
},
|
11926 |
+
{
|
11927 |
+
"epoch": 1.35,
|
11928 |
+
"learning_rate": 0.0003455805064500716,
|
11929 |
+
"loss": 2.0868,
|
11930 |
+
"step": 1975
|
11931 |
+
},
|
11932 |
+
{
|
11933 |
+
"epoch": 1.35,
|
11934 |
+
"learning_rate": 0.00034543717152412805,
|
11935 |
+
"loss": 1.9723,
|
11936 |
+
"step": 1976
|
11937 |
+
},
|
11938 |
+
{
|
11939 |
+
"epoch": 1.35,
|
11940 |
+
"learning_rate": 0.0003452938365981844,
|
11941 |
+
"loss": 2.0752,
|
11942 |
+
"step": 1977
|
11943 |
+
},
|
11944 |
+
{
|
11945 |
+
"epoch": 1.35,
|
11946 |
+
"learning_rate": 0.00034515050167224076,
|
11947 |
+
"loss": 1.9482,
|
11948 |
+
"step": 1978
|
11949 |
+
},
|
11950 |
+
{
|
11951 |
+
"epoch": 1.35,
|
11952 |
+
"learning_rate": 0.00034500716674629715,
|
11953 |
+
"loss": 1.9869,
|
11954 |
+
"step": 1979
|
11955 |
+
},
|
11956 |
+
{
|
11957 |
+
"epoch": 1.35,
|
11958 |
+
"learning_rate": 0.00034486383182035353,
|
11959 |
+
"loss": 2.0023,
|
11960 |
+
"step": 1980
|
11961 |
+
},
|
11962 |
+
{
|
11963 |
+
"epoch": 1.35,
|
11964 |
+
"learning_rate": 0.0003447204968944099,
|
11965 |
+
"loss": 2.0806,
|
11966 |
+
"step": 1981
|
11967 |
+
},
|
11968 |
+
{
|
11969 |
+
"epoch": 1.36,
|
11970 |
+
"learning_rate": 0.0003445771619684663,
|
11971 |
+
"loss": 2.0252,
|
11972 |
+
"step": 1982
|
11973 |
+
},
|
11974 |
+
{
|
11975 |
+
"epoch": 1.36,
|
11976 |
+
"learning_rate": 0.0003444338270425227,
|
11977 |
+
"loss": 1.9967,
|
11978 |
+
"step": 1983
|
11979 |
+
},
|
11980 |
+
{
|
11981 |
+
"epoch": 1.36,
|
11982 |
+
"learning_rate": 0.00034429049211657907,
|
11983 |
+
"loss": 1.9377,
|
11984 |
+
"step": 1984
|
11985 |
+
},
|
11986 |
+
{
|
11987 |
+
"epoch": 1.36,
|
11988 |
+
"learning_rate": 0.0003441471571906354,
|
11989 |
+
"loss": 1.9739,
|
11990 |
+
"step": 1985
|
11991 |
+
},
|
11992 |
+
{
|
11993 |
+
"epoch": 1.36,
|
11994 |
+
"learning_rate": 0.00034400382226469183,
|
11995 |
+
"loss": 2.0232,
|
11996 |
+
"step": 1986
|
11997 |
+
},
|
11998 |
+
{
|
11999 |
+
"epoch": 1.36,
|
12000 |
+
"learning_rate": 0.00034386048733874816,
|
12001 |
+
"loss": 1.9893,
|
12002 |
+
"step": 1987
|
12003 |
+
},
|
12004 |
+
{
|
12005 |
+
"epoch": 1.36,
|
12006 |
+
"learning_rate": 0.0003437171524128046,
|
12007 |
+
"loss": 2.1028,
|
12008 |
+
"step": 1988
|
12009 |
+
},
|
12010 |
+
{
|
12011 |
+
"epoch": 1.36,
|
12012 |
+
"learning_rate": 0.00034357381748686093,
|
12013 |
+
"loss": 2.0027,
|
12014 |
+
"step": 1989
|
12015 |
+
},
|
12016 |
+
{
|
12017 |
+
"epoch": 1.36,
|
12018 |
+
"learning_rate": 0.0003434304825609173,
|
12019 |
+
"loss": 1.9481,
|
12020 |
+
"step": 1990
|
12021 |
+
},
|
12022 |
+
{
|
12023 |
+
"epoch": 1.36,
|
12024 |
+
"learning_rate": 0.0003432871476349737,
|
12025 |
+
"loss": 2.0907,
|
12026 |
+
"step": 1991
|
12027 |
+
},
|
12028 |
+
{
|
12029 |
+
"epoch": 1.36,
|
12030 |
+
"learning_rate": 0.00034314381270903003,
|
12031 |
+
"loss": 2.0744,
|
12032 |
+
"step": 1992
|
12033 |
+
},
|
12034 |
+
{
|
12035 |
+
"epoch": 1.36,
|
12036 |
+
"learning_rate": 0.00034300047778308647,
|
12037 |
+
"loss": 2.0442,
|
12038 |
+
"step": 1993
|
12039 |
+
},
|
12040 |
+
{
|
12041 |
+
"epoch": 1.36,
|
12042 |
+
"learning_rate": 0.0003428571428571428,
|
12043 |
+
"loss": 2.0232,
|
12044 |
+
"step": 1994
|
12045 |
+
},
|
12046 |
+
{
|
12047 |
+
"epoch": 1.36,
|
12048 |
+
"learning_rate": 0.00034271380793119923,
|
12049 |
+
"loss": 2.0709,
|
12050 |
+
"step": 1995
|
12051 |
+
},
|
12052 |
+
{
|
12053 |
+
"epoch": 1.36,
|
12054 |
+
"learning_rate": 0.00034257047300525556,
|
12055 |
+
"loss": 1.9478,
|
12056 |
+
"step": 1996
|
12057 |
+
},
|
12058 |
+
{
|
12059 |
+
"epoch": 1.37,
|
12060 |
+
"learning_rate": 0.000342427138079312,
|
12061 |
+
"loss": 1.9676,
|
12062 |
+
"step": 1997
|
12063 |
+
},
|
12064 |
+
{
|
12065 |
+
"epoch": 1.37,
|
12066 |
+
"learning_rate": 0.00034228380315336833,
|
12067 |
+
"loss": 2.1258,
|
12068 |
+
"step": 1998
|
12069 |
+
},
|
12070 |
+
{
|
12071 |
+
"epoch": 1.37,
|
12072 |
+
"learning_rate": 0.0003421404682274247,
|
12073 |
+
"loss": 1.9892,
|
12074 |
+
"step": 1999
|
12075 |
+
},
|
12076 |
+
{
|
12077 |
+
"epoch": 1.37,
|
12078 |
+
"learning_rate": 0.0003419971333014811,
|
12079 |
+
"loss": 2.0512,
|
12080 |
+
"step": 2000
|
12081 |
+
},
|
12082 |
+
{
|
12083 |
+
"epoch": 1.37,
|
12084 |
+
"eval_loss": 1.9946362972259521,
|
12085 |
+
"eval_runtime": 1647.5347,
|
12086 |
+
"eval_samples_per_second": 6.07,
|
12087 |
+
"eval_steps_per_second": 6.07,
|
12088 |
+
"step": 2000
|
12089 |
}
|
12090 |
],
|
12091 |
"max_steps": 4386,
|
12092 |
"num_train_epochs": 3,
|
12093 |
+
"total_flos": 2.4507225531653345e+18,
|
12094 |
"trial_name": null,
|
12095 |
"trial_params": null
|
12096 |
}
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2368281769
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:af24d837954601d8f9729f848ea03545b9388763d421c6d2dce0a7e8ab73e67c
|
3 |
size 2368281769
|