diff --git a/README.md b/README.md
index 08371015f02382e6fcba318f4aaea54ae52cd3c4..5f4f5625039e17bc7326cc10fbbc7896bb06453b 100644
--- a/README.md
+++ b/README.md
@@ -4,18 +4,6 @@ library_name: peft
## Training procedure
-The following `bitsandbytes` quantization config was used during training:
-- quant_method: bitsandbytes
-- load_in_8bit: True
-- load_in_4bit: False
-- llm_int8_threshold: 6.0
-- llm_int8_skip_modules: None
-- llm_int8_enable_fp32_cpu_offload: False
-- llm_int8_has_fp16_weight: False
-- bnb_4bit_quant_type: fp4
-- bnb_4bit_use_double_quant: False
-- bnb_4bit_compute_dtype: float32
-
The following `bitsandbytes` quantization config was used during training:
- quant_method: bitsandbytes
- load_in_8bit: True
@@ -29,6 +17,5 @@ The following `bitsandbytes` quantization config was used during training:
- bnb_4bit_compute_dtype: float32
### Framework versions
-- PEFT 0.6.0.dev0
- PEFT 0.6.0.dev0
diff --git a/adapter_model.bin b/adapter_model.bin
index 7988fe960dc88d5b5a4a5c12881b117738f8e950..a3a8a8657472f05b2cf4539ab8410b09ab16229f 100644
--- a/adapter_model.bin
+++ b/adapter_model.bin
@@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
-oid sha256:af970fa4c3a5656856f672174ec36ac064378101c5eb1d725a132c5f169062a0
+oid sha256:3b30c921122a83376cb2c88c534b193ab3d6346073d4445956d15f6d88cbb610
size 39409357
diff --git a/checkpoint-100/README.md b/checkpoint-100/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4
--- /dev/null
+++ b/checkpoint-100/README.md
@@ -0,0 +1,34 @@
+---
+library_name: peft
+---
+## Training procedure
+
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+### Framework versions
+
+- PEFT 0.6.0.dev0
+
+- PEFT 0.6.0.dev0
diff --git a/checkpoint-100/adapter_config.json b/checkpoint-100/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b
--- /dev/null
+++ b/checkpoint-100/adapter_config.json
@@ -0,0 +1,23 @@
+{
+ "auto_mapping": null,
+ "base_model_name_or_path": "bigscience/bloomz-3b",
+ "bias": "none",
+ "fan_in_fan_out": false,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "lora_alpha": 16,
+ "lora_dropout": 0.0,
+ "modules_to_save": null,
+ "peft_type": "LORA",
+ "r": 8,
+ "revision": null,
+ "target_modules": [
+ "dense_4h_to_h",
+ "dense",
+ "dense_h_to_4h",
+ "query_key_value"
+ ],
+ "task_type": "CAUSAL_LM"
+}
\ No newline at end of file
diff --git a/checkpoint-100/adapter_model.bin b/checkpoint-100/adapter_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..ce6fc1daf536064aeee1d21a0516da9066eed22d
--- /dev/null
+++ b/checkpoint-100/adapter_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6ed4e680e8bd7c8d67e0d7f4b56e9acead397bfed8e4ada826388336fbb68352
+size 39409357
diff --git a/checkpoint-100/optimizer.pt b/checkpoint-100/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..3f176b46c888a37136e4f393f90265de0c1174ea
--- /dev/null
+++ b/checkpoint-100/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9aaf758f69c081ea407315416366c593e3d483404a11cdc46f7a6bcf8426f6c7
+size 78844421
diff --git a/checkpoint-100/rng_state.pth b/checkpoint-100/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..e8a62beafdd6e61c41abbc54c8b7428f4e3484c0
--- /dev/null
+++ b/checkpoint-100/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b2278a87cdf86c3f9219223c847f6b27f6b7f15b8226b617f38936e8ff2cbcde
+size 14575
diff --git a/checkpoint-100/scheduler.pt b/checkpoint-100/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..965729cbbbc80d6c8f742c79e62c2779f432711b
--- /dev/null
+++ b/checkpoint-100/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:88d541c44943f7d01b3a3c6f9781b89528a9b1d1baccd0d6004ff689a88b2849
+size 627
diff --git a/checkpoint-100/special_tokens_map.json b/checkpoint-100/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d
--- /dev/null
+++ b/checkpoint-100/special_tokens_map.json
@@ -0,0 +1,6 @@
+{
+ "bos_token": "",
+ "eos_token": "",
+ "pad_token": "",
+ "unk_token": ""
+}
diff --git a/checkpoint-100/tokenizer.json b/checkpoint-100/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207
--- /dev/null
+++ b/checkpoint-100/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe
+size 14500570
diff --git a/checkpoint-100/tokenizer_config.json b/checkpoint-100/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3
--- /dev/null
+++ b/checkpoint-100/tokenizer_config.json
@@ -0,0 +1,10 @@
+{
+ "add_prefix_space": false,
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": "",
+ "tokenizer_class": "BloomTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-100/trainer_state.json b/checkpoint-100/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..53b3c1d6e440b9df7714983439853a774bfb09ea
--- /dev/null
+++ b/checkpoint-100/trainer_state.json
@@ -0,0 +1,619 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.28368794326241137,
+ "eval_steps": 500,
+ "global_step": 100,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 0.00019981060606060605,
+ "loss": 2.9206,
+ "step": 1
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019962121212121212,
+ "loss": 2.7609,
+ "step": 2
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001994318181818182,
+ "loss": 2.6878,
+ "step": 3
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019924242424242426,
+ "loss": 2.6697,
+ "step": 4
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001990530303030303,
+ "loss": 2.5818,
+ "step": 5
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019886363636363637,
+ "loss": 2.5396,
+ "step": 6
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019867424242424244,
+ "loss": 2.5265,
+ "step": 7
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.0001984848484848485,
+ "loss": 2.5475,
+ "step": 8
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019829545454545455,
+ "loss": 2.4835,
+ "step": 9
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001981060606060606,
+ "loss": 2.4559,
+ "step": 10
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001979166666666667,
+ "loss": 2.4511,
+ "step": 11
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019772727272727273,
+ "loss": 2.4592,
+ "step": 12
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.0001975378787878788,
+ "loss": 2.4495,
+ "step": 13
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019734848484848484,
+ "loss": 2.4714,
+ "step": 14
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019715909090909094,
+ "loss": 2.4302,
+ "step": 15
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019696969696969698,
+ "loss": 2.4097,
+ "step": 16
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019678030303030305,
+ "loss": 2.4523,
+ "step": 17
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.0001965909090909091,
+ "loss": 2.4325,
+ "step": 18
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019640151515151516,
+ "loss": 2.4125,
+ "step": 19
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019621212121212123,
+ "loss": 2.4329,
+ "step": 20
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019602272727272727,
+ "loss": 2.3471,
+ "step": 21
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019583333333333334,
+ "loss": 2.3012,
+ "step": 22
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001956439393939394,
+ "loss": 2.3869,
+ "step": 23
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019545454545454548,
+ "loss": 2.3822,
+ "step": 24
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019526515151515152,
+ "loss": 2.3427,
+ "step": 25
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001950757575757576,
+ "loss": 2.3659,
+ "step": 26
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019488636363636366,
+ "loss": 2.3826,
+ "step": 27
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.0001946969696969697,
+ "loss": 2.3532,
+ "step": 28
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019450757575757577,
+ "loss": 2.3828,
+ "step": 29
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001943181818181818,
+ "loss": 2.3133,
+ "step": 30
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001941287878787879,
+ "loss": 2.3613,
+ "step": 31
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019393939393939395,
+ "loss": 2.3867,
+ "step": 32
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019375000000000002,
+ "loss": 2.2966,
+ "step": 33
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019356060606060606,
+ "loss": 2.3436,
+ "step": 34
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019337121212121213,
+ "loss": 2.3425,
+ "step": 35
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.0001931818181818182,
+ "loss": 2.307,
+ "step": 36
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019299242424242424,
+ "loss": 2.3521,
+ "step": 37
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.0001928030303030303,
+ "loss": 2.3302,
+ "step": 38
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019261363636363635,
+ "loss": 2.312,
+ "step": 39
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019242424242424245,
+ "loss": 2.3655,
+ "step": 40
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001922348484848485,
+ "loss": 2.344,
+ "step": 41
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019204545454545456,
+ "loss": 2.3373,
+ "step": 42
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001918560606060606,
+ "loss": 2.3331,
+ "step": 43
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019166666666666667,
+ "loss": 2.3376,
+ "step": 44
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019147727272727274,
+ "loss": 2.3369,
+ "step": 45
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019128787878787878,
+ "loss": 2.3413,
+ "step": 46
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019109848484848485,
+ "loss": 2.3212,
+ "step": 47
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019090909090909092,
+ "loss": 2.307,
+ "step": 48
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.000190719696969697,
+ "loss": 2.2929,
+ "step": 49
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019053030303030303,
+ "loss": 2.2873,
+ "step": 50
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.0001903409090909091,
+ "loss": 2.3098,
+ "step": 51
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00019015151515151517,
+ "loss": 2.3129,
+ "step": 52
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.0001899621212121212,
+ "loss": 2.3038,
+ "step": 53
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00018977272727272728,
+ "loss": 2.286,
+ "step": 54
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018958333333333332,
+ "loss": 2.3388,
+ "step": 55
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018939393939393942,
+ "loss": 2.3193,
+ "step": 56
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018920454545454546,
+ "loss": 2.3136,
+ "step": 57
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018901515151515153,
+ "loss": 2.3141,
+ "step": 58
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018882575757575757,
+ "loss": 2.3646,
+ "step": 59
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018863636363636364,
+ "loss": 2.3318,
+ "step": 60
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.0001884469696969697,
+ "loss": 2.2977,
+ "step": 61
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018825757575757575,
+ "loss": 2.2764,
+ "step": 62
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018806818181818182,
+ "loss": 2.3095,
+ "step": 63
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.0001878787878787879,
+ "loss": 2.252,
+ "step": 64
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018768939393939396,
+ "loss": 2.2786,
+ "step": 65
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.0001875,
+ "loss": 2.2789,
+ "step": 66
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018731060606060607,
+ "loss": 2.2841,
+ "step": 67
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018712121212121212,
+ "loss": 2.3436,
+ "step": 68
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018693181818181818,
+ "loss": 2.2956,
+ "step": 69
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018674242424242425,
+ "loss": 2.2353,
+ "step": 70
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.0001865530303030303,
+ "loss": 2.2772,
+ "step": 71
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018636363636363636,
+ "loss": 2.2496,
+ "step": 72
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018617424242424243,
+ "loss": 2.2477,
+ "step": 73
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.0001859848484848485,
+ "loss": 2.2791,
+ "step": 74
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018579545454545454,
+ "loss": 2.2799,
+ "step": 75
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018560606060606061,
+ "loss": 2.3132,
+ "step": 76
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018541666666666668,
+ "loss": 2.2542,
+ "step": 77
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018522727272727273,
+ "loss": 2.2609,
+ "step": 78
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.0001850378787878788,
+ "loss": 2.2819,
+ "step": 79
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018484848484848484,
+ "loss": 2.2844,
+ "step": 80
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018465909090909093,
+ "loss": 2.2542,
+ "step": 81
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018446969696969697,
+ "loss": 2.2603,
+ "step": 82
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018428030303030304,
+ "loss": 2.2832,
+ "step": 83
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018409090909090909,
+ "loss": 2.2869,
+ "step": 84
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018390151515151518,
+ "loss": 2.2646,
+ "step": 85
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018371212121212122,
+ "loss": 2.2698,
+ "step": 86
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018352272727272727,
+ "loss": 2.2757,
+ "step": 87
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018333333333333334,
+ "loss": 2.2544,
+ "step": 88
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.0001831439393939394,
+ "loss": 2.2678,
+ "step": 89
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018295454545454547,
+ "loss": 2.2778,
+ "step": 90
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018276515151515152,
+ "loss": 2.2027,
+ "step": 91
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018257575757575758,
+ "loss": 2.2167,
+ "step": 92
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018238636363636365,
+ "loss": 2.2602,
+ "step": 93
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018219696969696972,
+ "loss": 2.2736,
+ "step": 94
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018200757575757577,
+ "loss": 2.2443,
+ "step": 95
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 2.2299,
+ "step": 96
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.0001816287878787879,
+ "loss": 2.2644,
+ "step": 97
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018143939393939395,
+ "loss": 2.259,
+ "step": 98
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018125000000000001,
+ "loss": 2.2567,
+ "step": 99
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018106060606060606,
+ "loss": 2.2599,
+ "step": 100
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1056,
+ "num_train_epochs": 3,
+ "save_steps": 100,
+ "total_flos": 2.5310590559514624e+17,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-100/training_args.bin b/checkpoint-100/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..574132c086f9a526d71493b1ec4c09396eac5482
--- /dev/null
+++ b/checkpoint-100/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:50303c7f1586feb001f01b7e968e567295f501ef6c6407e28250c458696a43af
+size 4155
diff --git a/checkpoint-1000/README.md b/checkpoint-1000/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4
--- /dev/null
+++ b/checkpoint-1000/README.md
@@ -0,0 +1,34 @@
+---
+library_name: peft
+---
+## Training procedure
+
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+### Framework versions
+
+- PEFT 0.6.0.dev0
+
+- PEFT 0.6.0.dev0
diff --git a/checkpoint-1000/adapter_config.json b/checkpoint-1000/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b
--- /dev/null
+++ b/checkpoint-1000/adapter_config.json
@@ -0,0 +1,23 @@
+{
+ "auto_mapping": null,
+ "base_model_name_or_path": "bigscience/bloomz-3b",
+ "bias": "none",
+ "fan_in_fan_out": false,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "lora_alpha": 16,
+ "lora_dropout": 0.0,
+ "modules_to_save": null,
+ "peft_type": "LORA",
+ "r": 8,
+ "revision": null,
+ "target_modules": [
+ "dense_4h_to_h",
+ "dense",
+ "dense_h_to_4h",
+ "query_key_value"
+ ],
+ "task_type": "CAUSAL_LM"
+}
\ No newline at end of file
diff --git a/checkpoint-1000/adapter_model.bin b/checkpoint-1000/adapter_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..d156bc7405bc8ea3e652f0a2eb2e83beaeef3294
--- /dev/null
+++ b/checkpoint-1000/adapter_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:881578bad3c9206920de4787c4a9d6c8e57d77588bf0d7db0d25ed4f90f349ff
+size 39409357
diff --git a/checkpoint-1000/optimizer.pt b/checkpoint-1000/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..cb89200069fa639ca592ea496f78afcfee97cb80
--- /dev/null
+++ b/checkpoint-1000/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:88cf590bf3d40f0fa16ea88522ca56ed0dba7a8e23b2c081d85df96fc1152aed
+size 78844421
diff --git a/checkpoint-1000/rng_state.pth b/checkpoint-1000/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..e0df6592e27bef84a4beb9293e7b49666a4d652e
--- /dev/null
+++ b/checkpoint-1000/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7508d4b8dd267de5cc58e972da25236687927651336a28f292c92f7f23951475
+size 14575
diff --git a/checkpoint-1000/scheduler.pt b/checkpoint-1000/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..ff056ac4913eb4b9e81cce4bbdd6efead7b10485
--- /dev/null
+++ b/checkpoint-1000/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f23cbb71419e97e268c2c3c9dacfadbf31269deafbdadf5d827840e02e7f3fc8
+size 627
diff --git a/checkpoint-1000/special_tokens_map.json b/checkpoint-1000/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d
--- /dev/null
+++ b/checkpoint-1000/special_tokens_map.json
@@ -0,0 +1,6 @@
+{
+ "bos_token": "",
+ "eos_token": "",
+ "pad_token": "",
+ "unk_token": ""
+}
diff --git a/checkpoint-1000/tokenizer.json b/checkpoint-1000/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207
--- /dev/null
+++ b/checkpoint-1000/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe
+size 14500570
diff --git a/checkpoint-1000/tokenizer_config.json b/checkpoint-1000/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3
--- /dev/null
+++ b/checkpoint-1000/tokenizer_config.json
@@ -0,0 +1,10 @@
+{
+ "add_prefix_space": false,
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": "",
+ "tokenizer_class": "BloomTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-1000/trainer_state.json b/checkpoint-1000/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..47af7325d9860cd20d1deffca4907fda2e04de77
--- /dev/null
+++ b/checkpoint-1000/trainer_state.json
@@ -0,0 +1,6019 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 2.8368794326241136,
+ "eval_steps": 500,
+ "global_step": 1000,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 0.00019981060606060605,
+ "loss": 2.9206,
+ "step": 1
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019962121212121212,
+ "loss": 2.7609,
+ "step": 2
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001994318181818182,
+ "loss": 2.6878,
+ "step": 3
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019924242424242426,
+ "loss": 2.6697,
+ "step": 4
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001990530303030303,
+ "loss": 2.5818,
+ "step": 5
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019886363636363637,
+ "loss": 2.5396,
+ "step": 6
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019867424242424244,
+ "loss": 2.5265,
+ "step": 7
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.0001984848484848485,
+ "loss": 2.5475,
+ "step": 8
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019829545454545455,
+ "loss": 2.4835,
+ "step": 9
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001981060606060606,
+ "loss": 2.4559,
+ "step": 10
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001979166666666667,
+ "loss": 2.4511,
+ "step": 11
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019772727272727273,
+ "loss": 2.4592,
+ "step": 12
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.0001975378787878788,
+ "loss": 2.4495,
+ "step": 13
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019734848484848484,
+ "loss": 2.4714,
+ "step": 14
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019715909090909094,
+ "loss": 2.4302,
+ "step": 15
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019696969696969698,
+ "loss": 2.4097,
+ "step": 16
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019678030303030305,
+ "loss": 2.4523,
+ "step": 17
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.0001965909090909091,
+ "loss": 2.4325,
+ "step": 18
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019640151515151516,
+ "loss": 2.4125,
+ "step": 19
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019621212121212123,
+ "loss": 2.4329,
+ "step": 20
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019602272727272727,
+ "loss": 2.3471,
+ "step": 21
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019583333333333334,
+ "loss": 2.3012,
+ "step": 22
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001956439393939394,
+ "loss": 2.3869,
+ "step": 23
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019545454545454548,
+ "loss": 2.3822,
+ "step": 24
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019526515151515152,
+ "loss": 2.3427,
+ "step": 25
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001950757575757576,
+ "loss": 2.3659,
+ "step": 26
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019488636363636366,
+ "loss": 2.3826,
+ "step": 27
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.0001946969696969697,
+ "loss": 2.3532,
+ "step": 28
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019450757575757577,
+ "loss": 2.3828,
+ "step": 29
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001943181818181818,
+ "loss": 2.3133,
+ "step": 30
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001941287878787879,
+ "loss": 2.3613,
+ "step": 31
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019393939393939395,
+ "loss": 2.3867,
+ "step": 32
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019375000000000002,
+ "loss": 2.2966,
+ "step": 33
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019356060606060606,
+ "loss": 2.3436,
+ "step": 34
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019337121212121213,
+ "loss": 2.3425,
+ "step": 35
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.0001931818181818182,
+ "loss": 2.307,
+ "step": 36
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019299242424242424,
+ "loss": 2.3521,
+ "step": 37
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.0001928030303030303,
+ "loss": 2.3302,
+ "step": 38
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019261363636363635,
+ "loss": 2.312,
+ "step": 39
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019242424242424245,
+ "loss": 2.3655,
+ "step": 40
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001922348484848485,
+ "loss": 2.344,
+ "step": 41
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019204545454545456,
+ "loss": 2.3373,
+ "step": 42
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001918560606060606,
+ "loss": 2.3331,
+ "step": 43
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019166666666666667,
+ "loss": 2.3376,
+ "step": 44
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019147727272727274,
+ "loss": 2.3369,
+ "step": 45
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019128787878787878,
+ "loss": 2.3413,
+ "step": 46
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019109848484848485,
+ "loss": 2.3212,
+ "step": 47
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019090909090909092,
+ "loss": 2.307,
+ "step": 48
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.000190719696969697,
+ "loss": 2.2929,
+ "step": 49
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019053030303030303,
+ "loss": 2.2873,
+ "step": 50
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.0001903409090909091,
+ "loss": 2.3098,
+ "step": 51
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00019015151515151517,
+ "loss": 2.3129,
+ "step": 52
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.0001899621212121212,
+ "loss": 2.3038,
+ "step": 53
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00018977272727272728,
+ "loss": 2.286,
+ "step": 54
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018958333333333332,
+ "loss": 2.3388,
+ "step": 55
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018939393939393942,
+ "loss": 2.3193,
+ "step": 56
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018920454545454546,
+ "loss": 2.3136,
+ "step": 57
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018901515151515153,
+ "loss": 2.3141,
+ "step": 58
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018882575757575757,
+ "loss": 2.3646,
+ "step": 59
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018863636363636364,
+ "loss": 2.3318,
+ "step": 60
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.0001884469696969697,
+ "loss": 2.2977,
+ "step": 61
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018825757575757575,
+ "loss": 2.2764,
+ "step": 62
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018806818181818182,
+ "loss": 2.3095,
+ "step": 63
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.0001878787878787879,
+ "loss": 2.252,
+ "step": 64
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018768939393939396,
+ "loss": 2.2786,
+ "step": 65
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.0001875,
+ "loss": 2.2789,
+ "step": 66
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018731060606060607,
+ "loss": 2.2841,
+ "step": 67
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018712121212121212,
+ "loss": 2.3436,
+ "step": 68
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018693181818181818,
+ "loss": 2.2956,
+ "step": 69
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018674242424242425,
+ "loss": 2.2353,
+ "step": 70
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.0001865530303030303,
+ "loss": 2.2772,
+ "step": 71
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018636363636363636,
+ "loss": 2.2496,
+ "step": 72
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018617424242424243,
+ "loss": 2.2477,
+ "step": 73
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.0001859848484848485,
+ "loss": 2.2791,
+ "step": 74
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018579545454545454,
+ "loss": 2.2799,
+ "step": 75
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018560606060606061,
+ "loss": 2.3132,
+ "step": 76
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018541666666666668,
+ "loss": 2.2542,
+ "step": 77
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018522727272727273,
+ "loss": 2.2609,
+ "step": 78
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.0001850378787878788,
+ "loss": 2.2819,
+ "step": 79
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018484848484848484,
+ "loss": 2.2844,
+ "step": 80
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018465909090909093,
+ "loss": 2.2542,
+ "step": 81
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018446969696969697,
+ "loss": 2.2603,
+ "step": 82
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018428030303030304,
+ "loss": 2.2832,
+ "step": 83
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018409090909090909,
+ "loss": 2.2869,
+ "step": 84
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018390151515151518,
+ "loss": 2.2646,
+ "step": 85
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018371212121212122,
+ "loss": 2.2698,
+ "step": 86
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018352272727272727,
+ "loss": 2.2757,
+ "step": 87
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018333333333333334,
+ "loss": 2.2544,
+ "step": 88
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.0001831439393939394,
+ "loss": 2.2678,
+ "step": 89
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018295454545454547,
+ "loss": 2.2778,
+ "step": 90
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018276515151515152,
+ "loss": 2.2027,
+ "step": 91
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018257575757575758,
+ "loss": 2.2167,
+ "step": 92
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018238636363636365,
+ "loss": 2.2602,
+ "step": 93
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018219696969696972,
+ "loss": 2.2736,
+ "step": 94
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018200757575757577,
+ "loss": 2.2443,
+ "step": 95
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 2.2299,
+ "step": 96
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.0001816287878787879,
+ "loss": 2.2644,
+ "step": 97
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018143939393939395,
+ "loss": 2.259,
+ "step": 98
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018125000000000001,
+ "loss": 2.2567,
+ "step": 99
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018106060606060606,
+ "loss": 2.2599,
+ "step": 100
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018087121212121213,
+ "loss": 2.2091,
+ "step": 101
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.0001806818181818182,
+ "loss": 2.2312,
+ "step": 102
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018049242424242426,
+ "loss": 2.1869,
+ "step": 103
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001803030303030303,
+ "loss": 2.2023,
+ "step": 104
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00018011363636363638,
+ "loss": 2.2132,
+ "step": 105
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00017992424242424244,
+ "loss": 2.2612,
+ "step": 106
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001797348484848485,
+ "loss": 2.2109,
+ "step": 107
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017954545454545456,
+ "loss": 2.215,
+ "step": 108
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001793560606060606,
+ "loss": 2.2114,
+ "step": 109
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001791666666666667,
+ "loss": 2.2203,
+ "step": 110
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017897727272727274,
+ "loss": 2.2594,
+ "step": 111
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.0001787878787878788,
+ "loss": 2.2001,
+ "step": 112
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017859848484848485,
+ "loss": 2.2046,
+ "step": 113
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017840909090909092,
+ "loss": 2.1907,
+ "step": 114
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017821969696969699,
+ "loss": 2.2539,
+ "step": 115
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017803030303030303,
+ "loss": 2.2335,
+ "step": 116
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.0001778409090909091,
+ "loss": 2.2171,
+ "step": 117
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017765151515151517,
+ "loss": 2.2278,
+ "step": 118
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017746212121212123,
+ "loss": 2.231,
+ "step": 119
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017727272727272728,
+ "loss": 2.2141,
+ "step": 120
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017708333333333335,
+ "loss": 2.2432,
+ "step": 121
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017689393939393942,
+ "loss": 2.2266,
+ "step": 122
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017670454545454546,
+ "loss": 2.1929,
+ "step": 123
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017651515151515153,
+ "loss": 2.2077,
+ "step": 124
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017632575757575757,
+ "loss": 2.2133,
+ "step": 125
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017613636363636366,
+ "loss": 2.2251,
+ "step": 126
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.0001759469696969697,
+ "loss": 2.2265,
+ "step": 127
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017575757575757578,
+ "loss": 2.2186,
+ "step": 128
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017556818181818182,
+ "loss": 2.1925,
+ "step": 129
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.0001753787878787879,
+ "loss": 2.1956,
+ "step": 130
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017518939393939396,
+ "loss": 2.2459,
+ "step": 131
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.000175,
+ "loss": 2.22,
+ "step": 132
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.00017481060606060607,
+ "loss": 2.2143,
+ "step": 133
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001746212121212121,
+ "loss": 2.2359,
+ "step": 134
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001744318181818182,
+ "loss": 2.2058,
+ "step": 135
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017424242424242425,
+ "loss": 2.2307,
+ "step": 136
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017405303030303032,
+ "loss": 2.2062,
+ "step": 137
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017386363636363636,
+ "loss": 2.1796,
+ "step": 138
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017367424242424243,
+ "loss": 2.2054,
+ "step": 139
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001734848484848485,
+ "loss": 2.1651,
+ "step": 140
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.00017329545454545454,
+ "loss": 2.2159,
+ "step": 141
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001731060606060606,
+ "loss": 2.1988,
+ "step": 142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017291666666666668,
+ "loss": 2.1676,
+ "step": 143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017272727272727275,
+ "loss": 2.1725,
+ "step": 144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.0001725378787878788,
+ "loss": 2.2205,
+ "step": 145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017234848484848486,
+ "loss": 2.1486,
+ "step": 146
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017215909090909093,
+ "loss": 2.147,
+ "step": 147
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017196969696969697,
+ "loss": 2.1651,
+ "step": 148
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017178030303030304,
+ "loss": 2.1983,
+ "step": 149
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017159090909090908,
+ "loss": 2.1778,
+ "step": 150
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017140151515151518,
+ "loss": 2.1631,
+ "step": 151
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017121212121212122,
+ "loss": 2.1442,
+ "step": 152
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.0001710227272727273,
+ "loss": 2.1397,
+ "step": 153
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017083333333333333,
+ "loss": 2.1697,
+ "step": 154
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.0001706439393939394,
+ "loss": 2.1451,
+ "step": 155
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017045454545454547,
+ "loss": 2.1789,
+ "step": 156
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.0001702651515151515,
+ "loss": 2.1037,
+ "step": 157
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00017007575757575758,
+ "loss": 2.1698,
+ "step": 158
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016988636363636365,
+ "loss": 2.1538,
+ "step": 159
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016969696969696972,
+ "loss": 2.2015,
+ "step": 160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016950757575757576,
+ "loss": 2.179,
+ "step": 161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016931818181818183,
+ "loss": 2.1766,
+ "step": 162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.0001691287878787879,
+ "loss": 2.1646,
+ "step": 163
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016893939393939394,
+ "loss": 2.1694,
+ "step": 164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016875,
+ "loss": 2.1562,
+ "step": 165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016856060606060605,
+ "loss": 2.1551,
+ "step": 166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016837121212121212,
+ "loss": 2.1652,
+ "step": 167
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001681818181818182,
+ "loss": 2.1594,
+ "step": 168
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.00016799242424242426,
+ "loss": 2.1674,
+ "step": 169
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001678030303030303,
+ "loss": 2.1378,
+ "step": 170
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016761363636363637,
+ "loss": 2.1447,
+ "step": 171
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016742424242424244,
+ "loss": 2.1451,
+ "step": 172
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016723484848484848,
+ "loss": 2.1336,
+ "step": 173
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016704545454545455,
+ "loss": 2.1231,
+ "step": 174
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001668560606060606,
+ "loss": 2.1143,
+ "step": 175
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001666666666666667,
+ "loss": 2.1316,
+ "step": 176
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.00016647727272727273,
+ "loss": 2.1281,
+ "step": 177
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001662878787878788,
+ "loss": 2.136,
+ "step": 178
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016609848484848484,
+ "loss": 2.1279,
+ "step": 179
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016590909090909094,
+ "loss": 2.1421,
+ "step": 180
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016571969696969698,
+ "loss": 2.1541,
+ "step": 181
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016553030303030305,
+ "loss": 2.1293,
+ "step": 182
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.0001653409090909091,
+ "loss": 2.1294,
+ "step": 183
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016515151515151516,
+ "loss": 2.1459,
+ "step": 184
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016496212121212123,
+ "loss": 2.1113,
+ "step": 185
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016477272727272727,
+ "loss": 2.1394,
+ "step": 186
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016458333333333334,
+ "loss": 2.1321,
+ "step": 187
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.0001643939393939394,
+ "loss": 2.148,
+ "step": 188
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016420454545454548,
+ "loss": 2.1631,
+ "step": 189
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016401515151515152,
+ "loss": 2.1276,
+ "step": 190
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.0001638257575757576,
+ "loss": 2.0706,
+ "step": 191
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016363636363636366,
+ "loss": 2.127,
+ "step": 192
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001634469696969697,
+ "loss": 2.1449,
+ "step": 193
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.00016325757575757577,
+ "loss": 2.1204,
+ "step": 194
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001630681818181818,
+ "loss": 2.0904,
+ "step": 195
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.0001628787878787879,
+ "loss": 2.1129,
+ "step": 196
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016268939393939395,
+ "loss": 2.1036,
+ "step": 197
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016250000000000002,
+ "loss": 2.1509,
+ "step": 198
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016231060606060606,
+ "loss": 2.1239,
+ "step": 199
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016212121212121213,
+ "loss": 2.145,
+ "step": 200
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.0001619318181818182,
+ "loss": 2.1221,
+ "step": 201
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016174242424242424,
+ "loss": 2.1181,
+ "step": 202
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001615530303030303,
+ "loss": 2.1306,
+ "step": 203
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016136363636363635,
+ "loss": 2.0199,
+ "step": 204
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016117424242424245,
+ "loss": 2.1178,
+ "step": 205
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001609848484848485,
+ "loss": 2.1584,
+ "step": 206
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016079545454545456,
+ "loss": 2.0872,
+ "step": 207
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.0001606060606060606,
+ "loss": 2.1033,
+ "step": 208
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016041666666666667,
+ "loss": 2.1381,
+ "step": 209
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016022727272727274,
+ "loss": 2.1127,
+ "step": 210
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016003787878787878,
+ "loss": 2.1077,
+ "step": 211
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015984848484848485,
+ "loss": 2.0984,
+ "step": 212
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015965909090909092,
+ "loss": 2.0994,
+ "step": 213
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.000159469696969697,
+ "loss": 2.096,
+ "step": 214
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.00015928030303030303,
+ "loss": 2.0909,
+ "step": 215
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.0001590909090909091,
+ "loss": 2.118,
+ "step": 216
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015890151515151517,
+ "loss": 2.0783,
+ "step": 217
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.0001587121212121212,
+ "loss": 2.0876,
+ "step": 218
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015852272727272728,
+ "loss": 2.0581,
+ "step": 219
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015833333333333332,
+ "loss": 2.0548,
+ "step": 220
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015814393939393942,
+ "loss": 2.0595,
+ "step": 221
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015795454545454546,
+ "loss": 2.0719,
+ "step": 222
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015776515151515153,
+ "loss": 2.0903,
+ "step": 223
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015757575757575757,
+ "loss": 2.0941,
+ "step": 224
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015738636363636364,
+ "loss": 2.0926,
+ "step": 225
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.0001571969696969697,
+ "loss": 2.0816,
+ "step": 226
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015700757575757575,
+ "loss": 2.0894,
+ "step": 227
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015681818181818182,
+ "loss": 2.0798,
+ "step": 228
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.0001566287878787879,
+ "loss": 2.0672,
+ "step": 229
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015643939393939396,
+ "loss": 2.0787,
+ "step": 230
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015625,
+ "loss": 2.0611,
+ "step": 231
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015606060606060607,
+ "loss": 2.0805,
+ "step": 232
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015587121212121211,
+ "loss": 2.053,
+ "step": 233
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015568181818181818,
+ "loss": 2.0575,
+ "step": 234
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015549242424242425,
+ "loss": 2.0459,
+ "step": 235
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.0001553030303030303,
+ "loss": 2.0635,
+ "step": 236
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015511363636363636,
+ "loss": 2.0335,
+ "step": 237
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015492424242424243,
+ "loss": 2.0681,
+ "step": 238
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001547348484848485,
+ "loss": 2.0748,
+ "step": 239
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015454545454545454,
+ "loss": 2.1091,
+ "step": 240
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001543560606060606,
+ "loss": 2.0732,
+ "step": 241
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015416666666666668,
+ "loss": 2.0746,
+ "step": 242
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015397727272727272,
+ "loss": 2.0306,
+ "step": 243
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.0001537878787878788,
+ "loss": 2.0864,
+ "step": 244
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015359848484848484,
+ "loss": 2.0664,
+ "step": 245
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015340909090909093,
+ "loss": 2.0801,
+ "step": 246
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015321969696969697,
+ "loss": 2.0799,
+ "step": 247
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015303030303030304,
+ "loss": 2.0621,
+ "step": 248
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015284090909090909,
+ "loss": 2.0687,
+ "step": 249
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015265151515151515,
+ "loss": 2.018,
+ "step": 250
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015246212121212122,
+ "loss": 2.0256,
+ "step": 251
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015227272727272727,
+ "loss": 2.0736,
+ "step": 252
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015208333333333333,
+ "loss": 2.0609,
+ "step": 253
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.0001518939393939394,
+ "loss": 2.0539,
+ "step": 254
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015170454545454547,
+ "loss": 2.0282,
+ "step": 255
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015151515151515152,
+ "loss": 2.0417,
+ "step": 256
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015132575757575758,
+ "loss": 2.0333,
+ "step": 257
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015113636363636365,
+ "loss": 2.0428,
+ "step": 258
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015094696969696972,
+ "loss": 2.045,
+ "step": 259
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.00015075757575757576,
+ "loss": 2.0463,
+ "step": 260
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001505681818181818,
+ "loss": 2.0539,
+ "step": 261
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001503787878787879,
+ "loss": 2.0184,
+ "step": 262
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015018939393939394,
+ "loss": 2.0858,
+ "step": 263
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 2.0239,
+ "step": 264
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014981060606060606,
+ "loss": 2.0425,
+ "step": 265
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014962121212121213,
+ "loss": 2.0263,
+ "step": 266
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001494318181818182,
+ "loss": 2.042,
+ "step": 267
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.00014924242424242426,
+ "loss": 2.026,
+ "step": 268
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001490530303030303,
+ "loss": 2.0411,
+ "step": 269
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014886363636363635,
+ "loss": 2.028,
+ "step": 270
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014867424242424244,
+ "loss": 2.0172,
+ "step": 271
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014848484848484849,
+ "loss": 2.0196,
+ "step": 272
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014829545454545455,
+ "loss": 2.0142,
+ "step": 273
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001481060606060606,
+ "loss": 2.0265,
+ "step": 274
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001479166666666667,
+ "loss": 2.0353,
+ "step": 275
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.00014772727272727274,
+ "loss": 2.0327,
+ "step": 276
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.0001475378787878788,
+ "loss": 2.0188,
+ "step": 277
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014734848484848485,
+ "loss": 1.9987,
+ "step": 278
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014715909090909092,
+ "loss": 2.0141,
+ "step": 279
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014696969696969698,
+ "loss": 2.0403,
+ "step": 280
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014678030303030303,
+ "loss": 1.9977,
+ "step": 281
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.0001465909090909091,
+ "loss": 1.9674,
+ "step": 282
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014640151515151517,
+ "loss": 1.9984,
+ "step": 283
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014621212121212123,
+ "loss": 1.9796,
+ "step": 284
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014602272727272728,
+ "loss": 2.0139,
+ "step": 285
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014583333333333335,
+ "loss": 1.9866,
+ "step": 286
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014564393939393941,
+ "loss": 2.0208,
+ "step": 287
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014545454545454546,
+ "loss": 1.9844,
+ "step": 288
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014526515151515153,
+ "loss": 2.0082,
+ "step": 289
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014507575757575757,
+ "loss": 1.984,
+ "step": 290
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014488636363636366,
+ "loss": 2.0015,
+ "step": 291
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.0001446969696969697,
+ "loss": 2.0209,
+ "step": 292
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014450757575757578,
+ "loss": 1.9728,
+ "step": 293
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014431818181818182,
+ "loss": 2.0032,
+ "step": 294
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014412878787878789,
+ "loss": 1.9641,
+ "step": 295
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014393939393939396,
+ "loss": 1.9945,
+ "step": 296
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014375,
+ "loss": 1.9658,
+ "step": 297
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014356060606060607,
+ "loss": 1.9907,
+ "step": 298
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001433712121212121,
+ "loss": 1.9935,
+ "step": 299
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001431818181818182,
+ "loss": 1.9897,
+ "step": 300
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014299242424242425,
+ "loss": 1.984,
+ "step": 301
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014280303030303032,
+ "loss": 1.9581,
+ "step": 302
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014261363636363636,
+ "loss": 1.9893,
+ "step": 303
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014242424242424243,
+ "loss": 1.9568,
+ "step": 304
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001422348484848485,
+ "loss": 1.98,
+ "step": 305
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014204545454545454,
+ "loss": 1.9519,
+ "step": 306
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001418560606060606,
+ "loss": 1.9693,
+ "step": 307
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014166666666666668,
+ "loss": 1.9866,
+ "step": 308
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014147727272727275,
+ "loss": 1.9508,
+ "step": 309
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.0001412878787878788,
+ "loss": 1.9653,
+ "step": 310
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014109848484848486,
+ "loss": 1.9991,
+ "step": 311
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014090909090909093,
+ "loss": 1.9442,
+ "step": 312
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014071969696969697,
+ "loss": 1.9807,
+ "step": 313
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014053030303030304,
+ "loss": 1.9958,
+ "step": 314
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014034090909090908,
+ "loss": 1.9459,
+ "step": 315
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00014015151515151518,
+ "loss": 1.9508,
+ "step": 316
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013996212121212122,
+ "loss": 1.9933,
+ "step": 317
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.0001397727272727273,
+ "loss": 1.9703,
+ "step": 318
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013958333333333333,
+ "loss": 1.965,
+ "step": 319
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001393939393939394,
+ "loss": 1.9264,
+ "step": 320
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.00013920454545454547,
+ "loss": 1.9688,
+ "step": 321
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001390151515151515,
+ "loss": 1.9901,
+ "step": 322
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013882575757575758,
+ "loss": 1.9363,
+ "step": 323
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013863636363636365,
+ "loss": 1.9269,
+ "step": 324
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013844696969696972,
+ "loss": 1.9688,
+ "step": 325
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013825757575757576,
+ "loss": 1.9758,
+ "step": 326
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013806818181818183,
+ "loss": 1.9414,
+ "step": 327
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.0001378787878787879,
+ "loss": 1.9397,
+ "step": 328
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013768939393939394,
+ "loss": 1.9032,
+ "step": 329
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001375,
+ "loss": 1.9777,
+ "step": 330
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013731060606060605,
+ "loss": 1.9173,
+ "step": 331
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013712121212121212,
+ "loss": 1.9307,
+ "step": 332
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001369318181818182,
+ "loss": 1.9611,
+ "step": 333
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013674242424242426,
+ "loss": 1.9698,
+ "step": 334
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.0001365530303030303,
+ "loss": 1.9619,
+ "step": 335
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013636363636363637,
+ "loss": 1.9322,
+ "step": 336
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013617424242424244,
+ "loss": 1.9441,
+ "step": 337
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013598484848484848,
+ "loss": 1.9563,
+ "step": 338
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013579545454545455,
+ "loss": 1.9283,
+ "step": 339
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.0001356060606060606,
+ "loss": 1.9508,
+ "step": 340
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001354166666666667,
+ "loss": 1.9285,
+ "step": 341
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.00013522727272727273,
+ "loss": 1.9295,
+ "step": 342
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001350378787878788,
+ "loss": 1.9272,
+ "step": 343
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013484848484848484,
+ "loss": 1.905,
+ "step": 344
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013465909090909094,
+ "loss": 1.9409,
+ "step": 345
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013446969696969698,
+ "loss": 1.9674,
+ "step": 346
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013428030303030302,
+ "loss": 1.9278,
+ "step": 347
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.0001340909090909091,
+ "loss": 1.9136,
+ "step": 348
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013390151515151516,
+ "loss": 1.9143,
+ "step": 349
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013371212121212123,
+ "loss": 1.9381,
+ "step": 350
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013352272727272727,
+ "loss": 1.9136,
+ "step": 351
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013333333333333334,
+ "loss": 1.9103,
+ "step": 352
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0001331439393939394,
+ "loss": 1.9027,
+ "step": 353
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013295454545454548,
+ "loss": 1.8674,
+ "step": 354
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013276515151515152,
+ "loss": 1.886,
+ "step": 355
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013257575757575756,
+ "loss": 1.887,
+ "step": 356
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013238636363636366,
+ "loss": 1.87,
+ "step": 357
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001321969696969697,
+ "loss": 1.8715,
+ "step": 358
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.00013200757575757577,
+ "loss": 1.8993,
+ "step": 359
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001318181818181818,
+ "loss": 1.8844,
+ "step": 360
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001316287878787879,
+ "loss": 1.8965,
+ "step": 361
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013143939393939395,
+ "loss": 1.8956,
+ "step": 362
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013125000000000002,
+ "loss": 1.869,
+ "step": 363
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013106060606060606,
+ "loss": 1.8702,
+ "step": 364
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013087121212121213,
+ "loss": 1.8962,
+ "step": 365
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001306818181818182,
+ "loss": 1.8613,
+ "step": 366
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013049242424242424,
+ "loss": 1.8845,
+ "step": 367
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001303030303030303,
+ "loss": 1.8689,
+ "step": 368
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00013011363636363635,
+ "loss": 1.9059,
+ "step": 369
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00012992424242424245,
+ "loss": 1.9082,
+ "step": 370
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.0001297348484848485,
+ "loss": 1.8918,
+ "step": 371
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012954545454545456,
+ "loss": 1.8657,
+ "step": 372
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.0001293560606060606,
+ "loss": 1.8909,
+ "step": 373
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012916666666666667,
+ "loss": 1.8649,
+ "step": 374
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012897727272727274,
+ "loss": 1.833,
+ "step": 375
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012878787878787878,
+ "loss": 1.8815,
+ "step": 376
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012859848484848485,
+ "loss": 1.8646,
+ "step": 377
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012840909090909092,
+ "loss": 1.846,
+ "step": 378
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.000128219696969697,
+ "loss": 1.8631,
+ "step": 379
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012803030303030303,
+ "loss": 1.917,
+ "step": 380
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.0001278409090909091,
+ "loss": 1.9068,
+ "step": 381
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012765151515151517,
+ "loss": 1.8772,
+ "step": 382
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.0001274621212121212,
+ "loss": 1.8414,
+ "step": 383
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012727272727272728,
+ "loss": 1.9003,
+ "step": 384
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012708333333333332,
+ "loss": 1.8415,
+ "step": 385
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012689393939393942,
+ "loss": 1.8491,
+ "step": 386
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012670454545454546,
+ "loss": 1.8875,
+ "step": 387
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012651515151515153,
+ "loss": 1.8629,
+ "step": 388
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012632575757575757,
+ "loss": 1.8378,
+ "step": 389
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012613636363636364,
+ "loss": 1.8442,
+ "step": 390
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.0001259469696969697,
+ "loss": 1.8587,
+ "step": 391
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012575757575757575,
+ "loss": 1.8659,
+ "step": 392
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012556818181818182,
+ "loss": 1.8271,
+ "step": 393
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.0001253787878787879,
+ "loss": 1.8692,
+ "step": 394
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.00012518939393939396,
+ "loss": 1.8071,
+ "step": 395
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.000125,
+ "loss": 1.8564,
+ "step": 396
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012481060606060607,
+ "loss": 1.8891,
+ "step": 397
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012462121212121211,
+ "loss": 1.8173,
+ "step": 398
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012443181818181818,
+ "loss": 1.8653,
+ "step": 399
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012424242424242425,
+ "loss": 1.8843,
+ "step": 400
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.0001240530303030303,
+ "loss": 1.8527,
+ "step": 401
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012386363636363636,
+ "loss": 1.8352,
+ "step": 402
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012367424242424243,
+ "loss": 1.866,
+ "step": 403
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001234848484848485,
+ "loss": 1.8557,
+ "step": 404
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012329545454545454,
+ "loss": 1.8284,
+ "step": 405
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001231060606060606,
+ "loss": 1.8359,
+ "step": 406
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012291666666666668,
+ "loss": 1.8437,
+ "step": 407
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012272727272727272,
+ "loss": 1.8256,
+ "step": 408
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.0001225378787878788,
+ "loss": 1.8297,
+ "step": 409
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012234848484848484,
+ "loss": 1.8515,
+ "step": 410
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012215909090909093,
+ "loss": 1.8198,
+ "step": 411
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012196969696969697,
+ "loss": 1.7809,
+ "step": 412
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012178030303030303,
+ "loss": 1.8438,
+ "step": 413
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012159090909090908,
+ "loss": 1.8497,
+ "step": 414
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012140151515151517,
+ "loss": 1.8463,
+ "step": 415
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012121212121212122,
+ "loss": 1.7768,
+ "step": 416
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012102272727272728,
+ "loss": 1.8561,
+ "step": 417
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012083333333333333,
+ "loss": 1.863,
+ "step": 418
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.0001206439393939394,
+ "loss": 1.8193,
+ "step": 419
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012045454545454546,
+ "loss": 1.7732,
+ "step": 420
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012026515151515151,
+ "loss": 1.7728,
+ "step": 421
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00012007575757575757,
+ "loss": 1.8113,
+ "step": 422
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011988636363636365,
+ "loss": 1.7976,
+ "step": 423
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011969696969696971,
+ "loss": 1.786,
+ "step": 424
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011950757575757576,
+ "loss": 1.8019,
+ "step": 425
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011931818181818182,
+ "loss": 1.786,
+ "step": 426
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011912878787878789,
+ "loss": 1.8102,
+ "step": 427
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011893939393939394,
+ "loss": 1.7828,
+ "step": 428
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011875,
+ "loss": 1.8498,
+ "step": 429
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011856060606060606,
+ "loss": 1.7983,
+ "step": 430
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011837121212121211,
+ "loss": 1.7863,
+ "step": 431
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001181818181818182,
+ "loss": 1.8171,
+ "step": 432
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011799242424242425,
+ "loss": 1.8143,
+ "step": 433
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001178030303030303,
+ "loss": 1.7815,
+ "step": 434
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011761363636363636,
+ "loss": 1.7652,
+ "step": 435
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011742424242424244,
+ "loss": 1.8242,
+ "step": 436
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011723484848484849,
+ "loss": 1.7789,
+ "step": 437
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011704545454545454,
+ "loss": 1.7549,
+ "step": 438
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.0001168560606060606,
+ "loss": 1.7528,
+ "step": 439
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011666666666666668,
+ "loss": 1.7443,
+ "step": 440
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011647727272727273,
+ "loss": 1.7911,
+ "step": 441
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011628787878787879,
+ "loss": 1.7848,
+ "step": 442
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011609848484848485,
+ "loss": 1.8137,
+ "step": 443
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011590909090909093,
+ "loss": 1.791,
+ "step": 444
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011571969696969698,
+ "loss": 1.7921,
+ "step": 445
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011553030303030304,
+ "loss": 1.772,
+ "step": 446
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011534090909090908,
+ "loss": 1.776,
+ "step": 447
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011515151515151516,
+ "loss": 1.7948,
+ "step": 448
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011496212121212122,
+ "loss": 1.8187,
+ "step": 449
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011477272727272728,
+ "loss": 1.7436,
+ "step": 450
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011458333333333333,
+ "loss": 1.7326,
+ "step": 451
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011439393939393941,
+ "loss": 1.8005,
+ "step": 452
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011420454545454547,
+ "loss": 1.8088,
+ "step": 453
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011401515151515153,
+ "loss": 1.7632,
+ "step": 454
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011382575757575758,
+ "loss": 1.7848,
+ "step": 455
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011363636363636365,
+ "loss": 1.7756,
+ "step": 456
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001134469696969697,
+ "loss": 1.7964,
+ "step": 457
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011325757575757576,
+ "loss": 1.7604,
+ "step": 458
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011306818181818182,
+ "loss": 1.7914,
+ "step": 459
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001128787878787879,
+ "loss": 1.8059,
+ "step": 460
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011268939393939395,
+ "loss": 1.7647,
+ "step": 461
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011250000000000001,
+ "loss": 1.7526,
+ "step": 462
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011231060606060607,
+ "loss": 1.7736,
+ "step": 463
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011212121212121212,
+ "loss": 1.7449,
+ "step": 464
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011193181818181819,
+ "loss": 1.7636,
+ "step": 465
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011174242424242425,
+ "loss": 1.7846,
+ "step": 466
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.0001115530303030303,
+ "loss": 1.78,
+ "step": 467
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011136363636363636,
+ "loss": 1.7828,
+ "step": 468
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011117424242424244,
+ "loss": 1.729,
+ "step": 469
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.0001109848484848485,
+ "loss": 1.7145,
+ "step": 470
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011079545454545455,
+ "loss": 1.7189,
+ "step": 471
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011060606060606061,
+ "loss": 1.7628,
+ "step": 472
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011041666666666668,
+ "loss": 1.7399,
+ "step": 473
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011022727272727273,
+ "loss": 1.7561,
+ "step": 474
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00011003787878787879,
+ "loss": 1.7979,
+ "step": 475
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010984848484848484,
+ "loss": 1.7673,
+ "step": 476
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010965909090909093,
+ "loss": 1.777,
+ "step": 477
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010946969696969698,
+ "loss": 1.7042,
+ "step": 478
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010928030303030304,
+ "loss": 1.7764,
+ "step": 479
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010909090909090909,
+ "loss": 1.6993,
+ "step": 480
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010890151515151516,
+ "loss": 1.7688,
+ "step": 481
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010871212121212122,
+ "loss": 1.7428,
+ "step": 482
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010852272727272727,
+ "loss": 1.675,
+ "step": 483
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010833333333333333,
+ "loss": 1.7183,
+ "step": 484
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010814393939393941,
+ "loss": 1.7305,
+ "step": 485
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010795454545454547,
+ "loss": 1.7541,
+ "step": 486
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010776515151515152,
+ "loss": 1.7074,
+ "step": 487
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010757575757575758,
+ "loss": 1.7093,
+ "step": 488
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010738636363636365,
+ "loss": 1.7354,
+ "step": 489
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.0001071969696969697,
+ "loss": 1.7415,
+ "step": 490
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010700757575757576,
+ "loss": 1.72,
+ "step": 491
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010681818181818181,
+ "loss": 1.7453,
+ "step": 492
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.0001066287878787879,
+ "loss": 1.7077,
+ "step": 493
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010643939393939395,
+ "loss": 1.6936,
+ "step": 494
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010625000000000001,
+ "loss": 1.7616,
+ "step": 495
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010606060606060606,
+ "loss": 1.7749,
+ "step": 496
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010587121212121212,
+ "loss": 1.7375,
+ "step": 497
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010568181818181819,
+ "loss": 1.7203,
+ "step": 498
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010549242424242424,
+ "loss": 1.7148,
+ "step": 499
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.0001053030303030303,
+ "loss": 1.7859,
+ "step": 500
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010511363636363635,
+ "loss": 1.7478,
+ "step": 501
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010492424242424244,
+ "loss": 1.7091,
+ "step": 502
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.00010473484848484849,
+ "loss": 1.7112,
+ "step": 503
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.00010454545454545455,
+ "loss": 1.6967,
+ "step": 504
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.0001043560606060606,
+ "loss": 1.7431,
+ "step": 505
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010416666666666667,
+ "loss": 1.7065,
+ "step": 506
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010397727272727273,
+ "loss": 1.6955,
+ "step": 507
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010378787878787878,
+ "loss": 1.7375,
+ "step": 508
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010359848484848484,
+ "loss": 1.7056,
+ "step": 509
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010340909090909092,
+ "loss": 1.7044,
+ "step": 510
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010321969696969698,
+ "loss": 1.7204,
+ "step": 511
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010303030303030303,
+ "loss": 1.6801,
+ "step": 512
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010284090909090909,
+ "loss": 1.7381,
+ "step": 513
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010265151515151516,
+ "loss": 1.7064,
+ "step": 514
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010246212121212121,
+ "loss": 1.6973,
+ "step": 515
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010227272727272727,
+ "loss": 1.7295,
+ "step": 516
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010208333333333333,
+ "loss": 1.6991,
+ "step": 517
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010189393939393941,
+ "loss": 1.6986,
+ "step": 518
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010170454545454546,
+ "loss": 1.6989,
+ "step": 519
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010151515151515152,
+ "loss": 1.7009,
+ "step": 520
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010132575757575757,
+ "loss": 1.6919,
+ "step": 521
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010113636363636366,
+ "loss": 1.6955,
+ "step": 522
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010094696969696971,
+ "loss": 1.7177,
+ "step": 523
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.00010075757575757576,
+ "loss": 1.715,
+ "step": 524
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.00010056818181818181,
+ "loss": 1.6686,
+ "step": 525
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.0001003787878787879,
+ "loss": 1.771,
+ "step": 526
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 0.00010018939393939395,
+ "loss": 1.7024,
+ "step": 527
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 0.0001,
+ "loss": 1.7016,
+ "step": 528
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 9.981060606060606e-05,
+ "loss": 1.6501,
+ "step": 529
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 9.962121212121213e-05,
+ "loss": 1.6903,
+ "step": 530
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.943181818181819e-05,
+ "loss": 1.6806,
+ "step": 531
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.924242424242425e-05,
+ "loss": 1.7096,
+ "step": 532
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.90530303030303e-05,
+ "loss": 1.7307,
+ "step": 533
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.886363636363637e-05,
+ "loss": 1.6871,
+ "step": 534
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.867424242424242e-05,
+ "loss": 1.7457,
+ "step": 535
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.848484848484849e-05,
+ "loss": 1.6867,
+ "step": 536
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.829545454545455e-05,
+ "loss": 1.6789,
+ "step": 537
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.810606060606061e-05,
+ "loss": 1.6403,
+ "step": 538
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.791666666666667e-05,
+ "loss": 1.6697,
+ "step": 539
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.772727272727274e-05,
+ "loss": 1.7293,
+ "step": 540
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.75378787878788e-05,
+ "loss": 1.6998,
+ "step": 541
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.734848484848485e-05,
+ "loss": 1.693,
+ "step": 542
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.71590909090909e-05,
+ "loss": 1.664,
+ "step": 543
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.696969696969698e-05,
+ "loss": 1.7061,
+ "step": 544
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.678030303030303e-05,
+ "loss": 1.6631,
+ "step": 545
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.65909090909091e-05,
+ "loss": 1.6343,
+ "step": 546
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.640151515151516e-05,
+ "loss": 1.6939,
+ "step": 547
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.621212121212123e-05,
+ "loss": 1.669,
+ "step": 548
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.602272727272728e-05,
+ "loss": 1.6561,
+ "step": 549
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.583333333333334e-05,
+ "loss": 1.6675,
+ "step": 550
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.564393939393939e-05,
+ "loss": 1.7109,
+ "step": 551
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.545454545454546e-05,
+ "loss": 1.693,
+ "step": 552
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.526515151515152e-05,
+ "loss": 1.6557,
+ "step": 553
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.507575757575759e-05,
+ "loss": 1.6642,
+ "step": 554
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.488636363636364e-05,
+ "loss": 1.6674,
+ "step": 555
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.469696969696971e-05,
+ "loss": 1.6492,
+ "step": 556
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.450757575757577e-05,
+ "loss": 1.6915,
+ "step": 557
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.431818181818182e-05,
+ "loss": 1.7028,
+ "step": 558
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.412878787878788e-05,
+ "loss": 1.6749,
+ "step": 559
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.393939393939395e-05,
+ "loss": 1.6526,
+ "step": 560
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.375e-05,
+ "loss": 1.687,
+ "step": 561
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.356060606060606e-05,
+ "loss": 1.6632,
+ "step": 562
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.337121212121213e-05,
+ "loss": 1.7074,
+ "step": 563
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.318181818181818e-05,
+ "loss": 1.6164,
+ "step": 564
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.299242424242425e-05,
+ "loss": 1.6594,
+ "step": 565
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.280303030303031e-05,
+ "loss": 1.6603,
+ "step": 566
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.261363636363636e-05,
+ "loss": 1.6213,
+ "step": 567
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.242424242424242e-05,
+ "loss": 1.6899,
+ "step": 568
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.223484848484849e-05,
+ "loss": 1.6619,
+ "step": 569
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.204545454545454e-05,
+ "loss": 1.7035,
+ "step": 570
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.185606060606061e-05,
+ "loss": 1.6408,
+ "step": 571
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.166666666666667e-05,
+ "loss": 1.6506,
+ "step": 572
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.147727272727274e-05,
+ "loss": 1.658,
+ "step": 573
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.128787878787879e-05,
+ "loss": 1.6005,
+ "step": 574
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.109848484848486e-05,
+ "loss": 1.6821,
+ "step": 575
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.090909090909092e-05,
+ "loss": 1.6858,
+ "step": 576
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.071969696969697e-05,
+ "loss": 1.6933,
+ "step": 577
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.053030303030303e-05,
+ "loss": 1.6757,
+ "step": 578
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.03409090909091e-05,
+ "loss": 1.6107,
+ "step": 579
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 9.015151515151515e-05,
+ "loss": 1.5751,
+ "step": 580
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.996212121212122e-05,
+ "loss": 1.6168,
+ "step": 581
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.977272727272728e-05,
+ "loss": 1.6213,
+ "step": 582
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.958333333333335e-05,
+ "loss": 1.6243,
+ "step": 583
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.93939393939394e-05,
+ "loss": 1.6249,
+ "step": 584
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.920454545454546e-05,
+ "loss": 1.6529,
+ "step": 585
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.901515151515151e-05,
+ "loss": 1.626,
+ "step": 586
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.882575757575758e-05,
+ "loss": 1.6616,
+ "step": 587
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.863636363636364e-05,
+ "loss": 1.6622,
+ "step": 588
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.844696969696971e-05,
+ "loss": 1.5927,
+ "step": 589
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.825757575757576e-05,
+ "loss": 1.6351,
+ "step": 590
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.806818181818183e-05,
+ "loss": 1.6213,
+ "step": 591
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.787878787878789e-05,
+ "loss": 1.635,
+ "step": 592
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.768939393939394e-05,
+ "loss": 1.6406,
+ "step": 593
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.75e-05,
+ "loss": 1.6387,
+ "step": 594
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.731060606060605e-05,
+ "loss": 1.602,
+ "step": 595
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.712121212121212e-05,
+ "loss": 1.601,
+ "step": 596
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.693181818181818e-05,
+ "loss": 1.5855,
+ "step": 597
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.674242424242425e-05,
+ "loss": 1.6236,
+ "step": 598
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.65530303030303e-05,
+ "loss": 1.5999,
+ "step": 599
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.636363636363637e-05,
+ "loss": 1.6093,
+ "step": 600
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.617424242424243e-05,
+ "loss": 1.6602,
+ "step": 601
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.598484848484848e-05,
+ "loss": 1.599,
+ "step": 602
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.579545454545454e-05,
+ "loss": 1.6056,
+ "step": 603
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.560606060606061e-05,
+ "loss": 1.6377,
+ "step": 604
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.541666666666666e-05,
+ "loss": 1.5769,
+ "step": 605
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.522727272727273e-05,
+ "loss": 1.6219,
+ "step": 606
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.503787878787879e-05,
+ "loss": 1.5917,
+ "step": 607
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.484848484848486e-05,
+ "loss": 1.6019,
+ "step": 608
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.465909090909091e-05,
+ "loss": 1.6316,
+ "step": 609
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.446969696969697e-05,
+ "loss": 1.6327,
+ "step": 610
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.428030303030303e-05,
+ "loss": 1.6023,
+ "step": 611
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.40909090909091e-05,
+ "loss": 1.6087,
+ "step": 612
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.390151515151515e-05,
+ "loss": 1.6245,
+ "step": 613
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.371212121212122e-05,
+ "loss": 1.5957,
+ "step": 614
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.352272727272727e-05,
+ "loss": 1.6196,
+ "step": 615
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.333333333333334e-05,
+ "loss": 1.6364,
+ "step": 616
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.31439393939394e-05,
+ "loss": 1.5977,
+ "step": 617
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.295454545454547e-05,
+ "loss": 1.6018,
+ "step": 618
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.276515151515152e-05,
+ "loss": 1.5973,
+ "step": 619
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.257575757575758e-05,
+ "loss": 1.6216,
+ "step": 620
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.238636363636364e-05,
+ "loss": 1.6422,
+ "step": 621
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.21969696969697e-05,
+ "loss": 1.6401,
+ "step": 622
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.200757575757576e-05,
+ "loss": 1.6446,
+ "step": 623
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.181818181818183e-05,
+ "loss": 1.5791,
+ "step": 624
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.162878787878789e-05,
+ "loss": 1.5953,
+ "step": 625
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.143939393939395e-05,
+ "loss": 1.5941,
+ "step": 626
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.125000000000001e-05,
+ "loss": 1.5784,
+ "step": 627
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.106060606060607e-05,
+ "loss": 1.6024,
+ "step": 628
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.087121212121212e-05,
+ "loss": 1.6295,
+ "step": 629
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.068181818181818e-05,
+ "loss": 1.5905,
+ "step": 630
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.049242424242425e-05,
+ "loss": 1.6073,
+ "step": 631
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.03030303030303e-05,
+ "loss": 1.6104,
+ "step": 632
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 8.011363636363637e-05,
+ "loss": 1.6134,
+ "step": 633
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.992424242424243e-05,
+ "loss": 1.6569,
+ "step": 634
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.97348484848485e-05,
+ "loss": 1.5493,
+ "step": 635
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.954545454545455e-05,
+ "loss": 1.5767,
+ "step": 636
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.93560606060606e-05,
+ "loss": 1.5692,
+ "step": 637
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.916666666666666e-05,
+ "loss": 1.6116,
+ "step": 638
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.897727272727273e-05,
+ "loss": 1.5684,
+ "step": 639
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.878787878787879e-05,
+ "loss": 1.6177,
+ "step": 640
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.859848484848486e-05,
+ "loss": 1.6151,
+ "step": 641
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.840909090909091e-05,
+ "loss": 1.6293,
+ "step": 642
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.821969696969698e-05,
+ "loss": 1.6298,
+ "step": 643
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.803030303030304e-05,
+ "loss": 1.6073,
+ "step": 644
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.784090909090909e-05,
+ "loss": 1.5328,
+ "step": 645
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.765151515151515e-05,
+ "loss": 1.5895,
+ "step": 646
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.746212121212122e-05,
+ "loss": 1.5728,
+ "step": 647
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.727272727272727e-05,
+ "loss": 1.5449,
+ "step": 648
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.708333333333334e-05,
+ "loss": 1.5731,
+ "step": 649
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.68939393939394e-05,
+ "loss": 1.627,
+ "step": 650
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.670454545454547e-05,
+ "loss": 1.6139,
+ "step": 651
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.651515151515152e-05,
+ "loss": 1.5613,
+ "step": 652
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.632575757575758e-05,
+ "loss": 1.5734,
+ "step": 653
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.613636363636363e-05,
+ "loss": 1.5537,
+ "step": 654
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.59469696969697e-05,
+ "loss": 1.5886,
+ "step": 655
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.575757575757576e-05,
+ "loss": 1.5504,
+ "step": 656
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.556818181818183e-05,
+ "loss": 1.5613,
+ "step": 657
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.537878787878788e-05,
+ "loss": 1.5877,
+ "step": 658
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.518939393939395e-05,
+ "loss": 1.605,
+ "step": 659
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.500000000000001e-05,
+ "loss": 1.5403,
+ "step": 660
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.481060606060606e-05,
+ "loss": 1.6039,
+ "step": 661
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.462121212121213e-05,
+ "loss": 1.5708,
+ "step": 662
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.443181818181817e-05,
+ "loss": 1.5692,
+ "step": 663
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.424242424242424e-05,
+ "loss": 1.5084,
+ "step": 664
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.40530303030303e-05,
+ "loss": 1.5982,
+ "step": 665
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.386363636363637e-05,
+ "loss": 1.5881,
+ "step": 666
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.367424242424242e-05,
+ "loss": 1.5593,
+ "step": 667
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.348484848484849e-05,
+ "loss": 1.5871,
+ "step": 668
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.329545454545455e-05,
+ "loss": 1.6134,
+ "step": 669
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.310606060606062e-05,
+ "loss": 1.5516,
+ "step": 670
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.291666666666667e-05,
+ "loss": 1.5691,
+ "step": 671
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.272727272727273e-05,
+ "loss": 1.5801,
+ "step": 672
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.253787878787878e-05,
+ "loss": 1.5684,
+ "step": 673
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.234848484848485e-05,
+ "loss": 1.5591,
+ "step": 674
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.215909090909091e-05,
+ "loss": 1.5727,
+ "step": 675
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.196969696969698e-05,
+ "loss": 1.6081,
+ "step": 676
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.178030303030303e-05,
+ "loss": 1.5884,
+ "step": 677
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.15909090909091e-05,
+ "loss": 1.5638,
+ "step": 678
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.140151515151516e-05,
+ "loss": 1.5614,
+ "step": 679
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.121212121212121e-05,
+ "loss": 1.5543,
+ "step": 680
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.102272727272727e-05,
+ "loss": 1.5801,
+ "step": 681
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.083333333333334e-05,
+ "loss": 1.5458,
+ "step": 682
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.06439393939394e-05,
+ "loss": 1.5567,
+ "step": 683
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.045454545454546e-05,
+ "loss": 1.5567,
+ "step": 684
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.026515151515152e-05,
+ "loss": 1.5638,
+ "step": 685
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 7.007575757575759e-05,
+ "loss": 1.5431,
+ "step": 686
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.988636363636364e-05,
+ "loss": 1.5729,
+ "step": 687
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.96969696969697e-05,
+ "loss": 1.5235,
+ "step": 688
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.950757575757575e-05,
+ "loss": 1.5753,
+ "step": 689
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.931818181818182e-05,
+ "loss": 1.5319,
+ "step": 690
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.912878787878788e-05,
+ "loss": 1.5847,
+ "step": 691
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.893939393939395e-05,
+ "loss": 1.5533,
+ "step": 692
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.875e-05,
+ "loss": 1.5665,
+ "step": 693
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.856060606060606e-05,
+ "loss": 1.5913,
+ "step": 694
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.837121212121213e-05,
+ "loss": 1.6011,
+ "step": 695
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.818181818181818e-05,
+ "loss": 1.5201,
+ "step": 696
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.799242424242424e-05,
+ "loss": 1.57,
+ "step": 697
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.78030303030303e-05,
+ "loss": 1.5381,
+ "step": 698
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.761363636363636e-05,
+ "loss": 1.5681,
+ "step": 699
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.742424242424242e-05,
+ "loss": 1.5542,
+ "step": 700
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.723484848484849e-05,
+ "loss": 1.5779,
+ "step": 701
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.704545454545455e-05,
+ "loss": 1.578,
+ "step": 702
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.685606060606061e-05,
+ "loss": 1.6131,
+ "step": 703
+ },
+ {
+ "epoch": 2.0,
+ "learning_rate": 6.666666666666667e-05,
+ "loss": 1.5085,
+ "step": 704
+ },
+ {
+ "epoch": 2.0,
+ "learning_rate": 6.647727272727274e-05,
+ "loss": 1.4876,
+ "step": 705
+ },
+ {
+ "epoch": 2.0,
+ "learning_rate": 6.628787878787878e-05,
+ "loss": 1.5071,
+ "step": 706
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.609848484848485e-05,
+ "loss": 1.574,
+ "step": 707
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.59090909090909e-05,
+ "loss": 1.5214,
+ "step": 708
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.571969696969697e-05,
+ "loss": 1.5382,
+ "step": 709
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.553030303030303e-05,
+ "loss": 1.5136,
+ "step": 710
+ },
+ {
+ "epoch": 2.02,
+ "learning_rate": 6.53409090909091e-05,
+ "loss": 1.4807,
+ "step": 711
+ },
+ {
+ "epoch": 2.02,
+ "learning_rate": 6.515151515151516e-05,
+ "loss": 1.491,
+ "step": 712
+ },
+ {
+ "epoch": 2.02,
+ "learning_rate": 6.496212121212122e-05,
+ "loss": 1.5595,
+ "step": 713
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.477272727272728e-05,
+ "loss": 1.5342,
+ "step": 714
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.458333333333334e-05,
+ "loss": 1.5173,
+ "step": 715
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.439393939393939e-05,
+ "loss": 1.5353,
+ "step": 716
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.420454545454546e-05,
+ "loss": 1.4826,
+ "step": 717
+ },
+ {
+ "epoch": 2.04,
+ "learning_rate": 6.401515151515152e-05,
+ "loss": 1.5404,
+ "step": 718
+ },
+ {
+ "epoch": 2.04,
+ "learning_rate": 6.382575757575759e-05,
+ "loss": 1.5612,
+ "step": 719
+ },
+ {
+ "epoch": 2.04,
+ "learning_rate": 6.363636363636364e-05,
+ "loss": 1.5203,
+ "step": 720
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.344696969696971e-05,
+ "loss": 1.52,
+ "step": 721
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.325757575757577e-05,
+ "loss": 1.5417,
+ "step": 722
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.306818181818182e-05,
+ "loss": 1.5352,
+ "step": 723
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.287878787878788e-05,
+ "loss": 1.4671,
+ "step": 724
+ },
+ {
+ "epoch": 2.06,
+ "learning_rate": 6.268939393939395e-05,
+ "loss": 1.5739,
+ "step": 725
+ },
+ {
+ "epoch": 2.06,
+ "learning_rate": 6.25e-05,
+ "loss": 1.4987,
+ "step": 726
+ },
+ {
+ "epoch": 2.06,
+ "learning_rate": 6.231060606060606e-05,
+ "loss": 1.5145,
+ "step": 727
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.212121212121213e-05,
+ "loss": 1.5686,
+ "step": 728
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.193181818181818e-05,
+ "loss": 1.4872,
+ "step": 729
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.174242424242425e-05,
+ "loss": 1.4831,
+ "step": 730
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.15530303030303e-05,
+ "loss": 1.5242,
+ "step": 731
+ },
+ {
+ "epoch": 2.08,
+ "learning_rate": 6.136363636363636e-05,
+ "loss": 1.5298,
+ "step": 732
+ },
+ {
+ "epoch": 2.08,
+ "learning_rate": 6.117424242424242e-05,
+ "loss": 1.4941,
+ "step": 733
+ },
+ {
+ "epoch": 2.08,
+ "learning_rate": 6.098484848484849e-05,
+ "loss": 1.5022,
+ "step": 734
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.079545454545454e-05,
+ "loss": 1.4947,
+ "step": 735
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.060606060606061e-05,
+ "loss": 1.4922,
+ "step": 736
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.041666666666667e-05,
+ "loss": 1.4796,
+ "step": 737
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.022727272727273e-05,
+ "loss": 1.4619,
+ "step": 738
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 6.0037878787878785e-05,
+ "loss": 1.5346,
+ "step": 739
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 5.9848484848484854e-05,
+ "loss": 1.4987,
+ "step": 740
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 5.965909090909091e-05,
+ "loss": 1.516,
+ "step": 741
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 5.946969696969697e-05,
+ "loss": 1.5075,
+ "step": 742
+ },
+ {
+ "epoch": 2.11,
+ "learning_rate": 5.928030303030303e-05,
+ "loss": 1.4584,
+ "step": 743
+ },
+ {
+ "epoch": 2.11,
+ "learning_rate": 5.90909090909091e-05,
+ "loss": 1.5285,
+ "step": 744
+ },
+ {
+ "epoch": 2.11,
+ "learning_rate": 5.890151515151515e-05,
+ "loss": 1.5339,
+ "step": 745
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.871212121212122e-05,
+ "loss": 1.4811,
+ "step": 746
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.852272727272727e-05,
+ "loss": 1.5158,
+ "step": 747
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.833333333333334e-05,
+ "loss": 1.5523,
+ "step": 748
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.8143939393939395e-05,
+ "loss": 1.4911,
+ "step": 749
+ },
+ {
+ "epoch": 2.13,
+ "learning_rate": 5.7954545454545464e-05,
+ "loss": 1.508,
+ "step": 750
+ },
+ {
+ "epoch": 2.13,
+ "learning_rate": 5.776515151515152e-05,
+ "loss": 1.5273,
+ "step": 751
+ },
+ {
+ "epoch": 2.13,
+ "learning_rate": 5.757575757575758e-05,
+ "loss": 1.5231,
+ "step": 752
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.738636363636364e-05,
+ "loss": 1.5269,
+ "step": 753
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.719696969696971e-05,
+ "loss": 1.5306,
+ "step": 754
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.700757575757576e-05,
+ "loss": 1.4519,
+ "step": 755
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.6818181818181825e-05,
+ "loss": 1.4976,
+ "step": 756
+ },
+ {
+ "epoch": 2.15,
+ "learning_rate": 5.662878787878788e-05,
+ "loss": 1.5136,
+ "step": 757
+ },
+ {
+ "epoch": 2.15,
+ "learning_rate": 5.643939393939395e-05,
+ "loss": 1.536,
+ "step": 758
+ },
+ {
+ "epoch": 2.15,
+ "learning_rate": 5.6250000000000005e-05,
+ "loss": 1.4927,
+ "step": 759
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.606060606060606e-05,
+ "loss": 1.4931,
+ "step": 760
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.587121212121212e-05,
+ "loss": 1.4939,
+ "step": 761
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.568181818181818e-05,
+ "loss": 1.4446,
+ "step": 762
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.549242424242425e-05,
+ "loss": 1.5113,
+ "step": 763
+ },
+ {
+ "epoch": 2.17,
+ "learning_rate": 5.5303030303030304e-05,
+ "loss": 1.4886,
+ "step": 764
+ },
+ {
+ "epoch": 2.17,
+ "learning_rate": 5.5113636363636366e-05,
+ "loss": 1.5291,
+ "step": 765
+ },
+ {
+ "epoch": 2.17,
+ "learning_rate": 5.492424242424242e-05,
+ "loss": 1.5204,
+ "step": 766
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.473484848484849e-05,
+ "loss": 1.4677,
+ "step": 767
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.4545454545454546e-05,
+ "loss": 1.4499,
+ "step": 768
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.435606060606061e-05,
+ "loss": 1.4735,
+ "step": 769
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.4166666666666664e-05,
+ "loss": 1.4688,
+ "step": 770
+ },
+ {
+ "epoch": 2.19,
+ "learning_rate": 5.397727272727273e-05,
+ "loss": 1.4728,
+ "step": 771
+ },
+ {
+ "epoch": 2.19,
+ "learning_rate": 5.378787878787879e-05,
+ "loss": 1.4791,
+ "step": 772
+ },
+ {
+ "epoch": 2.19,
+ "learning_rate": 5.359848484848485e-05,
+ "loss": 1.5265,
+ "step": 773
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.340909090909091e-05,
+ "loss": 1.4775,
+ "step": 774
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.3219696969696976e-05,
+ "loss": 1.4531,
+ "step": 775
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.303030303030303e-05,
+ "loss": 1.4891,
+ "step": 776
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.2840909090909094e-05,
+ "loss": 1.4764,
+ "step": 777
+ },
+ {
+ "epoch": 2.21,
+ "learning_rate": 5.265151515151515e-05,
+ "loss": 1.4864,
+ "step": 778
+ },
+ {
+ "epoch": 2.21,
+ "learning_rate": 5.246212121212122e-05,
+ "loss": 1.521,
+ "step": 779
+ },
+ {
+ "epoch": 2.21,
+ "learning_rate": 5.2272727272727274e-05,
+ "loss": 1.473,
+ "step": 780
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.208333333333334e-05,
+ "loss": 1.477,
+ "step": 781
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.189393939393939e-05,
+ "loss": 1.482,
+ "step": 782
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.170454545454546e-05,
+ "loss": 1.5387,
+ "step": 783
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.151515151515152e-05,
+ "loss": 1.4354,
+ "step": 784
+ },
+ {
+ "epoch": 2.23,
+ "learning_rate": 5.132575757575758e-05,
+ "loss": 1.4792,
+ "step": 785
+ },
+ {
+ "epoch": 2.23,
+ "learning_rate": 5.1136363636363635e-05,
+ "loss": 1.4589,
+ "step": 786
+ },
+ {
+ "epoch": 2.23,
+ "learning_rate": 5.0946969696969704e-05,
+ "loss": 1.4883,
+ "step": 787
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.075757575757576e-05,
+ "loss": 1.4442,
+ "step": 788
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.056818181818183e-05,
+ "loss": 1.4387,
+ "step": 789
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.037878787878788e-05,
+ "loss": 1.5203,
+ "step": 790
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.018939393939395e-05,
+ "loss": 1.4634,
+ "step": 791
+ },
+ {
+ "epoch": 2.25,
+ "learning_rate": 5e-05,
+ "loss": 1.4734,
+ "step": 792
+ },
+ {
+ "epoch": 2.25,
+ "learning_rate": 4.9810606060606065e-05,
+ "loss": 1.5079,
+ "step": 793
+ },
+ {
+ "epoch": 2.25,
+ "learning_rate": 4.962121212121213e-05,
+ "loss": 1.4986,
+ "step": 794
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.943181818181818e-05,
+ "loss": 1.4132,
+ "step": 795
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.9242424242424245e-05,
+ "loss": 1.4992,
+ "step": 796
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.905303030303031e-05,
+ "loss": 1.4601,
+ "step": 797
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.886363636363637e-05,
+ "loss": 1.47,
+ "step": 798
+ },
+ {
+ "epoch": 2.27,
+ "learning_rate": 4.8674242424242425e-05,
+ "loss": 1.4575,
+ "step": 799
+ },
+ {
+ "epoch": 2.27,
+ "learning_rate": 4.848484848484849e-05,
+ "loss": 1.4508,
+ "step": 800
+ },
+ {
+ "epoch": 2.27,
+ "learning_rate": 4.829545454545455e-05,
+ "loss": 1.4806,
+ "step": 801
+ },
+ {
+ "epoch": 2.28,
+ "learning_rate": 4.810606060606061e-05,
+ "loss": 1.475,
+ "step": 802
+ },
+ {
+ "epoch": 2.28,
+ "learning_rate": 4.791666666666667e-05,
+ "loss": 1.4219,
+ "step": 803
+ },
+ {
+ "epoch": 2.28,
+ "learning_rate": 4.772727272727273e-05,
+ "loss": 1.4651,
+ "step": 804
+ },
+ {
+ "epoch": 2.28,
+ "learning_rate": 4.753787878787879e-05,
+ "loss": 1.519,
+ "step": 805
+ },
+ {
+ "epoch": 2.29,
+ "learning_rate": 4.7348484848484855e-05,
+ "loss": 1.4887,
+ "step": 806
+ },
+ {
+ "epoch": 2.29,
+ "learning_rate": 4.715909090909091e-05,
+ "loss": 1.4713,
+ "step": 807
+ },
+ {
+ "epoch": 2.29,
+ "learning_rate": 4.696969696969697e-05,
+ "loss": 1.4588,
+ "step": 808
+ },
+ {
+ "epoch": 2.3,
+ "learning_rate": 4.678030303030303e-05,
+ "loss": 1.4752,
+ "step": 809
+ },
+ {
+ "epoch": 2.3,
+ "learning_rate": 4.659090909090909e-05,
+ "loss": 1.4722,
+ "step": 810
+ },
+ {
+ "epoch": 2.3,
+ "learning_rate": 4.6401515151515154e-05,
+ "loss": 1.4735,
+ "step": 811
+ },
+ {
+ "epoch": 2.3,
+ "learning_rate": 4.621212121212121e-05,
+ "loss": 1.4893,
+ "step": 812
+ },
+ {
+ "epoch": 2.31,
+ "learning_rate": 4.602272727272727e-05,
+ "loss": 1.4793,
+ "step": 813
+ },
+ {
+ "epoch": 2.31,
+ "learning_rate": 4.5833333333333334e-05,
+ "loss": 1.4737,
+ "step": 814
+ },
+ {
+ "epoch": 2.31,
+ "learning_rate": 4.5643939393939396e-05,
+ "loss": 1.5194,
+ "step": 815
+ },
+ {
+ "epoch": 2.31,
+ "learning_rate": 4.545454545454546e-05,
+ "loss": 1.4461,
+ "step": 816
+ },
+ {
+ "epoch": 2.32,
+ "learning_rate": 4.5265151515151514e-05,
+ "loss": 1.467,
+ "step": 817
+ },
+ {
+ "epoch": 2.32,
+ "learning_rate": 4.5075757575757577e-05,
+ "loss": 1.5031,
+ "step": 818
+ },
+ {
+ "epoch": 2.32,
+ "learning_rate": 4.488636363636364e-05,
+ "loss": 1.5085,
+ "step": 819
+ },
+ {
+ "epoch": 2.33,
+ "learning_rate": 4.46969696969697e-05,
+ "loss": 1.4356,
+ "step": 820
+ },
+ {
+ "epoch": 2.33,
+ "learning_rate": 4.450757575757576e-05,
+ "loss": 1.4645,
+ "step": 821
+ },
+ {
+ "epoch": 2.33,
+ "learning_rate": 4.431818181818182e-05,
+ "loss": 1.4224,
+ "step": 822
+ },
+ {
+ "epoch": 2.33,
+ "learning_rate": 4.412878787878788e-05,
+ "loss": 1.4675,
+ "step": 823
+ },
+ {
+ "epoch": 2.34,
+ "learning_rate": 4.3939393939393944e-05,
+ "loss": 1.4892,
+ "step": 824
+ },
+ {
+ "epoch": 2.34,
+ "learning_rate": 4.375e-05,
+ "loss": 1.5441,
+ "step": 825
+ },
+ {
+ "epoch": 2.34,
+ "learning_rate": 4.356060606060606e-05,
+ "loss": 1.4205,
+ "step": 826
+ },
+ {
+ "epoch": 2.35,
+ "learning_rate": 4.3371212121212124e-05,
+ "loss": 1.4849,
+ "step": 827
+ },
+ {
+ "epoch": 2.35,
+ "learning_rate": 4.318181818181819e-05,
+ "loss": 1.4789,
+ "step": 828
+ },
+ {
+ "epoch": 2.35,
+ "learning_rate": 4.299242424242424e-05,
+ "loss": 1.445,
+ "step": 829
+ },
+ {
+ "epoch": 2.35,
+ "learning_rate": 4.2803030303030305e-05,
+ "loss": 1.4528,
+ "step": 830
+ },
+ {
+ "epoch": 2.36,
+ "learning_rate": 4.261363636363637e-05,
+ "loss": 1.4588,
+ "step": 831
+ },
+ {
+ "epoch": 2.36,
+ "learning_rate": 4.242424242424243e-05,
+ "loss": 1.4158,
+ "step": 832
+ },
+ {
+ "epoch": 2.36,
+ "learning_rate": 4.2234848484848485e-05,
+ "loss": 1.4933,
+ "step": 833
+ },
+ {
+ "epoch": 2.37,
+ "learning_rate": 4.204545454545455e-05,
+ "loss": 1.4294,
+ "step": 834
+ },
+ {
+ "epoch": 2.37,
+ "learning_rate": 4.185606060606061e-05,
+ "loss": 1.4764,
+ "step": 835
+ },
+ {
+ "epoch": 2.37,
+ "learning_rate": 4.166666666666667e-05,
+ "loss": 1.4262,
+ "step": 836
+ },
+ {
+ "epoch": 2.37,
+ "learning_rate": 4.1477272727272734e-05,
+ "loss": 1.3994,
+ "step": 837
+ },
+ {
+ "epoch": 2.38,
+ "learning_rate": 4.128787878787879e-05,
+ "loss": 1.4912,
+ "step": 838
+ },
+ {
+ "epoch": 2.38,
+ "learning_rate": 4.109848484848485e-05,
+ "loss": 1.4228,
+ "step": 839
+ },
+ {
+ "epoch": 2.38,
+ "learning_rate": 4.0909090909090915e-05,
+ "loss": 1.4403,
+ "step": 840
+ },
+ {
+ "epoch": 2.39,
+ "learning_rate": 4.071969696969698e-05,
+ "loss": 1.4738,
+ "step": 841
+ },
+ {
+ "epoch": 2.39,
+ "learning_rate": 4.053030303030303e-05,
+ "loss": 1.4715,
+ "step": 842
+ },
+ {
+ "epoch": 2.39,
+ "learning_rate": 4.034090909090909e-05,
+ "loss": 1.4354,
+ "step": 843
+ },
+ {
+ "epoch": 2.39,
+ "learning_rate": 4.015151515151515e-05,
+ "loss": 1.4296,
+ "step": 844
+ },
+ {
+ "epoch": 2.4,
+ "learning_rate": 3.996212121212121e-05,
+ "loss": 1.4773,
+ "step": 845
+ },
+ {
+ "epoch": 2.4,
+ "learning_rate": 3.9772727272727275e-05,
+ "loss": 1.4813,
+ "step": 846
+ },
+ {
+ "epoch": 2.4,
+ "learning_rate": 3.958333333333333e-05,
+ "loss": 1.4763,
+ "step": 847
+ },
+ {
+ "epoch": 2.41,
+ "learning_rate": 3.939393939393939e-05,
+ "loss": 1.472,
+ "step": 848
+ },
+ {
+ "epoch": 2.41,
+ "learning_rate": 3.9204545454545456e-05,
+ "loss": 1.3975,
+ "step": 849
+ },
+ {
+ "epoch": 2.41,
+ "learning_rate": 3.901515151515152e-05,
+ "loss": 1.4507,
+ "step": 850
+ },
+ {
+ "epoch": 2.41,
+ "learning_rate": 3.8825757575757574e-05,
+ "loss": 1.4911,
+ "step": 851
+ },
+ {
+ "epoch": 2.42,
+ "learning_rate": 3.8636363636363636e-05,
+ "loss": 1.3785,
+ "step": 852
+ },
+ {
+ "epoch": 2.42,
+ "learning_rate": 3.84469696969697e-05,
+ "loss": 1.3964,
+ "step": 853
+ },
+ {
+ "epoch": 2.42,
+ "learning_rate": 3.825757575757576e-05,
+ "loss": 1.471,
+ "step": 854
+ },
+ {
+ "epoch": 2.43,
+ "learning_rate": 3.8068181818181816e-05,
+ "loss": 1.4034,
+ "step": 855
+ },
+ {
+ "epoch": 2.43,
+ "learning_rate": 3.787878787878788e-05,
+ "loss": 1.5032,
+ "step": 856
+ },
+ {
+ "epoch": 2.43,
+ "learning_rate": 3.768939393939394e-05,
+ "loss": 1.488,
+ "step": 857
+ },
+ {
+ "epoch": 2.43,
+ "learning_rate": 3.7500000000000003e-05,
+ "loss": 1.4341,
+ "step": 858
+ },
+ {
+ "epoch": 2.44,
+ "learning_rate": 3.7310606060606066e-05,
+ "loss": 1.4728,
+ "step": 859
+ },
+ {
+ "epoch": 2.44,
+ "learning_rate": 3.712121212121212e-05,
+ "loss": 1.4146,
+ "step": 860
+ },
+ {
+ "epoch": 2.44,
+ "learning_rate": 3.6931818181818184e-05,
+ "loss": 1.4241,
+ "step": 861
+ },
+ {
+ "epoch": 2.45,
+ "learning_rate": 3.6742424242424246e-05,
+ "loss": 1.4811,
+ "step": 862
+ },
+ {
+ "epoch": 2.45,
+ "learning_rate": 3.655303030303031e-05,
+ "loss": 1.5406,
+ "step": 863
+ },
+ {
+ "epoch": 2.45,
+ "learning_rate": 3.6363636363636364e-05,
+ "loss": 1.4578,
+ "step": 864
+ },
+ {
+ "epoch": 2.45,
+ "learning_rate": 3.6174242424242427e-05,
+ "loss": 1.4487,
+ "step": 865
+ },
+ {
+ "epoch": 2.46,
+ "learning_rate": 3.598484848484849e-05,
+ "loss": 1.4609,
+ "step": 866
+ },
+ {
+ "epoch": 2.46,
+ "learning_rate": 3.579545454545455e-05,
+ "loss": 1.3872,
+ "step": 867
+ },
+ {
+ "epoch": 2.46,
+ "learning_rate": 3.560606060606061e-05,
+ "loss": 1.4584,
+ "step": 868
+ },
+ {
+ "epoch": 2.47,
+ "learning_rate": 3.541666666666667e-05,
+ "loss": 1.4516,
+ "step": 869
+ },
+ {
+ "epoch": 2.47,
+ "learning_rate": 3.522727272727273e-05,
+ "loss": 1.4802,
+ "step": 870
+ },
+ {
+ "epoch": 2.47,
+ "learning_rate": 3.5037878787878794e-05,
+ "loss": 1.4247,
+ "step": 871
+ },
+ {
+ "epoch": 2.47,
+ "learning_rate": 3.484848484848485e-05,
+ "loss": 1.4203,
+ "step": 872
+ },
+ {
+ "epoch": 2.48,
+ "learning_rate": 3.465909090909091e-05,
+ "loss": 1.4793,
+ "step": 873
+ },
+ {
+ "epoch": 2.48,
+ "learning_rate": 3.4469696969696974e-05,
+ "loss": 1.4442,
+ "step": 874
+ },
+ {
+ "epoch": 2.48,
+ "learning_rate": 3.428030303030303e-05,
+ "loss": 1.4779,
+ "step": 875
+ },
+ {
+ "epoch": 2.49,
+ "learning_rate": 3.409090909090909e-05,
+ "loss": 1.4143,
+ "step": 876
+ },
+ {
+ "epoch": 2.49,
+ "learning_rate": 3.390151515151515e-05,
+ "loss": 1.4758,
+ "step": 877
+ },
+ {
+ "epoch": 2.49,
+ "learning_rate": 3.371212121212121e-05,
+ "loss": 1.4327,
+ "step": 878
+ },
+ {
+ "epoch": 2.49,
+ "learning_rate": 3.352272727272727e-05,
+ "loss": 1.466,
+ "step": 879
+ },
+ {
+ "epoch": 2.5,
+ "learning_rate": 3.3333333333333335e-05,
+ "loss": 1.498,
+ "step": 880
+ },
+ {
+ "epoch": 2.5,
+ "learning_rate": 3.314393939393939e-05,
+ "loss": 1.4679,
+ "step": 881
+ },
+ {
+ "epoch": 2.5,
+ "learning_rate": 3.295454545454545e-05,
+ "loss": 1.4606,
+ "step": 882
+ },
+ {
+ "epoch": 2.5,
+ "learning_rate": 3.2765151515151515e-05,
+ "loss": 1.4416,
+ "step": 883
+ },
+ {
+ "epoch": 2.51,
+ "learning_rate": 3.257575757575758e-05,
+ "loss": 1.4284,
+ "step": 884
+ },
+ {
+ "epoch": 2.51,
+ "learning_rate": 3.238636363636364e-05,
+ "loss": 1.4554,
+ "step": 885
+ },
+ {
+ "epoch": 2.51,
+ "learning_rate": 3.2196969696969696e-05,
+ "loss": 1.4306,
+ "step": 886
+ },
+ {
+ "epoch": 2.52,
+ "learning_rate": 3.200757575757576e-05,
+ "loss": 1.4751,
+ "step": 887
+ },
+ {
+ "epoch": 2.52,
+ "learning_rate": 3.181818181818182e-05,
+ "loss": 1.4787,
+ "step": 888
+ },
+ {
+ "epoch": 2.52,
+ "learning_rate": 3.162878787878788e-05,
+ "loss": 1.4345,
+ "step": 889
+ },
+ {
+ "epoch": 2.52,
+ "learning_rate": 3.143939393939394e-05,
+ "loss": 1.4951,
+ "step": 890
+ },
+ {
+ "epoch": 2.53,
+ "learning_rate": 3.125e-05,
+ "loss": 1.4212,
+ "step": 891
+ },
+ {
+ "epoch": 2.53,
+ "learning_rate": 3.106060606060606e-05,
+ "loss": 1.4258,
+ "step": 892
+ },
+ {
+ "epoch": 2.53,
+ "learning_rate": 3.0871212121212125e-05,
+ "loss": 1.4454,
+ "step": 893
+ },
+ {
+ "epoch": 2.54,
+ "learning_rate": 3.068181818181818e-05,
+ "loss": 1.4717,
+ "step": 894
+ },
+ {
+ "epoch": 2.54,
+ "learning_rate": 3.0492424242424243e-05,
+ "loss": 1.45,
+ "step": 895
+ },
+ {
+ "epoch": 2.54,
+ "learning_rate": 3.0303030303030306e-05,
+ "loss": 1.4785,
+ "step": 896
+ },
+ {
+ "epoch": 2.54,
+ "learning_rate": 3.0113636363636365e-05,
+ "loss": 1.3477,
+ "step": 897
+ },
+ {
+ "epoch": 2.55,
+ "learning_rate": 2.9924242424242427e-05,
+ "loss": 1.4807,
+ "step": 898
+ },
+ {
+ "epoch": 2.55,
+ "learning_rate": 2.9734848484848486e-05,
+ "loss": 1.4737,
+ "step": 899
+ },
+ {
+ "epoch": 2.55,
+ "learning_rate": 2.954545454545455e-05,
+ "loss": 1.4427,
+ "step": 900
+ },
+ {
+ "epoch": 2.56,
+ "learning_rate": 2.935606060606061e-05,
+ "loss": 1.4793,
+ "step": 901
+ },
+ {
+ "epoch": 2.56,
+ "learning_rate": 2.916666666666667e-05,
+ "loss": 1.4168,
+ "step": 902
+ },
+ {
+ "epoch": 2.56,
+ "learning_rate": 2.8977272727272732e-05,
+ "loss": 1.4452,
+ "step": 903
+ },
+ {
+ "epoch": 2.56,
+ "learning_rate": 2.878787878787879e-05,
+ "loss": 1.37,
+ "step": 904
+ },
+ {
+ "epoch": 2.57,
+ "learning_rate": 2.8598484848484853e-05,
+ "loss": 1.4097,
+ "step": 905
+ },
+ {
+ "epoch": 2.57,
+ "learning_rate": 2.8409090909090912e-05,
+ "loss": 1.3963,
+ "step": 906
+ },
+ {
+ "epoch": 2.57,
+ "learning_rate": 2.8219696969696975e-05,
+ "loss": 1.4226,
+ "step": 907
+ },
+ {
+ "epoch": 2.58,
+ "learning_rate": 2.803030303030303e-05,
+ "loss": 1.4734,
+ "step": 908
+ },
+ {
+ "epoch": 2.58,
+ "learning_rate": 2.784090909090909e-05,
+ "loss": 1.4221,
+ "step": 909
+ },
+ {
+ "epoch": 2.58,
+ "learning_rate": 2.7651515151515152e-05,
+ "loss": 1.4383,
+ "step": 910
+ },
+ {
+ "epoch": 2.58,
+ "learning_rate": 2.746212121212121e-05,
+ "loss": 1.4968,
+ "step": 911
+ },
+ {
+ "epoch": 2.59,
+ "learning_rate": 2.7272727272727273e-05,
+ "loss": 1.4475,
+ "step": 912
+ },
+ {
+ "epoch": 2.59,
+ "learning_rate": 2.7083333333333332e-05,
+ "loss": 1.4358,
+ "step": 913
+ },
+ {
+ "epoch": 2.59,
+ "learning_rate": 2.6893939393939394e-05,
+ "loss": 1.4536,
+ "step": 914
+ },
+ {
+ "epoch": 2.6,
+ "learning_rate": 2.6704545454545453e-05,
+ "loss": 1.419,
+ "step": 915
+ },
+ {
+ "epoch": 2.6,
+ "learning_rate": 2.6515151515151516e-05,
+ "loss": 1.416,
+ "step": 916
+ },
+ {
+ "epoch": 2.6,
+ "learning_rate": 2.6325757575757575e-05,
+ "loss": 1.4109,
+ "step": 917
+ },
+ {
+ "epoch": 2.6,
+ "learning_rate": 2.6136363636363637e-05,
+ "loss": 1.3907,
+ "step": 918
+ },
+ {
+ "epoch": 2.61,
+ "learning_rate": 2.5946969696969696e-05,
+ "loss": 1.4316,
+ "step": 919
+ },
+ {
+ "epoch": 2.61,
+ "learning_rate": 2.575757575757576e-05,
+ "loss": 1.4484,
+ "step": 920
+ },
+ {
+ "epoch": 2.61,
+ "learning_rate": 2.5568181818181817e-05,
+ "loss": 1.5423,
+ "step": 921
+ },
+ {
+ "epoch": 2.62,
+ "learning_rate": 2.537878787878788e-05,
+ "loss": 1.5007,
+ "step": 922
+ },
+ {
+ "epoch": 2.62,
+ "learning_rate": 2.518939393939394e-05,
+ "loss": 1.4497,
+ "step": 923
+ },
+ {
+ "epoch": 2.62,
+ "learning_rate": 2.5e-05,
+ "loss": 1.4404,
+ "step": 924
+ },
+ {
+ "epoch": 2.62,
+ "learning_rate": 2.4810606060606064e-05,
+ "loss": 1.476,
+ "step": 925
+ },
+ {
+ "epoch": 2.63,
+ "learning_rate": 2.4621212121212123e-05,
+ "loss": 1.4526,
+ "step": 926
+ },
+ {
+ "epoch": 2.63,
+ "learning_rate": 2.4431818181818185e-05,
+ "loss": 1.4112,
+ "step": 927
+ },
+ {
+ "epoch": 2.63,
+ "learning_rate": 2.4242424242424244e-05,
+ "loss": 1.465,
+ "step": 928
+ },
+ {
+ "epoch": 2.64,
+ "learning_rate": 2.4053030303030306e-05,
+ "loss": 1.4395,
+ "step": 929
+ },
+ {
+ "epoch": 2.64,
+ "learning_rate": 2.3863636363636365e-05,
+ "loss": 1.4097,
+ "step": 930
+ },
+ {
+ "epoch": 2.64,
+ "learning_rate": 2.3674242424242428e-05,
+ "loss": 1.4487,
+ "step": 931
+ },
+ {
+ "epoch": 2.64,
+ "learning_rate": 2.3484848484848487e-05,
+ "loss": 1.4614,
+ "step": 932
+ },
+ {
+ "epoch": 2.65,
+ "learning_rate": 2.3295454545454546e-05,
+ "loss": 1.4373,
+ "step": 933
+ },
+ {
+ "epoch": 2.65,
+ "learning_rate": 2.3106060606060605e-05,
+ "loss": 1.4899,
+ "step": 934
+ },
+ {
+ "epoch": 2.65,
+ "learning_rate": 2.2916666666666667e-05,
+ "loss": 1.4455,
+ "step": 935
+ },
+ {
+ "epoch": 2.66,
+ "learning_rate": 2.272727272727273e-05,
+ "loss": 1.3954,
+ "step": 936
+ },
+ {
+ "epoch": 2.66,
+ "learning_rate": 2.2537878787878788e-05,
+ "loss": 1.456,
+ "step": 937
+ },
+ {
+ "epoch": 2.66,
+ "learning_rate": 2.234848484848485e-05,
+ "loss": 1.4317,
+ "step": 938
+ },
+ {
+ "epoch": 2.66,
+ "learning_rate": 2.215909090909091e-05,
+ "loss": 1.3824,
+ "step": 939
+ },
+ {
+ "epoch": 2.67,
+ "learning_rate": 2.1969696969696972e-05,
+ "loss": 1.4068,
+ "step": 940
+ },
+ {
+ "epoch": 2.67,
+ "learning_rate": 2.178030303030303e-05,
+ "loss": 1.3588,
+ "step": 941
+ },
+ {
+ "epoch": 2.67,
+ "learning_rate": 2.1590909090909093e-05,
+ "loss": 1.405,
+ "step": 942
+ },
+ {
+ "epoch": 2.68,
+ "learning_rate": 2.1401515151515152e-05,
+ "loss": 1.4763,
+ "step": 943
+ },
+ {
+ "epoch": 2.68,
+ "learning_rate": 2.1212121212121215e-05,
+ "loss": 1.4424,
+ "step": 944
+ },
+ {
+ "epoch": 2.68,
+ "learning_rate": 2.1022727272727274e-05,
+ "loss": 1.4339,
+ "step": 945
+ },
+ {
+ "epoch": 2.68,
+ "learning_rate": 2.0833333333333336e-05,
+ "loss": 1.4734,
+ "step": 946
+ },
+ {
+ "epoch": 2.69,
+ "learning_rate": 2.0643939393939395e-05,
+ "loss": 1.437,
+ "step": 947
+ },
+ {
+ "epoch": 2.69,
+ "learning_rate": 2.0454545454545457e-05,
+ "loss": 1.432,
+ "step": 948
+ },
+ {
+ "epoch": 2.69,
+ "learning_rate": 2.0265151515151516e-05,
+ "loss": 1.5139,
+ "step": 949
+ },
+ {
+ "epoch": 2.7,
+ "learning_rate": 2.0075757575757575e-05,
+ "loss": 1.4176,
+ "step": 950
+ },
+ {
+ "epoch": 2.7,
+ "learning_rate": 1.9886363636363638e-05,
+ "loss": 1.4205,
+ "step": 951
+ },
+ {
+ "epoch": 2.7,
+ "learning_rate": 1.9696969696969697e-05,
+ "loss": 1.4537,
+ "step": 952
+ },
+ {
+ "epoch": 2.7,
+ "learning_rate": 1.950757575757576e-05,
+ "loss": 1.447,
+ "step": 953
+ },
+ {
+ "epoch": 2.71,
+ "learning_rate": 1.9318181818181818e-05,
+ "loss": 1.4213,
+ "step": 954
+ },
+ {
+ "epoch": 2.71,
+ "learning_rate": 1.912878787878788e-05,
+ "loss": 1.4057,
+ "step": 955
+ },
+ {
+ "epoch": 2.71,
+ "learning_rate": 1.893939393939394e-05,
+ "loss": 1.3947,
+ "step": 956
+ },
+ {
+ "epoch": 2.71,
+ "learning_rate": 1.8750000000000002e-05,
+ "loss": 1.4504,
+ "step": 957
+ },
+ {
+ "epoch": 2.72,
+ "learning_rate": 1.856060606060606e-05,
+ "loss": 1.4295,
+ "step": 958
+ },
+ {
+ "epoch": 2.72,
+ "learning_rate": 1.8371212121212123e-05,
+ "loss": 1.3945,
+ "step": 959
+ },
+ {
+ "epoch": 2.72,
+ "learning_rate": 1.8181818181818182e-05,
+ "loss": 1.4175,
+ "step": 960
+ },
+ {
+ "epoch": 2.73,
+ "learning_rate": 1.7992424242424244e-05,
+ "loss": 1.4521,
+ "step": 961
+ },
+ {
+ "epoch": 2.73,
+ "learning_rate": 1.7803030303030303e-05,
+ "loss": 1.4259,
+ "step": 962
+ },
+ {
+ "epoch": 2.73,
+ "learning_rate": 1.7613636363636366e-05,
+ "loss": 1.3455,
+ "step": 963
+ },
+ {
+ "epoch": 2.73,
+ "learning_rate": 1.7424242424242425e-05,
+ "loss": 1.4303,
+ "step": 964
+ },
+ {
+ "epoch": 2.74,
+ "learning_rate": 1.7234848484848487e-05,
+ "loss": 1.4553,
+ "step": 965
+ },
+ {
+ "epoch": 2.74,
+ "learning_rate": 1.7045454545454546e-05,
+ "loss": 1.4166,
+ "step": 966
+ },
+ {
+ "epoch": 2.74,
+ "learning_rate": 1.6856060606060605e-05,
+ "loss": 1.4248,
+ "step": 967
+ },
+ {
+ "epoch": 2.75,
+ "learning_rate": 1.6666666666666667e-05,
+ "loss": 1.4213,
+ "step": 968
+ },
+ {
+ "epoch": 2.75,
+ "learning_rate": 1.6477272727272726e-05,
+ "loss": 1.4287,
+ "step": 969
+ },
+ {
+ "epoch": 2.75,
+ "learning_rate": 1.628787878787879e-05,
+ "loss": 1.4804,
+ "step": 970
+ },
+ {
+ "epoch": 2.75,
+ "learning_rate": 1.6098484848484848e-05,
+ "loss": 1.3863,
+ "step": 971
+ },
+ {
+ "epoch": 2.76,
+ "learning_rate": 1.590909090909091e-05,
+ "loss": 1.4326,
+ "step": 972
+ },
+ {
+ "epoch": 2.76,
+ "learning_rate": 1.571969696969697e-05,
+ "loss": 1.3857,
+ "step": 973
+ },
+ {
+ "epoch": 2.76,
+ "learning_rate": 1.553030303030303e-05,
+ "loss": 1.3745,
+ "step": 974
+ },
+ {
+ "epoch": 2.77,
+ "learning_rate": 1.534090909090909e-05,
+ "loss": 1.4398,
+ "step": 975
+ },
+ {
+ "epoch": 2.77,
+ "learning_rate": 1.5151515151515153e-05,
+ "loss": 1.4498,
+ "step": 976
+ },
+ {
+ "epoch": 2.77,
+ "learning_rate": 1.4962121212121214e-05,
+ "loss": 1.4087,
+ "step": 977
+ },
+ {
+ "epoch": 2.77,
+ "learning_rate": 1.4772727272727274e-05,
+ "loss": 1.4844,
+ "step": 978
+ },
+ {
+ "epoch": 2.78,
+ "learning_rate": 1.4583333333333335e-05,
+ "loss": 1.4476,
+ "step": 979
+ },
+ {
+ "epoch": 2.78,
+ "learning_rate": 1.4393939393939396e-05,
+ "loss": 1.407,
+ "step": 980
+ },
+ {
+ "epoch": 2.78,
+ "learning_rate": 1.4204545454545456e-05,
+ "loss": 1.4173,
+ "step": 981
+ },
+ {
+ "epoch": 2.79,
+ "learning_rate": 1.4015151515151515e-05,
+ "loss": 1.4286,
+ "step": 982
+ },
+ {
+ "epoch": 2.79,
+ "learning_rate": 1.3825757575757576e-05,
+ "loss": 1.3656,
+ "step": 983
+ },
+ {
+ "epoch": 2.79,
+ "learning_rate": 1.3636363636363637e-05,
+ "loss": 1.3885,
+ "step": 984
+ },
+ {
+ "epoch": 2.79,
+ "learning_rate": 1.3446969696969697e-05,
+ "loss": 1.4205,
+ "step": 985
+ },
+ {
+ "epoch": 2.8,
+ "learning_rate": 1.3257575757575758e-05,
+ "loss": 1.395,
+ "step": 986
+ },
+ {
+ "epoch": 2.8,
+ "learning_rate": 1.3068181818181819e-05,
+ "loss": 1.4598,
+ "step": 987
+ },
+ {
+ "epoch": 2.8,
+ "learning_rate": 1.287878787878788e-05,
+ "loss": 1.4488,
+ "step": 988
+ },
+ {
+ "epoch": 2.81,
+ "learning_rate": 1.268939393939394e-05,
+ "loss": 1.4015,
+ "step": 989
+ },
+ {
+ "epoch": 2.81,
+ "learning_rate": 1.25e-05,
+ "loss": 1.4647,
+ "step": 990
+ },
+ {
+ "epoch": 2.81,
+ "learning_rate": 1.2310606060606061e-05,
+ "loss": 1.4058,
+ "step": 991
+ },
+ {
+ "epoch": 2.81,
+ "learning_rate": 1.2121212121212122e-05,
+ "loss": 1.3689,
+ "step": 992
+ },
+ {
+ "epoch": 2.82,
+ "learning_rate": 1.1931818181818183e-05,
+ "loss": 1.4467,
+ "step": 993
+ },
+ {
+ "epoch": 2.82,
+ "learning_rate": 1.1742424242424243e-05,
+ "loss": 1.4298,
+ "step": 994
+ },
+ {
+ "epoch": 2.82,
+ "learning_rate": 1.1553030303030302e-05,
+ "loss": 1.4265,
+ "step": 995
+ },
+ {
+ "epoch": 2.83,
+ "learning_rate": 1.1363636363636365e-05,
+ "loss": 1.4099,
+ "step": 996
+ },
+ {
+ "epoch": 2.83,
+ "learning_rate": 1.1174242424242425e-05,
+ "loss": 1.4161,
+ "step": 997
+ },
+ {
+ "epoch": 2.83,
+ "learning_rate": 1.0984848484848486e-05,
+ "loss": 1.4087,
+ "step": 998
+ },
+ {
+ "epoch": 2.83,
+ "learning_rate": 1.0795454545454547e-05,
+ "loss": 1.4275,
+ "step": 999
+ },
+ {
+ "epoch": 2.84,
+ "learning_rate": 1.0606060606060607e-05,
+ "loss": 1.3889,
+ "step": 1000
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1056,
+ "num_train_epochs": 3,
+ "save_steps": 100,
+ "total_flos": 2.5331819242832794e+18,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-1000/training_args.bin b/checkpoint-1000/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..574132c086f9a526d71493b1ec4c09396eac5482
--- /dev/null
+++ b/checkpoint-1000/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:50303c7f1586feb001f01b7e968e567295f501ef6c6407e28250c458696a43af
+size 4155
diff --git a/checkpoint-200/README.md b/checkpoint-200/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4
--- /dev/null
+++ b/checkpoint-200/README.md
@@ -0,0 +1,34 @@
+---
+library_name: peft
+---
+## Training procedure
+
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+### Framework versions
+
+- PEFT 0.6.0.dev0
+
+- PEFT 0.6.0.dev0
diff --git a/checkpoint-200/adapter_config.json b/checkpoint-200/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b
--- /dev/null
+++ b/checkpoint-200/adapter_config.json
@@ -0,0 +1,23 @@
+{
+ "auto_mapping": null,
+ "base_model_name_or_path": "bigscience/bloomz-3b",
+ "bias": "none",
+ "fan_in_fan_out": false,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "lora_alpha": 16,
+ "lora_dropout": 0.0,
+ "modules_to_save": null,
+ "peft_type": "LORA",
+ "r": 8,
+ "revision": null,
+ "target_modules": [
+ "dense_4h_to_h",
+ "dense",
+ "dense_h_to_4h",
+ "query_key_value"
+ ],
+ "task_type": "CAUSAL_LM"
+}
\ No newline at end of file
diff --git a/checkpoint-200/adapter_model.bin b/checkpoint-200/adapter_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..7988fe960dc88d5b5a4a5c12881b117738f8e950
--- /dev/null
+++ b/checkpoint-200/adapter_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:af970fa4c3a5656856f672174ec36ac064378101c5eb1d725a132c5f169062a0
+size 39409357
diff --git a/checkpoint-200/optimizer.pt b/checkpoint-200/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..652f07b4db67439c064b5b5636f42acf0ff1559e
--- /dev/null
+++ b/checkpoint-200/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:da756f223a08e8635e8fe5fae5ef9d2173d86392bb4169cb415818a13d98abd2
+size 78844421
diff --git a/checkpoint-200/rng_state.pth b/checkpoint-200/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..e8a62beafdd6e61c41abbc54c8b7428f4e3484c0
--- /dev/null
+++ b/checkpoint-200/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b2278a87cdf86c3f9219223c847f6b27f6b7f15b8226b617f38936e8ff2cbcde
+size 14575
diff --git a/checkpoint-200/scheduler.pt b/checkpoint-200/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..b7d27b087a6f7fa54c65b34e6dfb7113b6964667
--- /dev/null
+++ b/checkpoint-200/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:84eda105b04e1ff8992a03ec3c10699744eae4e45155e29019b4fa919b2d986a
+size 627
diff --git a/checkpoint-200/special_tokens_map.json b/checkpoint-200/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d
--- /dev/null
+++ b/checkpoint-200/special_tokens_map.json
@@ -0,0 +1,6 @@
+{
+ "bos_token": "",
+ "eos_token": "",
+ "pad_token": "",
+ "unk_token": ""
+}
diff --git a/checkpoint-200/tokenizer.json b/checkpoint-200/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207
--- /dev/null
+++ b/checkpoint-200/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe
+size 14500570
diff --git a/checkpoint-200/tokenizer_config.json b/checkpoint-200/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3
--- /dev/null
+++ b/checkpoint-200/tokenizer_config.json
@@ -0,0 +1,10 @@
+{
+ "add_prefix_space": false,
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": "",
+ "tokenizer_class": "BloomTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-200/trainer_state.json b/checkpoint-200/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..e21d32a2dbbd519f912d236b14e10efd394a748a
--- /dev/null
+++ b/checkpoint-200/trainer_state.json
@@ -0,0 +1,1219 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.5673758865248227,
+ "eval_steps": 500,
+ "global_step": 200,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 0.00019981060606060605,
+ "loss": 2.9206,
+ "step": 1
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019962121212121212,
+ "loss": 2.7609,
+ "step": 2
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001994318181818182,
+ "loss": 2.6878,
+ "step": 3
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019924242424242426,
+ "loss": 2.6697,
+ "step": 4
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001990530303030303,
+ "loss": 2.5818,
+ "step": 5
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019886363636363637,
+ "loss": 2.5396,
+ "step": 6
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019867424242424244,
+ "loss": 2.5265,
+ "step": 7
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.0001984848484848485,
+ "loss": 2.5475,
+ "step": 8
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019829545454545455,
+ "loss": 2.4835,
+ "step": 9
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001981060606060606,
+ "loss": 2.4559,
+ "step": 10
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001979166666666667,
+ "loss": 2.4511,
+ "step": 11
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019772727272727273,
+ "loss": 2.4592,
+ "step": 12
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.0001975378787878788,
+ "loss": 2.4495,
+ "step": 13
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019734848484848484,
+ "loss": 2.4714,
+ "step": 14
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019715909090909094,
+ "loss": 2.4302,
+ "step": 15
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019696969696969698,
+ "loss": 2.4097,
+ "step": 16
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019678030303030305,
+ "loss": 2.4523,
+ "step": 17
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.0001965909090909091,
+ "loss": 2.4325,
+ "step": 18
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019640151515151516,
+ "loss": 2.4125,
+ "step": 19
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019621212121212123,
+ "loss": 2.4329,
+ "step": 20
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019602272727272727,
+ "loss": 2.3471,
+ "step": 21
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019583333333333334,
+ "loss": 2.3012,
+ "step": 22
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001956439393939394,
+ "loss": 2.3869,
+ "step": 23
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019545454545454548,
+ "loss": 2.3822,
+ "step": 24
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019526515151515152,
+ "loss": 2.3427,
+ "step": 25
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001950757575757576,
+ "loss": 2.3659,
+ "step": 26
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019488636363636366,
+ "loss": 2.3826,
+ "step": 27
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.0001946969696969697,
+ "loss": 2.3532,
+ "step": 28
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019450757575757577,
+ "loss": 2.3828,
+ "step": 29
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001943181818181818,
+ "loss": 2.3133,
+ "step": 30
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001941287878787879,
+ "loss": 2.3613,
+ "step": 31
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019393939393939395,
+ "loss": 2.3867,
+ "step": 32
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019375000000000002,
+ "loss": 2.2966,
+ "step": 33
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019356060606060606,
+ "loss": 2.3436,
+ "step": 34
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019337121212121213,
+ "loss": 2.3425,
+ "step": 35
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.0001931818181818182,
+ "loss": 2.307,
+ "step": 36
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019299242424242424,
+ "loss": 2.3521,
+ "step": 37
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.0001928030303030303,
+ "loss": 2.3302,
+ "step": 38
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019261363636363635,
+ "loss": 2.312,
+ "step": 39
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019242424242424245,
+ "loss": 2.3655,
+ "step": 40
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001922348484848485,
+ "loss": 2.344,
+ "step": 41
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019204545454545456,
+ "loss": 2.3373,
+ "step": 42
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001918560606060606,
+ "loss": 2.3331,
+ "step": 43
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019166666666666667,
+ "loss": 2.3376,
+ "step": 44
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019147727272727274,
+ "loss": 2.3369,
+ "step": 45
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019128787878787878,
+ "loss": 2.3413,
+ "step": 46
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019109848484848485,
+ "loss": 2.3212,
+ "step": 47
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019090909090909092,
+ "loss": 2.307,
+ "step": 48
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.000190719696969697,
+ "loss": 2.2929,
+ "step": 49
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019053030303030303,
+ "loss": 2.2873,
+ "step": 50
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.0001903409090909091,
+ "loss": 2.3098,
+ "step": 51
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00019015151515151517,
+ "loss": 2.3129,
+ "step": 52
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.0001899621212121212,
+ "loss": 2.3038,
+ "step": 53
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00018977272727272728,
+ "loss": 2.286,
+ "step": 54
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018958333333333332,
+ "loss": 2.3388,
+ "step": 55
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018939393939393942,
+ "loss": 2.3193,
+ "step": 56
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018920454545454546,
+ "loss": 2.3136,
+ "step": 57
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018901515151515153,
+ "loss": 2.3141,
+ "step": 58
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018882575757575757,
+ "loss": 2.3646,
+ "step": 59
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018863636363636364,
+ "loss": 2.3318,
+ "step": 60
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.0001884469696969697,
+ "loss": 2.2977,
+ "step": 61
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018825757575757575,
+ "loss": 2.2764,
+ "step": 62
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018806818181818182,
+ "loss": 2.3095,
+ "step": 63
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.0001878787878787879,
+ "loss": 2.252,
+ "step": 64
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018768939393939396,
+ "loss": 2.2786,
+ "step": 65
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.0001875,
+ "loss": 2.2789,
+ "step": 66
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018731060606060607,
+ "loss": 2.2841,
+ "step": 67
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018712121212121212,
+ "loss": 2.3436,
+ "step": 68
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018693181818181818,
+ "loss": 2.2956,
+ "step": 69
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018674242424242425,
+ "loss": 2.2353,
+ "step": 70
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.0001865530303030303,
+ "loss": 2.2772,
+ "step": 71
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018636363636363636,
+ "loss": 2.2496,
+ "step": 72
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018617424242424243,
+ "loss": 2.2477,
+ "step": 73
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.0001859848484848485,
+ "loss": 2.2791,
+ "step": 74
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018579545454545454,
+ "loss": 2.2799,
+ "step": 75
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018560606060606061,
+ "loss": 2.3132,
+ "step": 76
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018541666666666668,
+ "loss": 2.2542,
+ "step": 77
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018522727272727273,
+ "loss": 2.2609,
+ "step": 78
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.0001850378787878788,
+ "loss": 2.2819,
+ "step": 79
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018484848484848484,
+ "loss": 2.2844,
+ "step": 80
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018465909090909093,
+ "loss": 2.2542,
+ "step": 81
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018446969696969697,
+ "loss": 2.2603,
+ "step": 82
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018428030303030304,
+ "loss": 2.2832,
+ "step": 83
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018409090909090909,
+ "loss": 2.2869,
+ "step": 84
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018390151515151518,
+ "loss": 2.2646,
+ "step": 85
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018371212121212122,
+ "loss": 2.2698,
+ "step": 86
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018352272727272727,
+ "loss": 2.2757,
+ "step": 87
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018333333333333334,
+ "loss": 2.2544,
+ "step": 88
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.0001831439393939394,
+ "loss": 2.2678,
+ "step": 89
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018295454545454547,
+ "loss": 2.2778,
+ "step": 90
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018276515151515152,
+ "loss": 2.2027,
+ "step": 91
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018257575757575758,
+ "loss": 2.2167,
+ "step": 92
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018238636363636365,
+ "loss": 2.2602,
+ "step": 93
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018219696969696972,
+ "loss": 2.2736,
+ "step": 94
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018200757575757577,
+ "loss": 2.2443,
+ "step": 95
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 2.2299,
+ "step": 96
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.0001816287878787879,
+ "loss": 2.2644,
+ "step": 97
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018143939393939395,
+ "loss": 2.259,
+ "step": 98
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018125000000000001,
+ "loss": 2.2567,
+ "step": 99
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018106060606060606,
+ "loss": 2.2599,
+ "step": 100
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018087121212121213,
+ "loss": 2.2091,
+ "step": 101
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.0001806818181818182,
+ "loss": 2.2312,
+ "step": 102
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018049242424242426,
+ "loss": 2.1869,
+ "step": 103
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001803030303030303,
+ "loss": 2.2023,
+ "step": 104
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00018011363636363638,
+ "loss": 2.2132,
+ "step": 105
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00017992424242424244,
+ "loss": 2.2612,
+ "step": 106
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001797348484848485,
+ "loss": 2.2109,
+ "step": 107
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017954545454545456,
+ "loss": 2.215,
+ "step": 108
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001793560606060606,
+ "loss": 2.2114,
+ "step": 109
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001791666666666667,
+ "loss": 2.2203,
+ "step": 110
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017897727272727274,
+ "loss": 2.2594,
+ "step": 111
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.0001787878787878788,
+ "loss": 2.2001,
+ "step": 112
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017859848484848485,
+ "loss": 2.2046,
+ "step": 113
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017840909090909092,
+ "loss": 2.1907,
+ "step": 114
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017821969696969699,
+ "loss": 2.2539,
+ "step": 115
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017803030303030303,
+ "loss": 2.2335,
+ "step": 116
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.0001778409090909091,
+ "loss": 2.2171,
+ "step": 117
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017765151515151517,
+ "loss": 2.2278,
+ "step": 118
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017746212121212123,
+ "loss": 2.231,
+ "step": 119
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017727272727272728,
+ "loss": 2.2141,
+ "step": 120
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017708333333333335,
+ "loss": 2.2432,
+ "step": 121
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017689393939393942,
+ "loss": 2.2266,
+ "step": 122
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017670454545454546,
+ "loss": 2.1929,
+ "step": 123
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017651515151515153,
+ "loss": 2.2077,
+ "step": 124
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017632575757575757,
+ "loss": 2.2133,
+ "step": 125
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017613636363636366,
+ "loss": 2.2251,
+ "step": 126
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.0001759469696969697,
+ "loss": 2.2265,
+ "step": 127
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017575757575757578,
+ "loss": 2.2186,
+ "step": 128
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017556818181818182,
+ "loss": 2.1925,
+ "step": 129
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.0001753787878787879,
+ "loss": 2.1956,
+ "step": 130
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017518939393939396,
+ "loss": 2.2459,
+ "step": 131
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.000175,
+ "loss": 2.22,
+ "step": 132
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.00017481060606060607,
+ "loss": 2.2143,
+ "step": 133
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001746212121212121,
+ "loss": 2.2359,
+ "step": 134
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001744318181818182,
+ "loss": 2.2058,
+ "step": 135
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017424242424242425,
+ "loss": 2.2307,
+ "step": 136
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017405303030303032,
+ "loss": 2.2062,
+ "step": 137
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017386363636363636,
+ "loss": 2.1796,
+ "step": 138
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017367424242424243,
+ "loss": 2.2054,
+ "step": 139
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001734848484848485,
+ "loss": 2.1651,
+ "step": 140
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.00017329545454545454,
+ "loss": 2.2159,
+ "step": 141
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001731060606060606,
+ "loss": 2.1988,
+ "step": 142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017291666666666668,
+ "loss": 2.1676,
+ "step": 143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017272727272727275,
+ "loss": 2.1725,
+ "step": 144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.0001725378787878788,
+ "loss": 2.2205,
+ "step": 145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017234848484848486,
+ "loss": 2.1486,
+ "step": 146
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017215909090909093,
+ "loss": 2.147,
+ "step": 147
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017196969696969697,
+ "loss": 2.1651,
+ "step": 148
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017178030303030304,
+ "loss": 2.1983,
+ "step": 149
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017159090909090908,
+ "loss": 2.1778,
+ "step": 150
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017140151515151518,
+ "loss": 2.1631,
+ "step": 151
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017121212121212122,
+ "loss": 2.1442,
+ "step": 152
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.0001710227272727273,
+ "loss": 2.1397,
+ "step": 153
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017083333333333333,
+ "loss": 2.1697,
+ "step": 154
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.0001706439393939394,
+ "loss": 2.1451,
+ "step": 155
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017045454545454547,
+ "loss": 2.1789,
+ "step": 156
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.0001702651515151515,
+ "loss": 2.1037,
+ "step": 157
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00017007575757575758,
+ "loss": 2.1698,
+ "step": 158
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016988636363636365,
+ "loss": 2.1538,
+ "step": 159
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016969696969696972,
+ "loss": 2.2015,
+ "step": 160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016950757575757576,
+ "loss": 2.179,
+ "step": 161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016931818181818183,
+ "loss": 2.1766,
+ "step": 162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.0001691287878787879,
+ "loss": 2.1646,
+ "step": 163
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016893939393939394,
+ "loss": 2.1694,
+ "step": 164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016875,
+ "loss": 2.1562,
+ "step": 165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016856060606060605,
+ "loss": 2.1551,
+ "step": 166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016837121212121212,
+ "loss": 2.1652,
+ "step": 167
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001681818181818182,
+ "loss": 2.1594,
+ "step": 168
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.00016799242424242426,
+ "loss": 2.1674,
+ "step": 169
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001678030303030303,
+ "loss": 2.1378,
+ "step": 170
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016761363636363637,
+ "loss": 2.1447,
+ "step": 171
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016742424242424244,
+ "loss": 2.1451,
+ "step": 172
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016723484848484848,
+ "loss": 2.1336,
+ "step": 173
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016704545454545455,
+ "loss": 2.1231,
+ "step": 174
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001668560606060606,
+ "loss": 2.1143,
+ "step": 175
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001666666666666667,
+ "loss": 2.1316,
+ "step": 176
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.00016647727272727273,
+ "loss": 2.1281,
+ "step": 177
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001662878787878788,
+ "loss": 2.136,
+ "step": 178
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016609848484848484,
+ "loss": 2.1279,
+ "step": 179
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016590909090909094,
+ "loss": 2.1421,
+ "step": 180
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016571969696969698,
+ "loss": 2.1541,
+ "step": 181
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016553030303030305,
+ "loss": 2.1293,
+ "step": 182
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.0001653409090909091,
+ "loss": 2.1294,
+ "step": 183
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016515151515151516,
+ "loss": 2.1459,
+ "step": 184
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016496212121212123,
+ "loss": 2.1113,
+ "step": 185
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016477272727272727,
+ "loss": 2.1394,
+ "step": 186
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016458333333333334,
+ "loss": 2.1321,
+ "step": 187
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.0001643939393939394,
+ "loss": 2.148,
+ "step": 188
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016420454545454548,
+ "loss": 2.1631,
+ "step": 189
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016401515151515152,
+ "loss": 2.1276,
+ "step": 190
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.0001638257575757576,
+ "loss": 2.0706,
+ "step": 191
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016363636363636366,
+ "loss": 2.127,
+ "step": 192
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001634469696969697,
+ "loss": 2.1449,
+ "step": 193
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.00016325757575757577,
+ "loss": 2.1204,
+ "step": 194
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001630681818181818,
+ "loss": 2.0904,
+ "step": 195
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.0001628787878787879,
+ "loss": 2.1129,
+ "step": 196
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016268939393939395,
+ "loss": 2.1036,
+ "step": 197
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016250000000000002,
+ "loss": 2.1509,
+ "step": 198
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016231060606060606,
+ "loss": 2.1239,
+ "step": 199
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016212121212121213,
+ "loss": 2.145,
+ "step": 200
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1056,
+ "num_train_epochs": 3,
+ "save_steps": 100,
+ "total_flos": 5.0683926177970176e+17,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-200/training_args.bin b/checkpoint-200/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..574132c086f9a526d71493b1ec4c09396eac5482
--- /dev/null
+++ b/checkpoint-200/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:50303c7f1586feb001f01b7e968e567295f501ef6c6407e28250c458696a43af
+size 4155
diff --git a/checkpoint-300/README.md b/checkpoint-300/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4
--- /dev/null
+++ b/checkpoint-300/README.md
@@ -0,0 +1,34 @@
+---
+library_name: peft
+---
+## Training procedure
+
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+### Framework versions
+
+- PEFT 0.6.0.dev0
+
+- PEFT 0.6.0.dev0
diff --git a/checkpoint-300/adapter_config.json b/checkpoint-300/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b
--- /dev/null
+++ b/checkpoint-300/adapter_config.json
@@ -0,0 +1,23 @@
+{
+ "auto_mapping": null,
+ "base_model_name_or_path": "bigscience/bloomz-3b",
+ "bias": "none",
+ "fan_in_fan_out": false,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "lora_alpha": 16,
+ "lora_dropout": 0.0,
+ "modules_to_save": null,
+ "peft_type": "LORA",
+ "r": 8,
+ "revision": null,
+ "target_modules": [
+ "dense_4h_to_h",
+ "dense",
+ "dense_h_to_4h",
+ "query_key_value"
+ ],
+ "task_type": "CAUSAL_LM"
+}
\ No newline at end of file
diff --git a/checkpoint-300/adapter_model.bin b/checkpoint-300/adapter_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..e0ab92414039bbef3ba189d8e0579d808f3d1718
--- /dev/null
+++ b/checkpoint-300/adapter_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d73a6061995dfd3470fb05f76f5e41d5f8e33b867534c494972efadc2689f043
+size 39409357
diff --git a/checkpoint-300/optimizer.pt b/checkpoint-300/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..22e4a42740f86c60cc8c17a7f9cc977473d99d23
--- /dev/null
+++ b/checkpoint-300/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e907c311aa285e9bae5575de3a9db0ba37e8290ea4e2ddeb374df9aed022a507
+size 78844421
diff --git a/checkpoint-300/rng_state.pth b/checkpoint-300/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..e8a62beafdd6e61c41abbc54c8b7428f4e3484c0
--- /dev/null
+++ b/checkpoint-300/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b2278a87cdf86c3f9219223c847f6b27f6b7f15b8226b617f38936e8ff2cbcde
+size 14575
diff --git a/checkpoint-300/scheduler.pt b/checkpoint-300/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..f8672764f0c9487e97b66b7834c9d0928c60e651
--- /dev/null
+++ b/checkpoint-300/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e94d9046da30e8144679cc9c489e77f8d73da1feed453b2a636a27ad2550580f
+size 627
diff --git a/checkpoint-300/special_tokens_map.json b/checkpoint-300/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d
--- /dev/null
+++ b/checkpoint-300/special_tokens_map.json
@@ -0,0 +1,6 @@
+{
+ "bos_token": "",
+ "eos_token": "",
+ "pad_token": "",
+ "unk_token": ""
+}
diff --git a/checkpoint-300/tokenizer.json b/checkpoint-300/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207
--- /dev/null
+++ b/checkpoint-300/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe
+size 14500570
diff --git a/checkpoint-300/tokenizer_config.json b/checkpoint-300/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3
--- /dev/null
+++ b/checkpoint-300/tokenizer_config.json
@@ -0,0 +1,10 @@
+{
+ "add_prefix_space": false,
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": "",
+ "tokenizer_class": "BloomTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-300/trainer_state.json b/checkpoint-300/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..58c2803c12156895c8e983f2f1714eafb445da9e
--- /dev/null
+++ b/checkpoint-300/trainer_state.json
@@ -0,0 +1,1819 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.851063829787234,
+ "eval_steps": 500,
+ "global_step": 300,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 0.00019981060606060605,
+ "loss": 2.9206,
+ "step": 1
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019962121212121212,
+ "loss": 2.7609,
+ "step": 2
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001994318181818182,
+ "loss": 2.6878,
+ "step": 3
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019924242424242426,
+ "loss": 2.6697,
+ "step": 4
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001990530303030303,
+ "loss": 2.5818,
+ "step": 5
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019886363636363637,
+ "loss": 2.5396,
+ "step": 6
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019867424242424244,
+ "loss": 2.5265,
+ "step": 7
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.0001984848484848485,
+ "loss": 2.5475,
+ "step": 8
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019829545454545455,
+ "loss": 2.4835,
+ "step": 9
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001981060606060606,
+ "loss": 2.4559,
+ "step": 10
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001979166666666667,
+ "loss": 2.4511,
+ "step": 11
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019772727272727273,
+ "loss": 2.4592,
+ "step": 12
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.0001975378787878788,
+ "loss": 2.4495,
+ "step": 13
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019734848484848484,
+ "loss": 2.4714,
+ "step": 14
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019715909090909094,
+ "loss": 2.4302,
+ "step": 15
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019696969696969698,
+ "loss": 2.4097,
+ "step": 16
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019678030303030305,
+ "loss": 2.4523,
+ "step": 17
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.0001965909090909091,
+ "loss": 2.4325,
+ "step": 18
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019640151515151516,
+ "loss": 2.4125,
+ "step": 19
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019621212121212123,
+ "loss": 2.4329,
+ "step": 20
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019602272727272727,
+ "loss": 2.3471,
+ "step": 21
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019583333333333334,
+ "loss": 2.3012,
+ "step": 22
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001956439393939394,
+ "loss": 2.3869,
+ "step": 23
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019545454545454548,
+ "loss": 2.3822,
+ "step": 24
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019526515151515152,
+ "loss": 2.3427,
+ "step": 25
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001950757575757576,
+ "loss": 2.3659,
+ "step": 26
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019488636363636366,
+ "loss": 2.3826,
+ "step": 27
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.0001946969696969697,
+ "loss": 2.3532,
+ "step": 28
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019450757575757577,
+ "loss": 2.3828,
+ "step": 29
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001943181818181818,
+ "loss": 2.3133,
+ "step": 30
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001941287878787879,
+ "loss": 2.3613,
+ "step": 31
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019393939393939395,
+ "loss": 2.3867,
+ "step": 32
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019375000000000002,
+ "loss": 2.2966,
+ "step": 33
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019356060606060606,
+ "loss": 2.3436,
+ "step": 34
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019337121212121213,
+ "loss": 2.3425,
+ "step": 35
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.0001931818181818182,
+ "loss": 2.307,
+ "step": 36
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019299242424242424,
+ "loss": 2.3521,
+ "step": 37
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.0001928030303030303,
+ "loss": 2.3302,
+ "step": 38
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019261363636363635,
+ "loss": 2.312,
+ "step": 39
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019242424242424245,
+ "loss": 2.3655,
+ "step": 40
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001922348484848485,
+ "loss": 2.344,
+ "step": 41
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019204545454545456,
+ "loss": 2.3373,
+ "step": 42
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001918560606060606,
+ "loss": 2.3331,
+ "step": 43
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019166666666666667,
+ "loss": 2.3376,
+ "step": 44
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019147727272727274,
+ "loss": 2.3369,
+ "step": 45
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019128787878787878,
+ "loss": 2.3413,
+ "step": 46
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019109848484848485,
+ "loss": 2.3212,
+ "step": 47
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019090909090909092,
+ "loss": 2.307,
+ "step": 48
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.000190719696969697,
+ "loss": 2.2929,
+ "step": 49
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019053030303030303,
+ "loss": 2.2873,
+ "step": 50
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.0001903409090909091,
+ "loss": 2.3098,
+ "step": 51
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00019015151515151517,
+ "loss": 2.3129,
+ "step": 52
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.0001899621212121212,
+ "loss": 2.3038,
+ "step": 53
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00018977272727272728,
+ "loss": 2.286,
+ "step": 54
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018958333333333332,
+ "loss": 2.3388,
+ "step": 55
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018939393939393942,
+ "loss": 2.3193,
+ "step": 56
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018920454545454546,
+ "loss": 2.3136,
+ "step": 57
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018901515151515153,
+ "loss": 2.3141,
+ "step": 58
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018882575757575757,
+ "loss": 2.3646,
+ "step": 59
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018863636363636364,
+ "loss": 2.3318,
+ "step": 60
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.0001884469696969697,
+ "loss": 2.2977,
+ "step": 61
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018825757575757575,
+ "loss": 2.2764,
+ "step": 62
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018806818181818182,
+ "loss": 2.3095,
+ "step": 63
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.0001878787878787879,
+ "loss": 2.252,
+ "step": 64
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018768939393939396,
+ "loss": 2.2786,
+ "step": 65
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.0001875,
+ "loss": 2.2789,
+ "step": 66
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018731060606060607,
+ "loss": 2.2841,
+ "step": 67
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018712121212121212,
+ "loss": 2.3436,
+ "step": 68
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018693181818181818,
+ "loss": 2.2956,
+ "step": 69
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018674242424242425,
+ "loss": 2.2353,
+ "step": 70
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.0001865530303030303,
+ "loss": 2.2772,
+ "step": 71
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018636363636363636,
+ "loss": 2.2496,
+ "step": 72
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018617424242424243,
+ "loss": 2.2477,
+ "step": 73
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.0001859848484848485,
+ "loss": 2.2791,
+ "step": 74
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018579545454545454,
+ "loss": 2.2799,
+ "step": 75
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018560606060606061,
+ "loss": 2.3132,
+ "step": 76
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018541666666666668,
+ "loss": 2.2542,
+ "step": 77
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018522727272727273,
+ "loss": 2.2609,
+ "step": 78
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.0001850378787878788,
+ "loss": 2.2819,
+ "step": 79
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018484848484848484,
+ "loss": 2.2844,
+ "step": 80
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018465909090909093,
+ "loss": 2.2542,
+ "step": 81
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018446969696969697,
+ "loss": 2.2603,
+ "step": 82
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018428030303030304,
+ "loss": 2.2832,
+ "step": 83
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018409090909090909,
+ "loss": 2.2869,
+ "step": 84
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018390151515151518,
+ "loss": 2.2646,
+ "step": 85
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018371212121212122,
+ "loss": 2.2698,
+ "step": 86
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018352272727272727,
+ "loss": 2.2757,
+ "step": 87
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018333333333333334,
+ "loss": 2.2544,
+ "step": 88
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.0001831439393939394,
+ "loss": 2.2678,
+ "step": 89
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018295454545454547,
+ "loss": 2.2778,
+ "step": 90
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018276515151515152,
+ "loss": 2.2027,
+ "step": 91
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018257575757575758,
+ "loss": 2.2167,
+ "step": 92
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018238636363636365,
+ "loss": 2.2602,
+ "step": 93
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018219696969696972,
+ "loss": 2.2736,
+ "step": 94
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018200757575757577,
+ "loss": 2.2443,
+ "step": 95
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 2.2299,
+ "step": 96
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.0001816287878787879,
+ "loss": 2.2644,
+ "step": 97
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018143939393939395,
+ "loss": 2.259,
+ "step": 98
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018125000000000001,
+ "loss": 2.2567,
+ "step": 99
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018106060606060606,
+ "loss": 2.2599,
+ "step": 100
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018087121212121213,
+ "loss": 2.2091,
+ "step": 101
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.0001806818181818182,
+ "loss": 2.2312,
+ "step": 102
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018049242424242426,
+ "loss": 2.1869,
+ "step": 103
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001803030303030303,
+ "loss": 2.2023,
+ "step": 104
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00018011363636363638,
+ "loss": 2.2132,
+ "step": 105
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00017992424242424244,
+ "loss": 2.2612,
+ "step": 106
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001797348484848485,
+ "loss": 2.2109,
+ "step": 107
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017954545454545456,
+ "loss": 2.215,
+ "step": 108
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001793560606060606,
+ "loss": 2.2114,
+ "step": 109
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001791666666666667,
+ "loss": 2.2203,
+ "step": 110
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017897727272727274,
+ "loss": 2.2594,
+ "step": 111
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.0001787878787878788,
+ "loss": 2.2001,
+ "step": 112
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017859848484848485,
+ "loss": 2.2046,
+ "step": 113
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017840909090909092,
+ "loss": 2.1907,
+ "step": 114
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017821969696969699,
+ "loss": 2.2539,
+ "step": 115
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017803030303030303,
+ "loss": 2.2335,
+ "step": 116
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.0001778409090909091,
+ "loss": 2.2171,
+ "step": 117
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017765151515151517,
+ "loss": 2.2278,
+ "step": 118
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017746212121212123,
+ "loss": 2.231,
+ "step": 119
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017727272727272728,
+ "loss": 2.2141,
+ "step": 120
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017708333333333335,
+ "loss": 2.2432,
+ "step": 121
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017689393939393942,
+ "loss": 2.2266,
+ "step": 122
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017670454545454546,
+ "loss": 2.1929,
+ "step": 123
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017651515151515153,
+ "loss": 2.2077,
+ "step": 124
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017632575757575757,
+ "loss": 2.2133,
+ "step": 125
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017613636363636366,
+ "loss": 2.2251,
+ "step": 126
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.0001759469696969697,
+ "loss": 2.2265,
+ "step": 127
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017575757575757578,
+ "loss": 2.2186,
+ "step": 128
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017556818181818182,
+ "loss": 2.1925,
+ "step": 129
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.0001753787878787879,
+ "loss": 2.1956,
+ "step": 130
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017518939393939396,
+ "loss": 2.2459,
+ "step": 131
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.000175,
+ "loss": 2.22,
+ "step": 132
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.00017481060606060607,
+ "loss": 2.2143,
+ "step": 133
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001746212121212121,
+ "loss": 2.2359,
+ "step": 134
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001744318181818182,
+ "loss": 2.2058,
+ "step": 135
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017424242424242425,
+ "loss": 2.2307,
+ "step": 136
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017405303030303032,
+ "loss": 2.2062,
+ "step": 137
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017386363636363636,
+ "loss": 2.1796,
+ "step": 138
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017367424242424243,
+ "loss": 2.2054,
+ "step": 139
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001734848484848485,
+ "loss": 2.1651,
+ "step": 140
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.00017329545454545454,
+ "loss": 2.2159,
+ "step": 141
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001731060606060606,
+ "loss": 2.1988,
+ "step": 142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017291666666666668,
+ "loss": 2.1676,
+ "step": 143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017272727272727275,
+ "loss": 2.1725,
+ "step": 144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.0001725378787878788,
+ "loss": 2.2205,
+ "step": 145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017234848484848486,
+ "loss": 2.1486,
+ "step": 146
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017215909090909093,
+ "loss": 2.147,
+ "step": 147
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017196969696969697,
+ "loss": 2.1651,
+ "step": 148
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017178030303030304,
+ "loss": 2.1983,
+ "step": 149
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017159090909090908,
+ "loss": 2.1778,
+ "step": 150
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017140151515151518,
+ "loss": 2.1631,
+ "step": 151
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017121212121212122,
+ "loss": 2.1442,
+ "step": 152
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.0001710227272727273,
+ "loss": 2.1397,
+ "step": 153
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017083333333333333,
+ "loss": 2.1697,
+ "step": 154
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.0001706439393939394,
+ "loss": 2.1451,
+ "step": 155
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017045454545454547,
+ "loss": 2.1789,
+ "step": 156
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.0001702651515151515,
+ "loss": 2.1037,
+ "step": 157
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00017007575757575758,
+ "loss": 2.1698,
+ "step": 158
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016988636363636365,
+ "loss": 2.1538,
+ "step": 159
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016969696969696972,
+ "loss": 2.2015,
+ "step": 160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016950757575757576,
+ "loss": 2.179,
+ "step": 161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016931818181818183,
+ "loss": 2.1766,
+ "step": 162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.0001691287878787879,
+ "loss": 2.1646,
+ "step": 163
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016893939393939394,
+ "loss": 2.1694,
+ "step": 164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016875,
+ "loss": 2.1562,
+ "step": 165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016856060606060605,
+ "loss": 2.1551,
+ "step": 166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016837121212121212,
+ "loss": 2.1652,
+ "step": 167
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001681818181818182,
+ "loss": 2.1594,
+ "step": 168
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.00016799242424242426,
+ "loss": 2.1674,
+ "step": 169
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001678030303030303,
+ "loss": 2.1378,
+ "step": 170
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016761363636363637,
+ "loss": 2.1447,
+ "step": 171
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016742424242424244,
+ "loss": 2.1451,
+ "step": 172
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016723484848484848,
+ "loss": 2.1336,
+ "step": 173
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016704545454545455,
+ "loss": 2.1231,
+ "step": 174
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001668560606060606,
+ "loss": 2.1143,
+ "step": 175
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001666666666666667,
+ "loss": 2.1316,
+ "step": 176
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.00016647727272727273,
+ "loss": 2.1281,
+ "step": 177
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001662878787878788,
+ "loss": 2.136,
+ "step": 178
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016609848484848484,
+ "loss": 2.1279,
+ "step": 179
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016590909090909094,
+ "loss": 2.1421,
+ "step": 180
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016571969696969698,
+ "loss": 2.1541,
+ "step": 181
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016553030303030305,
+ "loss": 2.1293,
+ "step": 182
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.0001653409090909091,
+ "loss": 2.1294,
+ "step": 183
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016515151515151516,
+ "loss": 2.1459,
+ "step": 184
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016496212121212123,
+ "loss": 2.1113,
+ "step": 185
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016477272727272727,
+ "loss": 2.1394,
+ "step": 186
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016458333333333334,
+ "loss": 2.1321,
+ "step": 187
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.0001643939393939394,
+ "loss": 2.148,
+ "step": 188
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016420454545454548,
+ "loss": 2.1631,
+ "step": 189
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016401515151515152,
+ "loss": 2.1276,
+ "step": 190
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.0001638257575757576,
+ "loss": 2.0706,
+ "step": 191
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016363636363636366,
+ "loss": 2.127,
+ "step": 192
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001634469696969697,
+ "loss": 2.1449,
+ "step": 193
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.00016325757575757577,
+ "loss": 2.1204,
+ "step": 194
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001630681818181818,
+ "loss": 2.0904,
+ "step": 195
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.0001628787878787879,
+ "loss": 2.1129,
+ "step": 196
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016268939393939395,
+ "loss": 2.1036,
+ "step": 197
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016250000000000002,
+ "loss": 2.1509,
+ "step": 198
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016231060606060606,
+ "loss": 2.1239,
+ "step": 199
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016212121212121213,
+ "loss": 2.145,
+ "step": 200
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.0001619318181818182,
+ "loss": 2.1221,
+ "step": 201
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016174242424242424,
+ "loss": 2.1181,
+ "step": 202
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001615530303030303,
+ "loss": 2.1306,
+ "step": 203
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016136363636363635,
+ "loss": 2.0199,
+ "step": 204
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016117424242424245,
+ "loss": 2.1178,
+ "step": 205
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001609848484848485,
+ "loss": 2.1584,
+ "step": 206
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016079545454545456,
+ "loss": 2.0872,
+ "step": 207
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.0001606060606060606,
+ "loss": 2.1033,
+ "step": 208
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016041666666666667,
+ "loss": 2.1381,
+ "step": 209
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016022727272727274,
+ "loss": 2.1127,
+ "step": 210
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016003787878787878,
+ "loss": 2.1077,
+ "step": 211
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015984848484848485,
+ "loss": 2.0984,
+ "step": 212
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015965909090909092,
+ "loss": 2.0994,
+ "step": 213
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.000159469696969697,
+ "loss": 2.096,
+ "step": 214
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.00015928030303030303,
+ "loss": 2.0909,
+ "step": 215
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.0001590909090909091,
+ "loss": 2.118,
+ "step": 216
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015890151515151517,
+ "loss": 2.0783,
+ "step": 217
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.0001587121212121212,
+ "loss": 2.0876,
+ "step": 218
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015852272727272728,
+ "loss": 2.0581,
+ "step": 219
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015833333333333332,
+ "loss": 2.0548,
+ "step": 220
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015814393939393942,
+ "loss": 2.0595,
+ "step": 221
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015795454545454546,
+ "loss": 2.0719,
+ "step": 222
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015776515151515153,
+ "loss": 2.0903,
+ "step": 223
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015757575757575757,
+ "loss": 2.0941,
+ "step": 224
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015738636363636364,
+ "loss": 2.0926,
+ "step": 225
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.0001571969696969697,
+ "loss": 2.0816,
+ "step": 226
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015700757575757575,
+ "loss": 2.0894,
+ "step": 227
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015681818181818182,
+ "loss": 2.0798,
+ "step": 228
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.0001566287878787879,
+ "loss": 2.0672,
+ "step": 229
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015643939393939396,
+ "loss": 2.0787,
+ "step": 230
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015625,
+ "loss": 2.0611,
+ "step": 231
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015606060606060607,
+ "loss": 2.0805,
+ "step": 232
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015587121212121211,
+ "loss": 2.053,
+ "step": 233
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015568181818181818,
+ "loss": 2.0575,
+ "step": 234
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015549242424242425,
+ "loss": 2.0459,
+ "step": 235
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.0001553030303030303,
+ "loss": 2.0635,
+ "step": 236
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015511363636363636,
+ "loss": 2.0335,
+ "step": 237
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015492424242424243,
+ "loss": 2.0681,
+ "step": 238
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001547348484848485,
+ "loss": 2.0748,
+ "step": 239
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015454545454545454,
+ "loss": 2.1091,
+ "step": 240
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001543560606060606,
+ "loss": 2.0732,
+ "step": 241
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015416666666666668,
+ "loss": 2.0746,
+ "step": 242
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015397727272727272,
+ "loss": 2.0306,
+ "step": 243
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.0001537878787878788,
+ "loss": 2.0864,
+ "step": 244
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015359848484848484,
+ "loss": 2.0664,
+ "step": 245
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015340909090909093,
+ "loss": 2.0801,
+ "step": 246
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015321969696969697,
+ "loss": 2.0799,
+ "step": 247
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015303030303030304,
+ "loss": 2.0621,
+ "step": 248
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015284090909090909,
+ "loss": 2.0687,
+ "step": 249
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015265151515151515,
+ "loss": 2.018,
+ "step": 250
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015246212121212122,
+ "loss": 2.0256,
+ "step": 251
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015227272727272727,
+ "loss": 2.0736,
+ "step": 252
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015208333333333333,
+ "loss": 2.0609,
+ "step": 253
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.0001518939393939394,
+ "loss": 2.0539,
+ "step": 254
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015170454545454547,
+ "loss": 2.0282,
+ "step": 255
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015151515151515152,
+ "loss": 2.0417,
+ "step": 256
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015132575757575758,
+ "loss": 2.0333,
+ "step": 257
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015113636363636365,
+ "loss": 2.0428,
+ "step": 258
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015094696969696972,
+ "loss": 2.045,
+ "step": 259
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.00015075757575757576,
+ "loss": 2.0463,
+ "step": 260
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001505681818181818,
+ "loss": 2.0539,
+ "step": 261
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001503787878787879,
+ "loss": 2.0184,
+ "step": 262
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015018939393939394,
+ "loss": 2.0858,
+ "step": 263
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 2.0239,
+ "step": 264
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014981060606060606,
+ "loss": 2.0425,
+ "step": 265
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014962121212121213,
+ "loss": 2.0263,
+ "step": 266
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001494318181818182,
+ "loss": 2.042,
+ "step": 267
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.00014924242424242426,
+ "loss": 2.026,
+ "step": 268
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001490530303030303,
+ "loss": 2.0411,
+ "step": 269
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014886363636363635,
+ "loss": 2.028,
+ "step": 270
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014867424242424244,
+ "loss": 2.0172,
+ "step": 271
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014848484848484849,
+ "loss": 2.0196,
+ "step": 272
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014829545454545455,
+ "loss": 2.0142,
+ "step": 273
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001481060606060606,
+ "loss": 2.0265,
+ "step": 274
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001479166666666667,
+ "loss": 2.0353,
+ "step": 275
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.00014772727272727274,
+ "loss": 2.0327,
+ "step": 276
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.0001475378787878788,
+ "loss": 2.0188,
+ "step": 277
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014734848484848485,
+ "loss": 1.9987,
+ "step": 278
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014715909090909092,
+ "loss": 2.0141,
+ "step": 279
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014696969696969698,
+ "loss": 2.0403,
+ "step": 280
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014678030303030303,
+ "loss": 1.9977,
+ "step": 281
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.0001465909090909091,
+ "loss": 1.9674,
+ "step": 282
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014640151515151517,
+ "loss": 1.9984,
+ "step": 283
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014621212121212123,
+ "loss": 1.9796,
+ "step": 284
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014602272727272728,
+ "loss": 2.0139,
+ "step": 285
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014583333333333335,
+ "loss": 1.9866,
+ "step": 286
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014564393939393941,
+ "loss": 2.0208,
+ "step": 287
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014545454545454546,
+ "loss": 1.9844,
+ "step": 288
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014526515151515153,
+ "loss": 2.0082,
+ "step": 289
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014507575757575757,
+ "loss": 1.984,
+ "step": 290
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014488636363636366,
+ "loss": 2.0015,
+ "step": 291
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.0001446969696969697,
+ "loss": 2.0209,
+ "step": 292
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014450757575757578,
+ "loss": 1.9728,
+ "step": 293
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014431818181818182,
+ "loss": 2.0032,
+ "step": 294
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014412878787878789,
+ "loss": 1.9641,
+ "step": 295
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014393939393939396,
+ "loss": 1.9945,
+ "step": 296
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014375,
+ "loss": 1.9658,
+ "step": 297
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014356060606060607,
+ "loss": 1.9907,
+ "step": 298
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001433712121212121,
+ "loss": 1.9935,
+ "step": 299
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001431818181818182,
+ "loss": 1.9897,
+ "step": 300
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1056,
+ "num_train_epochs": 3,
+ "save_steps": 100,
+ "total_flos": 7.603665299822592e+17,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-300/training_args.bin b/checkpoint-300/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..574132c086f9a526d71493b1ec4c09396eac5482
--- /dev/null
+++ b/checkpoint-300/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:50303c7f1586feb001f01b7e968e567295f501ef6c6407e28250c458696a43af
+size 4155
diff --git a/checkpoint-400/README.md b/checkpoint-400/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4
--- /dev/null
+++ b/checkpoint-400/README.md
@@ -0,0 +1,34 @@
+---
+library_name: peft
+---
+## Training procedure
+
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+### Framework versions
+
+- PEFT 0.6.0.dev0
+
+- PEFT 0.6.0.dev0
diff --git a/checkpoint-400/adapter_config.json b/checkpoint-400/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b
--- /dev/null
+++ b/checkpoint-400/adapter_config.json
@@ -0,0 +1,23 @@
+{
+ "auto_mapping": null,
+ "base_model_name_or_path": "bigscience/bloomz-3b",
+ "bias": "none",
+ "fan_in_fan_out": false,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "lora_alpha": 16,
+ "lora_dropout": 0.0,
+ "modules_to_save": null,
+ "peft_type": "LORA",
+ "r": 8,
+ "revision": null,
+ "target_modules": [
+ "dense_4h_to_h",
+ "dense",
+ "dense_h_to_4h",
+ "query_key_value"
+ ],
+ "task_type": "CAUSAL_LM"
+}
\ No newline at end of file
diff --git a/checkpoint-400/adapter_model.bin b/checkpoint-400/adapter_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..bdaea43c9cbf89920340538fec7d54f5f7657a30
--- /dev/null
+++ b/checkpoint-400/adapter_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:51647d6c0564017d9193f228b6d8bc208d4d6a55f7e4bd08dc9d1ce64d9903c8
+size 39409357
diff --git a/checkpoint-400/optimizer.pt b/checkpoint-400/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..297d8fe097b700e1b2d39e73caae0c33a65ee3b0
--- /dev/null
+++ b/checkpoint-400/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2d805d92e1fdea1995c4b0f014cc6ddb5ca33b3be6d4ed4e3c1caf4f79c61bad
+size 78844421
diff --git a/checkpoint-400/rng_state.pth b/checkpoint-400/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..b9111227e3b79b9bc3e2a642832c3e49e36216d4
--- /dev/null
+++ b/checkpoint-400/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:972139d83957a9cf2600cb6eeca17287d7a5377c33a53500ae7e13fe830ad36b
+size 14575
diff --git a/checkpoint-400/scheduler.pt b/checkpoint-400/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..cf526f441047cbf30592b5b6355238d55d48bf67
--- /dev/null
+++ b/checkpoint-400/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:57f49b2cd2e0bb26081b2f483de251b2a0460e7b08adcce7b0b2d750de7a0cb2
+size 627
diff --git a/checkpoint-400/special_tokens_map.json b/checkpoint-400/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d
--- /dev/null
+++ b/checkpoint-400/special_tokens_map.json
@@ -0,0 +1,6 @@
+{
+ "bos_token": "",
+ "eos_token": "",
+ "pad_token": "",
+ "unk_token": ""
+}
diff --git a/checkpoint-400/tokenizer.json b/checkpoint-400/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207
--- /dev/null
+++ b/checkpoint-400/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe
+size 14500570
diff --git a/checkpoint-400/tokenizer_config.json b/checkpoint-400/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3
--- /dev/null
+++ b/checkpoint-400/tokenizer_config.json
@@ -0,0 +1,10 @@
+{
+ "add_prefix_space": false,
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": "",
+ "tokenizer_class": "BloomTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-400/trainer_state.json b/checkpoint-400/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..3cbd9d15d22c7a6cde039723430d50faed590a05
--- /dev/null
+++ b/checkpoint-400/trainer_state.json
@@ -0,0 +1,2419 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.1347517730496455,
+ "eval_steps": 500,
+ "global_step": 400,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 0.00019981060606060605,
+ "loss": 2.9206,
+ "step": 1
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019962121212121212,
+ "loss": 2.7609,
+ "step": 2
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001994318181818182,
+ "loss": 2.6878,
+ "step": 3
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019924242424242426,
+ "loss": 2.6697,
+ "step": 4
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001990530303030303,
+ "loss": 2.5818,
+ "step": 5
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019886363636363637,
+ "loss": 2.5396,
+ "step": 6
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019867424242424244,
+ "loss": 2.5265,
+ "step": 7
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.0001984848484848485,
+ "loss": 2.5475,
+ "step": 8
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019829545454545455,
+ "loss": 2.4835,
+ "step": 9
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001981060606060606,
+ "loss": 2.4559,
+ "step": 10
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001979166666666667,
+ "loss": 2.4511,
+ "step": 11
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019772727272727273,
+ "loss": 2.4592,
+ "step": 12
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.0001975378787878788,
+ "loss": 2.4495,
+ "step": 13
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019734848484848484,
+ "loss": 2.4714,
+ "step": 14
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019715909090909094,
+ "loss": 2.4302,
+ "step": 15
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019696969696969698,
+ "loss": 2.4097,
+ "step": 16
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019678030303030305,
+ "loss": 2.4523,
+ "step": 17
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.0001965909090909091,
+ "loss": 2.4325,
+ "step": 18
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019640151515151516,
+ "loss": 2.4125,
+ "step": 19
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019621212121212123,
+ "loss": 2.4329,
+ "step": 20
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019602272727272727,
+ "loss": 2.3471,
+ "step": 21
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019583333333333334,
+ "loss": 2.3012,
+ "step": 22
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001956439393939394,
+ "loss": 2.3869,
+ "step": 23
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019545454545454548,
+ "loss": 2.3822,
+ "step": 24
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019526515151515152,
+ "loss": 2.3427,
+ "step": 25
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001950757575757576,
+ "loss": 2.3659,
+ "step": 26
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019488636363636366,
+ "loss": 2.3826,
+ "step": 27
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.0001946969696969697,
+ "loss": 2.3532,
+ "step": 28
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019450757575757577,
+ "loss": 2.3828,
+ "step": 29
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001943181818181818,
+ "loss": 2.3133,
+ "step": 30
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001941287878787879,
+ "loss": 2.3613,
+ "step": 31
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019393939393939395,
+ "loss": 2.3867,
+ "step": 32
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019375000000000002,
+ "loss": 2.2966,
+ "step": 33
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019356060606060606,
+ "loss": 2.3436,
+ "step": 34
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019337121212121213,
+ "loss": 2.3425,
+ "step": 35
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.0001931818181818182,
+ "loss": 2.307,
+ "step": 36
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019299242424242424,
+ "loss": 2.3521,
+ "step": 37
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.0001928030303030303,
+ "loss": 2.3302,
+ "step": 38
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019261363636363635,
+ "loss": 2.312,
+ "step": 39
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019242424242424245,
+ "loss": 2.3655,
+ "step": 40
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001922348484848485,
+ "loss": 2.344,
+ "step": 41
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019204545454545456,
+ "loss": 2.3373,
+ "step": 42
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001918560606060606,
+ "loss": 2.3331,
+ "step": 43
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019166666666666667,
+ "loss": 2.3376,
+ "step": 44
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019147727272727274,
+ "loss": 2.3369,
+ "step": 45
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019128787878787878,
+ "loss": 2.3413,
+ "step": 46
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019109848484848485,
+ "loss": 2.3212,
+ "step": 47
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019090909090909092,
+ "loss": 2.307,
+ "step": 48
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.000190719696969697,
+ "loss": 2.2929,
+ "step": 49
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019053030303030303,
+ "loss": 2.2873,
+ "step": 50
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.0001903409090909091,
+ "loss": 2.3098,
+ "step": 51
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00019015151515151517,
+ "loss": 2.3129,
+ "step": 52
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.0001899621212121212,
+ "loss": 2.3038,
+ "step": 53
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00018977272727272728,
+ "loss": 2.286,
+ "step": 54
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018958333333333332,
+ "loss": 2.3388,
+ "step": 55
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018939393939393942,
+ "loss": 2.3193,
+ "step": 56
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018920454545454546,
+ "loss": 2.3136,
+ "step": 57
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018901515151515153,
+ "loss": 2.3141,
+ "step": 58
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018882575757575757,
+ "loss": 2.3646,
+ "step": 59
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018863636363636364,
+ "loss": 2.3318,
+ "step": 60
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.0001884469696969697,
+ "loss": 2.2977,
+ "step": 61
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018825757575757575,
+ "loss": 2.2764,
+ "step": 62
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018806818181818182,
+ "loss": 2.3095,
+ "step": 63
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.0001878787878787879,
+ "loss": 2.252,
+ "step": 64
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018768939393939396,
+ "loss": 2.2786,
+ "step": 65
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.0001875,
+ "loss": 2.2789,
+ "step": 66
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018731060606060607,
+ "loss": 2.2841,
+ "step": 67
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018712121212121212,
+ "loss": 2.3436,
+ "step": 68
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018693181818181818,
+ "loss": 2.2956,
+ "step": 69
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018674242424242425,
+ "loss": 2.2353,
+ "step": 70
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.0001865530303030303,
+ "loss": 2.2772,
+ "step": 71
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018636363636363636,
+ "loss": 2.2496,
+ "step": 72
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018617424242424243,
+ "loss": 2.2477,
+ "step": 73
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.0001859848484848485,
+ "loss": 2.2791,
+ "step": 74
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018579545454545454,
+ "loss": 2.2799,
+ "step": 75
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018560606060606061,
+ "loss": 2.3132,
+ "step": 76
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018541666666666668,
+ "loss": 2.2542,
+ "step": 77
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018522727272727273,
+ "loss": 2.2609,
+ "step": 78
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.0001850378787878788,
+ "loss": 2.2819,
+ "step": 79
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018484848484848484,
+ "loss": 2.2844,
+ "step": 80
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018465909090909093,
+ "loss": 2.2542,
+ "step": 81
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018446969696969697,
+ "loss": 2.2603,
+ "step": 82
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018428030303030304,
+ "loss": 2.2832,
+ "step": 83
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018409090909090909,
+ "loss": 2.2869,
+ "step": 84
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018390151515151518,
+ "loss": 2.2646,
+ "step": 85
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018371212121212122,
+ "loss": 2.2698,
+ "step": 86
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018352272727272727,
+ "loss": 2.2757,
+ "step": 87
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018333333333333334,
+ "loss": 2.2544,
+ "step": 88
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.0001831439393939394,
+ "loss": 2.2678,
+ "step": 89
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018295454545454547,
+ "loss": 2.2778,
+ "step": 90
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018276515151515152,
+ "loss": 2.2027,
+ "step": 91
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018257575757575758,
+ "loss": 2.2167,
+ "step": 92
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018238636363636365,
+ "loss": 2.2602,
+ "step": 93
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018219696969696972,
+ "loss": 2.2736,
+ "step": 94
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018200757575757577,
+ "loss": 2.2443,
+ "step": 95
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 2.2299,
+ "step": 96
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.0001816287878787879,
+ "loss": 2.2644,
+ "step": 97
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018143939393939395,
+ "loss": 2.259,
+ "step": 98
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018125000000000001,
+ "loss": 2.2567,
+ "step": 99
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018106060606060606,
+ "loss": 2.2599,
+ "step": 100
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018087121212121213,
+ "loss": 2.2091,
+ "step": 101
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.0001806818181818182,
+ "loss": 2.2312,
+ "step": 102
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018049242424242426,
+ "loss": 2.1869,
+ "step": 103
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001803030303030303,
+ "loss": 2.2023,
+ "step": 104
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00018011363636363638,
+ "loss": 2.2132,
+ "step": 105
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00017992424242424244,
+ "loss": 2.2612,
+ "step": 106
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001797348484848485,
+ "loss": 2.2109,
+ "step": 107
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017954545454545456,
+ "loss": 2.215,
+ "step": 108
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001793560606060606,
+ "loss": 2.2114,
+ "step": 109
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001791666666666667,
+ "loss": 2.2203,
+ "step": 110
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017897727272727274,
+ "loss": 2.2594,
+ "step": 111
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.0001787878787878788,
+ "loss": 2.2001,
+ "step": 112
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017859848484848485,
+ "loss": 2.2046,
+ "step": 113
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017840909090909092,
+ "loss": 2.1907,
+ "step": 114
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017821969696969699,
+ "loss": 2.2539,
+ "step": 115
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017803030303030303,
+ "loss": 2.2335,
+ "step": 116
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.0001778409090909091,
+ "loss": 2.2171,
+ "step": 117
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017765151515151517,
+ "loss": 2.2278,
+ "step": 118
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017746212121212123,
+ "loss": 2.231,
+ "step": 119
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017727272727272728,
+ "loss": 2.2141,
+ "step": 120
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017708333333333335,
+ "loss": 2.2432,
+ "step": 121
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017689393939393942,
+ "loss": 2.2266,
+ "step": 122
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017670454545454546,
+ "loss": 2.1929,
+ "step": 123
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017651515151515153,
+ "loss": 2.2077,
+ "step": 124
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017632575757575757,
+ "loss": 2.2133,
+ "step": 125
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017613636363636366,
+ "loss": 2.2251,
+ "step": 126
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.0001759469696969697,
+ "loss": 2.2265,
+ "step": 127
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017575757575757578,
+ "loss": 2.2186,
+ "step": 128
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017556818181818182,
+ "loss": 2.1925,
+ "step": 129
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.0001753787878787879,
+ "loss": 2.1956,
+ "step": 130
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017518939393939396,
+ "loss": 2.2459,
+ "step": 131
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.000175,
+ "loss": 2.22,
+ "step": 132
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.00017481060606060607,
+ "loss": 2.2143,
+ "step": 133
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001746212121212121,
+ "loss": 2.2359,
+ "step": 134
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001744318181818182,
+ "loss": 2.2058,
+ "step": 135
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017424242424242425,
+ "loss": 2.2307,
+ "step": 136
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017405303030303032,
+ "loss": 2.2062,
+ "step": 137
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017386363636363636,
+ "loss": 2.1796,
+ "step": 138
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017367424242424243,
+ "loss": 2.2054,
+ "step": 139
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001734848484848485,
+ "loss": 2.1651,
+ "step": 140
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.00017329545454545454,
+ "loss": 2.2159,
+ "step": 141
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001731060606060606,
+ "loss": 2.1988,
+ "step": 142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017291666666666668,
+ "loss": 2.1676,
+ "step": 143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017272727272727275,
+ "loss": 2.1725,
+ "step": 144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.0001725378787878788,
+ "loss": 2.2205,
+ "step": 145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017234848484848486,
+ "loss": 2.1486,
+ "step": 146
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017215909090909093,
+ "loss": 2.147,
+ "step": 147
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017196969696969697,
+ "loss": 2.1651,
+ "step": 148
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017178030303030304,
+ "loss": 2.1983,
+ "step": 149
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017159090909090908,
+ "loss": 2.1778,
+ "step": 150
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017140151515151518,
+ "loss": 2.1631,
+ "step": 151
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017121212121212122,
+ "loss": 2.1442,
+ "step": 152
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.0001710227272727273,
+ "loss": 2.1397,
+ "step": 153
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017083333333333333,
+ "loss": 2.1697,
+ "step": 154
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.0001706439393939394,
+ "loss": 2.1451,
+ "step": 155
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017045454545454547,
+ "loss": 2.1789,
+ "step": 156
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.0001702651515151515,
+ "loss": 2.1037,
+ "step": 157
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00017007575757575758,
+ "loss": 2.1698,
+ "step": 158
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016988636363636365,
+ "loss": 2.1538,
+ "step": 159
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016969696969696972,
+ "loss": 2.2015,
+ "step": 160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016950757575757576,
+ "loss": 2.179,
+ "step": 161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016931818181818183,
+ "loss": 2.1766,
+ "step": 162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.0001691287878787879,
+ "loss": 2.1646,
+ "step": 163
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016893939393939394,
+ "loss": 2.1694,
+ "step": 164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016875,
+ "loss": 2.1562,
+ "step": 165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016856060606060605,
+ "loss": 2.1551,
+ "step": 166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016837121212121212,
+ "loss": 2.1652,
+ "step": 167
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001681818181818182,
+ "loss": 2.1594,
+ "step": 168
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.00016799242424242426,
+ "loss": 2.1674,
+ "step": 169
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001678030303030303,
+ "loss": 2.1378,
+ "step": 170
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016761363636363637,
+ "loss": 2.1447,
+ "step": 171
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016742424242424244,
+ "loss": 2.1451,
+ "step": 172
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016723484848484848,
+ "loss": 2.1336,
+ "step": 173
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016704545454545455,
+ "loss": 2.1231,
+ "step": 174
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001668560606060606,
+ "loss": 2.1143,
+ "step": 175
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001666666666666667,
+ "loss": 2.1316,
+ "step": 176
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.00016647727272727273,
+ "loss": 2.1281,
+ "step": 177
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001662878787878788,
+ "loss": 2.136,
+ "step": 178
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016609848484848484,
+ "loss": 2.1279,
+ "step": 179
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016590909090909094,
+ "loss": 2.1421,
+ "step": 180
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016571969696969698,
+ "loss": 2.1541,
+ "step": 181
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016553030303030305,
+ "loss": 2.1293,
+ "step": 182
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.0001653409090909091,
+ "loss": 2.1294,
+ "step": 183
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016515151515151516,
+ "loss": 2.1459,
+ "step": 184
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016496212121212123,
+ "loss": 2.1113,
+ "step": 185
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016477272727272727,
+ "loss": 2.1394,
+ "step": 186
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016458333333333334,
+ "loss": 2.1321,
+ "step": 187
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.0001643939393939394,
+ "loss": 2.148,
+ "step": 188
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016420454545454548,
+ "loss": 2.1631,
+ "step": 189
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016401515151515152,
+ "loss": 2.1276,
+ "step": 190
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.0001638257575757576,
+ "loss": 2.0706,
+ "step": 191
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016363636363636366,
+ "loss": 2.127,
+ "step": 192
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001634469696969697,
+ "loss": 2.1449,
+ "step": 193
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.00016325757575757577,
+ "loss": 2.1204,
+ "step": 194
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001630681818181818,
+ "loss": 2.0904,
+ "step": 195
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.0001628787878787879,
+ "loss": 2.1129,
+ "step": 196
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016268939393939395,
+ "loss": 2.1036,
+ "step": 197
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016250000000000002,
+ "loss": 2.1509,
+ "step": 198
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016231060606060606,
+ "loss": 2.1239,
+ "step": 199
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016212121212121213,
+ "loss": 2.145,
+ "step": 200
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.0001619318181818182,
+ "loss": 2.1221,
+ "step": 201
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016174242424242424,
+ "loss": 2.1181,
+ "step": 202
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001615530303030303,
+ "loss": 2.1306,
+ "step": 203
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016136363636363635,
+ "loss": 2.0199,
+ "step": 204
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016117424242424245,
+ "loss": 2.1178,
+ "step": 205
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001609848484848485,
+ "loss": 2.1584,
+ "step": 206
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016079545454545456,
+ "loss": 2.0872,
+ "step": 207
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.0001606060606060606,
+ "loss": 2.1033,
+ "step": 208
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016041666666666667,
+ "loss": 2.1381,
+ "step": 209
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016022727272727274,
+ "loss": 2.1127,
+ "step": 210
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016003787878787878,
+ "loss": 2.1077,
+ "step": 211
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015984848484848485,
+ "loss": 2.0984,
+ "step": 212
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015965909090909092,
+ "loss": 2.0994,
+ "step": 213
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.000159469696969697,
+ "loss": 2.096,
+ "step": 214
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.00015928030303030303,
+ "loss": 2.0909,
+ "step": 215
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.0001590909090909091,
+ "loss": 2.118,
+ "step": 216
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015890151515151517,
+ "loss": 2.0783,
+ "step": 217
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.0001587121212121212,
+ "loss": 2.0876,
+ "step": 218
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015852272727272728,
+ "loss": 2.0581,
+ "step": 219
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015833333333333332,
+ "loss": 2.0548,
+ "step": 220
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015814393939393942,
+ "loss": 2.0595,
+ "step": 221
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015795454545454546,
+ "loss": 2.0719,
+ "step": 222
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015776515151515153,
+ "loss": 2.0903,
+ "step": 223
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015757575757575757,
+ "loss": 2.0941,
+ "step": 224
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015738636363636364,
+ "loss": 2.0926,
+ "step": 225
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.0001571969696969697,
+ "loss": 2.0816,
+ "step": 226
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015700757575757575,
+ "loss": 2.0894,
+ "step": 227
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015681818181818182,
+ "loss": 2.0798,
+ "step": 228
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.0001566287878787879,
+ "loss": 2.0672,
+ "step": 229
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015643939393939396,
+ "loss": 2.0787,
+ "step": 230
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015625,
+ "loss": 2.0611,
+ "step": 231
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015606060606060607,
+ "loss": 2.0805,
+ "step": 232
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015587121212121211,
+ "loss": 2.053,
+ "step": 233
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015568181818181818,
+ "loss": 2.0575,
+ "step": 234
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015549242424242425,
+ "loss": 2.0459,
+ "step": 235
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.0001553030303030303,
+ "loss": 2.0635,
+ "step": 236
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015511363636363636,
+ "loss": 2.0335,
+ "step": 237
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015492424242424243,
+ "loss": 2.0681,
+ "step": 238
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001547348484848485,
+ "loss": 2.0748,
+ "step": 239
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015454545454545454,
+ "loss": 2.1091,
+ "step": 240
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001543560606060606,
+ "loss": 2.0732,
+ "step": 241
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015416666666666668,
+ "loss": 2.0746,
+ "step": 242
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015397727272727272,
+ "loss": 2.0306,
+ "step": 243
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.0001537878787878788,
+ "loss": 2.0864,
+ "step": 244
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015359848484848484,
+ "loss": 2.0664,
+ "step": 245
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015340909090909093,
+ "loss": 2.0801,
+ "step": 246
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015321969696969697,
+ "loss": 2.0799,
+ "step": 247
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015303030303030304,
+ "loss": 2.0621,
+ "step": 248
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015284090909090909,
+ "loss": 2.0687,
+ "step": 249
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015265151515151515,
+ "loss": 2.018,
+ "step": 250
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015246212121212122,
+ "loss": 2.0256,
+ "step": 251
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015227272727272727,
+ "loss": 2.0736,
+ "step": 252
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015208333333333333,
+ "loss": 2.0609,
+ "step": 253
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.0001518939393939394,
+ "loss": 2.0539,
+ "step": 254
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015170454545454547,
+ "loss": 2.0282,
+ "step": 255
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015151515151515152,
+ "loss": 2.0417,
+ "step": 256
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015132575757575758,
+ "loss": 2.0333,
+ "step": 257
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015113636363636365,
+ "loss": 2.0428,
+ "step": 258
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015094696969696972,
+ "loss": 2.045,
+ "step": 259
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.00015075757575757576,
+ "loss": 2.0463,
+ "step": 260
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001505681818181818,
+ "loss": 2.0539,
+ "step": 261
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001503787878787879,
+ "loss": 2.0184,
+ "step": 262
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015018939393939394,
+ "loss": 2.0858,
+ "step": 263
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 2.0239,
+ "step": 264
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014981060606060606,
+ "loss": 2.0425,
+ "step": 265
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014962121212121213,
+ "loss": 2.0263,
+ "step": 266
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001494318181818182,
+ "loss": 2.042,
+ "step": 267
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.00014924242424242426,
+ "loss": 2.026,
+ "step": 268
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001490530303030303,
+ "loss": 2.0411,
+ "step": 269
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014886363636363635,
+ "loss": 2.028,
+ "step": 270
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014867424242424244,
+ "loss": 2.0172,
+ "step": 271
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014848484848484849,
+ "loss": 2.0196,
+ "step": 272
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014829545454545455,
+ "loss": 2.0142,
+ "step": 273
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001481060606060606,
+ "loss": 2.0265,
+ "step": 274
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001479166666666667,
+ "loss": 2.0353,
+ "step": 275
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.00014772727272727274,
+ "loss": 2.0327,
+ "step": 276
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.0001475378787878788,
+ "loss": 2.0188,
+ "step": 277
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014734848484848485,
+ "loss": 1.9987,
+ "step": 278
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014715909090909092,
+ "loss": 2.0141,
+ "step": 279
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014696969696969698,
+ "loss": 2.0403,
+ "step": 280
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014678030303030303,
+ "loss": 1.9977,
+ "step": 281
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.0001465909090909091,
+ "loss": 1.9674,
+ "step": 282
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014640151515151517,
+ "loss": 1.9984,
+ "step": 283
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014621212121212123,
+ "loss": 1.9796,
+ "step": 284
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014602272727272728,
+ "loss": 2.0139,
+ "step": 285
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014583333333333335,
+ "loss": 1.9866,
+ "step": 286
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014564393939393941,
+ "loss": 2.0208,
+ "step": 287
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014545454545454546,
+ "loss": 1.9844,
+ "step": 288
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014526515151515153,
+ "loss": 2.0082,
+ "step": 289
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014507575757575757,
+ "loss": 1.984,
+ "step": 290
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014488636363636366,
+ "loss": 2.0015,
+ "step": 291
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.0001446969696969697,
+ "loss": 2.0209,
+ "step": 292
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014450757575757578,
+ "loss": 1.9728,
+ "step": 293
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014431818181818182,
+ "loss": 2.0032,
+ "step": 294
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014412878787878789,
+ "loss": 1.9641,
+ "step": 295
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014393939393939396,
+ "loss": 1.9945,
+ "step": 296
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014375,
+ "loss": 1.9658,
+ "step": 297
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014356060606060607,
+ "loss": 1.9907,
+ "step": 298
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001433712121212121,
+ "loss": 1.9935,
+ "step": 299
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001431818181818182,
+ "loss": 1.9897,
+ "step": 300
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014299242424242425,
+ "loss": 1.984,
+ "step": 301
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014280303030303032,
+ "loss": 1.9581,
+ "step": 302
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014261363636363636,
+ "loss": 1.9893,
+ "step": 303
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014242424242424243,
+ "loss": 1.9568,
+ "step": 304
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001422348484848485,
+ "loss": 1.98,
+ "step": 305
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014204545454545454,
+ "loss": 1.9519,
+ "step": 306
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001418560606060606,
+ "loss": 1.9693,
+ "step": 307
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014166666666666668,
+ "loss": 1.9866,
+ "step": 308
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014147727272727275,
+ "loss": 1.9508,
+ "step": 309
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.0001412878787878788,
+ "loss": 1.9653,
+ "step": 310
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014109848484848486,
+ "loss": 1.9991,
+ "step": 311
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014090909090909093,
+ "loss": 1.9442,
+ "step": 312
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014071969696969697,
+ "loss": 1.9807,
+ "step": 313
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014053030303030304,
+ "loss": 1.9958,
+ "step": 314
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014034090909090908,
+ "loss": 1.9459,
+ "step": 315
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00014015151515151518,
+ "loss": 1.9508,
+ "step": 316
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013996212121212122,
+ "loss": 1.9933,
+ "step": 317
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.0001397727272727273,
+ "loss": 1.9703,
+ "step": 318
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013958333333333333,
+ "loss": 1.965,
+ "step": 319
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001393939393939394,
+ "loss": 1.9264,
+ "step": 320
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.00013920454545454547,
+ "loss": 1.9688,
+ "step": 321
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001390151515151515,
+ "loss": 1.9901,
+ "step": 322
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013882575757575758,
+ "loss": 1.9363,
+ "step": 323
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013863636363636365,
+ "loss": 1.9269,
+ "step": 324
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013844696969696972,
+ "loss": 1.9688,
+ "step": 325
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013825757575757576,
+ "loss": 1.9758,
+ "step": 326
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013806818181818183,
+ "loss": 1.9414,
+ "step": 327
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.0001378787878787879,
+ "loss": 1.9397,
+ "step": 328
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013768939393939394,
+ "loss": 1.9032,
+ "step": 329
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001375,
+ "loss": 1.9777,
+ "step": 330
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013731060606060605,
+ "loss": 1.9173,
+ "step": 331
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013712121212121212,
+ "loss": 1.9307,
+ "step": 332
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001369318181818182,
+ "loss": 1.9611,
+ "step": 333
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013674242424242426,
+ "loss": 1.9698,
+ "step": 334
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.0001365530303030303,
+ "loss": 1.9619,
+ "step": 335
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013636363636363637,
+ "loss": 1.9322,
+ "step": 336
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013617424242424244,
+ "loss": 1.9441,
+ "step": 337
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013598484848484848,
+ "loss": 1.9563,
+ "step": 338
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013579545454545455,
+ "loss": 1.9283,
+ "step": 339
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.0001356060606060606,
+ "loss": 1.9508,
+ "step": 340
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001354166666666667,
+ "loss": 1.9285,
+ "step": 341
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.00013522727272727273,
+ "loss": 1.9295,
+ "step": 342
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001350378787878788,
+ "loss": 1.9272,
+ "step": 343
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013484848484848484,
+ "loss": 1.905,
+ "step": 344
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013465909090909094,
+ "loss": 1.9409,
+ "step": 345
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013446969696969698,
+ "loss": 1.9674,
+ "step": 346
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013428030303030302,
+ "loss": 1.9278,
+ "step": 347
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.0001340909090909091,
+ "loss": 1.9136,
+ "step": 348
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013390151515151516,
+ "loss": 1.9143,
+ "step": 349
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013371212121212123,
+ "loss": 1.9381,
+ "step": 350
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013352272727272727,
+ "loss": 1.9136,
+ "step": 351
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013333333333333334,
+ "loss": 1.9103,
+ "step": 352
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0001331439393939394,
+ "loss": 1.9027,
+ "step": 353
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013295454545454548,
+ "loss": 1.8674,
+ "step": 354
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013276515151515152,
+ "loss": 1.886,
+ "step": 355
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013257575757575756,
+ "loss": 1.887,
+ "step": 356
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013238636363636366,
+ "loss": 1.87,
+ "step": 357
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001321969696969697,
+ "loss": 1.8715,
+ "step": 358
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.00013200757575757577,
+ "loss": 1.8993,
+ "step": 359
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001318181818181818,
+ "loss": 1.8844,
+ "step": 360
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001316287878787879,
+ "loss": 1.8965,
+ "step": 361
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013143939393939395,
+ "loss": 1.8956,
+ "step": 362
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013125000000000002,
+ "loss": 1.869,
+ "step": 363
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013106060606060606,
+ "loss": 1.8702,
+ "step": 364
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013087121212121213,
+ "loss": 1.8962,
+ "step": 365
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001306818181818182,
+ "loss": 1.8613,
+ "step": 366
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013049242424242424,
+ "loss": 1.8845,
+ "step": 367
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001303030303030303,
+ "loss": 1.8689,
+ "step": 368
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00013011363636363635,
+ "loss": 1.9059,
+ "step": 369
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00012992424242424245,
+ "loss": 1.9082,
+ "step": 370
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.0001297348484848485,
+ "loss": 1.8918,
+ "step": 371
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012954545454545456,
+ "loss": 1.8657,
+ "step": 372
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.0001293560606060606,
+ "loss": 1.8909,
+ "step": 373
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012916666666666667,
+ "loss": 1.8649,
+ "step": 374
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012897727272727274,
+ "loss": 1.833,
+ "step": 375
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012878787878787878,
+ "loss": 1.8815,
+ "step": 376
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012859848484848485,
+ "loss": 1.8646,
+ "step": 377
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012840909090909092,
+ "loss": 1.846,
+ "step": 378
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.000128219696969697,
+ "loss": 1.8631,
+ "step": 379
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012803030303030303,
+ "loss": 1.917,
+ "step": 380
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.0001278409090909091,
+ "loss": 1.9068,
+ "step": 381
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012765151515151517,
+ "loss": 1.8772,
+ "step": 382
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.0001274621212121212,
+ "loss": 1.8414,
+ "step": 383
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012727272727272728,
+ "loss": 1.9003,
+ "step": 384
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012708333333333332,
+ "loss": 1.8415,
+ "step": 385
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012689393939393942,
+ "loss": 1.8491,
+ "step": 386
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012670454545454546,
+ "loss": 1.8875,
+ "step": 387
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012651515151515153,
+ "loss": 1.8629,
+ "step": 388
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012632575757575757,
+ "loss": 1.8378,
+ "step": 389
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012613636363636364,
+ "loss": 1.8442,
+ "step": 390
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.0001259469696969697,
+ "loss": 1.8587,
+ "step": 391
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012575757575757575,
+ "loss": 1.8659,
+ "step": 392
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012556818181818182,
+ "loss": 1.8271,
+ "step": 393
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.0001253787878787879,
+ "loss": 1.8692,
+ "step": 394
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.00012518939393939396,
+ "loss": 1.8071,
+ "step": 395
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.000125,
+ "loss": 1.8564,
+ "step": 396
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012481060606060607,
+ "loss": 1.8891,
+ "step": 397
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012462121212121211,
+ "loss": 1.8173,
+ "step": 398
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012443181818181818,
+ "loss": 1.8653,
+ "step": 399
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012424242424242425,
+ "loss": 1.8843,
+ "step": 400
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1056,
+ "num_train_epochs": 3,
+ "save_steps": 100,
+ "total_flos": 1.0133699319771955e+18,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-400/training_args.bin b/checkpoint-400/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..574132c086f9a526d71493b1ec4c09396eac5482
--- /dev/null
+++ b/checkpoint-400/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:50303c7f1586feb001f01b7e968e567295f501ef6c6407e28250c458696a43af
+size 4155
diff --git a/checkpoint-500/README.md b/checkpoint-500/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4
--- /dev/null
+++ b/checkpoint-500/README.md
@@ -0,0 +1,34 @@
+---
+library_name: peft
+---
+## Training procedure
+
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+### Framework versions
+
+- PEFT 0.6.0.dev0
+
+- PEFT 0.6.0.dev0
diff --git a/checkpoint-500/adapter_config.json b/checkpoint-500/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b
--- /dev/null
+++ b/checkpoint-500/adapter_config.json
@@ -0,0 +1,23 @@
+{
+ "auto_mapping": null,
+ "base_model_name_or_path": "bigscience/bloomz-3b",
+ "bias": "none",
+ "fan_in_fan_out": false,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "lora_alpha": 16,
+ "lora_dropout": 0.0,
+ "modules_to_save": null,
+ "peft_type": "LORA",
+ "r": 8,
+ "revision": null,
+ "target_modules": [
+ "dense_4h_to_h",
+ "dense",
+ "dense_h_to_4h",
+ "query_key_value"
+ ],
+ "task_type": "CAUSAL_LM"
+}
\ No newline at end of file
diff --git a/checkpoint-500/adapter_model.bin b/checkpoint-500/adapter_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..25cc776cbc1b17a7e69000a9f077c21039fd0bf4
--- /dev/null
+++ b/checkpoint-500/adapter_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1a5c509e1c0613caab59f0b692295f9682b8e7f61d19f3f9217dec2d2e3d6dde
+size 39409357
diff --git a/checkpoint-500/optimizer.pt b/checkpoint-500/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..470f34cc5da5d72a0d4d95ec62219ced965db98e
--- /dev/null
+++ b/checkpoint-500/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a85a134fd2fed8929e943a491ab03616957df5f174be2b48747e2249c0e9e177
+size 78844421
diff --git a/checkpoint-500/rng_state.pth b/checkpoint-500/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..b9111227e3b79b9bc3e2a642832c3e49e36216d4
--- /dev/null
+++ b/checkpoint-500/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:972139d83957a9cf2600cb6eeca17287d7a5377c33a53500ae7e13fe830ad36b
+size 14575
diff --git a/checkpoint-500/scheduler.pt b/checkpoint-500/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..16f7c243bf07f9fce408ee4c4d9f3ea4bca60367
--- /dev/null
+++ b/checkpoint-500/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9fecad8588b20d8e104cfb29510026ed4bc5806d2feb07b85fe947b4f541003e
+size 627
diff --git a/checkpoint-500/special_tokens_map.json b/checkpoint-500/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d
--- /dev/null
+++ b/checkpoint-500/special_tokens_map.json
@@ -0,0 +1,6 @@
+{
+ "bos_token": "",
+ "eos_token": "",
+ "pad_token": "",
+ "unk_token": ""
+}
diff --git a/checkpoint-500/tokenizer.json b/checkpoint-500/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207
--- /dev/null
+++ b/checkpoint-500/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe
+size 14500570
diff --git a/checkpoint-500/tokenizer_config.json b/checkpoint-500/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3
--- /dev/null
+++ b/checkpoint-500/tokenizer_config.json
@@ -0,0 +1,10 @@
+{
+ "add_prefix_space": false,
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": "",
+ "tokenizer_class": "BloomTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-500/trainer_state.json b/checkpoint-500/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..de48183a4f72d739562f1ed9f509d1512cebf76a
--- /dev/null
+++ b/checkpoint-500/trainer_state.json
@@ -0,0 +1,3019 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.4184397163120568,
+ "eval_steps": 500,
+ "global_step": 500,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 0.00019981060606060605,
+ "loss": 2.9206,
+ "step": 1
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019962121212121212,
+ "loss": 2.7609,
+ "step": 2
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001994318181818182,
+ "loss": 2.6878,
+ "step": 3
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019924242424242426,
+ "loss": 2.6697,
+ "step": 4
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001990530303030303,
+ "loss": 2.5818,
+ "step": 5
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019886363636363637,
+ "loss": 2.5396,
+ "step": 6
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019867424242424244,
+ "loss": 2.5265,
+ "step": 7
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.0001984848484848485,
+ "loss": 2.5475,
+ "step": 8
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019829545454545455,
+ "loss": 2.4835,
+ "step": 9
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001981060606060606,
+ "loss": 2.4559,
+ "step": 10
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001979166666666667,
+ "loss": 2.4511,
+ "step": 11
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019772727272727273,
+ "loss": 2.4592,
+ "step": 12
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.0001975378787878788,
+ "loss": 2.4495,
+ "step": 13
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019734848484848484,
+ "loss": 2.4714,
+ "step": 14
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019715909090909094,
+ "loss": 2.4302,
+ "step": 15
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019696969696969698,
+ "loss": 2.4097,
+ "step": 16
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019678030303030305,
+ "loss": 2.4523,
+ "step": 17
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.0001965909090909091,
+ "loss": 2.4325,
+ "step": 18
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019640151515151516,
+ "loss": 2.4125,
+ "step": 19
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019621212121212123,
+ "loss": 2.4329,
+ "step": 20
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019602272727272727,
+ "loss": 2.3471,
+ "step": 21
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019583333333333334,
+ "loss": 2.3012,
+ "step": 22
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001956439393939394,
+ "loss": 2.3869,
+ "step": 23
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019545454545454548,
+ "loss": 2.3822,
+ "step": 24
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019526515151515152,
+ "loss": 2.3427,
+ "step": 25
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001950757575757576,
+ "loss": 2.3659,
+ "step": 26
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019488636363636366,
+ "loss": 2.3826,
+ "step": 27
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.0001946969696969697,
+ "loss": 2.3532,
+ "step": 28
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019450757575757577,
+ "loss": 2.3828,
+ "step": 29
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001943181818181818,
+ "loss": 2.3133,
+ "step": 30
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001941287878787879,
+ "loss": 2.3613,
+ "step": 31
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019393939393939395,
+ "loss": 2.3867,
+ "step": 32
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019375000000000002,
+ "loss": 2.2966,
+ "step": 33
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019356060606060606,
+ "loss": 2.3436,
+ "step": 34
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019337121212121213,
+ "loss": 2.3425,
+ "step": 35
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.0001931818181818182,
+ "loss": 2.307,
+ "step": 36
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019299242424242424,
+ "loss": 2.3521,
+ "step": 37
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.0001928030303030303,
+ "loss": 2.3302,
+ "step": 38
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019261363636363635,
+ "loss": 2.312,
+ "step": 39
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019242424242424245,
+ "loss": 2.3655,
+ "step": 40
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001922348484848485,
+ "loss": 2.344,
+ "step": 41
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019204545454545456,
+ "loss": 2.3373,
+ "step": 42
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001918560606060606,
+ "loss": 2.3331,
+ "step": 43
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019166666666666667,
+ "loss": 2.3376,
+ "step": 44
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019147727272727274,
+ "loss": 2.3369,
+ "step": 45
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019128787878787878,
+ "loss": 2.3413,
+ "step": 46
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019109848484848485,
+ "loss": 2.3212,
+ "step": 47
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019090909090909092,
+ "loss": 2.307,
+ "step": 48
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.000190719696969697,
+ "loss": 2.2929,
+ "step": 49
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019053030303030303,
+ "loss": 2.2873,
+ "step": 50
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.0001903409090909091,
+ "loss": 2.3098,
+ "step": 51
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00019015151515151517,
+ "loss": 2.3129,
+ "step": 52
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.0001899621212121212,
+ "loss": 2.3038,
+ "step": 53
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00018977272727272728,
+ "loss": 2.286,
+ "step": 54
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018958333333333332,
+ "loss": 2.3388,
+ "step": 55
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018939393939393942,
+ "loss": 2.3193,
+ "step": 56
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018920454545454546,
+ "loss": 2.3136,
+ "step": 57
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018901515151515153,
+ "loss": 2.3141,
+ "step": 58
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018882575757575757,
+ "loss": 2.3646,
+ "step": 59
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018863636363636364,
+ "loss": 2.3318,
+ "step": 60
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.0001884469696969697,
+ "loss": 2.2977,
+ "step": 61
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018825757575757575,
+ "loss": 2.2764,
+ "step": 62
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018806818181818182,
+ "loss": 2.3095,
+ "step": 63
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.0001878787878787879,
+ "loss": 2.252,
+ "step": 64
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018768939393939396,
+ "loss": 2.2786,
+ "step": 65
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.0001875,
+ "loss": 2.2789,
+ "step": 66
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018731060606060607,
+ "loss": 2.2841,
+ "step": 67
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018712121212121212,
+ "loss": 2.3436,
+ "step": 68
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018693181818181818,
+ "loss": 2.2956,
+ "step": 69
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018674242424242425,
+ "loss": 2.2353,
+ "step": 70
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.0001865530303030303,
+ "loss": 2.2772,
+ "step": 71
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018636363636363636,
+ "loss": 2.2496,
+ "step": 72
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018617424242424243,
+ "loss": 2.2477,
+ "step": 73
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.0001859848484848485,
+ "loss": 2.2791,
+ "step": 74
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018579545454545454,
+ "loss": 2.2799,
+ "step": 75
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018560606060606061,
+ "loss": 2.3132,
+ "step": 76
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018541666666666668,
+ "loss": 2.2542,
+ "step": 77
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018522727272727273,
+ "loss": 2.2609,
+ "step": 78
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.0001850378787878788,
+ "loss": 2.2819,
+ "step": 79
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018484848484848484,
+ "loss": 2.2844,
+ "step": 80
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018465909090909093,
+ "loss": 2.2542,
+ "step": 81
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018446969696969697,
+ "loss": 2.2603,
+ "step": 82
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018428030303030304,
+ "loss": 2.2832,
+ "step": 83
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018409090909090909,
+ "loss": 2.2869,
+ "step": 84
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018390151515151518,
+ "loss": 2.2646,
+ "step": 85
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018371212121212122,
+ "loss": 2.2698,
+ "step": 86
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018352272727272727,
+ "loss": 2.2757,
+ "step": 87
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018333333333333334,
+ "loss": 2.2544,
+ "step": 88
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.0001831439393939394,
+ "loss": 2.2678,
+ "step": 89
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018295454545454547,
+ "loss": 2.2778,
+ "step": 90
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018276515151515152,
+ "loss": 2.2027,
+ "step": 91
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018257575757575758,
+ "loss": 2.2167,
+ "step": 92
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018238636363636365,
+ "loss": 2.2602,
+ "step": 93
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018219696969696972,
+ "loss": 2.2736,
+ "step": 94
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018200757575757577,
+ "loss": 2.2443,
+ "step": 95
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 2.2299,
+ "step": 96
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.0001816287878787879,
+ "loss": 2.2644,
+ "step": 97
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018143939393939395,
+ "loss": 2.259,
+ "step": 98
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018125000000000001,
+ "loss": 2.2567,
+ "step": 99
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018106060606060606,
+ "loss": 2.2599,
+ "step": 100
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018087121212121213,
+ "loss": 2.2091,
+ "step": 101
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.0001806818181818182,
+ "loss": 2.2312,
+ "step": 102
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018049242424242426,
+ "loss": 2.1869,
+ "step": 103
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001803030303030303,
+ "loss": 2.2023,
+ "step": 104
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00018011363636363638,
+ "loss": 2.2132,
+ "step": 105
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00017992424242424244,
+ "loss": 2.2612,
+ "step": 106
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001797348484848485,
+ "loss": 2.2109,
+ "step": 107
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017954545454545456,
+ "loss": 2.215,
+ "step": 108
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001793560606060606,
+ "loss": 2.2114,
+ "step": 109
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001791666666666667,
+ "loss": 2.2203,
+ "step": 110
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017897727272727274,
+ "loss": 2.2594,
+ "step": 111
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.0001787878787878788,
+ "loss": 2.2001,
+ "step": 112
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017859848484848485,
+ "loss": 2.2046,
+ "step": 113
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017840909090909092,
+ "loss": 2.1907,
+ "step": 114
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017821969696969699,
+ "loss": 2.2539,
+ "step": 115
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017803030303030303,
+ "loss": 2.2335,
+ "step": 116
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.0001778409090909091,
+ "loss": 2.2171,
+ "step": 117
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017765151515151517,
+ "loss": 2.2278,
+ "step": 118
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017746212121212123,
+ "loss": 2.231,
+ "step": 119
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017727272727272728,
+ "loss": 2.2141,
+ "step": 120
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017708333333333335,
+ "loss": 2.2432,
+ "step": 121
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017689393939393942,
+ "loss": 2.2266,
+ "step": 122
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017670454545454546,
+ "loss": 2.1929,
+ "step": 123
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017651515151515153,
+ "loss": 2.2077,
+ "step": 124
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017632575757575757,
+ "loss": 2.2133,
+ "step": 125
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017613636363636366,
+ "loss": 2.2251,
+ "step": 126
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.0001759469696969697,
+ "loss": 2.2265,
+ "step": 127
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017575757575757578,
+ "loss": 2.2186,
+ "step": 128
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017556818181818182,
+ "loss": 2.1925,
+ "step": 129
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.0001753787878787879,
+ "loss": 2.1956,
+ "step": 130
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017518939393939396,
+ "loss": 2.2459,
+ "step": 131
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.000175,
+ "loss": 2.22,
+ "step": 132
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.00017481060606060607,
+ "loss": 2.2143,
+ "step": 133
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001746212121212121,
+ "loss": 2.2359,
+ "step": 134
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001744318181818182,
+ "loss": 2.2058,
+ "step": 135
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017424242424242425,
+ "loss": 2.2307,
+ "step": 136
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017405303030303032,
+ "loss": 2.2062,
+ "step": 137
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017386363636363636,
+ "loss": 2.1796,
+ "step": 138
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017367424242424243,
+ "loss": 2.2054,
+ "step": 139
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001734848484848485,
+ "loss": 2.1651,
+ "step": 140
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.00017329545454545454,
+ "loss": 2.2159,
+ "step": 141
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001731060606060606,
+ "loss": 2.1988,
+ "step": 142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017291666666666668,
+ "loss": 2.1676,
+ "step": 143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017272727272727275,
+ "loss": 2.1725,
+ "step": 144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.0001725378787878788,
+ "loss": 2.2205,
+ "step": 145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017234848484848486,
+ "loss": 2.1486,
+ "step": 146
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017215909090909093,
+ "loss": 2.147,
+ "step": 147
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017196969696969697,
+ "loss": 2.1651,
+ "step": 148
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017178030303030304,
+ "loss": 2.1983,
+ "step": 149
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017159090909090908,
+ "loss": 2.1778,
+ "step": 150
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017140151515151518,
+ "loss": 2.1631,
+ "step": 151
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017121212121212122,
+ "loss": 2.1442,
+ "step": 152
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.0001710227272727273,
+ "loss": 2.1397,
+ "step": 153
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017083333333333333,
+ "loss": 2.1697,
+ "step": 154
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.0001706439393939394,
+ "loss": 2.1451,
+ "step": 155
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017045454545454547,
+ "loss": 2.1789,
+ "step": 156
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.0001702651515151515,
+ "loss": 2.1037,
+ "step": 157
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00017007575757575758,
+ "loss": 2.1698,
+ "step": 158
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016988636363636365,
+ "loss": 2.1538,
+ "step": 159
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016969696969696972,
+ "loss": 2.2015,
+ "step": 160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016950757575757576,
+ "loss": 2.179,
+ "step": 161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016931818181818183,
+ "loss": 2.1766,
+ "step": 162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.0001691287878787879,
+ "loss": 2.1646,
+ "step": 163
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016893939393939394,
+ "loss": 2.1694,
+ "step": 164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016875,
+ "loss": 2.1562,
+ "step": 165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016856060606060605,
+ "loss": 2.1551,
+ "step": 166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016837121212121212,
+ "loss": 2.1652,
+ "step": 167
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001681818181818182,
+ "loss": 2.1594,
+ "step": 168
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.00016799242424242426,
+ "loss": 2.1674,
+ "step": 169
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001678030303030303,
+ "loss": 2.1378,
+ "step": 170
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016761363636363637,
+ "loss": 2.1447,
+ "step": 171
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016742424242424244,
+ "loss": 2.1451,
+ "step": 172
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016723484848484848,
+ "loss": 2.1336,
+ "step": 173
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016704545454545455,
+ "loss": 2.1231,
+ "step": 174
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001668560606060606,
+ "loss": 2.1143,
+ "step": 175
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001666666666666667,
+ "loss": 2.1316,
+ "step": 176
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.00016647727272727273,
+ "loss": 2.1281,
+ "step": 177
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001662878787878788,
+ "loss": 2.136,
+ "step": 178
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016609848484848484,
+ "loss": 2.1279,
+ "step": 179
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016590909090909094,
+ "loss": 2.1421,
+ "step": 180
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016571969696969698,
+ "loss": 2.1541,
+ "step": 181
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016553030303030305,
+ "loss": 2.1293,
+ "step": 182
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.0001653409090909091,
+ "loss": 2.1294,
+ "step": 183
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016515151515151516,
+ "loss": 2.1459,
+ "step": 184
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016496212121212123,
+ "loss": 2.1113,
+ "step": 185
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016477272727272727,
+ "loss": 2.1394,
+ "step": 186
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016458333333333334,
+ "loss": 2.1321,
+ "step": 187
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.0001643939393939394,
+ "loss": 2.148,
+ "step": 188
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016420454545454548,
+ "loss": 2.1631,
+ "step": 189
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016401515151515152,
+ "loss": 2.1276,
+ "step": 190
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.0001638257575757576,
+ "loss": 2.0706,
+ "step": 191
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016363636363636366,
+ "loss": 2.127,
+ "step": 192
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001634469696969697,
+ "loss": 2.1449,
+ "step": 193
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.00016325757575757577,
+ "loss": 2.1204,
+ "step": 194
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001630681818181818,
+ "loss": 2.0904,
+ "step": 195
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.0001628787878787879,
+ "loss": 2.1129,
+ "step": 196
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016268939393939395,
+ "loss": 2.1036,
+ "step": 197
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016250000000000002,
+ "loss": 2.1509,
+ "step": 198
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016231060606060606,
+ "loss": 2.1239,
+ "step": 199
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016212121212121213,
+ "loss": 2.145,
+ "step": 200
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.0001619318181818182,
+ "loss": 2.1221,
+ "step": 201
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016174242424242424,
+ "loss": 2.1181,
+ "step": 202
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001615530303030303,
+ "loss": 2.1306,
+ "step": 203
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016136363636363635,
+ "loss": 2.0199,
+ "step": 204
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016117424242424245,
+ "loss": 2.1178,
+ "step": 205
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001609848484848485,
+ "loss": 2.1584,
+ "step": 206
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016079545454545456,
+ "loss": 2.0872,
+ "step": 207
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.0001606060606060606,
+ "loss": 2.1033,
+ "step": 208
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016041666666666667,
+ "loss": 2.1381,
+ "step": 209
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016022727272727274,
+ "loss": 2.1127,
+ "step": 210
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016003787878787878,
+ "loss": 2.1077,
+ "step": 211
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015984848484848485,
+ "loss": 2.0984,
+ "step": 212
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015965909090909092,
+ "loss": 2.0994,
+ "step": 213
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.000159469696969697,
+ "loss": 2.096,
+ "step": 214
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.00015928030303030303,
+ "loss": 2.0909,
+ "step": 215
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.0001590909090909091,
+ "loss": 2.118,
+ "step": 216
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015890151515151517,
+ "loss": 2.0783,
+ "step": 217
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.0001587121212121212,
+ "loss": 2.0876,
+ "step": 218
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015852272727272728,
+ "loss": 2.0581,
+ "step": 219
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015833333333333332,
+ "loss": 2.0548,
+ "step": 220
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015814393939393942,
+ "loss": 2.0595,
+ "step": 221
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015795454545454546,
+ "loss": 2.0719,
+ "step": 222
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015776515151515153,
+ "loss": 2.0903,
+ "step": 223
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015757575757575757,
+ "loss": 2.0941,
+ "step": 224
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015738636363636364,
+ "loss": 2.0926,
+ "step": 225
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.0001571969696969697,
+ "loss": 2.0816,
+ "step": 226
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015700757575757575,
+ "loss": 2.0894,
+ "step": 227
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015681818181818182,
+ "loss": 2.0798,
+ "step": 228
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.0001566287878787879,
+ "loss": 2.0672,
+ "step": 229
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015643939393939396,
+ "loss": 2.0787,
+ "step": 230
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015625,
+ "loss": 2.0611,
+ "step": 231
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015606060606060607,
+ "loss": 2.0805,
+ "step": 232
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015587121212121211,
+ "loss": 2.053,
+ "step": 233
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015568181818181818,
+ "loss": 2.0575,
+ "step": 234
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015549242424242425,
+ "loss": 2.0459,
+ "step": 235
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.0001553030303030303,
+ "loss": 2.0635,
+ "step": 236
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015511363636363636,
+ "loss": 2.0335,
+ "step": 237
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015492424242424243,
+ "loss": 2.0681,
+ "step": 238
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001547348484848485,
+ "loss": 2.0748,
+ "step": 239
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015454545454545454,
+ "loss": 2.1091,
+ "step": 240
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001543560606060606,
+ "loss": 2.0732,
+ "step": 241
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015416666666666668,
+ "loss": 2.0746,
+ "step": 242
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015397727272727272,
+ "loss": 2.0306,
+ "step": 243
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.0001537878787878788,
+ "loss": 2.0864,
+ "step": 244
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015359848484848484,
+ "loss": 2.0664,
+ "step": 245
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015340909090909093,
+ "loss": 2.0801,
+ "step": 246
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015321969696969697,
+ "loss": 2.0799,
+ "step": 247
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015303030303030304,
+ "loss": 2.0621,
+ "step": 248
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015284090909090909,
+ "loss": 2.0687,
+ "step": 249
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015265151515151515,
+ "loss": 2.018,
+ "step": 250
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015246212121212122,
+ "loss": 2.0256,
+ "step": 251
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015227272727272727,
+ "loss": 2.0736,
+ "step": 252
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015208333333333333,
+ "loss": 2.0609,
+ "step": 253
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.0001518939393939394,
+ "loss": 2.0539,
+ "step": 254
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015170454545454547,
+ "loss": 2.0282,
+ "step": 255
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015151515151515152,
+ "loss": 2.0417,
+ "step": 256
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015132575757575758,
+ "loss": 2.0333,
+ "step": 257
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015113636363636365,
+ "loss": 2.0428,
+ "step": 258
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015094696969696972,
+ "loss": 2.045,
+ "step": 259
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.00015075757575757576,
+ "loss": 2.0463,
+ "step": 260
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001505681818181818,
+ "loss": 2.0539,
+ "step": 261
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001503787878787879,
+ "loss": 2.0184,
+ "step": 262
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015018939393939394,
+ "loss": 2.0858,
+ "step": 263
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 2.0239,
+ "step": 264
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014981060606060606,
+ "loss": 2.0425,
+ "step": 265
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014962121212121213,
+ "loss": 2.0263,
+ "step": 266
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001494318181818182,
+ "loss": 2.042,
+ "step": 267
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.00014924242424242426,
+ "loss": 2.026,
+ "step": 268
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001490530303030303,
+ "loss": 2.0411,
+ "step": 269
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014886363636363635,
+ "loss": 2.028,
+ "step": 270
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014867424242424244,
+ "loss": 2.0172,
+ "step": 271
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014848484848484849,
+ "loss": 2.0196,
+ "step": 272
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014829545454545455,
+ "loss": 2.0142,
+ "step": 273
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001481060606060606,
+ "loss": 2.0265,
+ "step": 274
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001479166666666667,
+ "loss": 2.0353,
+ "step": 275
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.00014772727272727274,
+ "loss": 2.0327,
+ "step": 276
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.0001475378787878788,
+ "loss": 2.0188,
+ "step": 277
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014734848484848485,
+ "loss": 1.9987,
+ "step": 278
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014715909090909092,
+ "loss": 2.0141,
+ "step": 279
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014696969696969698,
+ "loss": 2.0403,
+ "step": 280
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014678030303030303,
+ "loss": 1.9977,
+ "step": 281
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.0001465909090909091,
+ "loss": 1.9674,
+ "step": 282
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014640151515151517,
+ "loss": 1.9984,
+ "step": 283
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014621212121212123,
+ "loss": 1.9796,
+ "step": 284
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014602272727272728,
+ "loss": 2.0139,
+ "step": 285
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014583333333333335,
+ "loss": 1.9866,
+ "step": 286
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014564393939393941,
+ "loss": 2.0208,
+ "step": 287
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014545454545454546,
+ "loss": 1.9844,
+ "step": 288
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014526515151515153,
+ "loss": 2.0082,
+ "step": 289
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014507575757575757,
+ "loss": 1.984,
+ "step": 290
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014488636363636366,
+ "loss": 2.0015,
+ "step": 291
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.0001446969696969697,
+ "loss": 2.0209,
+ "step": 292
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014450757575757578,
+ "loss": 1.9728,
+ "step": 293
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014431818181818182,
+ "loss": 2.0032,
+ "step": 294
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014412878787878789,
+ "loss": 1.9641,
+ "step": 295
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014393939393939396,
+ "loss": 1.9945,
+ "step": 296
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014375,
+ "loss": 1.9658,
+ "step": 297
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014356060606060607,
+ "loss": 1.9907,
+ "step": 298
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001433712121212121,
+ "loss": 1.9935,
+ "step": 299
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001431818181818182,
+ "loss": 1.9897,
+ "step": 300
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014299242424242425,
+ "loss": 1.984,
+ "step": 301
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014280303030303032,
+ "loss": 1.9581,
+ "step": 302
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014261363636363636,
+ "loss": 1.9893,
+ "step": 303
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014242424242424243,
+ "loss": 1.9568,
+ "step": 304
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001422348484848485,
+ "loss": 1.98,
+ "step": 305
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014204545454545454,
+ "loss": 1.9519,
+ "step": 306
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001418560606060606,
+ "loss": 1.9693,
+ "step": 307
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014166666666666668,
+ "loss": 1.9866,
+ "step": 308
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014147727272727275,
+ "loss": 1.9508,
+ "step": 309
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.0001412878787878788,
+ "loss": 1.9653,
+ "step": 310
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014109848484848486,
+ "loss": 1.9991,
+ "step": 311
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014090909090909093,
+ "loss": 1.9442,
+ "step": 312
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014071969696969697,
+ "loss": 1.9807,
+ "step": 313
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014053030303030304,
+ "loss": 1.9958,
+ "step": 314
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014034090909090908,
+ "loss": 1.9459,
+ "step": 315
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00014015151515151518,
+ "loss": 1.9508,
+ "step": 316
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013996212121212122,
+ "loss": 1.9933,
+ "step": 317
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.0001397727272727273,
+ "loss": 1.9703,
+ "step": 318
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013958333333333333,
+ "loss": 1.965,
+ "step": 319
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001393939393939394,
+ "loss": 1.9264,
+ "step": 320
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.00013920454545454547,
+ "loss": 1.9688,
+ "step": 321
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001390151515151515,
+ "loss": 1.9901,
+ "step": 322
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013882575757575758,
+ "loss": 1.9363,
+ "step": 323
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013863636363636365,
+ "loss": 1.9269,
+ "step": 324
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013844696969696972,
+ "loss": 1.9688,
+ "step": 325
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013825757575757576,
+ "loss": 1.9758,
+ "step": 326
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013806818181818183,
+ "loss": 1.9414,
+ "step": 327
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.0001378787878787879,
+ "loss": 1.9397,
+ "step": 328
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013768939393939394,
+ "loss": 1.9032,
+ "step": 329
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001375,
+ "loss": 1.9777,
+ "step": 330
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013731060606060605,
+ "loss": 1.9173,
+ "step": 331
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013712121212121212,
+ "loss": 1.9307,
+ "step": 332
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001369318181818182,
+ "loss": 1.9611,
+ "step": 333
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013674242424242426,
+ "loss": 1.9698,
+ "step": 334
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.0001365530303030303,
+ "loss": 1.9619,
+ "step": 335
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013636363636363637,
+ "loss": 1.9322,
+ "step": 336
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013617424242424244,
+ "loss": 1.9441,
+ "step": 337
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013598484848484848,
+ "loss": 1.9563,
+ "step": 338
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013579545454545455,
+ "loss": 1.9283,
+ "step": 339
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.0001356060606060606,
+ "loss": 1.9508,
+ "step": 340
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001354166666666667,
+ "loss": 1.9285,
+ "step": 341
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.00013522727272727273,
+ "loss": 1.9295,
+ "step": 342
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001350378787878788,
+ "loss": 1.9272,
+ "step": 343
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013484848484848484,
+ "loss": 1.905,
+ "step": 344
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013465909090909094,
+ "loss": 1.9409,
+ "step": 345
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013446969696969698,
+ "loss": 1.9674,
+ "step": 346
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013428030303030302,
+ "loss": 1.9278,
+ "step": 347
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.0001340909090909091,
+ "loss": 1.9136,
+ "step": 348
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013390151515151516,
+ "loss": 1.9143,
+ "step": 349
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013371212121212123,
+ "loss": 1.9381,
+ "step": 350
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013352272727272727,
+ "loss": 1.9136,
+ "step": 351
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013333333333333334,
+ "loss": 1.9103,
+ "step": 352
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0001331439393939394,
+ "loss": 1.9027,
+ "step": 353
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013295454545454548,
+ "loss": 1.8674,
+ "step": 354
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013276515151515152,
+ "loss": 1.886,
+ "step": 355
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013257575757575756,
+ "loss": 1.887,
+ "step": 356
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013238636363636366,
+ "loss": 1.87,
+ "step": 357
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001321969696969697,
+ "loss": 1.8715,
+ "step": 358
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.00013200757575757577,
+ "loss": 1.8993,
+ "step": 359
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001318181818181818,
+ "loss": 1.8844,
+ "step": 360
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001316287878787879,
+ "loss": 1.8965,
+ "step": 361
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013143939393939395,
+ "loss": 1.8956,
+ "step": 362
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013125000000000002,
+ "loss": 1.869,
+ "step": 363
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013106060606060606,
+ "loss": 1.8702,
+ "step": 364
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013087121212121213,
+ "loss": 1.8962,
+ "step": 365
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001306818181818182,
+ "loss": 1.8613,
+ "step": 366
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013049242424242424,
+ "loss": 1.8845,
+ "step": 367
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001303030303030303,
+ "loss": 1.8689,
+ "step": 368
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00013011363636363635,
+ "loss": 1.9059,
+ "step": 369
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00012992424242424245,
+ "loss": 1.9082,
+ "step": 370
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.0001297348484848485,
+ "loss": 1.8918,
+ "step": 371
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012954545454545456,
+ "loss": 1.8657,
+ "step": 372
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.0001293560606060606,
+ "loss": 1.8909,
+ "step": 373
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012916666666666667,
+ "loss": 1.8649,
+ "step": 374
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012897727272727274,
+ "loss": 1.833,
+ "step": 375
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012878787878787878,
+ "loss": 1.8815,
+ "step": 376
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012859848484848485,
+ "loss": 1.8646,
+ "step": 377
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012840909090909092,
+ "loss": 1.846,
+ "step": 378
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.000128219696969697,
+ "loss": 1.8631,
+ "step": 379
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012803030303030303,
+ "loss": 1.917,
+ "step": 380
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.0001278409090909091,
+ "loss": 1.9068,
+ "step": 381
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012765151515151517,
+ "loss": 1.8772,
+ "step": 382
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.0001274621212121212,
+ "loss": 1.8414,
+ "step": 383
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012727272727272728,
+ "loss": 1.9003,
+ "step": 384
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012708333333333332,
+ "loss": 1.8415,
+ "step": 385
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012689393939393942,
+ "loss": 1.8491,
+ "step": 386
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012670454545454546,
+ "loss": 1.8875,
+ "step": 387
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012651515151515153,
+ "loss": 1.8629,
+ "step": 388
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012632575757575757,
+ "loss": 1.8378,
+ "step": 389
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012613636363636364,
+ "loss": 1.8442,
+ "step": 390
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.0001259469696969697,
+ "loss": 1.8587,
+ "step": 391
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012575757575757575,
+ "loss": 1.8659,
+ "step": 392
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012556818181818182,
+ "loss": 1.8271,
+ "step": 393
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.0001253787878787879,
+ "loss": 1.8692,
+ "step": 394
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.00012518939393939396,
+ "loss": 1.8071,
+ "step": 395
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.000125,
+ "loss": 1.8564,
+ "step": 396
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012481060606060607,
+ "loss": 1.8891,
+ "step": 397
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012462121212121211,
+ "loss": 1.8173,
+ "step": 398
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012443181818181818,
+ "loss": 1.8653,
+ "step": 399
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012424242424242425,
+ "loss": 1.8843,
+ "step": 400
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.0001240530303030303,
+ "loss": 1.8527,
+ "step": 401
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012386363636363636,
+ "loss": 1.8352,
+ "step": 402
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012367424242424243,
+ "loss": 1.866,
+ "step": 403
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001234848484848485,
+ "loss": 1.8557,
+ "step": 404
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012329545454545454,
+ "loss": 1.8284,
+ "step": 405
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001231060606060606,
+ "loss": 1.8359,
+ "step": 406
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012291666666666668,
+ "loss": 1.8437,
+ "step": 407
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012272727272727272,
+ "loss": 1.8256,
+ "step": 408
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.0001225378787878788,
+ "loss": 1.8297,
+ "step": 409
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012234848484848484,
+ "loss": 1.8515,
+ "step": 410
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012215909090909093,
+ "loss": 1.8198,
+ "step": 411
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012196969696969697,
+ "loss": 1.7809,
+ "step": 412
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012178030303030303,
+ "loss": 1.8438,
+ "step": 413
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012159090909090908,
+ "loss": 1.8497,
+ "step": 414
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012140151515151517,
+ "loss": 1.8463,
+ "step": 415
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012121212121212122,
+ "loss": 1.7768,
+ "step": 416
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012102272727272728,
+ "loss": 1.8561,
+ "step": 417
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012083333333333333,
+ "loss": 1.863,
+ "step": 418
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.0001206439393939394,
+ "loss": 1.8193,
+ "step": 419
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012045454545454546,
+ "loss": 1.7732,
+ "step": 420
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012026515151515151,
+ "loss": 1.7728,
+ "step": 421
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00012007575757575757,
+ "loss": 1.8113,
+ "step": 422
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011988636363636365,
+ "loss": 1.7976,
+ "step": 423
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011969696969696971,
+ "loss": 1.786,
+ "step": 424
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011950757575757576,
+ "loss": 1.8019,
+ "step": 425
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011931818181818182,
+ "loss": 1.786,
+ "step": 426
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011912878787878789,
+ "loss": 1.8102,
+ "step": 427
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011893939393939394,
+ "loss": 1.7828,
+ "step": 428
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011875,
+ "loss": 1.8498,
+ "step": 429
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011856060606060606,
+ "loss": 1.7983,
+ "step": 430
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011837121212121211,
+ "loss": 1.7863,
+ "step": 431
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001181818181818182,
+ "loss": 1.8171,
+ "step": 432
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011799242424242425,
+ "loss": 1.8143,
+ "step": 433
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001178030303030303,
+ "loss": 1.7815,
+ "step": 434
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011761363636363636,
+ "loss": 1.7652,
+ "step": 435
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011742424242424244,
+ "loss": 1.8242,
+ "step": 436
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011723484848484849,
+ "loss": 1.7789,
+ "step": 437
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011704545454545454,
+ "loss": 1.7549,
+ "step": 438
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.0001168560606060606,
+ "loss": 1.7528,
+ "step": 439
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011666666666666668,
+ "loss": 1.7443,
+ "step": 440
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011647727272727273,
+ "loss": 1.7911,
+ "step": 441
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011628787878787879,
+ "loss": 1.7848,
+ "step": 442
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011609848484848485,
+ "loss": 1.8137,
+ "step": 443
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011590909090909093,
+ "loss": 1.791,
+ "step": 444
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011571969696969698,
+ "loss": 1.7921,
+ "step": 445
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011553030303030304,
+ "loss": 1.772,
+ "step": 446
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011534090909090908,
+ "loss": 1.776,
+ "step": 447
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011515151515151516,
+ "loss": 1.7948,
+ "step": 448
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011496212121212122,
+ "loss": 1.8187,
+ "step": 449
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011477272727272728,
+ "loss": 1.7436,
+ "step": 450
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011458333333333333,
+ "loss": 1.7326,
+ "step": 451
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011439393939393941,
+ "loss": 1.8005,
+ "step": 452
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011420454545454547,
+ "loss": 1.8088,
+ "step": 453
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011401515151515153,
+ "loss": 1.7632,
+ "step": 454
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011382575757575758,
+ "loss": 1.7848,
+ "step": 455
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011363636363636365,
+ "loss": 1.7756,
+ "step": 456
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001134469696969697,
+ "loss": 1.7964,
+ "step": 457
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011325757575757576,
+ "loss": 1.7604,
+ "step": 458
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011306818181818182,
+ "loss": 1.7914,
+ "step": 459
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001128787878787879,
+ "loss": 1.8059,
+ "step": 460
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011268939393939395,
+ "loss": 1.7647,
+ "step": 461
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011250000000000001,
+ "loss": 1.7526,
+ "step": 462
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011231060606060607,
+ "loss": 1.7736,
+ "step": 463
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011212121212121212,
+ "loss": 1.7449,
+ "step": 464
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011193181818181819,
+ "loss": 1.7636,
+ "step": 465
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011174242424242425,
+ "loss": 1.7846,
+ "step": 466
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.0001115530303030303,
+ "loss": 1.78,
+ "step": 467
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011136363636363636,
+ "loss": 1.7828,
+ "step": 468
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011117424242424244,
+ "loss": 1.729,
+ "step": 469
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.0001109848484848485,
+ "loss": 1.7145,
+ "step": 470
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011079545454545455,
+ "loss": 1.7189,
+ "step": 471
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011060606060606061,
+ "loss": 1.7628,
+ "step": 472
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011041666666666668,
+ "loss": 1.7399,
+ "step": 473
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011022727272727273,
+ "loss": 1.7561,
+ "step": 474
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00011003787878787879,
+ "loss": 1.7979,
+ "step": 475
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010984848484848484,
+ "loss": 1.7673,
+ "step": 476
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010965909090909093,
+ "loss": 1.777,
+ "step": 477
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010946969696969698,
+ "loss": 1.7042,
+ "step": 478
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010928030303030304,
+ "loss": 1.7764,
+ "step": 479
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010909090909090909,
+ "loss": 1.6993,
+ "step": 480
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010890151515151516,
+ "loss": 1.7688,
+ "step": 481
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010871212121212122,
+ "loss": 1.7428,
+ "step": 482
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010852272727272727,
+ "loss": 1.675,
+ "step": 483
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010833333333333333,
+ "loss": 1.7183,
+ "step": 484
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010814393939393941,
+ "loss": 1.7305,
+ "step": 485
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010795454545454547,
+ "loss": 1.7541,
+ "step": 486
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010776515151515152,
+ "loss": 1.7074,
+ "step": 487
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010757575757575758,
+ "loss": 1.7093,
+ "step": 488
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010738636363636365,
+ "loss": 1.7354,
+ "step": 489
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.0001071969696969697,
+ "loss": 1.7415,
+ "step": 490
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010700757575757576,
+ "loss": 1.72,
+ "step": 491
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010681818181818181,
+ "loss": 1.7453,
+ "step": 492
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.0001066287878787879,
+ "loss": 1.7077,
+ "step": 493
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010643939393939395,
+ "loss": 1.6936,
+ "step": 494
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010625000000000001,
+ "loss": 1.7616,
+ "step": 495
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010606060606060606,
+ "loss": 1.7749,
+ "step": 496
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010587121212121212,
+ "loss": 1.7375,
+ "step": 497
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010568181818181819,
+ "loss": 1.7203,
+ "step": 498
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010549242424242424,
+ "loss": 1.7148,
+ "step": 499
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.0001053030303030303,
+ "loss": 1.7859,
+ "step": 500
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1056,
+ "num_train_epochs": 3,
+ "save_steps": 100,
+ "total_flos": 1.2661854770688614e+18,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-500/training_args.bin b/checkpoint-500/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..574132c086f9a526d71493b1ec4c09396eac5482
--- /dev/null
+++ b/checkpoint-500/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:50303c7f1586feb001f01b7e968e567295f501ef6c6407e28250c458696a43af
+size 4155
diff --git a/checkpoint-600/README.md b/checkpoint-600/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4
--- /dev/null
+++ b/checkpoint-600/README.md
@@ -0,0 +1,34 @@
+---
+library_name: peft
+---
+## Training procedure
+
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+### Framework versions
+
+- PEFT 0.6.0.dev0
+
+- PEFT 0.6.0.dev0
diff --git a/checkpoint-600/adapter_config.json b/checkpoint-600/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b
--- /dev/null
+++ b/checkpoint-600/adapter_config.json
@@ -0,0 +1,23 @@
+{
+ "auto_mapping": null,
+ "base_model_name_or_path": "bigscience/bloomz-3b",
+ "bias": "none",
+ "fan_in_fan_out": false,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "lora_alpha": 16,
+ "lora_dropout": 0.0,
+ "modules_to_save": null,
+ "peft_type": "LORA",
+ "r": 8,
+ "revision": null,
+ "target_modules": [
+ "dense_4h_to_h",
+ "dense",
+ "dense_h_to_4h",
+ "query_key_value"
+ ],
+ "task_type": "CAUSAL_LM"
+}
\ No newline at end of file
diff --git a/checkpoint-600/adapter_model.bin b/checkpoint-600/adapter_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6928ac1b34082af03d15032ebb3235157e7e9417
--- /dev/null
+++ b/checkpoint-600/adapter_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b7759b3b722013cb431b017d86d75233fcd33aaec6cfe739b146ed900c96f8e
+size 39409357
diff --git a/checkpoint-600/optimizer.pt b/checkpoint-600/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..645a5a6e895145ed27283db47665427d30b062f5
--- /dev/null
+++ b/checkpoint-600/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ad22c89414d0d96cf7691f419539ad2a227ca4579c543db0e1d87ba673dd1049
+size 78844421
diff --git a/checkpoint-600/rng_state.pth b/checkpoint-600/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..b9111227e3b79b9bc3e2a642832c3e49e36216d4
--- /dev/null
+++ b/checkpoint-600/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:972139d83957a9cf2600cb6eeca17287d7a5377c33a53500ae7e13fe830ad36b
+size 14575
diff --git a/checkpoint-600/scheduler.pt b/checkpoint-600/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..5eae217b69587207d6473ddfe9144c2f3597ba6d
--- /dev/null
+++ b/checkpoint-600/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6542076aaff4a0174b47c33f5c8b6e88b077e7f5ee30557ce058012350b5eccc
+size 627
diff --git a/checkpoint-600/special_tokens_map.json b/checkpoint-600/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d
--- /dev/null
+++ b/checkpoint-600/special_tokens_map.json
@@ -0,0 +1,6 @@
+{
+ "bos_token": "",
+ "eos_token": "",
+ "pad_token": "",
+ "unk_token": ""
+}
diff --git a/checkpoint-600/tokenizer.json b/checkpoint-600/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207
--- /dev/null
+++ b/checkpoint-600/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe
+size 14500570
diff --git a/checkpoint-600/tokenizer_config.json b/checkpoint-600/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3
--- /dev/null
+++ b/checkpoint-600/tokenizer_config.json
@@ -0,0 +1,10 @@
+{
+ "add_prefix_space": false,
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": "",
+ "tokenizer_class": "BloomTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-600/trainer_state.json b/checkpoint-600/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..fec99c8e850f2d198144146e5282a677a2673b30
--- /dev/null
+++ b/checkpoint-600/trainer_state.json
@@ -0,0 +1,3619 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.702127659574468,
+ "eval_steps": 500,
+ "global_step": 600,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 0.00019981060606060605,
+ "loss": 2.9206,
+ "step": 1
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019962121212121212,
+ "loss": 2.7609,
+ "step": 2
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001994318181818182,
+ "loss": 2.6878,
+ "step": 3
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019924242424242426,
+ "loss": 2.6697,
+ "step": 4
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001990530303030303,
+ "loss": 2.5818,
+ "step": 5
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019886363636363637,
+ "loss": 2.5396,
+ "step": 6
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019867424242424244,
+ "loss": 2.5265,
+ "step": 7
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.0001984848484848485,
+ "loss": 2.5475,
+ "step": 8
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019829545454545455,
+ "loss": 2.4835,
+ "step": 9
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001981060606060606,
+ "loss": 2.4559,
+ "step": 10
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001979166666666667,
+ "loss": 2.4511,
+ "step": 11
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019772727272727273,
+ "loss": 2.4592,
+ "step": 12
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.0001975378787878788,
+ "loss": 2.4495,
+ "step": 13
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019734848484848484,
+ "loss": 2.4714,
+ "step": 14
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019715909090909094,
+ "loss": 2.4302,
+ "step": 15
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019696969696969698,
+ "loss": 2.4097,
+ "step": 16
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019678030303030305,
+ "loss": 2.4523,
+ "step": 17
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.0001965909090909091,
+ "loss": 2.4325,
+ "step": 18
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019640151515151516,
+ "loss": 2.4125,
+ "step": 19
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019621212121212123,
+ "loss": 2.4329,
+ "step": 20
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019602272727272727,
+ "loss": 2.3471,
+ "step": 21
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019583333333333334,
+ "loss": 2.3012,
+ "step": 22
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001956439393939394,
+ "loss": 2.3869,
+ "step": 23
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019545454545454548,
+ "loss": 2.3822,
+ "step": 24
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019526515151515152,
+ "loss": 2.3427,
+ "step": 25
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001950757575757576,
+ "loss": 2.3659,
+ "step": 26
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019488636363636366,
+ "loss": 2.3826,
+ "step": 27
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.0001946969696969697,
+ "loss": 2.3532,
+ "step": 28
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019450757575757577,
+ "loss": 2.3828,
+ "step": 29
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001943181818181818,
+ "loss": 2.3133,
+ "step": 30
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001941287878787879,
+ "loss": 2.3613,
+ "step": 31
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019393939393939395,
+ "loss": 2.3867,
+ "step": 32
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019375000000000002,
+ "loss": 2.2966,
+ "step": 33
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019356060606060606,
+ "loss": 2.3436,
+ "step": 34
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019337121212121213,
+ "loss": 2.3425,
+ "step": 35
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.0001931818181818182,
+ "loss": 2.307,
+ "step": 36
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019299242424242424,
+ "loss": 2.3521,
+ "step": 37
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.0001928030303030303,
+ "loss": 2.3302,
+ "step": 38
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019261363636363635,
+ "loss": 2.312,
+ "step": 39
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019242424242424245,
+ "loss": 2.3655,
+ "step": 40
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001922348484848485,
+ "loss": 2.344,
+ "step": 41
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019204545454545456,
+ "loss": 2.3373,
+ "step": 42
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001918560606060606,
+ "loss": 2.3331,
+ "step": 43
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019166666666666667,
+ "loss": 2.3376,
+ "step": 44
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019147727272727274,
+ "loss": 2.3369,
+ "step": 45
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019128787878787878,
+ "loss": 2.3413,
+ "step": 46
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019109848484848485,
+ "loss": 2.3212,
+ "step": 47
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019090909090909092,
+ "loss": 2.307,
+ "step": 48
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.000190719696969697,
+ "loss": 2.2929,
+ "step": 49
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019053030303030303,
+ "loss": 2.2873,
+ "step": 50
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.0001903409090909091,
+ "loss": 2.3098,
+ "step": 51
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00019015151515151517,
+ "loss": 2.3129,
+ "step": 52
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.0001899621212121212,
+ "loss": 2.3038,
+ "step": 53
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00018977272727272728,
+ "loss": 2.286,
+ "step": 54
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018958333333333332,
+ "loss": 2.3388,
+ "step": 55
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018939393939393942,
+ "loss": 2.3193,
+ "step": 56
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018920454545454546,
+ "loss": 2.3136,
+ "step": 57
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018901515151515153,
+ "loss": 2.3141,
+ "step": 58
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018882575757575757,
+ "loss": 2.3646,
+ "step": 59
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018863636363636364,
+ "loss": 2.3318,
+ "step": 60
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.0001884469696969697,
+ "loss": 2.2977,
+ "step": 61
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018825757575757575,
+ "loss": 2.2764,
+ "step": 62
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018806818181818182,
+ "loss": 2.3095,
+ "step": 63
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.0001878787878787879,
+ "loss": 2.252,
+ "step": 64
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018768939393939396,
+ "loss": 2.2786,
+ "step": 65
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.0001875,
+ "loss": 2.2789,
+ "step": 66
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018731060606060607,
+ "loss": 2.2841,
+ "step": 67
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018712121212121212,
+ "loss": 2.3436,
+ "step": 68
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018693181818181818,
+ "loss": 2.2956,
+ "step": 69
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018674242424242425,
+ "loss": 2.2353,
+ "step": 70
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.0001865530303030303,
+ "loss": 2.2772,
+ "step": 71
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018636363636363636,
+ "loss": 2.2496,
+ "step": 72
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018617424242424243,
+ "loss": 2.2477,
+ "step": 73
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.0001859848484848485,
+ "loss": 2.2791,
+ "step": 74
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018579545454545454,
+ "loss": 2.2799,
+ "step": 75
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018560606060606061,
+ "loss": 2.3132,
+ "step": 76
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018541666666666668,
+ "loss": 2.2542,
+ "step": 77
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018522727272727273,
+ "loss": 2.2609,
+ "step": 78
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.0001850378787878788,
+ "loss": 2.2819,
+ "step": 79
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018484848484848484,
+ "loss": 2.2844,
+ "step": 80
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018465909090909093,
+ "loss": 2.2542,
+ "step": 81
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018446969696969697,
+ "loss": 2.2603,
+ "step": 82
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018428030303030304,
+ "loss": 2.2832,
+ "step": 83
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018409090909090909,
+ "loss": 2.2869,
+ "step": 84
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018390151515151518,
+ "loss": 2.2646,
+ "step": 85
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018371212121212122,
+ "loss": 2.2698,
+ "step": 86
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018352272727272727,
+ "loss": 2.2757,
+ "step": 87
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018333333333333334,
+ "loss": 2.2544,
+ "step": 88
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.0001831439393939394,
+ "loss": 2.2678,
+ "step": 89
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018295454545454547,
+ "loss": 2.2778,
+ "step": 90
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018276515151515152,
+ "loss": 2.2027,
+ "step": 91
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018257575757575758,
+ "loss": 2.2167,
+ "step": 92
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018238636363636365,
+ "loss": 2.2602,
+ "step": 93
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018219696969696972,
+ "loss": 2.2736,
+ "step": 94
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018200757575757577,
+ "loss": 2.2443,
+ "step": 95
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 2.2299,
+ "step": 96
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.0001816287878787879,
+ "loss": 2.2644,
+ "step": 97
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018143939393939395,
+ "loss": 2.259,
+ "step": 98
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018125000000000001,
+ "loss": 2.2567,
+ "step": 99
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018106060606060606,
+ "loss": 2.2599,
+ "step": 100
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018087121212121213,
+ "loss": 2.2091,
+ "step": 101
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.0001806818181818182,
+ "loss": 2.2312,
+ "step": 102
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018049242424242426,
+ "loss": 2.1869,
+ "step": 103
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001803030303030303,
+ "loss": 2.2023,
+ "step": 104
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00018011363636363638,
+ "loss": 2.2132,
+ "step": 105
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00017992424242424244,
+ "loss": 2.2612,
+ "step": 106
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001797348484848485,
+ "loss": 2.2109,
+ "step": 107
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017954545454545456,
+ "loss": 2.215,
+ "step": 108
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001793560606060606,
+ "loss": 2.2114,
+ "step": 109
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001791666666666667,
+ "loss": 2.2203,
+ "step": 110
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017897727272727274,
+ "loss": 2.2594,
+ "step": 111
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.0001787878787878788,
+ "loss": 2.2001,
+ "step": 112
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017859848484848485,
+ "loss": 2.2046,
+ "step": 113
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017840909090909092,
+ "loss": 2.1907,
+ "step": 114
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017821969696969699,
+ "loss": 2.2539,
+ "step": 115
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017803030303030303,
+ "loss": 2.2335,
+ "step": 116
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.0001778409090909091,
+ "loss": 2.2171,
+ "step": 117
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017765151515151517,
+ "loss": 2.2278,
+ "step": 118
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017746212121212123,
+ "loss": 2.231,
+ "step": 119
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017727272727272728,
+ "loss": 2.2141,
+ "step": 120
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017708333333333335,
+ "loss": 2.2432,
+ "step": 121
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017689393939393942,
+ "loss": 2.2266,
+ "step": 122
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017670454545454546,
+ "loss": 2.1929,
+ "step": 123
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017651515151515153,
+ "loss": 2.2077,
+ "step": 124
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017632575757575757,
+ "loss": 2.2133,
+ "step": 125
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017613636363636366,
+ "loss": 2.2251,
+ "step": 126
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.0001759469696969697,
+ "loss": 2.2265,
+ "step": 127
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017575757575757578,
+ "loss": 2.2186,
+ "step": 128
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017556818181818182,
+ "loss": 2.1925,
+ "step": 129
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.0001753787878787879,
+ "loss": 2.1956,
+ "step": 130
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017518939393939396,
+ "loss": 2.2459,
+ "step": 131
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.000175,
+ "loss": 2.22,
+ "step": 132
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.00017481060606060607,
+ "loss": 2.2143,
+ "step": 133
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001746212121212121,
+ "loss": 2.2359,
+ "step": 134
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001744318181818182,
+ "loss": 2.2058,
+ "step": 135
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017424242424242425,
+ "loss": 2.2307,
+ "step": 136
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017405303030303032,
+ "loss": 2.2062,
+ "step": 137
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017386363636363636,
+ "loss": 2.1796,
+ "step": 138
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017367424242424243,
+ "loss": 2.2054,
+ "step": 139
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001734848484848485,
+ "loss": 2.1651,
+ "step": 140
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.00017329545454545454,
+ "loss": 2.2159,
+ "step": 141
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001731060606060606,
+ "loss": 2.1988,
+ "step": 142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017291666666666668,
+ "loss": 2.1676,
+ "step": 143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017272727272727275,
+ "loss": 2.1725,
+ "step": 144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.0001725378787878788,
+ "loss": 2.2205,
+ "step": 145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017234848484848486,
+ "loss": 2.1486,
+ "step": 146
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017215909090909093,
+ "loss": 2.147,
+ "step": 147
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017196969696969697,
+ "loss": 2.1651,
+ "step": 148
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017178030303030304,
+ "loss": 2.1983,
+ "step": 149
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017159090909090908,
+ "loss": 2.1778,
+ "step": 150
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017140151515151518,
+ "loss": 2.1631,
+ "step": 151
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017121212121212122,
+ "loss": 2.1442,
+ "step": 152
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.0001710227272727273,
+ "loss": 2.1397,
+ "step": 153
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017083333333333333,
+ "loss": 2.1697,
+ "step": 154
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.0001706439393939394,
+ "loss": 2.1451,
+ "step": 155
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017045454545454547,
+ "loss": 2.1789,
+ "step": 156
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.0001702651515151515,
+ "loss": 2.1037,
+ "step": 157
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00017007575757575758,
+ "loss": 2.1698,
+ "step": 158
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016988636363636365,
+ "loss": 2.1538,
+ "step": 159
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016969696969696972,
+ "loss": 2.2015,
+ "step": 160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016950757575757576,
+ "loss": 2.179,
+ "step": 161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016931818181818183,
+ "loss": 2.1766,
+ "step": 162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.0001691287878787879,
+ "loss": 2.1646,
+ "step": 163
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016893939393939394,
+ "loss": 2.1694,
+ "step": 164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016875,
+ "loss": 2.1562,
+ "step": 165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016856060606060605,
+ "loss": 2.1551,
+ "step": 166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016837121212121212,
+ "loss": 2.1652,
+ "step": 167
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001681818181818182,
+ "loss": 2.1594,
+ "step": 168
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.00016799242424242426,
+ "loss": 2.1674,
+ "step": 169
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001678030303030303,
+ "loss": 2.1378,
+ "step": 170
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016761363636363637,
+ "loss": 2.1447,
+ "step": 171
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016742424242424244,
+ "loss": 2.1451,
+ "step": 172
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016723484848484848,
+ "loss": 2.1336,
+ "step": 173
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016704545454545455,
+ "loss": 2.1231,
+ "step": 174
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001668560606060606,
+ "loss": 2.1143,
+ "step": 175
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001666666666666667,
+ "loss": 2.1316,
+ "step": 176
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.00016647727272727273,
+ "loss": 2.1281,
+ "step": 177
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001662878787878788,
+ "loss": 2.136,
+ "step": 178
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016609848484848484,
+ "loss": 2.1279,
+ "step": 179
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016590909090909094,
+ "loss": 2.1421,
+ "step": 180
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016571969696969698,
+ "loss": 2.1541,
+ "step": 181
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016553030303030305,
+ "loss": 2.1293,
+ "step": 182
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.0001653409090909091,
+ "loss": 2.1294,
+ "step": 183
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016515151515151516,
+ "loss": 2.1459,
+ "step": 184
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016496212121212123,
+ "loss": 2.1113,
+ "step": 185
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016477272727272727,
+ "loss": 2.1394,
+ "step": 186
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016458333333333334,
+ "loss": 2.1321,
+ "step": 187
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.0001643939393939394,
+ "loss": 2.148,
+ "step": 188
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016420454545454548,
+ "loss": 2.1631,
+ "step": 189
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016401515151515152,
+ "loss": 2.1276,
+ "step": 190
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.0001638257575757576,
+ "loss": 2.0706,
+ "step": 191
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016363636363636366,
+ "loss": 2.127,
+ "step": 192
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001634469696969697,
+ "loss": 2.1449,
+ "step": 193
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.00016325757575757577,
+ "loss": 2.1204,
+ "step": 194
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001630681818181818,
+ "loss": 2.0904,
+ "step": 195
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.0001628787878787879,
+ "loss": 2.1129,
+ "step": 196
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016268939393939395,
+ "loss": 2.1036,
+ "step": 197
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016250000000000002,
+ "loss": 2.1509,
+ "step": 198
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016231060606060606,
+ "loss": 2.1239,
+ "step": 199
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016212121212121213,
+ "loss": 2.145,
+ "step": 200
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.0001619318181818182,
+ "loss": 2.1221,
+ "step": 201
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016174242424242424,
+ "loss": 2.1181,
+ "step": 202
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001615530303030303,
+ "loss": 2.1306,
+ "step": 203
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016136363636363635,
+ "loss": 2.0199,
+ "step": 204
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016117424242424245,
+ "loss": 2.1178,
+ "step": 205
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001609848484848485,
+ "loss": 2.1584,
+ "step": 206
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016079545454545456,
+ "loss": 2.0872,
+ "step": 207
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.0001606060606060606,
+ "loss": 2.1033,
+ "step": 208
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016041666666666667,
+ "loss": 2.1381,
+ "step": 209
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016022727272727274,
+ "loss": 2.1127,
+ "step": 210
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016003787878787878,
+ "loss": 2.1077,
+ "step": 211
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015984848484848485,
+ "loss": 2.0984,
+ "step": 212
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015965909090909092,
+ "loss": 2.0994,
+ "step": 213
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.000159469696969697,
+ "loss": 2.096,
+ "step": 214
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.00015928030303030303,
+ "loss": 2.0909,
+ "step": 215
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.0001590909090909091,
+ "loss": 2.118,
+ "step": 216
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015890151515151517,
+ "loss": 2.0783,
+ "step": 217
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.0001587121212121212,
+ "loss": 2.0876,
+ "step": 218
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015852272727272728,
+ "loss": 2.0581,
+ "step": 219
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015833333333333332,
+ "loss": 2.0548,
+ "step": 220
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015814393939393942,
+ "loss": 2.0595,
+ "step": 221
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015795454545454546,
+ "loss": 2.0719,
+ "step": 222
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015776515151515153,
+ "loss": 2.0903,
+ "step": 223
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015757575757575757,
+ "loss": 2.0941,
+ "step": 224
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015738636363636364,
+ "loss": 2.0926,
+ "step": 225
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.0001571969696969697,
+ "loss": 2.0816,
+ "step": 226
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015700757575757575,
+ "loss": 2.0894,
+ "step": 227
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015681818181818182,
+ "loss": 2.0798,
+ "step": 228
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.0001566287878787879,
+ "loss": 2.0672,
+ "step": 229
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015643939393939396,
+ "loss": 2.0787,
+ "step": 230
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015625,
+ "loss": 2.0611,
+ "step": 231
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015606060606060607,
+ "loss": 2.0805,
+ "step": 232
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015587121212121211,
+ "loss": 2.053,
+ "step": 233
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015568181818181818,
+ "loss": 2.0575,
+ "step": 234
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015549242424242425,
+ "loss": 2.0459,
+ "step": 235
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.0001553030303030303,
+ "loss": 2.0635,
+ "step": 236
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015511363636363636,
+ "loss": 2.0335,
+ "step": 237
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015492424242424243,
+ "loss": 2.0681,
+ "step": 238
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001547348484848485,
+ "loss": 2.0748,
+ "step": 239
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015454545454545454,
+ "loss": 2.1091,
+ "step": 240
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001543560606060606,
+ "loss": 2.0732,
+ "step": 241
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015416666666666668,
+ "loss": 2.0746,
+ "step": 242
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015397727272727272,
+ "loss": 2.0306,
+ "step": 243
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.0001537878787878788,
+ "loss": 2.0864,
+ "step": 244
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015359848484848484,
+ "loss": 2.0664,
+ "step": 245
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015340909090909093,
+ "loss": 2.0801,
+ "step": 246
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015321969696969697,
+ "loss": 2.0799,
+ "step": 247
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015303030303030304,
+ "loss": 2.0621,
+ "step": 248
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015284090909090909,
+ "loss": 2.0687,
+ "step": 249
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015265151515151515,
+ "loss": 2.018,
+ "step": 250
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015246212121212122,
+ "loss": 2.0256,
+ "step": 251
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015227272727272727,
+ "loss": 2.0736,
+ "step": 252
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015208333333333333,
+ "loss": 2.0609,
+ "step": 253
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.0001518939393939394,
+ "loss": 2.0539,
+ "step": 254
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015170454545454547,
+ "loss": 2.0282,
+ "step": 255
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015151515151515152,
+ "loss": 2.0417,
+ "step": 256
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015132575757575758,
+ "loss": 2.0333,
+ "step": 257
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015113636363636365,
+ "loss": 2.0428,
+ "step": 258
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015094696969696972,
+ "loss": 2.045,
+ "step": 259
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.00015075757575757576,
+ "loss": 2.0463,
+ "step": 260
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001505681818181818,
+ "loss": 2.0539,
+ "step": 261
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001503787878787879,
+ "loss": 2.0184,
+ "step": 262
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015018939393939394,
+ "loss": 2.0858,
+ "step": 263
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 2.0239,
+ "step": 264
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014981060606060606,
+ "loss": 2.0425,
+ "step": 265
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014962121212121213,
+ "loss": 2.0263,
+ "step": 266
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001494318181818182,
+ "loss": 2.042,
+ "step": 267
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.00014924242424242426,
+ "loss": 2.026,
+ "step": 268
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001490530303030303,
+ "loss": 2.0411,
+ "step": 269
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014886363636363635,
+ "loss": 2.028,
+ "step": 270
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014867424242424244,
+ "loss": 2.0172,
+ "step": 271
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014848484848484849,
+ "loss": 2.0196,
+ "step": 272
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014829545454545455,
+ "loss": 2.0142,
+ "step": 273
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001481060606060606,
+ "loss": 2.0265,
+ "step": 274
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001479166666666667,
+ "loss": 2.0353,
+ "step": 275
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.00014772727272727274,
+ "loss": 2.0327,
+ "step": 276
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.0001475378787878788,
+ "loss": 2.0188,
+ "step": 277
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014734848484848485,
+ "loss": 1.9987,
+ "step": 278
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014715909090909092,
+ "loss": 2.0141,
+ "step": 279
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014696969696969698,
+ "loss": 2.0403,
+ "step": 280
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014678030303030303,
+ "loss": 1.9977,
+ "step": 281
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.0001465909090909091,
+ "loss": 1.9674,
+ "step": 282
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014640151515151517,
+ "loss": 1.9984,
+ "step": 283
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014621212121212123,
+ "loss": 1.9796,
+ "step": 284
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014602272727272728,
+ "loss": 2.0139,
+ "step": 285
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014583333333333335,
+ "loss": 1.9866,
+ "step": 286
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014564393939393941,
+ "loss": 2.0208,
+ "step": 287
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014545454545454546,
+ "loss": 1.9844,
+ "step": 288
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014526515151515153,
+ "loss": 2.0082,
+ "step": 289
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014507575757575757,
+ "loss": 1.984,
+ "step": 290
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014488636363636366,
+ "loss": 2.0015,
+ "step": 291
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.0001446969696969697,
+ "loss": 2.0209,
+ "step": 292
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014450757575757578,
+ "loss": 1.9728,
+ "step": 293
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014431818181818182,
+ "loss": 2.0032,
+ "step": 294
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014412878787878789,
+ "loss": 1.9641,
+ "step": 295
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014393939393939396,
+ "loss": 1.9945,
+ "step": 296
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014375,
+ "loss": 1.9658,
+ "step": 297
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014356060606060607,
+ "loss": 1.9907,
+ "step": 298
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001433712121212121,
+ "loss": 1.9935,
+ "step": 299
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001431818181818182,
+ "loss": 1.9897,
+ "step": 300
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014299242424242425,
+ "loss": 1.984,
+ "step": 301
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014280303030303032,
+ "loss": 1.9581,
+ "step": 302
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014261363636363636,
+ "loss": 1.9893,
+ "step": 303
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014242424242424243,
+ "loss": 1.9568,
+ "step": 304
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001422348484848485,
+ "loss": 1.98,
+ "step": 305
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014204545454545454,
+ "loss": 1.9519,
+ "step": 306
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001418560606060606,
+ "loss": 1.9693,
+ "step": 307
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014166666666666668,
+ "loss": 1.9866,
+ "step": 308
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014147727272727275,
+ "loss": 1.9508,
+ "step": 309
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.0001412878787878788,
+ "loss": 1.9653,
+ "step": 310
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014109848484848486,
+ "loss": 1.9991,
+ "step": 311
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014090909090909093,
+ "loss": 1.9442,
+ "step": 312
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014071969696969697,
+ "loss": 1.9807,
+ "step": 313
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014053030303030304,
+ "loss": 1.9958,
+ "step": 314
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014034090909090908,
+ "loss": 1.9459,
+ "step": 315
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00014015151515151518,
+ "loss": 1.9508,
+ "step": 316
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013996212121212122,
+ "loss": 1.9933,
+ "step": 317
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.0001397727272727273,
+ "loss": 1.9703,
+ "step": 318
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013958333333333333,
+ "loss": 1.965,
+ "step": 319
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001393939393939394,
+ "loss": 1.9264,
+ "step": 320
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.00013920454545454547,
+ "loss": 1.9688,
+ "step": 321
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001390151515151515,
+ "loss": 1.9901,
+ "step": 322
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013882575757575758,
+ "loss": 1.9363,
+ "step": 323
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013863636363636365,
+ "loss": 1.9269,
+ "step": 324
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013844696969696972,
+ "loss": 1.9688,
+ "step": 325
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013825757575757576,
+ "loss": 1.9758,
+ "step": 326
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013806818181818183,
+ "loss": 1.9414,
+ "step": 327
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.0001378787878787879,
+ "loss": 1.9397,
+ "step": 328
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013768939393939394,
+ "loss": 1.9032,
+ "step": 329
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001375,
+ "loss": 1.9777,
+ "step": 330
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013731060606060605,
+ "loss": 1.9173,
+ "step": 331
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013712121212121212,
+ "loss": 1.9307,
+ "step": 332
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001369318181818182,
+ "loss": 1.9611,
+ "step": 333
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013674242424242426,
+ "loss": 1.9698,
+ "step": 334
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.0001365530303030303,
+ "loss": 1.9619,
+ "step": 335
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013636363636363637,
+ "loss": 1.9322,
+ "step": 336
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013617424242424244,
+ "loss": 1.9441,
+ "step": 337
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013598484848484848,
+ "loss": 1.9563,
+ "step": 338
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013579545454545455,
+ "loss": 1.9283,
+ "step": 339
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.0001356060606060606,
+ "loss": 1.9508,
+ "step": 340
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001354166666666667,
+ "loss": 1.9285,
+ "step": 341
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.00013522727272727273,
+ "loss": 1.9295,
+ "step": 342
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001350378787878788,
+ "loss": 1.9272,
+ "step": 343
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013484848484848484,
+ "loss": 1.905,
+ "step": 344
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013465909090909094,
+ "loss": 1.9409,
+ "step": 345
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013446969696969698,
+ "loss": 1.9674,
+ "step": 346
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013428030303030302,
+ "loss": 1.9278,
+ "step": 347
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.0001340909090909091,
+ "loss": 1.9136,
+ "step": 348
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013390151515151516,
+ "loss": 1.9143,
+ "step": 349
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013371212121212123,
+ "loss": 1.9381,
+ "step": 350
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013352272727272727,
+ "loss": 1.9136,
+ "step": 351
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013333333333333334,
+ "loss": 1.9103,
+ "step": 352
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0001331439393939394,
+ "loss": 1.9027,
+ "step": 353
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013295454545454548,
+ "loss": 1.8674,
+ "step": 354
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013276515151515152,
+ "loss": 1.886,
+ "step": 355
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013257575757575756,
+ "loss": 1.887,
+ "step": 356
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013238636363636366,
+ "loss": 1.87,
+ "step": 357
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001321969696969697,
+ "loss": 1.8715,
+ "step": 358
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.00013200757575757577,
+ "loss": 1.8993,
+ "step": 359
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001318181818181818,
+ "loss": 1.8844,
+ "step": 360
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001316287878787879,
+ "loss": 1.8965,
+ "step": 361
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013143939393939395,
+ "loss": 1.8956,
+ "step": 362
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013125000000000002,
+ "loss": 1.869,
+ "step": 363
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013106060606060606,
+ "loss": 1.8702,
+ "step": 364
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013087121212121213,
+ "loss": 1.8962,
+ "step": 365
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001306818181818182,
+ "loss": 1.8613,
+ "step": 366
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013049242424242424,
+ "loss": 1.8845,
+ "step": 367
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001303030303030303,
+ "loss": 1.8689,
+ "step": 368
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00013011363636363635,
+ "loss": 1.9059,
+ "step": 369
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00012992424242424245,
+ "loss": 1.9082,
+ "step": 370
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.0001297348484848485,
+ "loss": 1.8918,
+ "step": 371
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012954545454545456,
+ "loss": 1.8657,
+ "step": 372
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.0001293560606060606,
+ "loss": 1.8909,
+ "step": 373
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012916666666666667,
+ "loss": 1.8649,
+ "step": 374
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012897727272727274,
+ "loss": 1.833,
+ "step": 375
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012878787878787878,
+ "loss": 1.8815,
+ "step": 376
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012859848484848485,
+ "loss": 1.8646,
+ "step": 377
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012840909090909092,
+ "loss": 1.846,
+ "step": 378
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.000128219696969697,
+ "loss": 1.8631,
+ "step": 379
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012803030303030303,
+ "loss": 1.917,
+ "step": 380
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.0001278409090909091,
+ "loss": 1.9068,
+ "step": 381
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012765151515151517,
+ "loss": 1.8772,
+ "step": 382
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.0001274621212121212,
+ "loss": 1.8414,
+ "step": 383
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012727272727272728,
+ "loss": 1.9003,
+ "step": 384
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012708333333333332,
+ "loss": 1.8415,
+ "step": 385
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012689393939393942,
+ "loss": 1.8491,
+ "step": 386
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012670454545454546,
+ "loss": 1.8875,
+ "step": 387
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012651515151515153,
+ "loss": 1.8629,
+ "step": 388
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012632575757575757,
+ "loss": 1.8378,
+ "step": 389
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012613636363636364,
+ "loss": 1.8442,
+ "step": 390
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.0001259469696969697,
+ "loss": 1.8587,
+ "step": 391
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012575757575757575,
+ "loss": 1.8659,
+ "step": 392
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012556818181818182,
+ "loss": 1.8271,
+ "step": 393
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.0001253787878787879,
+ "loss": 1.8692,
+ "step": 394
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.00012518939393939396,
+ "loss": 1.8071,
+ "step": 395
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.000125,
+ "loss": 1.8564,
+ "step": 396
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012481060606060607,
+ "loss": 1.8891,
+ "step": 397
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012462121212121211,
+ "loss": 1.8173,
+ "step": 398
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012443181818181818,
+ "loss": 1.8653,
+ "step": 399
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012424242424242425,
+ "loss": 1.8843,
+ "step": 400
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.0001240530303030303,
+ "loss": 1.8527,
+ "step": 401
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012386363636363636,
+ "loss": 1.8352,
+ "step": 402
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012367424242424243,
+ "loss": 1.866,
+ "step": 403
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001234848484848485,
+ "loss": 1.8557,
+ "step": 404
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012329545454545454,
+ "loss": 1.8284,
+ "step": 405
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001231060606060606,
+ "loss": 1.8359,
+ "step": 406
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012291666666666668,
+ "loss": 1.8437,
+ "step": 407
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012272727272727272,
+ "loss": 1.8256,
+ "step": 408
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.0001225378787878788,
+ "loss": 1.8297,
+ "step": 409
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012234848484848484,
+ "loss": 1.8515,
+ "step": 410
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012215909090909093,
+ "loss": 1.8198,
+ "step": 411
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012196969696969697,
+ "loss": 1.7809,
+ "step": 412
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012178030303030303,
+ "loss": 1.8438,
+ "step": 413
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012159090909090908,
+ "loss": 1.8497,
+ "step": 414
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012140151515151517,
+ "loss": 1.8463,
+ "step": 415
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012121212121212122,
+ "loss": 1.7768,
+ "step": 416
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012102272727272728,
+ "loss": 1.8561,
+ "step": 417
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012083333333333333,
+ "loss": 1.863,
+ "step": 418
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.0001206439393939394,
+ "loss": 1.8193,
+ "step": 419
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012045454545454546,
+ "loss": 1.7732,
+ "step": 420
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012026515151515151,
+ "loss": 1.7728,
+ "step": 421
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00012007575757575757,
+ "loss": 1.8113,
+ "step": 422
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011988636363636365,
+ "loss": 1.7976,
+ "step": 423
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011969696969696971,
+ "loss": 1.786,
+ "step": 424
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011950757575757576,
+ "loss": 1.8019,
+ "step": 425
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011931818181818182,
+ "loss": 1.786,
+ "step": 426
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011912878787878789,
+ "loss": 1.8102,
+ "step": 427
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011893939393939394,
+ "loss": 1.7828,
+ "step": 428
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011875,
+ "loss": 1.8498,
+ "step": 429
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011856060606060606,
+ "loss": 1.7983,
+ "step": 430
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011837121212121211,
+ "loss": 1.7863,
+ "step": 431
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001181818181818182,
+ "loss": 1.8171,
+ "step": 432
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011799242424242425,
+ "loss": 1.8143,
+ "step": 433
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001178030303030303,
+ "loss": 1.7815,
+ "step": 434
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011761363636363636,
+ "loss": 1.7652,
+ "step": 435
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011742424242424244,
+ "loss": 1.8242,
+ "step": 436
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011723484848484849,
+ "loss": 1.7789,
+ "step": 437
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011704545454545454,
+ "loss": 1.7549,
+ "step": 438
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.0001168560606060606,
+ "loss": 1.7528,
+ "step": 439
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011666666666666668,
+ "loss": 1.7443,
+ "step": 440
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011647727272727273,
+ "loss": 1.7911,
+ "step": 441
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011628787878787879,
+ "loss": 1.7848,
+ "step": 442
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011609848484848485,
+ "loss": 1.8137,
+ "step": 443
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011590909090909093,
+ "loss": 1.791,
+ "step": 444
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011571969696969698,
+ "loss": 1.7921,
+ "step": 445
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011553030303030304,
+ "loss": 1.772,
+ "step": 446
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011534090909090908,
+ "loss": 1.776,
+ "step": 447
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011515151515151516,
+ "loss": 1.7948,
+ "step": 448
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011496212121212122,
+ "loss": 1.8187,
+ "step": 449
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011477272727272728,
+ "loss": 1.7436,
+ "step": 450
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011458333333333333,
+ "loss": 1.7326,
+ "step": 451
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011439393939393941,
+ "loss": 1.8005,
+ "step": 452
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011420454545454547,
+ "loss": 1.8088,
+ "step": 453
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011401515151515153,
+ "loss": 1.7632,
+ "step": 454
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011382575757575758,
+ "loss": 1.7848,
+ "step": 455
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011363636363636365,
+ "loss": 1.7756,
+ "step": 456
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001134469696969697,
+ "loss": 1.7964,
+ "step": 457
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011325757575757576,
+ "loss": 1.7604,
+ "step": 458
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011306818181818182,
+ "loss": 1.7914,
+ "step": 459
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001128787878787879,
+ "loss": 1.8059,
+ "step": 460
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011268939393939395,
+ "loss": 1.7647,
+ "step": 461
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011250000000000001,
+ "loss": 1.7526,
+ "step": 462
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011231060606060607,
+ "loss": 1.7736,
+ "step": 463
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011212121212121212,
+ "loss": 1.7449,
+ "step": 464
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011193181818181819,
+ "loss": 1.7636,
+ "step": 465
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011174242424242425,
+ "loss": 1.7846,
+ "step": 466
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.0001115530303030303,
+ "loss": 1.78,
+ "step": 467
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011136363636363636,
+ "loss": 1.7828,
+ "step": 468
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011117424242424244,
+ "loss": 1.729,
+ "step": 469
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.0001109848484848485,
+ "loss": 1.7145,
+ "step": 470
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011079545454545455,
+ "loss": 1.7189,
+ "step": 471
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011060606060606061,
+ "loss": 1.7628,
+ "step": 472
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011041666666666668,
+ "loss": 1.7399,
+ "step": 473
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011022727272727273,
+ "loss": 1.7561,
+ "step": 474
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00011003787878787879,
+ "loss": 1.7979,
+ "step": 475
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010984848484848484,
+ "loss": 1.7673,
+ "step": 476
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010965909090909093,
+ "loss": 1.777,
+ "step": 477
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010946969696969698,
+ "loss": 1.7042,
+ "step": 478
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010928030303030304,
+ "loss": 1.7764,
+ "step": 479
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010909090909090909,
+ "loss": 1.6993,
+ "step": 480
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010890151515151516,
+ "loss": 1.7688,
+ "step": 481
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010871212121212122,
+ "loss": 1.7428,
+ "step": 482
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010852272727272727,
+ "loss": 1.675,
+ "step": 483
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010833333333333333,
+ "loss": 1.7183,
+ "step": 484
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010814393939393941,
+ "loss": 1.7305,
+ "step": 485
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010795454545454547,
+ "loss": 1.7541,
+ "step": 486
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010776515151515152,
+ "loss": 1.7074,
+ "step": 487
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010757575757575758,
+ "loss": 1.7093,
+ "step": 488
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010738636363636365,
+ "loss": 1.7354,
+ "step": 489
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.0001071969696969697,
+ "loss": 1.7415,
+ "step": 490
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010700757575757576,
+ "loss": 1.72,
+ "step": 491
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010681818181818181,
+ "loss": 1.7453,
+ "step": 492
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.0001066287878787879,
+ "loss": 1.7077,
+ "step": 493
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010643939393939395,
+ "loss": 1.6936,
+ "step": 494
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010625000000000001,
+ "loss": 1.7616,
+ "step": 495
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010606060606060606,
+ "loss": 1.7749,
+ "step": 496
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010587121212121212,
+ "loss": 1.7375,
+ "step": 497
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010568181818181819,
+ "loss": 1.7203,
+ "step": 498
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010549242424242424,
+ "loss": 1.7148,
+ "step": 499
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.0001053030303030303,
+ "loss": 1.7859,
+ "step": 500
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010511363636363635,
+ "loss": 1.7478,
+ "step": 501
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010492424242424244,
+ "loss": 1.7091,
+ "step": 502
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.00010473484848484849,
+ "loss": 1.7112,
+ "step": 503
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.00010454545454545455,
+ "loss": 1.6967,
+ "step": 504
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.0001043560606060606,
+ "loss": 1.7431,
+ "step": 505
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010416666666666667,
+ "loss": 1.7065,
+ "step": 506
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010397727272727273,
+ "loss": 1.6955,
+ "step": 507
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010378787878787878,
+ "loss": 1.7375,
+ "step": 508
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010359848484848484,
+ "loss": 1.7056,
+ "step": 509
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010340909090909092,
+ "loss": 1.7044,
+ "step": 510
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010321969696969698,
+ "loss": 1.7204,
+ "step": 511
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010303030303030303,
+ "loss": 1.6801,
+ "step": 512
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010284090909090909,
+ "loss": 1.7381,
+ "step": 513
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010265151515151516,
+ "loss": 1.7064,
+ "step": 514
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010246212121212121,
+ "loss": 1.6973,
+ "step": 515
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010227272727272727,
+ "loss": 1.7295,
+ "step": 516
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010208333333333333,
+ "loss": 1.6991,
+ "step": 517
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010189393939393941,
+ "loss": 1.6986,
+ "step": 518
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010170454545454546,
+ "loss": 1.6989,
+ "step": 519
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010151515151515152,
+ "loss": 1.7009,
+ "step": 520
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010132575757575757,
+ "loss": 1.6919,
+ "step": 521
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010113636363636366,
+ "loss": 1.6955,
+ "step": 522
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010094696969696971,
+ "loss": 1.7177,
+ "step": 523
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.00010075757575757576,
+ "loss": 1.715,
+ "step": 524
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.00010056818181818181,
+ "loss": 1.6686,
+ "step": 525
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.0001003787878787879,
+ "loss": 1.771,
+ "step": 526
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 0.00010018939393939395,
+ "loss": 1.7024,
+ "step": 527
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 0.0001,
+ "loss": 1.7016,
+ "step": 528
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 9.981060606060606e-05,
+ "loss": 1.6501,
+ "step": 529
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 9.962121212121213e-05,
+ "loss": 1.6903,
+ "step": 530
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.943181818181819e-05,
+ "loss": 1.6806,
+ "step": 531
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.924242424242425e-05,
+ "loss": 1.7096,
+ "step": 532
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.90530303030303e-05,
+ "loss": 1.7307,
+ "step": 533
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.886363636363637e-05,
+ "loss": 1.6871,
+ "step": 534
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.867424242424242e-05,
+ "loss": 1.7457,
+ "step": 535
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.848484848484849e-05,
+ "loss": 1.6867,
+ "step": 536
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.829545454545455e-05,
+ "loss": 1.6789,
+ "step": 537
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.810606060606061e-05,
+ "loss": 1.6403,
+ "step": 538
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.791666666666667e-05,
+ "loss": 1.6697,
+ "step": 539
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.772727272727274e-05,
+ "loss": 1.7293,
+ "step": 540
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.75378787878788e-05,
+ "loss": 1.6998,
+ "step": 541
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.734848484848485e-05,
+ "loss": 1.693,
+ "step": 542
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.71590909090909e-05,
+ "loss": 1.664,
+ "step": 543
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.696969696969698e-05,
+ "loss": 1.7061,
+ "step": 544
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.678030303030303e-05,
+ "loss": 1.6631,
+ "step": 545
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.65909090909091e-05,
+ "loss": 1.6343,
+ "step": 546
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.640151515151516e-05,
+ "loss": 1.6939,
+ "step": 547
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.621212121212123e-05,
+ "loss": 1.669,
+ "step": 548
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.602272727272728e-05,
+ "loss": 1.6561,
+ "step": 549
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.583333333333334e-05,
+ "loss": 1.6675,
+ "step": 550
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.564393939393939e-05,
+ "loss": 1.7109,
+ "step": 551
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.545454545454546e-05,
+ "loss": 1.693,
+ "step": 552
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.526515151515152e-05,
+ "loss": 1.6557,
+ "step": 553
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.507575757575759e-05,
+ "loss": 1.6642,
+ "step": 554
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.488636363636364e-05,
+ "loss": 1.6674,
+ "step": 555
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.469696969696971e-05,
+ "loss": 1.6492,
+ "step": 556
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.450757575757577e-05,
+ "loss": 1.6915,
+ "step": 557
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.431818181818182e-05,
+ "loss": 1.7028,
+ "step": 558
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.412878787878788e-05,
+ "loss": 1.6749,
+ "step": 559
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.393939393939395e-05,
+ "loss": 1.6526,
+ "step": 560
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.375e-05,
+ "loss": 1.687,
+ "step": 561
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.356060606060606e-05,
+ "loss": 1.6632,
+ "step": 562
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.337121212121213e-05,
+ "loss": 1.7074,
+ "step": 563
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.318181818181818e-05,
+ "loss": 1.6164,
+ "step": 564
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.299242424242425e-05,
+ "loss": 1.6594,
+ "step": 565
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.280303030303031e-05,
+ "loss": 1.6603,
+ "step": 566
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.261363636363636e-05,
+ "loss": 1.6213,
+ "step": 567
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.242424242424242e-05,
+ "loss": 1.6899,
+ "step": 568
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.223484848484849e-05,
+ "loss": 1.6619,
+ "step": 569
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.204545454545454e-05,
+ "loss": 1.7035,
+ "step": 570
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.185606060606061e-05,
+ "loss": 1.6408,
+ "step": 571
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.166666666666667e-05,
+ "loss": 1.6506,
+ "step": 572
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.147727272727274e-05,
+ "loss": 1.658,
+ "step": 573
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.128787878787879e-05,
+ "loss": 1.6005,
+ "step": 574
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.109848484848486e-05,
+ "loss": 1.6821,
+ "step": 575
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.090909090909092e-05,
+ "loss": 1.6858,
+ "step": 576
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.071969696969697e-05,
+ "loss": 1.6933,
+ "step": 577
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.053030303030303e-05,
+ "loss": 1.6757,
+ "step": 578
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.03409090909091e-05,
+ "loss": 1.6107,
+ "step": 579
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 9.015151515151515e-05,
+ "loss": 1.5751,
+ "step": 580
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.996212121212122e-05,
+ "loss": 1.6168,
+ "step": 581
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.977272727272728e-05,
+ "loss": 1.6213,
+ "step": 582
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.958333333333335e-05,
+ "loss": 1.6243,
+ "step": 583
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.93939393939394e-05,
+ "loss": 1.6249,
+ "step": 584
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.920454545454546e-05,
+ "loss": 1.6529,
+ "step": 585
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.901515151515151e-05,
+ "loss": 1.626,
+ "step": 586
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.882575757575758e-05,
+ "loss": 1.6616,
+ "step": 587
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.863636363636364e-05,
+ "loss": 1.6622,
+ "step": 588
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.844696969696971e-05,
+ "loss": 1.5927,
+ "step": 589
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.825757575757576e-05,
+ "loss": 1.6351,
+ "step": 590
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.806818181818183e-05,
+ "loss": 1.6213,
+ "step": 591
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.787878787878789e-05,
+ "loss": 1.635,
+ "step": 592
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.768939393939394e-05,
+ "loss": 1.6406,
+ "step": 593
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.75e-05,
+ "loss": 1.6387,
+ "step": 594
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.731060606060605e-05,
+ "loss": 1.602,
+ "step": 595
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.712121212121212e-05,
+ "loss": 1.601,
+ "step": 596
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.693181818181818e-05,
+ "loss": 1.5855,
+ "step": 597
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.674242424242425e-05,
+ "loss": 1.6236,
+ "step": 598
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.65530303030303e-05,
+ "loss": 1.5999,
+ "step": 599
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.636363636363637e-05,
+ "loss": 1.6093,
+ "step": 600
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1056,
+ "num_train_epochs": 3,
+ "save_steps": 100,
+ "total_flos": 1.5199339357538918e+18,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-600/training_args.bin b/checkpoint-600/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..574132c086f9a526d71493b1ec4c09396eac5482
--- /dev/null
+++ b/checkpoint-600/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:50303c7f1586feb001f01b7e968e567295f501ef6c6407e28250c458696a43af
+size 4155
diff --git a/checkpoint-700/README.md b/checkpoint-700/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4
--- /dev/null
+++ b/checkpoint-700/README.md
@@ -0,0 +1,34 @@
+---
+library_name: peft
+---
+## Training procedure
+
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+### Framework versions
+
+- PEFT 0.6.0.dev0
+
+- PEFT 0.6.0.dev0
diff --git a/checkpoint-700/adapter_config.json b/checkpoint-700/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b
--- /dev/null
+++ b/checkpoint-700/adapter_config.json
@@ -0,0 +1,23 @@
+{
+ "auto_mapping": null,
+ "base_model_name_or_path": "bigscience/bloomz-3b",
+ "bias": "none",
+ "fan_in_fan_out": false,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "lora_alpha": 16,
+ "lora_dropout": 0.0,
+ "modules_to_save": null,
+ "peft_type": "LORA",
+ "r": 8,
+ "revision": null,
+ "target_modules": [
+ "dense_4h_to_h",
+ "dense",
+ "dense_h_to_4h",
+ "query_key_value"
+ ],
+ "task_type": "CAUSAL_LM"
+}
\ No newline at end of file
diff --git a/checkpoint-700/adapter_model.bin b/checkpoint-700/adapter_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..5d1712acbf11df3f010b42267f5ccae9be04e903
--- /dev/null
+++ b/checkpoint-700/adapter_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:392a500fcd75678b8abc5abb9b6d62783cbaf5216f6c1dd2b89e535322bbf988
+size 39409357
diff --git a/checkpoint-700/optimizer.pt b/checkpoint-700/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..bfda7b9e60e8ece3e28d2f12ec3b8d5dc68f42e8
--- /dev/null
+++ b/checkpoint-700/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:79e3515a1aea16ee27126a77593dc2b51ca68a187f0b29610198a52d91482186
+size 78844421
diff --git a/checkpoint-700/rng_state.pth b/checkpoint-700/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..b9111227e3b79b9bc3e2a642832c3e49e36216d4
--- /dev/null
+++ b/checkpoint-700/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:972139d83957a9cf2600cb6eeca17287d7a5377c33a53500ae7e13fe830ad36b
+size 14575
diff --git a/checkpoint-700/scheduler.pt b/checkpoint-700/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..53f782bcb4ed75252bf5dec3f86cef82e2aafb9f
--- /dev/null
+++ b/checkpoint-700/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:07cc1bd88850ee8fa1c5f1d58bf2fcbdb72ff40454badeb31350bb652da37dd5
+size 627
diff --git a/checkpoint-700/special_tokens_map.json b/checkpoint-700/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d
--- /dev/null
+++ b/checkpoint-700/special_tokens_map.json
@@ -0,0 +1,6 @@
+{
+ "bos_token": "",
+ "eos_token": "",
+ "pad_token": "",
+ "unk_token": ""
+}
diff --git a/checkpoint-700/tokenizer.json b/checkpoint-700/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207
--- /dev/null
+++ b/checkpoint-700/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe
+size 14500570
diff --git a/checkpoint-700/tokenizer_config.json b/checkpoint-700/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3
--- /dev/null
+++ b/checkpoint-700/tokenizer_config.json
@@ -0,0 +1,10 @@
+{
+ "add_prefix_space": false,
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": "",
+ "tokenizer_class": "BloomTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-700/trainer_state.json b/checkpoint-700/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..0775091a098718c5e13abf7734e026c862189cb6
--- /dev/null
+++ b/checkpoint-700/trainer_state.json
@@ -0,0 +1,4219 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.9858156028368794,
+ "eval_steps": 500,
+ "global_step": 700,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 0.00019981060606060605,
+ "loss": 2.9206,
+ "step": 1
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019962121212121212,
+ "loss": 2.7609,
+ "step": 2
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001994318181818182,
+ "loss": 2.6878,
+ "step": 3
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019924242424242426,
+ "loss": 2.6697,
+ "step": 4
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001990530303030303,
+ "loss": 2.5818,
+ "step": 5
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019886363636363637,
+ "loss": 2.5396,
+ "step": 6
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019867424242424244,
+ "loss": 2.5265,
+ "step": 7
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.0001984848484848485,
+ "loss": 2.5475,
+ "step": 8
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019829545454545455,
+ "loss": 2.4835,
+ "step": 9
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001981060606060606,
+ "loss": 2.4559,
+ "step": 10
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001979166666666667,
+ "loss": 2.4511,
+ "step": 11
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019772727272727273,
+ "loss": 2.4592,
+ "step": 12
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.0001975378787878788,
+ "loss": 2.4495,
+ "step": 13
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019734848484848484,
+ "loss": 2.4714,
+ "step": 14
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019715909090909094,
+ "loss": 2.4302,
+ "step": 15
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019696969696969698,
+ "loss": 2.4097,
+ "step": 16
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019678030303030305,
+ "loss": 2.4523,
+ "step": 17
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.0001965909090909091,
+ "loss": 2.4325,
+ "step": 18
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019640151515151516,
+ "loss": 2.4125,
+ "step": 19
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019621212121212123,
+ "loss": 2.4329,
+ "step": 20
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019602272727272727,
+ "loss": 2.3471,
+ "step": 21
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019583333333333334,
+ "loss": 2.3012,
+ "step": 22
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001956439393939394,
+ "loss": 2.3869,
+ "step": 23
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019545454545454548,
+ "loss": 2.3822,
+ "step": 24
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019526515151515152,
+ "loss": 2.3427,
+ "step": 25
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001950757575757576,
+ "loss": 2.3659,
+ "step": 26
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019488636363636366,
+ "loss": 2.3826,
+ "step": 27
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.0001946969696969697,
+ "loss": 2.3532,
+ "step": 28
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019450757575757577,
+ "loss": 2.3828,
+ "step": 29
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001943181818181818,
+ "loss": 2.3133,
+ "step": 30
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001941287878787879,
+ "loss": 2.3613,
+ "step": 31
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019393939393939395,
+ "loss": 2.3867,
+ "step": 32
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019375000000000002,
+ "loss": 2.2966,
+ "step": 33
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019356060606060606,
+ "loss": 2.3436,
+ "step": 34
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019337121212121213,
+ "loss": 2.3425,
+ "step": 35
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.0001931818181818182,
+ "loss": 2.307,
+ "step": 36
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019299242424242424,
+ "loss": 2.3521,
+ "step": 37
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.0001928030303030303,
+ "loss": 2.3302,
+ "step": 38
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019261363636363635,
+ "loss": 2.312,
+ "step": 39
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019242424242424245,
+ "loss": 2.3655,
+ "step": 40
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001922348484848485,
+ "loss": 2.344,
+ "step": 41
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019204545454545456,
+ "loss": 2.3373,
+ "step": 42
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001918560606060606,
+ "loss": 2.3331,
+ "step": 43
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019166666666666667,
+ "loss": 2.3376,
+ "step": 44
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019147727272727274,
+ "loss": 2.3369,
+ "step": 45
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019128787878787878,
+ "loss": 2.3413,
+ "step": 46
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019109848484848485,
+ "loss": 2.3212,
+ "step": 47
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019090909090909092,
+ "loss": 2.307,
+ "step": 48
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.000190719696969697,
+ "loss": 2.2929,
+ "step": 49
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019053030303030303,
+ "loss": 2.2873,
+ "step": 50
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.0001903409090909091,
+ "loss": 2.3098,
+ "step": 51
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00019015151515151517,
+ "loss": 2.3129,
+ "step": 52
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.0001899621212121212,
+ "loss": 2.3038,
+ "step": 53
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00018977272727272728,
+ "loss": 2.286,
+ "step": 54
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018958333333333332,
+ "loss": 2.3388,
+ "step": 55
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018939393939393942,
+ "loss": 2.3193,
+ "step": 56
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018920454545454546,
+ "loss": 2.3136,
+ "step": 57
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018901515151515153,
+ "loss": 2.3141,
+ "step": 58
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018882575757575757,
+ "loss": 2.3646,
+ "step": 59
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018863636363636364,
+ "loss": 2.3318,
+ "step": 60
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.0001884469696969697,
+ "loss": 2.2977,
+ "step": 61
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018825757575757575,
+ "loss": 2.2764,
+ "step": 62
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018806818181818182,
+ "loss": 2.3095,
+ "step": 63
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.0001878787878787879,
+ "loss": 2.252,
+ "step": 64
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018768939393939396,
+ "loss": 2.2786,
+ "step": 65
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.0001875,
+ "loss": 2.2789,
+ "step": 66
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018731060606060607,
+ "loss": 2.2841,
+ "step": 67
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018712121212121212,
+ "loss": 2.3436,
+ "step": 68
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018693181818181818,
+ "loss": 2.2956,
+ "step": 69
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018674242424242425,
+ "loss": 2.2353,
+ "step": 70
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.0001865530303030303,
+ "loss": 2.2772,
+ "step": 71
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018636363636363636,
+ "loss": 2.2496,
+ "step": 72
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018617424242424243,
+ "loss": 2.2477,
+ "step": 73
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.0001859848484848485,
+ "loss": 2.2791,
+ "step": 74
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018579545454545454,
+ "loss": 2.2799,
+ "step": 75
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018560606060606061,
+ "loss": 2.3132,
+ "step": 76
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018541666666666668,
+ "loss": 2.2542,
+ "step": 77
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018522727272727273,
+ "loss": 2.2609,
+ "step": 78
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.0001850378787878788,
+ "loss": 2.2819,
+ "step": 79
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018484848484848484,
+ "loss": 2.2844,
+ "step": 80
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018465909090909093,
+ "loss": 2.2542,
+ "step": 81
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018446969696969697,
+ "loss": 2.2603,
+ "step": 82
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018428030303030304,
+ "loss": 2.2832,
+ "step": 83
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018409090909090909,
+ "loss": 2.2869,
+ "step": 84
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018390151515151518,
+ "loss": 2.2646,
+ "step": 85
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018371212121212122,
+ "loss": 2.2698,
+ "step": 86
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018352272727272727,
+ "loss": 2.2757,
+ "step": 87
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018333333333333334,
+ "loss": 2.2544,
+ "step": 88
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.0001831439393939394,
+ "loss": 2.2678,
+ "step": 89
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018295454545454547,
+ "loss": 2.2778,
+ "step": 90
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018276515151515152,
+ "loss": 2.2027,
+ "step": 91
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018257575757575758,
+ "loss": 2.2167,
+ "step": 92
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018238636363636365,
+ "loss": 2.2602,
+ "step": 93
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018219696969696972,
+ "loss": 2.2736,
+ "step": 94
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018200757575757577,
+ "loss": 2.2443,
+ "step": 95
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 2.2299,
+ "step": 96
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.0001816287878787879,
+ "loss": 2.2644,
+ "step": 97
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018143939393939395,
+ "loss": 2.259,
+ "step": 98
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018125000000000001,
+ "loss": 2.2567,
+ "step": 99
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018106060606060606,
+ "loss": 2.2599,
+ "step": 100
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018087121212121213,
+ "loss": 2.2091,
+ "step": 101
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.0001806818181818182,
+ "loss": 2.2312,
+ "step": 102
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018049242424242426,
+ "loss": 2.1869,
+ "step": 103
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001803030303030303,
+ "loss": 2.2023,
+ "step": 104
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00018011363636363638,
+ "loss": 2.2132,
+ "step": 105
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00017992424242424244,
+ "loss": 2.2612,
+ "step": 106
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001797348484848485,
+ "loss": 2.2109,
+ "step": 107
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017954545454545456,
+ "loss": 2.215,
+ "step": 108
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001793560606060606,
+ "loss": 2.2114,
+ "step": 109
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001791666666666667,
+ "loss": 2.2203,
+ "step": 110
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017897727272727274,
+ "loss": 2.2594,
+ "step": 111
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.0001787878787878788,
+ "loss": 2.2001,
+ "step": 112
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017859848484848485,
+ "loss": 2.2046,
+ "step": 113
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017840909090909092,
+ "loss": 2.1907,
+ "step": 114
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017821969696969699,
+ "loss": 2.2539,
+ "step": 115
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017803030303030303,
+ "loss": 2.2335,
+ "step": 116
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.0001778409090909091,
+ "loss": 2.2171,
+ "step": 117
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017765151515151517,
+ "loss": 2.2278,
+ "step": 118
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017746212121212123,
+ "loss": 2.231,
+ "step": 119
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017727272727272728,
+ "loss": 2.2141,
+ "step": 120
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017708333333333335,
+ "loss": 2.2432,
+ "step": 121
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017689393939393942,
+ "loss": 2.2266,
+ "step": 122
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017670454545454546,
+ "loss": 2.1929,
+ "step": 123
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017651515151515153,
+ "loss": 2.2077,
+ "step": 124
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017632575757575757,
+ "loss": 2.2133,
+ "step": 125
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017613636363636366,
+ "loss": 2.2251,
+ "step": 126
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.0001759469696969697,
+ "loss": 2.2265,
+ "step": 127
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017575757575757578,
+ "loss": 2.2186,
+ "step": 128
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017556818181818182,
+ "loss": 2.1925,
+ "step": 129
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.0001753787878787879,
+ "loss": 2.1956,
+ "step": 130
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017518939393939396,
+ "loss": 2.2459,
+ "step": 131
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.000175,
+ "loss": 2.22,
+ "step": 132
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.00017481060606060607,
+ "loss": 2.2143,
+ "step": 133
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001746212121212121,
+ "loss": 2.2359,
+ "step": 134
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001744318181818182,
+ "loss": 2.2058,
+ "step": 135
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017424242424242425,
+ "loss": 2.2307,
+ "step": 136
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017405303030303032,
+ "loss": 2.2062,
+ "step": 137
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017386363636363636,
+ "loss": 2.1796,
+ "step": 138
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017367424242424243,
+ "loss": 2.2054,
+ "step": 139
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001734848484848485,
+ "loss": 2.1651,
+ "step": 140
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.00017329545454545454,
+ "loss": 2.2159,
+ "step": 141
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001731060606060606,
+ "loss": 2.1988,
+ "step": 142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017291666666666668,
+ "loss": 2.1676,
+ "step": 143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017272727272727275,
+ "loss": 2.1725,
+ "step": 144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.0001725378787878788,
+ "loss": 2.2205,
+ "step": 145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017234848484848486,
+ "loss": 2.1486,
+ "step": 146
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017215909090909093,
+ "loss": 2.147,
+ "step": 147
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017196969696969697,
+ "loss": 2.1651,
+ "step": 148
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017178030303030304,
+ "loss": 2.1983,
+ "step": 149
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017159090909090908,
+ "loss": 2.1778,
+ "step": 150
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017140151515151518,
+ "loss": 2.1631,
+ "step": 151
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017121212121212122,
+ "loss": 2.1442,
+ "step": 152
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.0001710227272727273,
+ "loss": 2.1397,
+ "step": 153
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017083333333333333,
+ "loss": 2.1697,
+ "step": 154
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.0001706439393939394,
+ "loss": 2.1451,
+ "step": 155
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017045454545454547,
+ "loss": 2.1789,
+ "step": 156
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.0001702651515151515,
+ "loss": 2.1037,
+ "step": 157
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00017007575757575758,
+ "loss": 2.1698,
+ "step": 158
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016988636363636365,
+ "loss": 2.1538,
+ "step": 159
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016969696969696972,
+ "loss": 2.2015,
+ "step": 160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016950757575757576,
+ "loss": 2.179,
+ "step": 161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016931818181818183,
+ "loss": 2.1766,
+ "step": 162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.0001691287878787879,
+ "loss": 2.1646,
+ "step": 163
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016893939393939394,
+ "loss": 2.1694,
+ "step": 164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016875,
+ "loss": 2.1562,
+ "step": 165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016856060606060605,
+ "loss": 2.1551,
+ "step": 166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016837121212121212,
+ "loss": 2.1652,
+ "step": 167
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001681818181818182,
+ "loss": 2.1594,
+ "step": 168
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.00016799242424242426,
+ "loss": 2.1674,
+ "step": 169
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001678030303030303,
+ "loss": 2.1378,
+ "step": 170
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016761363636363637,
+ "loss": 2.1447,
+ "step": 171
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016742424242424244,
+ "loss": 2.1451,
+ "step": 172
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016723484848484848,
+ "loss": 2.1336,
+ "step": 173
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016704545454545455,
+ "loss": 2.1231,
+ "step": 174
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001668560606060606,
+ "loss": 2.1143,
+ "step": 175
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001666666666666667,
+ "loss": 2.1316,
+ "step": 176
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.00016647727272727273,
+ "loss": 2.1281,
+ "step": 177
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001662878787878788,
+ "loss": 2.136,
+ "step": 178
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016609848484848484,
+ "loss": 2.1279,
+ "step": 179
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016590909090909094,
+ "loss": 2.1421,
+ "step": 180
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016571969696969698,
+ "loss": 2.1541,
+ "step": 181
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016553030303030305,
+ "loss": 2.1293,
+ "step": 182
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.0001653409090909091,
+ "loss": 2.1294,
+ "step": 183
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016515151515151516,
+ "loss": 2.1459,
+ "step": 184
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016496212121212123,
+ "loss": 2.1113,
+ "step": 185
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016477272727272727,
+ "loss": 2.1394,
+ "step": 186
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016458333333333334,
+ "loss": 2.1321,
+ "step": 187
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.0001643939393939394,
+ "loss": 2.148,
+ "step": 188
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016420454545454548,
+ "loss": 2.1631,
+ "step": 189
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016401515151515152,
+ "loss": 2.1276,
+ "step": 190
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.0001638257575757576,
+ "loss": 2.0706,
+ "step": 191
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016363636363636366,
+ "loss": 2.127,
+ "step": 192
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001634469696969697,
+ "loss": 2.1449,
+ "step": 193
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.00016325757575757577,
+ "loss": 2.1204,
+ "step": 194
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001630681818181818,
+ "loss": 2.0904,
+ "step": 195
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.0001628787878787879,
+ "loss": 2.1129,
+ "step": 196
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016268939393939395,
+ "loss": 2.1036,
+ "step": 197
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016250000000000002,
+ "loss": 2.1509,
+ "step": 198
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016231060606060606,
+ "loss": 2.1239,
+ "step": 199
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016212121212121213,
+ "loss": 2.145,
+ "step": 200
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.0001619318181818182,
+ "loss": 2.1221,
+ "step": 201
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016174242424242424,
+ "loss": 2.1181,
+ "step": 202
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001615530303030303,
+ "loss": 2.1306,
+ "step": 203
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016136363636363635,
+ "loss": 2.0199,
+ "step": 204
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016117424242424245,
+ "loss": 2.1178,
+ "step": 205
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001609848484848485,
+ "loss": 2.1584,
+ "step": 206
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016079545454545456,
+ "loss": 2.0872,
+ "step": 207
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.0001606060606060606,
+ "loss": 2.1033,
+ "step": 208
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016041666666666667,
+ "loss": 2.1381,
+ "step": 209
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016022727272727274,
+ "loss": 2.1127,
+ "step": 210
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016003787878787878,
+ "loss": 2.1077,
+ "step": 211
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015984848484848485,
+ "loss": 2.0984,
+ "step": 212
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015965909090909092,
+ "loss": 2.0994,
+ "step": 213
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.000159469696969697,
+ "loss": 2.096,
+ "step": 214
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.00015928030303030303,
+ "loss": 2.0909,
+ "step": 215
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.0001590909090909091,
+ "loss": 2.118,
+ "step": 216
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015890151515151517,
+ "loss": 2.0783,
+ "step": 217
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.0001587121212121212,
+ "loss": 2.0876,
+ "step": 218
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015852272727272728,
+ "loss": 2.0581,
+ "step": 219
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015833333333333332,
+ "loss": 2.0548,
+ "step": 220
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015814393939393942,
+ "loss": 2.0595,
+ "step": 221
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015795454545454546,
+ "loss": 2.0719,
+ "step": 222
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015776515151515153,
+ "loss": 2.0903,
+ "step": 223
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015757575757575757,
+ "loss": 2.0941,
+ "step": 224
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015738636363636364,
+ "loss": 2.0926,
+ "step": 225
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.0001571969696969697,
+ "loss": 2.0816,
+ "step": 226
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015700757575757575,
+ "loss": 2.0894,
+ "step": 227
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015681818181818182,
+ "loss": 2.0798,
+ "step": 228
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.0001566287878787879,
+ "loss": 2.0672,
+ "step": 229
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015643939393939396,
+ "loss": 2.0787,
+ "step": 230
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015625,
+ "loss": 2.0611,
+ "step": 231
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015606060606060607,
+ "loss": 2.0805,
+ "step": 232
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015587121212121211,
+ "loss": 2.053,
+ "step": 233
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015568181818181818,
+ "loss": 2.0575,
+ "step": 234
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015549242424242425,
+ "loss": 2.0459,
+ "step": 235
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.0001553030303030303,
+ "loss": 2.0635,
+ "step": 236
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015511363636363636,
+ "loss": 2.0335,
+ "step": 237
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015492424242424243,
+ "loss": 2.0681,
+ "step": 238
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001547348484848485,
+ "loss": 2.0748,
+ "step": 239
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015454545454545454,
+ "loss": 2.1091,
+ "step": 240
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001543560606060606,
+ "loss": 2.0732,
+ "step": 241
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015416666666666668,
+ "loss": 2.0746,
+ "step": 242
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015397727272727272,
+ "loss": 2.0306,
+ "step": 243
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.0001537878787878788,
+ "loss": 2.0864,
+ "step": 244
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015359848484848484,
+ "loss": 2.0664,
+ "step": 245
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015340909090909093,
+ "loss": 2.0801,
+ "step": 246
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015321969696969697,
+ "loss": 2.0799,
+ "step": 247
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015303030303030304,
+ "loss": 2.0621,
+ "step": 248
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015284090909090909,
+ "loss": 2.0687,
+ "step": 249
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015265151515151515,
+ "loss": 2.018,
+ "step": 250
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015246212121212122,
+ "loss": 2.0256,
+ "step": 251
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015227272727272727,
+ "loss": 2.0736,
+ "step": 252
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015208333333333333,
+ "loss": 2.0609,
+ "step": 253
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.0001518939393939394,
+ "loss": 2.0539,
+ "step": 254
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015170454545454547,
+ "loss": 2.0282,
+ "step": 255
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015151515151515152,
+ "loss": 2.0417,
+ "step": 256
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015132575757575758,
+ "loss": 2.0333,
+ "step": 257
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015113636363636365,
+ "loss": 2.0428,
+ "step": 258
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015094696969696972,
+ "loss": 2.045,
+ "step": 259
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.00015075757575757576,
+ "loss": 2.0463,
+ "step": 260
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001505681818181818,
+ "loss": 2.0539,
+ "step": 261
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001503787878787879,
+ "loss": 2.0184,
+ "step": 262
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015018939393939394,
+ "loss": 2.0858,
+ "step": 263
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 2.0239,
+ "step": 264
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014981060606060606,
+ "loss": 2.0425,
+ "step": 265
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014962121212121213,
+ "loss": 2.0263,
+ "step": 266
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001494318181818182,
+ "loss": 2.042,
+ "step": 267
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.00014924242424242426,
+ "loss": 2.026,
+ "step": 268
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001490530303030303,
+ "loss": 2.0411,
+ "step": 269
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014886363636363635,
+ "loss": 2.028,
+ "step": 270
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014867424242424244,
+ "loss": 2.0172,
+ "step": 271
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014848484848484849,
+ "loss": 2.0196,
+ "step": 272
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014829545454545455,
+ "loss": 2.0142,
+ "step": 273
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001481060606060606,
+ "loss": 2.0265,
+ "step": 274
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001479166666666667,
+ "loss": 2.0353,
+ "step": 275
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.00014772727272727274,
+ "loss": 2.0327,
+ "step": 276
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.0001475378787878788,
+ "loss": 2.0188,
+ "step": 277
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014734848484848485,
+ "loss": 1.9987,
+ "step": 278
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014715909090909092,
+ "loss": 2.0141,
+ "step": 279
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014696969696969698,
+ "loss": 2.0403,
+ "step": 280
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014678030303030303,
+ "loss": 1.9977,
+ "step": 281
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.0001465909090909091,
+ "loss": 1.9674,
+ "step": 282
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014640151515151517,
+ "loss": 1.9984,
+ "step": 283
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014621212121212123,
+ "loss": 1.9796,
+ "step": 284
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014602272727272728,
+ "loss": 2.0139,
+ "step": 285
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014583333333333335,
+ "loss": 1.9866,
+ "step": 286
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014564393939393941,
+ "loss": 2.0208,
+ "step": 287
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014545454545454546,
+ "loss": 1.9844,
+ "step": 288
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014526515151515153,
+ "loss": 2.0082,
+ "step": 289
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014507575757575757,
+ "loss": 1.984,
+ "step": 290
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014488636363636366,
+ "loss": 2.0015,
+ "step": 291
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.0001446969696969697,
+ "loss": 2.0209,
+ "step": 292
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014450757575757578,
+ "loss": 1.9728,
+ "step": 293
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014431818181818182,
+ "loss": 2.0032,
+ "step": 294
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014412878787878789,
+ "loss": 1.9641,
+ "step": 295
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014393939393939396,
+ "loss": 1.9945,
+ "step": 296
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014375,
+ "loss": 1.9658,
+ "step": 297
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014356060606060607,
+ "loss": 1.9907,
+ "step": 298
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001433712121212121,
+ "loss": 1.9935,
+ "step": 299
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001431818181818182,
+ "loss": 1.9897,
+ "step": 300
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014299242424242425,
+ "loss": 1.984,
+ "step": 301
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014280303030303032,
+ "loss": 1.9581,
+ "step": 302
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014261363636363636,
+ "loss": 1.9893,
+ "step": 303
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014242424242424243,
+ "loss": 1.9568,
+ "step": 304
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001422348484848485,
+ "loss": 1.98,
+ "step": 305
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014204545454545454,
+ "loss": 1.9519,
+ "step": 306
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001418560606060606,
+ "loss": 1.9693,
+ "step": 307
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014166666666666668,
+ "loss": 1.9866,
+ "step": 308
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014147727272727275,
+ "loss": 1.9508,
+ "step": 309
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.0001412878787878788,
+ "loss": 1.9653,
+ "step": 310
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014109848484848486,
+ "loss": 1.9991,
+ "step": 311
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014090909090909093,
+ "loss": 1.9442,
+ "step": 312
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014071969696969697,
+ "loss": 1.9807,
+ "step": 313
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014053030303030304,
+ "loss": 1.9958,
+ "step": 314
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014034090909090908,
+ "loss": 1.9459,
+ "step": 315
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00014015151515151518,
+ "loss": 1.9508,
+ "step": 316
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013996212121212122,
+ "loss": 1.9933,
+ "step": 317
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.0001397727272727273,
+ "loss": 1.9703,
+ "step": 318
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013958333333333333,
+ "loss": 1.965,
+ "step": 319
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001393939393939394,
+ "loss": 1.9264,
+ "step": 320
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.00013920454545454547,
+ "loss": 1.9688,
+ "step": 321
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001390151515151515,
+ "loss": 1.9901,
+ "step": 322
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013882575757575758,
+ "loss": 1.9363,
+ "step": 323
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013863636363636365,
+ "loss": 1.9269,
+ "step": 324
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013844696969696972,
+ "loss": 1.9688,
+ "step": 325
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013825757575757576,
+ "loss": 1.9758,
+ "step": 326
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013806818181818183,
+ "loss": 1.9414,
+ "step": 327
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.0001378787878787879,
+ "loss": 1.9397,
+ "step": 328
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013768939393939394,
+ "loss": 1.9032,
+ "step": 329
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001375,
+ "loss": 1.9777,
+ "step": 330
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013731060606060605,
+ "loss": 1.9173,
+ "step": 331
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013712121212121212,
+ "loss": 1.9307,
+ "step": 332
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001369318181818182,
+ "loss": 1.9611,
+ "step": 333
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013674242424242426,
+ "loss": 1.9698,
+ "step": 334
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.0001365530303030303,
+ "loss": 1.9619,
+ "step": 335
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013636363636363637,
+ "loss": 1.9322,
+ "step": 336
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013617424242424244,
+ "loss": 1.9441,
+ "step": 337
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013598484848484848,
+ "loss": 1.9563,
+ "step": 338
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013579545454545455,
+ "loss": 1.9283,
+ "step": 339
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.0001356060606060606,
+ "loss": 1.9508,
+ "step": 340
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001354166666666667,
+ "loss": 1.9285,
+ "step": 341
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.00013522727272727273,
+ "loss": 1.9295,
+ "step": 342
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001350378787878788,
+ "loss": 1.9272,
+ "step": 343
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013484848484848484,
+ "loss": 1.905,
+ "step": 344
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013465909090909094,
+ "loss": 1.9409,
+ "step": 345
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013446969696969698,
+ "loss": 1.9674,
+ "step": 346
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013428030303030302,
+ "loss": 1.9278,
+ "step": 347
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.0001340909090909091,
+ "loss": 1.9136,
+ "step": 348
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013390151515151516,
+ "loss": 1.9143,
+ "step": 349
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013371212121212123,
+ "loss": 1.9381,
+ "step": 350
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013352272727272727,
+ "loss": 1.9136,
+ "step": 351
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013333333333333334,
+ "loss": 1.9103,
+ "step": 352
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0001331439393939394,
+ "loss": 1.9027,
+ "step": 353
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013295454545454548,
+ "loss": 1.8674,
+ "step": 354
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013276515151515152,
+ "loss": 1.886,
+ "step": 355
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013257575757575756,
+ "loss": 1.887,
+ "step": 356
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013238636363636366,
+ "loss": 1.87,
+ "step": 357
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001321969696969697,
+ "loss": 1.8715,
+ "step": 358
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.00013200757575757577,
+ "loss": 1.8993,
+ "step": 359
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001318181818181818,
+ "loss": 1.8844,
+ "step": 360
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001316287878787879,
+ "loss": 1.8965,
+ "step": 361
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013143939393939395,
+ "loss": 1.8956,
+ "step": 362
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013125000000000002,
+ "loss": 1.869,
+ "step": 363
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013106060606060606,
+ "loss": 1.8702,
+ "step": 364
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013087121212121213,
+ "loss": 1.8962,
+ "step": 365
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001306818181818182,
+ "loss": 1.8613,
+ "step": 366
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013049242424242424,
+ "loss": 1.8845,
+ "step": 367
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001303030303030303,
+ "loss": 1.8689,
+ "step": 368
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00013011363636363635,
+ "loss": 1.9059,
+ "step": 369
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00012992424242424245,
+ "loss": 1.9082,
+ "step": 370
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.0001297348484848485,
+ "loss": 1.8918,
+ "step": 371
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012954545454545456,
+ "loss": 1.8657,
+ "step": 372
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.0001293560606060606,
+ "loss": 1.8909,
+ "step": 373
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012916666666666667,
+ "loss": 1.8649,
+ "step": 374
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012897727272727274,
+ "loss": 1.833,
+ "step": 375
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012878787878787878,
+ "loss": 1.8815,
+ "step": 376
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012859848484848485,
+ "loss": 1.8646,
+ "step": 377
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012840909090909092,
+ "loss": 1.846,
+ "step": 378
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.000128219696969697,
+ "loss": 1.8631,
+ "step": 379
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012803030303030303,
+ "loss": 1.917,
+ "step": 380
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.0001278409090909091,
+ "loss": 1.9068,
+ "step": 381
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012765151515151517,
+ "loss": 1.8772,
+ "step": 382
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.0001274621212121212,
+ "loss": 1.8414,
+ "step": 383
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012727272727272728,
+ "loss": 1.9003,
+ "step": 384
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012708333333333332,
+ "loss": 1.8415,
+ "step": 385
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012689393939393942,
+ "loss": 1.8491,
+ "step": 386
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012670454545454546,
+ "loss": 1.8875,
+ "step": 387
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012651515151515153,
+ "loss": 1.8629,
+ "step": 388
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012632575757575757,
+ "loss": 1.8378,
+ "step": 389
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012613636363636364,
+ "loss": 1.8442,
+ "step": 390
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.0001259469696969697,
+ "loss": 1.8587,
+ "step": 391
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012575757575757575,
+ "loss": 1.8659,
+ "step": 392
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012556818181818182,
+ "loss": 1.8271,
+ "step": 393
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.0001253787878787879,
+ "loss": 1.8692,
+ "step": 394
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.00012518939393939396,
+ "loss": 1.8071,
+ "step": 395
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.000125,
+ "loss": 1.8564,
+ "step": 396
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012481060606060607,
+ "loss": 1.8891,
+ "step": 397
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012462121212121211,
+ "loss": 1.8173,
+ "step": 398
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012443181818181818,
+ "loss": 1.8653,
+ "step": 399
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012424242424242425,
+ "loss": 1.8843,
+ "step": 400
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.0001240530303030303,
+ "loss": 1.8527,
+ "step": 401
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012386363636363636,
+ "loss": 1.8352,
+ "step": 402
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012367424242424243,
+ "loss": 1.866,
+ "step": 403
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001234848484848485,
+ "loss": 1.8557,
+ "step": 404
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012329545454545454,
+ "loss": 1.8284,
+ "step": 405
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001231060606060606,
+ "loss": 1.8359,
+ "step": 406
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012291666666666668,
+ "loss": 1.8437,
+ "step": 407
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012272727272727272,
+ "loss": 1.8256,
+ "step": 408
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.0001225378787878788,
+ "loss": 1.8297,
+ "step": 409
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012234848484848484,
+ "loss": 1.8515,
+ "step": 410
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012215909090909093,
+ "loss": 1.8198,
+ "step": 411
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012196969696969697,
+ "loss": 1.7809,
+ "step": 412
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012178030303030303,
+ "loss": 1.8438,
+ "step": 413
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012159090909090908,
+ "loss": 1.8497,
+ "step": 414
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012140151515151517,
+ "loss": 1.8463,
+ "step": 415
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012121212121212122,
+ "loss": 1.7768,
+ "step": 416
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012102272727272728,
+ "loss": 1.8561,
+ "step": 417
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012083333333333333,
+ "loss": 1.863,
+ "step": 418
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.0001206439393939394,
+ "loss": 1.8193,
+ "step": 419
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012045454545454546,
+ "loss": 1.7732,
+ "step": 420
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012026515151515151,
+ "loss": 1.7728,
+ "step": 421
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00012007575757575757,
+ "loss": 1.8113,
+ "step": 422
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011988636363636365,
+ "loss": 1.7976,
+ "step": 423
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011969696969696971,
+ "loss": 1.786,
+ "step": 424
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011950757575757576,
+ "loss": 1.8019,
+ "step": 425
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011931818181818182,
+ "loss": 1.786,
+ "step": 426
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011912878787878789,
+ "loss": 1.8102,
+ "step": 427
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011893939393939394,
+ "loss": 1.7828,
+ "step": 428
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011875,
+ "loss": 1.8498,
+ "step": 429
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011856060606060606,
+ "loss": 1.7983,
+ "step": 430
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011837121212121211,
+ "loss": 1.7863,
+ "step": 431
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001181818181818182,
+ "loss": 1.8171,
+ "step": 432
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011799242424242425,
+ "loss": 1.8143,
+ "step": 433
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001178030303030303,
+ "loss": 1.7815,
+ "step": 434
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011761363636363636,
+ "loss": 1.7652,
+ "step": 435
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011742424242424244,
+ "loss": 1.8242,
+ "step": 436
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011723484848484849,
+ "loss": 1.7789,
+ "step": 437
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011704545454545454,
+ "loss": 1.7549,
+ "step": 438
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.0001168560606060606,
+ "loss": 1.7528,
+ "step": 439
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011666666666666668,
+ "loss": 1.7443,
+ "step": 440
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011647727272727273,
+ "loss": 1.7911,
+ "step": 441
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011628787878787879,
+ "loss": 1.7848,
+ "step": 442
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011609848484848485,
+ "loss": 1.8137,
+ "step": 443
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011590909090909093,
+ "loss": 1.791,
+ "step": 444
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011571969696969698,
+ "loss": 1.7921,
+ "step": 445
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011553030303030304,
+ "loss": 1.772,
+ "step": 446
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011534090909090908,
+ "loss": 1.776,
+ "step": 447
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011515151515151516,
+ "loss": 1.7948,
+ "step": 448
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011496212121212122,
+ "loss": 1.8187,
+ "step": 449
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011477272727272728,
+ "loss": 1.7436,
+ "step": 450
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011458333333333333,
+ "loss": 1.7326,
+ "step": 451
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011439393939393941,
+ "loss": 1.8005,
+ "step": 452
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011420454545454547,
+ "loss": 1.8088,
+ "step": 453
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011401515151515153,
+ "loss": 1.7632,
+ "step": 454
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011382575757575758,
+ "loss": 1.7848,
+ "step": 455
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011363636363636365,
+ "loss": 1.7756,
+ "step": 456
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001134469696969697,
+ "loss": 1.7964,
+ "step": 457
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011325757575757576,
+ "loss": 1.7604,
+ "step": 458
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011306818181818182,
+ "loss": 1.7914,
+ "step": 459
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001128787878787879,
+ "loss": 1.8059,
+ "step": 460
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011268939393939395,
+ "loss": 1.7647,
+ "step": 461
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011250000000000001,
+ "loss": 1.7526,
+ "step": 462
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011231060606060607,
+ "loss": 1.7736,
+ "step": 463
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011212121212121212,
+ "loss": 1.7449,
+ "step": 464
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011193181818181819,
+ "loss": 1.7636,
+ "step": 465
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011174242424242425,
+ "loss": 1.7846,
+ "step": 466
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.0001115530303030303,
+ "loss": 1.78,
+ "step": 467
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011136363636363636,
+ "loss": 1.7828,
+ "step": 468
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011117424242424244,
+ "loss": 1.729,
+ "step": 469
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.0001109848484848485,
+ "loss": 1.7145,
+ "step": 470
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011079545454545455,
+ "loss": 1.7189,
+ "step": 471
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011060606060606061,
+ "loss": 1.7628,
+ "step": 472
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011041666666666668,
+ "loss": 1.7399,
+ "step": 473
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011022727272727273,
+ "loss": 1.7561,
+ "step": 474
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00011003787878787879,
+ "loss": 1.7979,
+ "step": 475
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010984848484848484,
+ "loss": 1.7673,
+ "step": 476
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010965909090909093,
+ "loss": 1.777,
+ "step": 477
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010946969696969698,
+ "loss": 1.7042,
+ "step": 478
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010928030303030304,
+ "loss": 1.7764,
+ "step": 479
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010909090909090909,
+ "loss": 1.6993,
+ "step": 480
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010890151515151516,
+ "loss": 1.7688,
+ "step": 481
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010871212121212122,
+ "loss": 1.7428,
+ "step": 482
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010852272727272727,
+ "loss": 1.675,
+ "step": 483
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010833333333333333,
+ "loss": 1.7183,
+ "step": 484
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010814393939393941,
+ "loss": 1.7305,
+ "step": 485
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010795454545454547,
+ "loss": 1.7541,
+ "step": 486
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010776515151515152,
+ "loss": 1.7074,
+ "step": 487
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010757575757575758,
+ "loss": 1.7093,
+ "step": 488
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010738636363636365,
+ "loss": 1.7354,
+ "step": 489
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.0001071969696969697,
+ "loss": 1.7415,
+ "step": 490
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010700757575757576,
+ "loss": 1.72,
+ "step": 491
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010681818181818181,
+ "loss": 1.7453,
+ "step": 492
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.0001066287878787879,
+ "loss": 1.7077,
+ "step": 493
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010643939393939395,
+ "loss": 1.6936,
+ "step": 494
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010625000000000001,
+ "loss": 1.7616,
+ "step": 495
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010606060606060606,
+ "loss": 1.7749,
+ "step": 496
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010587121212121212,
+ "loss": 1.7375,
+ "step": 497
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010568181818181819,
+ "loss": 1.7203,
+ "step": 498
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010549242424242424,
+ "loss": 1.7148,
+ "step": 499
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.0001053030303030303,
+ "loss": 1.7859,
+ "step": 500
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010511363636363635,
+ "loss": 1.7478,
+ "step": 501
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010492424242424244,
+ "loss": 1.7091,
+ "step": 502
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.00010473484848484849,
+ "loss": 1.7112,
+ "step": 503
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.00010454545454545455,
+ "loss": 1.6967,
+ "step": 504
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.0001043560606060606,
+ "loss": 1.7431,
+ "step": 505
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010416666666666667,
+ "loss": 1.7065,
+ "step": 506
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010397727272727273,
+ "loss": 1.6955,
+ "step": 507
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010378787878787878,
+ "loss": 1.7375,
+ "step": 508
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010359848484848484,
+ "loss": 1.7056,
+ "step": 509
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010340909090909092,
+ "loss": 1.7044,
+ "step": 510
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010321969696969698,
+ "loss": 1.7204,
+ "step": 511
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010303030303030303,
+ "loss": 1.6801,
+ "step": 512
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010284090909090909,
+ "loss": 1.7381,
+ "step": 513
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010265151515151516,
+ "loss": 1.7064,
+ "step": 514
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010246212121212121,
+ "loss": 1.6973,
+ "step": 515
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010227272727272727,
+ "loss": 1.7295,
+ "step": 516
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010208333333333333,
+ "loss": 1.6991,
+ "step": 517
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010189393939393941,
+ "loss": 1.6986,
+ "step": 518
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010170454545454546,
+ "loss": 1.6989,
+ "step": 519
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010151515151515152,
+ "loss": 1.7009,
+ "step": 520
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010132575757575757,
+ "loss": 1.6919,
+ "step": 521
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010113636363636366,
+ "loss": 1.6955,
+ "step": 522
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010094696969696971,
+ "loss": 1.7177,
+ "step": 523
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.00010075757575757576,
+ "loss": 1.715,
+ "step": 524
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.00010056818181818181,
+ "loss": 1.6686,
+ "step": 525
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.0001003787878787879,
+ "loss": 1.771,
+ "step": 526
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 0.00010018939393939395,
+ "loss": 1.7024,
+ "step": 527
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 0.0001,
+ "loss": 1.7016,
+ "step": 528
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 9.981060606060606e-05,
+ "loss": 1.6501,
+ "step": 529
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 9.962121212121213e-05,
+ "loss": 1.6903,
+ "step": 530
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.943181818181819e-05,
+ "loss": 1.6806,
+ "step": 531
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.924242424242425e-05,
+ "loss": 1.7096,
+ "step": 532
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.90530303030303e-05,
+ "loss": 1.7307,
+ "step": 533
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.886363636363637e-05,
+ "loss": 1.6871,
+ "step": 534
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.867424242424242e-05,
+ "loss": 1.7457,
+ "step": 535
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.848484848484849e-05,
+ "loss": 1.6867,
+ "step": 536
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.829545454545455e-05,
+ "loss": 1.6789,
+ "step": 537
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.810606060606061e-05,
+ "loss": 1.6403,
+ "step": 538
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.791666666666667e-05,
+ "loss": 1.6697,
+ "step": 539
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.772727272727274e-05,
+ "loss": 1.7293,
+ "step": 540
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.75378787878788e-05,
+ "loss": 1.6998,
+ "step": 541
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.734848484848485e-05,
+ "loss": 1.693,
+ "step": 542
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.71590909090909e-05,
+ "loss": 1.664,
+ "step": 543
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.696969696969698e-05,
+ "loss": 1.7061,
+ "step": 544
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.678030303030303e-05,
+ "loss": 1.6631,
+ "step": 545
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.65909090909091e-05,
+ "loss": 1.6343,
+ "step": 546
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.640151515151516e-05,
+ "loss": 1.6939,
+ "step": 547
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.621212121212123e-05,
+ "loss": 1.669,
+ "step": 548
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.602272727272728e-05,
+ "loss": 1.6561,
+ "step": 549
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.583333333333334e-05,
+ "loss": 1.6675,
+ "step": 550
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.564393939393939e-05,
+ "loss": 1.7109,
+ "step": 551
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.545454545454546e-05,
+ "loss": 1.693,
+ "step": 552
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.526515151515152e-05,
+ "loss": 1.6557,
+ "step": 553
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.507575757575759e-05,
+ "loss": 1.6642,
+ "step": 554
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.488636363636364e-05,
+ "loss": 1.6674,
+ "step": 555
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.469696969696971e-05,
+ "loss": 1.6492,
+ "step": 556
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.450757575757577e-05,
+ "loss": 1.6915,
+ "step": 557
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.431818181818182e-05,
+ "loss": 1.7028,
+ "step": 558
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.412878787878788e-05,
+ "loss": 1.6749,
+ "step": 559
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.393939393939395e-05,
+ "loss": 1.6526,
+ "step": 560
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.375e-05,
+ "loss": 1.687,
+ "step": 561
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.356060606060606e-05,
+ "loss": 1.6632,
+ "step": 562
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.337121212121213e-05,
+ "loss": 1.7074,
+ "step": 563
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.318181818181818e-05,
+ "loss": 1.6164,
+ "step": 564
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.299242424242425e-05,
+ "loss": 1.6594,
+ "step": 565
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.280303030303031e-05,
+ "loss": 1.6603,
+ "step": 566
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.261363636363636e-05,
+ "loss": 1.6213,
+ "step": 567
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.242424242424242e-05,
+ "loss": 1.6899,
+ "step": 568
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.223484848484849e-05,
+ "loss": 1.6619,
+ "step": 569
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.204545454545454e-05,
+ "loss": 1.7035,
+ "step": 570
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.185606060606061e-05,
+ "loss": 1.6408,
+ "step": 571
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.166666666666667e-05,
+ "loss": 1.6506,
+ "step": 572
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.147727272727274e-05,
+ "loss": 1.658,
+ "step": 573
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.128787878787879e-05,
+ "loss": 1.6005,
+ "step": 574
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.109848484848486e-05,
+ "loss": 1.6821,
+ "step": 575
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.090909090909092e-05,
+ "loss": 1.6858,
+ "step": 576
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.071969696969697e-05,
+ "loss": 1.6933,
+ "step": 577
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.053030303030303e-05,
+ "loss": 1.6757,
+ "step": 578
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.03409090909091e-05,
+ "loss": 1.6107,
+ "step": 579
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 9.015151515151515e-05,
+ "loss": 1.5751,
+ "step": 580
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.996212121212122e-05,
+ "loss": 1.6168,
+ "step": 581
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.977272727272728e-05,
+ "loss": 1.6213,
+ "step": 582
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.958333333333335e-05,
+ "loss": 1.6243,
+ "step": 583
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.93939393939394e-05,
+ "loss": 1.6249,
+ "step": 584
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.920454545454546e-05,
+ "loss": 1.6529,
+ "step": 585
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.901515151515151e-05,
+ "loss": 1.626,
+ "step": 586
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.882575757575758e-05,
+ "loss": 1.6616,
+ "step": 587
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.863636363636364e-05,
+ "loss": 1.6622,
+ "step": 588
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.844696969696971e-05,
+ "loss": 1.5927,
+ "step": 589
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.825757575757576e-05,
+ "loss": 1.6351,
+ "step": 590
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.806818181818183e-05,
+ "loss": 1.6213,
+ "step": 591
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.787878787878789e-05,
+ "loss": 1.635,
+ "step": 592
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.768939393939394e-05,
+ "loss": 1.6406,
+ "step": 593
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.75e-05,
+ "loss": 1.6387,
+ "step": 594
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.731060606060605e-05,
+ "loss": 1.602,
+ "step": 595
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.712121212121212e-05,
+ "loss": 1.601,
+ "step": 596
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.693181818181818e-05,
+ "loss": 1.5855,
+ "step": 597
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.674242424242425e-05,
+ "loss": 1.6236,
+ "step": 598
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.65530303030303e-05,
+ "loss": 1.5999,
+ "step": 599
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.636363636363637e-05,
+ "loss": 1.6093,
+ "step": 600
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.617424242424243e-05,
+ "loss": 1.6602,
+ "step": 601
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.598484848484848e-05,
+ "loss": 1.599,
+ "step": 602
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.579545454545454e-05,
+ "loss": 1.6056,
+ "step": 603
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.560606060606061e-05,
+ "loss": 1.6377,
+ "step": 604
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.541666666666666e-05,
+ "loss": 1.5769,
+ "step": 605
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.522727272727273e-05,
+ "loss": 1.6219,
+ "step": 606
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.503787878787879e-05,
+ "loss": 1.5917,
+ "step": 607
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.484848484848486e-05,
+ "loss": 1.6019,
+ "step": 608
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.465909090909091e-05,
+ "loss": 1.6316,
+ "step": 609
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.446969696969697e-05,
+ "loss": 1.6327,
+ "step": 610
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.428030303030303e-05,
+ "loss": 1.6023,
+ "step": 611
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.40909090909091e-05,
+ "loss": 1.6087,
+ "step": 612
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.390151515151515e-05,
+ "loss": 1.6245,
+ "step": 613
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.371212121212122e-05,
+ "loss": 1.5957,
+ "step": 614
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.352272727272727e-05,
+ "loss": 1.6196,
+ "step": 615
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.333333333333334e-05,
+ "loss": 1.6364,
+ "step": 616
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.31439393939394e-05,
+ "loss": 1.5977,
+ "step": 617
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.295454545454547e-05,
+ "loss": 1.6018,
+ "step": 618
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.276515151515152e-05,
+ "loss": 1.5973,
+ "step": 619
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.257575757575758e-05,
+ "loss": 1.6216,
+ "step": 620
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.238636363636364e-05,
+ "loss": 1.6422,
+ "step": 621
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.21969696969697e-05,
+ "loss": 1.6401,
+ "step": 622
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.200757575757576e-05,
+ "loss": 1.6446,
+ "step": 623
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.181818181818183e-05,
+ "loss": 1.5791,
+ "step": 624
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.162878787878789e-05,
+ "loss": 1.5953,
+ "step": 625
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.143939393939395e-05,
+ "loss": 1.5941,
+ "step": 626
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.125000000000001e-05,
+ "loss": 1.5784,
+ "step": 627
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.106060606060607e-05,
+ "loss": 1.6024,
+ "step": 628
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.087121212121212e-05,
+ "loss": 1.6295,
+ "step": 629
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.068181818181818e-05,
+ "loss": 1.5905,
+ "step": 630
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.049242424242425e-05,
+ "loss": 1.6073,
+ "step": 631
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.03030303030303e-05,
+ "loss": 1.6104,
+ "step": 632
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 8.011363636363637e-05,
+ "loss": 1.6134,
+ "step": 633
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.992424242424243e-05,
+ "loss": 1.6569,
+ "step": 634
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.97348484848485e-05,
+ "loss": 1.5493,
+ "step": 635
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.954545454545455e-05,
+ "loss": 1.5767,
+ "step": 636
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.93560606060606e-05,
+ "loss": 1.5692,
+ "step": 637
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.916666666666666e-05,
+ "loss": 1.6116,
+ "step": 638
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.897727272727273e-05,
+ "loss": 1.5684,
+ "step": 639
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.878787878787879e-05,
+ "loss": 1.6177,
+ "step": 640
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.859848484848486e-05,
+ "loss": 1.6151,
+ "step": 641
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.840909090909091e-05,
+ "loss": 1.6293,
+ "step": 642
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.821969696969698e-05,
+ "loss": 1.6298,
+ "step": 643
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.803030303030304e-05,
+ "loss": 1.6073,
+ "step": 644
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.784090909090909e-05,
+ "loss": 1.5328,
+ "step": 645
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.765151515151515e-05,
+ "loss": 1.5895,
+ "step": 646
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.746212121212122e-05,
+ "loss": 1.5728,
+ "step": 647
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.727272727272727e-05,
+ "loss": 1.5449,
+ "step": 648
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.708333333333334e-05,
+ "loss": 1.5731,
+ "step": 649
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.68939393939394e-05,
+ "loss": 1.627,
+ "step": 650
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.670454545454547e-05,
+ "loss": 1.6139,
+ "step": 651
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.651515151515152e-05,
+ "loss": 1.5613,
+ "step": 652
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.632575757575758e-05,
+ "loss": 1.5734,
+ "step": 653
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.613636363636363e-05,
+ "loss": 1.5537,
+ "step": 654
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.59469696969697e-05,
+ "loss": 1.5886,
+ "step": 655
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.575757575757576e-05,
+ "loss": 1.5504,
+ "step": 656
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.556818181818183e-05,
+ "loss": 1.5613,
+ "step": 657
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.537878787878788e-05,
+ "loss": 1.5877,
+ "step": 658
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.518939393939395e-05,
+ "loss": 1.605,
+ "step": 659
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.500000000000001e-05,
+ "loss": 1.5403,
+ "step": 660
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.481060606060606e-05,
+ "loss": 1.6039,
+ "step": 661
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.462121212121213e-05,
+ "loss": 1.5708,
+ "step": 662
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.443181818181817e-05,
+ "loss": 1.5692,
+ "step": 663
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.424242424242424e-05,
+ "loss": 1.5084,
+ "step": 664
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.40530303030303e-05,
+ "loss": 1.5982,
+ "step": 665
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.386363636363637e-05,
+ "loss": 1.5881,
+ "step": 666
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.367424242424242e-05,
+ "loss": 1.5593,
+ "step": 667
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.348484848484849e-05,
+ "loss": 1.5871,
+ "step": 668
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.329545454545455e-05,
+ "loss": 1.6134,
+ "step": 669
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.310606060606062e-05,
+ "loss": 1.5516,
+ "step": 670
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.291666666666667e-05,
+ "loss": 1.5691,
+ "step": 671
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.272727272727273e-05,
+ "loss": 1.5801,
+ "step": 672
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.253787878787878e-05,
+ "loss": 1.5684,
+ "step": 673
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.234848484848485e-05,
+ "loss": 1.5591,
+ "step": 674
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.215909090909091e-05,
+ "loss": 1.5727,
+ "step": 675
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.196969696969698e-05,
+ "loss": 1.6081,
+ "step": 676
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.178030303030303e-05,
+ "loss": 1.5884,
+ "step": 677
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.15909090909091e-05,
+ "loss": 1.5638,
+ "step": 678
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.140151515151516e-05,
+ "loss": 1.5614,
+ "step": 679
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.121212121212121e-05,
+ "loss": 1.5543,
+ "step": 680
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.102272727272727e-05,
+ "loss": 1.5801,
+ "step": 681
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.083333333333334e-05,
+ "loss": 1.5458,
+ "step": 682
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.06439393939394e-05,
+ "loss": 1.5567,
+ "step": 683
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.045454545454546e-05,
+ "loss": 1.5567,
+ "step": 684
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.026515151515152e-05,
+ "loss": 1.5638,
+ "step": 685
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 7.007575757575759e-05,
+ "loss": 1.5431,
+ "step": 686
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.988636363636364e-05,
+ "loss": 1.5729,
+ "step": 687
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.96969696969697e-05,
+ "loss": 1.5235,
+ "step": 688
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.950757575757575e-05,
+ "loss": 1.5753,
+ "step": 689
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.931818181818182e-05,
+ "loss": 1.5319,
+ "step": 690
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.912878787878788e-05,
+ "loss": 1.5847,
+ "step": 691
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.893939393939395e-05,
+ "loss": 1.5533,
+ "step": 692
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.875e-05,
+ "loss": 1.5665,
+ "step": 693
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.856060606060606e-05,
+ "loss": 1.5913,
+ "step": 694
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.837121212121213e-05,
+ "loss": 1.6011,
+ "step": 695
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.818181818181818e-05,
+ "loss": 1.5201,
+ "step": 696
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.799242424242424e-05,
+ "loss": 1.57,
+ "step": 697
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.78030303030303e-05,
+ "loss": 1.5381,
+ "step": 698
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.761363636363636e-05,
+ "loss": 1.5681,
+ "step": 699
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.742424242424242e-05,
+ "loss": 1.5542,
+ "step": 700
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1056,
+ "num_train_epochs": 3,
+ "save_steps": 100,
+ "total_flos": 1.7736631679034778e+18,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-700/training_args.bin b/checkpoint-700/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..574132c086f9a526d71493b1ec4c09396eac5482
--- /dev/null
+++ b/checkpoint-700/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:50303c7f1586feb001f01b7e968e567295f501ef6c6407e28250c458696a43af
+size 4155
diff --git a/checkpoint-800/README.md b/checkpoint-800/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4
--- /dev/null
+++ b/checkpoint-800/README.md
@@ -0,0 +1,34 @@
+---
+library_name: peft
+---
+## Training procedure
+
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+### Framework versions
+
+- PEFT 0.6.0.dev0
+
+- PEFT 0.6.0.dev0
diff --git a/checkpoint-800/adapter_config.json b/checkpoint-800/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b
--- /dev/null
+++ b/checkpoint-800/adapter_config.json
@@ -0,0 +1,23 @@
+{
+ "auto_mapping": null,
+ "base_model_name_or_path": "bigscience/bloomz-3b",
+ "bias": "none",
+ "fan_in_fan_out": false,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "lora_alpha": 16,
+ "lora_dropout": 0.0,
+ "modules_to_save": null,
+ "peft_type": "LORA",
+ "r": 8,
+ "revision": null,
+ "target_modules": [
+ "dense_4h_to_h",
+ "dense",
+ "dense_h_to_4h",
+ "query_key_value"
+ ],
+ "task_type": "CAUSAL_LM"
+}
\ No newline at end of file
diff --git a/checkpoint-800/adapter_model.bin b/checkpoint-800/adapter_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..15dac9b56de644045fda952dbc2d5304d712dd74
--- /dev/null
+++ b/checkpoint-800/adapter_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:22d373cd672ec19463b2d6040177a00cda26cb93669e558697149117a1782009
+size 39409357
diff --git a/checkpoint-800/optimizer.pt b/checkpoint-800/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..dc130765bd34152bcb3035bb03a9a7a82bf4b2f4
--- /dev/null
+++ b/checkpoint-800/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8180eb202f99741e9193d50ddc0fe66537c977ff1d53810afae55a1ff639e378
+size 78844421
diff --git a/checkpoint-800/rng_state.pth b/checkpoint-800/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..e0df6592e27bef84a4beb9293e7b49666a4d652e
--- /dev/null
+++ b/checkpoint-800/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7508d4b8dd267de5cc58e972da25236687927651336a28f292c92f7f23951475
+size 14575
diff --git a/checkpoint-800/scheduler.pt b/checkpoint-800/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..97f8b91ac37199eddeac2c4efffde741d2d66c66
--- /dev/null
+++ b/checkpoint-800/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2859a4339a77c64fb0291beaf107c7b4a1c6af6bae21c0d0374693793f0e1238
+size 627
diff --git a/checkpoint-800/special_tokens_map.json b/checkpoint-800/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d
--- /dev/null
+++ b/checkpoint-800/special_tokens_map.json
@@ -0,0 +1,6 @@
+{
+ "bos_token": "",
+ "eos_token": "",
+ "pad_token": "",
+ "unk_token": ""
+}
diff --git a/checkpoint-800/tokenizer.json b/checkpoint-800/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207
--- /dev/null
+++ b/checkpoint-800/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe
+size 14500570
diff --git a/checkpoint-800/tokenizer_config.json b/checkpoint-800/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3
--- /dev/null
+++ b/checkpoint-800/tokenizer_config.json
@@ -0,0 +1,10 @@
+{
+ "add_prefix_space": false,
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": "",
+ "tokenizer_class": "BloomTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-800/trainer_state.json b/checkpoint-800/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..12ed85c67a6d6db07a9f9188475f06e7ec1814e6
--- /dev/null
+++ b/checkpoint-800/trainer_state.json
@@ -0,0 +1,4819 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 2.269503546099291,
+ "eval_steps": 500,
+ "global_step": 800,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 0.00019981060606060605,
+ "loss": 2.9206,
+ "step": 1
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019962121212121212,
+ "loss": 2.7609,
+ "step": 2
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001994318181818182,
+ "loss": 2.6878,
+ "step": 3
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019924242424242426,
+ "loss": 2.6697,
+ "step": 4
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001990530303030303,
+ "loss": 2.5818,
+ "step": 5
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019886363636363637,
+ "loss": 2.5396,
+ "step": 6
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019867424242424244,
+ "loss": 2.5265,
+ "step": 7
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.0001984848484848485,
+ "loss": 2.5475,
+ "step": 8
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019829545454545455,
+ "loss": 2.4835,
+ "step": 9
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001981060606060606,
+ "loss": 2.4559,
+ "step": 10
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001979166666666667,
+ "loss": 2.4511,
+ "step": 11
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019772727272727273,
+ "loss": 2.4592,
+ "step": 12
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.0001975378787878788,
+ "loss": 2.4495,
+ "step": 13
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019734848484848484,
+ "loss": 2.4714,
+ "step": 14
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019715909090909094,
+ "loss": 2.4302,
+ "step": 15
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019696969696969698,
+ "loss": 2.4097,
+ "step": 16
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019678030303030305,
+ "loss": 2.4523,
+ "step": 17
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.0001965909090909091,
+ "loss": 2.4325,
+ "step": 18
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019640151515151516,
+ "loss": 2.4125,
+ "step": 19
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019621212121212123,
+ "loss": 2.4329,
+ "step": 20
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019602272727272727,
+ "loss": 2.3471,
+ "step": 21
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019583333333333334,
+ "loss": 2.3012,
+ "step": 22
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001956439393939394,
+ "loss": 2.3869,
+ "step": 23
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019545454545454548,
+ "loss": 2.3822,
+ "step": 24
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019526515151515152,
+ "loss": 2.3427,
+ "step": 25
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001950757575757576,
+ "loss": 2.3659,
+ "step": 26
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019488636363636366,
+ "loss": 2.3826,
+ "step": 27
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.0001946969696969697,
+ "loss": 2.3532,
+ "step": 28
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019450757575757577,
+ "loss": 2.3828,
+ "step": 29
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001943181818181818,
+ "loss": 2.3133,
+ "step": 30
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001941287878787879,
+ "loss": 2.3613,
+ "step": 31
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019393939393939395,
+ "loss": 2.3867,
+ "step": 32
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019375000000000002,
+ "loss": 2.2966,
+ "step": 33
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019356060606060606,
+ "loss": 2.3436,
+ "step": 34
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019337121212121213,
+ "loss": 2.3425,
+ "step": 35
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.0001931818181818182,
+ "loss": 2.307,
+ "step": 36
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019299242424242424,
+ "loss": 2.3521,
+ "step": 37
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.0001928030303030303,
+ "loss": 2.3302,
+ "step": 38
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019261363636363635,
+ "loss": 2.312,
+ "step": 39
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019242424242424245,
+ "loss": 2.3655,
+ "step": 40
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001922348484848485,
+ "loss": 2.344,
+ "step": 41
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019204545454545456,
+ "loss": 2.3373,
+ "step": 42
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001918560606060606,
+ "loss": 2.3331,
+ "step": 43
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019166666666666667,
+ "loss": 2.3376,
+ "step": 44
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019147727272727274,
+ "loss": 2.3369,
+ "step": 45
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019128787878787878,
+ "loss": 2.3413,
+ "step": 46
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019109848484848485,
+ "loss": 2.3212,
+ "step": 47
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019090909090909092,
+ "loss": 2.307,
+ "step": 48
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.000190719696969697,
+ "loss": 2.2929,
+ "step": 49
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019053030303030303,
+ "loss": 2.2873,
+ "step": 50
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.0001903409090909091,
+ "loss": 2.3098,
+ "step": 51
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00019015151515151517,
+ "loss": 2.3129,
+ "step": 52
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.0001899621212121212,
+ "loss": 2.3038,
+ "step": 53
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00018977272727272728,
+ "loss": 2.286,
+ "step": 54
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018958333333333332,
+ "loss": 2.3388,
+ "step": 55
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018939393939393942,
+ "loss": 2.3193,
+ "step": 56
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018920454545454546,
+ "loss": 2.3136,
+ "step": 57
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018901515151515153,
+ "loss": 2.3141,
+ "step": 58
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018882575757575757,
+ "loss": 2.3646,
+ "step": 59
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018863636363636364,
+ "loss": 2.3318,
+ "step": 60
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.0001884469696969697,
+ "loss": 2.2977,
+ "step": 61
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018825757575757575,
+ "loss": 2.2764,
+ "step": 62
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018806818181818182,
+ "loss": 2.3095,
+ "step": 63
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.0001878787878787879,
+ "loss": 2.252,
+ "step": 64
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018768939393939396,
+ "loss": 2.2786,
+ "step": 65
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.0001875,
+ "loss": 2.2789,
+ "step": 66
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018731060606060607,
+ "loss": 2.2841,
+ "step": 67
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018712121212121212,
+ "loss": 2.3436,
+ "step": 68
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018693181818181818,
+ "loss": 2.2956,
+ "step": 69
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018674242424242425,
+ "loss": 2.2353,
+ "step": 70
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.0001865530303030303,
+ "loss": 2.2772,
+ "step": 71
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018636363636363636,
+ "loss": 2.2496,
+ "step": 72
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018617424242424243,
+ "loss": 2.2477,
+ "step": 73
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.0001859848484848485,
+ "loss": 2.2791,
+ "step": 74
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018579545454545454,
+ "loss": 2.2799,
+ "step": 75
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018560606060606061,
+ "loss": 2.3132,
+ "step": 76
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018541666666666668,
+ "loss": 2.2542,
+ "step": 77
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018522727272727273,
+ "loss": 2.2609,
+ "step": 78
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.0001850378787878788,
+ "loss": 2.2819,
+ "step": 79
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018484848484848484,
+ "loss": 2.2844,
+ "step": 80
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018465909090909093,
+ "loss": 2.2542,
+ "step": 81
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018446969696969697,
+ "loss": 2.2603,
+ "step": 82
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018428030303030304,
+ "loss": 2.2832,
+ "step": 83
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018409090909090909,
+ "loss": 2.2869,
+ "step": 84
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018390151515151518,
+ "loss": 2.2646,
+ "step": 85
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018371212121212122,
+ "loss": 2.2698,
+ "step": 86
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018352272727272727,
+ "loss": 2.2757,
+ "step": 87
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018333333333333334,
+ "loss": 2.2544,
+ "step": 88
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.0001831439393939394,
+ "loss": 2.2678,
+ "step": 89
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018295454545454547,
+ "loss": 2.2778,
+ "step": 90
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018276515151515152,
+ "loss": 2.2027,
+ "step": 91
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018257575757575758,
+ "loss": 2.2167,
+ "step": 92
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018238636363636365,
+ "loss": 2.2602,
+ "step": 93
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018219696969696972,
+ "loss": 2.2736,
+ "step": 94
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018200757575757577,
+ "loss": 2.2443,
+ "step": 95
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 2.2299,
+ "step": 96
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.0001816287878787879,
+ "loss": 2.2644,
+ "step": 97
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018143939393939395,
+ "loss": 2.259,
+ "step": 98
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018125000000000001,
+ "loss": 2.2567,
+ "step": 99
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018106060606060606,
+ "loss": 2.2599,
+ "step": 100
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018087121212121213,
+ "loss": 2.2091,
+ "step": 101
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.0001806818181818182,
+ "loss": 2.2312,
+ "step": 102
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018049242424242426,
+ "loss": 2.1869,
+ "step": 103
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001803030303030303,
+ "loss": 2.2023,
+ "step": 104
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00018011363636363638,
+ "loss": 2.2132,
+ "step": 105
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00017992424242424244,
+ "loss": 2.2612,
+ "step": 106
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001797348484848485,
+ "loss": 2.2109,
+ "step": 107
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017954545454545456,
+ "loss": 2.215,
+ "step": 108
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001793560606060606,
+ "loss": 2.2114,
+ "step": 109
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001791666666666667,
+ "loss": 2.2203,
+ "step": 110
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017897727272727274,
+ "loss": 2.2594,
+ "step": 111
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.0001787878787878788,
+ "loss": 2.2001,
+ "step": 112
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017859848484848485,
+ "loss": 2.2046,
+ "step": 113
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017840909090909092,
+ "loss": 2.1907,
+ "step": 114
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017821969696969699,
+ "loss": 2.2539,
+ "step": 115
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017803030303030303,
+ "loss": 2.2335,
+ "step": 116
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.0001778409090909091,
+ "loss": 2.2171,
+ "step": 117
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017765151515151517,
+ "loss": 2.2278,
+ "step": 118
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017746212121212123,
+ "loss": 2.231,
+ "step": 119
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017727272727272728,
+ "loss": 2.2141,
+ "step": 120
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017708333333333335,
+ "loss": 2.2432,
+ "step": 121
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017689393939393942,
+ "loss": 2.2266,
+ "step": 122
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017670454545454546,
+ "loss": 2.1929,
+ "step": 123
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017651515151515153,
+ "loss": 2.2077,
+ "step": 124
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017632575757575757,
+ "loss": 2.2133,
+ "step": 125
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017613636363636366,
+ "loss": 2.2251,
+ "step": 126
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.0001759469696969697,
+ "loss": 2.2265,
+ "step": 127
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017575757575757578,
+ "loss": 2.2186,
+ "step": 128
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017556818181818182,
+ "loss": 2.1925,
+ "step": 129
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.0001753787878787879,
+ "loss": 2.1956,
+ "step": 130
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017518939393939396,
+ "loss": 2.2459,
+ "step": 131
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.000175,
+ "loss": 2.22,
+ "step": 132
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.00017481060606060607,
+ "loss": 2.2143,
+ "step": 133
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001746212121212121,
+ "loss": 2.2359,
+ "step": 134
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001744318181818182,
+ "loss": 2.2058,
+ "step": 135
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017424242424242425,
+ "loss": 2.2307,
+ "step": 136
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017405303030303032,
+ "loss": 2.2062,
+ "step": 137
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017386363636363636,
+ "loss": 2.1796,
+ "step": 138
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017367424242424243,
+ "loss": 2.2054,
+ "step": 139
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001734848484848485,
+ "loss": 2.1651,
+ "step": 140
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.00017329545454545454,
+ "loss": 2.2159,
+ "step": 141
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001731060606060606,
+ "loss": 2.1988,
+ "step": 142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017291666666666668,
+ "loss": 2.1676,
+ "step": 143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017272727272727275,
+ "loss": 2.1725,
+ "step": 144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.0001725378787878788,
+ "loss": 2.2205,
+ "step": 145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017234848484848486,
+ "loss": 2.1486,
+ "step": 146
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017215909090909093,
+ "loss": 2.147,
+ "step": 147
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017196969696969697,
+ "loss": 2.1651,
+ "step": 148
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017178030303030304,
+ "loss": 2.1983,
+ "step": 149
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017159090909090908,
+ "loss": 2.1778,
+ "step": 150
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017140151515151518,
+ "loss": 2.1631,
+ "step": 151
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017121212121212122,
+ "loss": 2.1442,
+ "step": 152
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.0001710227272727273,
+ "loss": 2.1397,
+ "step": 153
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017083333333333333,
+ "loss": 2.1697,
+ "step": 154
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.0001706439393939394,
+ "loss": 2.1451,
+ "step": 155
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017045454545454547,
+ "loss": 2.1789,
+ "step": 156
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.0001702651515151515,
+ "loss": 2.1037,
+ "step": 157
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00017007575757575758,
+ "loss": 2.1698,
+ "step": 158
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016988636363636365,
+ "loss": 2.1538,
+ "step": 159
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016969696969696972,
+ "loss": 2.2015,
+ "step": 160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016950757575757576,
+ "loss": 2.179,
+ "step": 161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016931818181818183,
+ "loss": 2.1766,
+ "step": 162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.0001691287878787879,
+ "loss": 2.1646,
+ "step": 163
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016893939393939394,
+ "loss": 2.1694,
+ "step": 164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016875,
+ "loss": 2.1562,
+ "step": 165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016856060606060605,
+ "loss": 2.1551,
+ "step": 166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016837121212121212,
+ "loss": 2.1652,
+ "step": 167
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001681818181818182,
+ "loss": 2.1594,
+ "step": 168
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.00016799242424242426,
+ "loss": 2.1674,
+ "step": 169
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001678030303030303,
+ "loss": 2.1378,
+ "step": 170
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016761363636363637,
+ "loss": 2.1447,
+ "step": 171
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016742424242424244,
+ "loss": 2.1451,
+ "step": 172
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016723484848484848,
+ "loss": 2.1336,
+ "step": 173
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016704545454545455,
+ "loss": 2.1231,
+ "step": 174
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001668560606060606,
+ "loss": 2.1143,
+ "step": 175
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001666666666666667,
+ "loss": 2.1316,
+ "step": 176
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.00016647727272727273,
+ "loss": 2.1281,
+ "step": 177
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001662878787878788,
+ "loss": 2.136,
+ "step": 178
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016609848484848484,
+ "loss": 2.1279,
+ "step": 179
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016590909090909094,
+ "loss": 2.1421,
+ "step": 180
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016571969696969698,
+ "loss": 2.1541,
+ "step": 181
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016553030303030305,
+ "loss": 2.1293,
+ "step": 182
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.0001653409090909091,
+ "loss": 2.1294,
+ "step": 183
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016515151515151516,
+ "loss": 2.1459,
+ "step": 184
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016496212121212123,
+ "loss": 2.1113,
+ "step": 185
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016477272727272727,
+ "loss": 2.1394,
+ "step": 186
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016458333333333334,
+ "loss": 2.1321,
+ "step": 187
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.0001643939393939394,
+ "loss": 2.148,
+ "step": 188
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016420454545454548,
+ "loss": 2.1631,
+ "step": 189
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016401515151515152,
+ "loss": 2.1276,
+ "step": 190
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.0001638257575757576,
+ "loss": 2.0706,
+ "step": 191
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016363636363636366,
+ "loss": 2.127,
+ "step": 192
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001634469696969697,
+ "loss": 2.1449,
+ "step": 193
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.00016325757575757577,
+ "loss": 2.1204,
+ "step": 194
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001630681818181818,
+ "loss": 2.0904,
+ "step": 195
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.0001628787878787879,
+ "loss": 2.1129,
+ "step": 196
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016268939393939395,
+ "loss": 2.1036,
+ "step": 197
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016250000000000002,
+ "loss": 2.1509,
+ "step": 198
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016231060606060606,
+ "loss": 2.1239,
+ "step": 199
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016212121212121213,
+ "loss": 2.145,
+ "step": 200
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.0001619318181818182,
+ "loss": 2.1221,
+ "step": 201
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016174242424242424,
+ "loss": 2.1181,
+ "step": 202
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001615530303030303,
+ "loss": 2.1306,
+ "step": 203
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016136363636363635,
+ "loss": 2.0199,
+ "step": 204
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016117424242424245,
+ "loss": 2.1178,
+ "step": 205
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001609848484848485,
+ "loss": 2.1584,
+ "step": 206
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016079545454545456,
+ "loss": 2.0872,
+ "step": 207
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.0001606060606060606,
+ "loss": 2.1033,
+ "step": 208
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016041666666666667,
+ "loss": 2.1381,
+ "step": 209
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016022727272727274,
+ "loss": 2.1127,
+ "step": 210
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016003787878787878,
+ "loss": 2.1077,
+ "step": 211
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015984848484848485,
+ "loss": 2.0984,
+ "step": 212
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015965909090909092,
+ "loss": 2.0994,
+ "step": 213
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.000159469696969697,
+ "loss": 2.096,
+ "step": 214
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.00015928030303030303,
+ "loss": 2.0909,
+ "step": 215
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.0001590909090909091,
+ "loss": 2.118,
+ "step": 216
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015890151515151517,
+ "loss": 2.0783,
+ "step": 217
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.0001587121212121212,
+ "loss": 2.0876,
+ "step": 218
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015852272727272728,
+ "loss": 2.0581,
+ "step": 219
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015833333333333332,
+ "loss": 2.0548,
+ "step": 220
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015814393939393942,
+ "loss": 2.0595,
+ "step": 221
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015795454545454546,
+ "loss": 2.0719,
+ "step": 222
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015776515151515153,
+ "loss": 2.0903,
+ "step": 223
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015757575757575757,
+ "loss": 2.0941,
+ "step": 224
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015738636363636364,
+ "loss": 2.0926,
+ "step": 225
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.0001571969696969697,
+ "loss": 2.0816,
+ "step": 226
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015700757575757575,
+ "loss": 2.0894,
+ "step": 227
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015681818181818182,
+ "loss": 2.0798,
+ "step": 228
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.0001566287878787879,
+ "loss": 2.0672,
+ "step": 229
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015643939393939396,
+ "loss": 2.0787,
+ "step": 230
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015625,
+ "loss": 2.0611,
+ "step": 231
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015606060606060607,
+ "loss": 2.0805,
+ "step": 232
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015587121212121211,
+ "loss": 2.053,
+ "step": 233
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015568181818181818,
+ "loss": 2.0575,
+ "step": 234
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015549242424242425,
+ "loss": 2.0459,
+ "step": 235
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.0001553030303030303,
+ "loss": 2.0635,
+ "step": 236
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015511363636363636,
+ "loss": 2.0335,
+ "step": 237
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015492424242424243,
+ "loss": 2.0681,
+ "step": 238
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001547348484848485,
+ "loss": 2.0748,
+ "step": 239
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015454545454545454,
+ "loss": 2.1091,
+ "step": 240
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001543560606060606,
+ "loss": 2.0732,
+ "step": 241
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015416666666666668,
+ "loss": 2.0746,
+ "step": 242
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015397727272727272,
+ "loss": 2.0306,
+ "step": 243
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.0001537878787878788,
+ "loss": 2.0864,
+ "step": 244
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015359848484848484,
+ "loss": 2.0664,
+ "step": 245
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015340909090909093,
+ "loss": 2.0801,
+ "step": 246
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015321969696969697,
+ "loss": 2.0799,
+ "step": 247
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015303030303030304,
+ "loss": 2.0621,
+ "step": 248
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015284090909090909,
+ "loss": 2.0687,
+ "step": 249
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015265151515151515,
+ "loss": 2.018,
+ "step": 250
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015246212121212122,
+ "loss": 2.0256,
+ "step": 251
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015227272727272727,
+ "loss": 2.0736,
+ "step": 252
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015208333333333333,
+ "loss": 2.0609,
+ "step": 253
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.0001518939393939394,
+ "loss": 2.0539,
+ "step": 254
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015170454545454547,
+ "loss": 2.0282,
+ "step": 255
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015151515151515152,
+ "loss": 2.0417,
+ "step": 256
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015132575757575758,
+ "loss": 2.0333,
+ "step": 257
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015113636363636365,
+ "loss": 2.0428,
+ "step": 258
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015094696969696972,
+ "loss": 2.045,
+ "step": 259
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.00015075757575757576,
+ "loss": 2.0463,
+ "step": 260
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001505681818181818,
+ "loss": 2.0539,
+ "step": 261
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001503787878787879,
+ "loss": 2.0184,
+ "step": 262
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015018939393939394,
+ "loss": 2.0858,
+ "step": 263
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 2.0239,
+ "step": 264
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014981060606060606,
+ "loss": 2.0425,
+ "step": 265
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014962121212121213,
+ "loss": 2.0263,
+ "step": 266
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001494318181818182,
+ "loss": 2.042,
+ "step": 267
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.00014924242424242426,
+ "loss": 2.026,
+ "step": 268
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001490530303030303,
+ "loss": 2.0411,
+ "step": 269
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014886363636363635,
+ "loss": 2.028,
+ "step": 270
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014867424242424244,
+ "loss": 2.0172,
+ "step": 271
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014848484848484849,
+ "loss": 2.0196,
+ "step": 272
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014829545454545455,
+ "loss": 2.0142,
+ "step": 273
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001481060606060606,
+ "loss": 2.0265,
+ "step": 274
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001479166666666667,
+ "loss": 2.0353,
+ "step": 275
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.00014772727272727274,
+ "loss": 2.0327,
+ "step": 276
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.0001475378787878788,
+ "loss": 2.0188,
+ "step": 277
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014734848484848485,
+ "loss": 1.9987,
+ "step": 278
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014715909090909092,
+ "loss": 2.0141,
+ "step": 279
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014696969696969698,
+ "loss": 2.0403,
+ "step": 280
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014678030303030303,
+ "loss": 1.9977,
+ "step": 281
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.0001465909090909091,
+ "loss": 1.9674,
+ "step": 282
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014640151515151517,
+ "loss": 1.9984,
+ "step": 283
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014621212121212123,
+ "loss": 1.9796,
+ "step": 284
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014602272727272728,
+ "loss": 2.0139,
+ "step": 285
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014583333333333335,
+ "loss": 1.9866,
+ "step": 286
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014564393939393941,
+ "loss": 2.0208,
+ "step": 287
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014545454545454546,
+ "loss": 1.9844,
+ "step": 288
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014526515151515153,
+ "loss": 2.0082,
+ "step": 289
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014507575757575757,
+ "loss": 1.984,
+ "step": 290
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014488636363636366,
+ "loss": 2.0015,
+ "step": 291
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.0001446969696969697,
+ "loss": 2.0209,
+ "step": 292
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014450757575757578,
+ "loss": 1.9728,
+ "step": 293
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014431818181818182,
+ "loss": 2.0032,
+ "step": 294
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014412878787878789,
+ "loss": 1.9641,
+ "step": 295
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014393939393939396,
+ "loss": 1.9945,
+ "step": 296
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014375,
+ "loss": 1.9658,
+ "step": 297
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014356060606060607,
+ "loss": 1.9907,
+ "step": 298
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001433712121212121,
+ "loss": 1.9935,
+ "step": 299
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001431818181818182,
+ "loss": 1.9897,
+ "step": 300
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014299242424242425,
+ "loss": 1.984,
+ "step": 301
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014280303030303032,
+ "loss": 1.9581,
+ "step": 302
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014261363636363636,
+ "loss": 1.9893,
+ "step": 303
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014242424242424243,
+ "loss": 1.9568,
+ "step": 304
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001422348484848485,
+ "loss": 1.98,
+ "step": 305
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014204545454545454,
+ "loss": 1.9519,
+ "step": 306
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001418560606060606,
+ "loss": 1.9693,
+ "step": 307
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014166666666666668,
+ "loss": 1.9866,
+ "step": 308
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014147727272727275,
+ "loss": 1.9508,
+ "step": 309
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.0001412878787878788,
+ "loss": 1.9653,
+ "step": 310
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014109848484848486,
+ "loss": 1.9991,
+ "step": 311
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014090909090909093,
+ "loss": 1.9442,
+ "step": 312
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014071969696969697,
+ "loss": 1.9807,
+ "step": 313
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014053030303030304,
+ "loss": 1.9958,
+ "step": 314
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014034090909090908,
+ "loss": 1.9459,
+ "step": 315
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00014015151515151518,
+ "loss": 1.9508,
+ "step": 316
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013996212121212122,
+ "loss": 1.9933,
+ "step": 317
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.0001397727272727273,
+ "loss": 1.9703,
+ "step": 318
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013958333333333333,
+ "loss": 1.965,
+ "step": 319
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001393939393939394,
+ "loss": 1.9264,
+ "step": 320
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.00013920454545454547,
+ "loss": 1.9688,
+ "step": 321
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001390151515151515,
+ "loss": 1.9901,
+ "step": 322
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013882575757575758,
+ "loss": 1.9363,
+ "step": 323
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013863636363636365,
+ "loss": 1.9269,
+ "step": 324
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013844696969696972,
+ "loss": 1.9688,
+ "step": 325
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013825757575757576,
+ "loss": 1.9758,
+ "step": 326
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013806818181818183,
+ "loss": 1.9414,
+ "step": 327
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.0001378787878787879,
+ "loss": 1.9397,
+ "step": 328
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013768939393939394,
+ "loss": 1.9032,
+ "step": 329
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001375,
+ "loss": 1.9777,
+ "step": 330
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013731060606060605,
+ "loss": 1.9173,
+ "step": 331
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013712121212121212,
+ "loss": 1.9307,
+ "step": 332
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001369318181818182,
+ "loss": 1.9611,
+ "step": 333
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013674242424242426,
+ "loss": 1.9698,
+ "step": 334
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.0001365530303030303,
+ "loss": 1.9619,
+ "step": 335
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013636363636363637,
+ "loss": 1.9322,
+ "step": 336
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013617424242424244,
+ "loss": 1.9441,
+ "step": 337
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013598484848484848,
+ "loss": 1.9563,
+ "step": 338
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013579545454545455,
+ "loss": 1.9283,
+ "step": 339
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.0001356060606060606,
+ "loss": 1.9508,
+ "step": 340
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001354166666666667,
+ "loss": 1.9285,
+ "step": 341
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.00013522727272727273,
+ "loss": 1.9295,
+ "step": 342
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001350378787878788,
+ "loss": 1.9272,
+ "step": 343
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013484848484848484,
+ "loss": 1.905,
+ "step": 344
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013465909090909094,
+ "loss": 1.9409,
+ "step": 345
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013446969696969698,
+ "loss": 1.9674,
+ "step": 346
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013428030303030302,
+ "loss": 1.9278,
+ "step": 347
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.0001340909090909091,
+ "loss": 1.9136,
+ "step": 348
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013390151515151516,
+ "loss": 1.9143,
+ "step": 349
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013371212121212123,
+ "loss": 1.9381,
+ "step": 350
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013352272727272727,
+ "loss": 1.9136,
+ "step": 351
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013333333333333334,
+ "loss": 1.9103,
+ "step": 352
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0001331439393939394,
+ "loss": 1.9027,
+ "step": 353
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013295454545454548,
+ "loss": 1.8674,
+ "step": 354
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013276515151515152,
+ "loss": 1.886,
+ "step": 355
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013257575757575756,
+ "loss": 1.887,
+ "step": 356
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013238636363636366,
+ "loss": 1.87,
+ "step": 357
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001321969696969697,
+ "loss": 1.8715,
+ "step": 358
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.00013200757575757577,
+ "loss": 1.8993,
+ "step": 359
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001318181818181818,
+ "loss": 1.8844,
+ "step": 360
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001316287878787879,
+ "loss": 1.8965,
+ "step": 361
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013143939393939395,
+ "loss": 1.8956,
+ "step": 362
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013125000000000002,
+ "loss": 1.869,
+ "step": 363
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013106060606060606,
+ "loss": 1.8702,
+ "step": 364
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013087121212121213,
+ "loss": 1.8962,
+ "step": 365
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001306818181818182,
+ "loss": 1.8613,
+ "step": 366
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013049242424242424,
+ "loss": 1.8845,
+ "step": 367
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001303030303030303,
+ "loss": 1.8689,
+ "step": 368
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00013011363636363635,
+ "loss": 1.9059,
+ "step": 369
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00012992424242424245,
+ "loss": 1.9082,
+ "step": 370
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.0001297348484848485,
+ "loss": 1.8918,
+ "step": 371
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012954545454545456,
+ "loss": 1.8657,
+ "step": 372
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.0001293560606060606,
+ "loss": 1.8909,
+ "step": 373
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012916666666666667,
+ "loss": 1.8649,
+ "step": 374
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012897727272727274,
+ "loss": 1.833,
+ "step": 375
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012878787878787878,
+ "loss": 1.8815,
+ "step": 376
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012859848484848485,
+ "loss": 1.8646,
+ "step": 377
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012840909090909092,
+ "loss": 1.846,
+ "step": 378
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.000128219696969697,
+ "loss": 1.8631,
+ "step": 379
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012803030303030303,
+ "loss": 1.917,
+ "step": 380
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.0001278409090909091,
+ "loss": 1.9068,
+ "step": 381
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012765151515151517,
+ "loss": 1.8772,
+ "step": 382
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.0001274621212121212,
+ "loss": 1.8414,
+ "step": 383
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012727272727272728,
+ "loss": 1.9003,
+ "step": 384
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012708333333333332,
+ "loss": 1.8415,
+ "step": 385
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012689393939393942,
+ "loss": 1.8491,
+ "step": 386
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012670454545454546,
+ "loss": 1.8875,
+ "step": 387
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012651515151515153,
+ "loss": 1.8629,
+ "step": 388
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012632575757575757,
+ "loss": 1.8378,
+ "step": 389
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012613636363636364,
+ "loss": 1.8442,
+ "step": 390
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.0001259469696969697,
+ "loss": 1.8587,
+ "step": 391
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012575757575757575,
+ "loss": 1.8659,
+ "step": 392
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012556818181818182,
+ "loss": 1.8271,
+ "step": 393
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.0001253787878787879,
+ "loss": 1.8692,
+ "step": 394
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.00012518939393939396,
+ "loss": 1.8071,
+ "step": 395
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.000125,
+ "loss": 1.8564,
+ "step": 396
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012481060606060607,
+ "loss": 1.8891,
+ "step": 397
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012462121212121211,
+ "loss": 1.8173,
+ "step": 398
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012443181818181818,
+ "loss": 1.8653,
+ "step": 399
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012424242424242425,
+ "loss": 1.8843,
+ "step": 400
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.0001240530303030303,
+ "loss": 1.8527,
+ "step": 401
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012386363636363636,
+ "loss": 1.8352,
+ "step": 402
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012367424242424243,
+ "loss": 1.866,
+ "step": 403
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001234848484848485,
+ "loss": 1.8557,
+ "step": 404
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012329545454545454,
+ "loss": 1.8284,
+ "step": 405
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001231060606060606,
+ "loss": 1.8359,
+ "step": 406
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012291666666666668,
+ "loss": 1.8437,
+ "step": 407
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012272727272727272,
+ "loss": 1.8256,
+ "step": 408
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.0001225378787878788,
+ "loss": 1.8297,
+ "step": 409
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012234848484848484,
+ "loss": 1.8515,
+ "step": 410
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012215909090909093,
+ "loss": 1.8198,
+ "step": 411
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012196969696969697,
+ "loss": 1.7809,
+ "step": 412
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012178030303030303,
+ "loss": 1.8438,
+ "step": 413
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012159090909090908,
+ "loss": 1.8497,
+ "step": 414
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012140151515151517,
+ "loss": 1.8463,
+ "step": 415
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012121212121212122,
+ "loss": 1.7768,
+ "step": 416
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012102272727272728,
+ "loss": 1.8561,
+ "step": 417
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012083333333333333,
+ "loss": 1.863,
+ "step": 418
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.0001206439393939394,
+ "loss": 1.8193,
+ "step": 419
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012045454545454546,
+ "loss": 1.7732,
+ "step": 420
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012026515151515151,
+ "loss": 1.7728,
+ "step": 421
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00012007575757575757,
+ "loss": 1.8113,
+ "step": 422
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011988636363636365,
+ "loss": 1.7976,
+ "step": 423
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011969696969696971,
+ "loss": 1.786,
+ "step": 424
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011950757575757576,
+ "loss": 1.8019,
+ "step": 425
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011931818181818182,
+ "loss": 1.786,
+ "step": 426
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011912878787878789,
+ "loss": 1.8102,
+ "step": 427
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011893939393939394,
+ "loss": 1.7828,
+ "step": 428
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011875,
+ "loss": 1.8498,
+ "step": 429
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011856060606060606,
+ "loss": 1.7983,
+ "step": 430
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011837121212121211,
+ "loss": 1.7863,
+ "step": 431
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001181818181818182,
+ "loss": 1.8171,
+ "step": 432
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011799242424242425,
+ "loss": 1.8143,
+ "step": 433
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001178030303030303,
+ "loss": 1.7815,
+ "step": 434
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011761363636363636,
+ "loss": 1.7652,
+ "step": 435
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011742424242424244,
+ "loss": 1.8242,
+ "step": 436
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011723484848484849,
+ "loss": 1.7789,
+ "step": 437
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011704545454545454,
+ "loss": 1.7549,
+ "step": 438
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.0001168560606060606,
+ "loss": 1.7528,
+ "step": 439
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011666666666666668,
+ "loss": 1.7443,
+ "step": 440
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011647727272727273,
+ "loss": 1.7911,
+ "step": 441
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011628787878787879,
+ "loss": 1.7848,
+ "step": 442
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011609848484848485,
+ "loss": 1.8137,
+ "step": 443
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011590909090909093,
+ "loss": 1.791,
+ "step": 444
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011571969696969698,
+ "loss": 1.7921,
+ "step": 445
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011553030303030304,
+ "loss": 1.772,
+ "step": 446
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011534090909090908,
+ "loss": 1.776,
+ "step": 447
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011515151515151516,
+ "loss": 1.7948,
+ "step": 448
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011496212121212122,
+ "loss": 1.8187,
+ "step": 449
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011477272727272728,
+ "loss": 1.7436,
+ "step": 450
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011458333333333333,
+ "loss": 1.7326,
+ "step": 451
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011439393939393941,
+ "loss": 1.8005,
+ "step": 452
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011420454545454547,
+ "loss": 1.8088,
+ "step": 453
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011401515151515153,
+ "loss": 1.7632,
+ "step": 454
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011382575757575758,
+ "loss": 1.7848,
+ "step": 455
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011363636363636365,
+ "loss": 1.7756,
+ "step": 456
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001134469696969697,
+ "loss": 1.7964,
+ "step": 457
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011325757575757576,
+ "loss": 1.7604,
+ "step": 458
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011306818181818182,
+ "loss": 1.7914,
+ "step": 459
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001128787878787879,
+ "loss": 1.8059,
+ "step": 460
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011268939393939395,
+ "loss": 1.7647,
+ "step": 461
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011250000000000001,
+ "loss": 1.7526,
+ "step": 462
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011231060606060607,
+ "loss": 1.7736,
+ "step": 463
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011212121212121212,
+ "loss": 1.7449,
+ "step": 464
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011193181818181819,
+ "loss": 1.7636,
+ "step": 465
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011174242424242425,
+ "loss": 1.7846,
+ "step": 466
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.0001115530303030303,
+ "loss": 1.78,
+ "step": 467
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011136363636363636,
+ "loss": 1.7828,
+ "step": 468
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011117424242424244,
+ "loss": 1.729,
+ "step": 469
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.0001109848484848485,
+ "loss": 1.7145,
+ "step": 470
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011079545454545455,
+ "loss": 1.7189,
+ "step": 471
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011060606060606061,
+ "loss": 1.7628,
+ "step": 472
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011041666666666668,
+ "loss": 1.7399,
+ "step": 473
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011022727272727273,
+ "loss": 1.7561,
+ "step": 474
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00011003787878787879,
+ "loss": 1.7979,
+ "step": 475
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010984848484848484,
+ "loss": 1.7673,
+ "step": 476
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010965909090909093,
+ "loss": 1.777,
+ "step": 477
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010946969696969698,
+ "loss": 1.7042,
+ "step": 478
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010928030303030304,
+ "loss": 1.7764,
+ "step": 479
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010909090909090909,
+ "loss": 1.6993,
+ "step": 480
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010890151515151516,
+ "loss": 1.7688,
+ "step": 481
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010871212121212122,
+ "loss": 1.7428,
+ "step": 482
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010852272727272727,
+ "loss": 1.675,
+ "step": 483
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010833333333333333,
+ "loss": 1.7183,
+ "step": 484
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010814393939393941,
+ "loss": 1.7305,
+ "step": 485
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010795454545454547,
+ "loss": 1.7541,
+ "step": 486
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010776515151515152,
+ "loss": 1.7074,
+ "step": 487
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010757575757575758,
+ "loss": 1.7093,
+ "step": 488
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010738636363636365,
+ "loss": 1.7354,
+ "step": 489
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.0001071969696969697,
+ "loss": 1.7415,
+ "step": 490
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010700757575757576,
+ "loss": 1.72,
+ "step": 491
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010681818181818181,
+ "loss": 1.7453,
+ "step": 492
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.0001066287878787879,
+ "loss": 1.7077,
+ "step": 493
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010643939393939395,
+ "loss": 1.6936,
+ "step": 494
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010625000000000001,
+ "loss": 1.7616,
+ "step": 495
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010606060606060606,
+ "loss": 1.7749,
+ "step": 496
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010587121212121212,
+ "loss": 1.7375,
+ "step": 497
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010568181818181819,
+ "loss": 1.7203,
+ "step": 498
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010549242424242424,
+ "loss": 1.7148,
+ "step": 499
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.0001053030303030303,
+ "loss": 1.7859,
+ "step": 500
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010511363636363635,
+ "loss": 1.7478,
+ "step": 501
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010492424242424244,
+ "loss": 1.7091,
+ "step": 502
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.00010473484848484849,
+ "loss": 1.7112,
+ "step": 503
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.00010454545454545455,
+ "loss": 1.6967,
+ "step": 504
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.0001043560606060606,
+ "loss": 1.7431,
+ "step": 505
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010416666666666667,
+ "loss": 1.7065,
+ "step": 506
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010397727272727273,
+ "loss": 1.6955,
+ "step": 507
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010378787878787878,
+ "loss": 1.7375,
+ "step": 508
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010359848484848484,
+ "loss": 1.7056,
+ "step": 509
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010340909090909092,
+ "loss": 1.7044,
+ "step": 510
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010321969696969698,
+ "loss": 1.7204,
+ "step": 511
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010303030303030303,
+ "loss": 1.6801,
+ "step": 512
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010284090909090909,
+ "loss": 1.7381,
+ "step": 513
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010265151515151516,
+ "loss": 1.7064,
+ "step": 514
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010246212121212121,
+ "loss": 1.6973,
+ "step": 515
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010227272727272727,
+ "loss": 1.7295,
+ "step": 516
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010208333333333333,
+ "loss": 1.6991,
+ "step": 517
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010189393939393941,
+ "loss": 1.6986,
+ "step": 518
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010170454545454546,
+ "loss": 1.6989,
+ "step": 519
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010151515151515152,
+ "loss": 1.7009,
+ "step": 520
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010132575757575757,
+ "loss": 1.6919,
+ "step": 521
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010113636363636366,
+ "loss": 1.6955,
+ "step": 522
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010094696969696971,
+ "loss": 1.7177,
+ "step": 523
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.00010075757575757576,
+ "loss": 1.715,
+ "step": 524
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.00010056818181818181,
+ "loss": 1.6686,
+ "step": 525
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.0001003787878787879,
+ "loss": 1.771,
+ "step": 526
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 0.00010018939393939395,
+ "loss": 1.7024,
+ "step": 527
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 0.0001,
+ "loss": 1.7016,
+ "step": 528
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 9.981060606060606e-05,
+ "loss": 1.6501,
+ "step": 529
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 9.962121212121213e-05,
+ "loss": 1.6903,
+ "step": 530
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.943181818181819e-05,
+ "loss": 1.6806,
+ "step": 531
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.924242424242425e-05,
+ "loss": 1.7096,
+ "step": 532
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.90530303030303e-05,
+ "loss": 1.7307,
+ "step": 533
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.886363636363637e-05,
+ "loss": 1.6871,
+ "step": 534
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.867424242424242e-05,
+ "loss": 1.7457,
+ "step": 535
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.848484848484849e-05,
+ "loss": 1.6867,
+ "step": 536
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.829545454545455e-05,
+ "loss": 1.6789,
+ "step": 537
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.810606060606061e-05,
+ "loss": 1.6403,
+ "step": 538
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.791666666666667e-05,
+ "loss": 1.6697,
+ "step": 539
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.772727272727274e-05,
+ "loss": 1.7293,
+ "step": 540
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.75378787878788e-05,
+ "loss": 1.6998,
+ "step": 541
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.734848484848485e-05,
+ "loss": 1.693,
+ "step": 542
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.71590909090909e-05,
+ "loss": 1.664,
+ "step": 543
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.696969696969698e-05,
+ "loss": 1.7061,
+ "step": 544
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.678030303030303e-05,
+ "loss": 1.6631,
+ "step": 545
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.65909090909091e-05,
+ "loss": 1.6343,
+ "step": 546
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.640151515151516e-05,
+ "loss": 1.6939,
+ "step": 547
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.621212121212123e-05,
+ "loss": 1.669,
+ "step": 548
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.602272727272728e-05,
+ "loss": 1.6561,
+ "step": 549
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.583333333333334e-05,
+ "loss": 1.6675,
+ "step": 550
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.564393939393939e-05,
+ "loss": 1.7109,
+ "step": 551
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.545454545454546e-05,
+ "loss": 1.693,
+ "step": 552
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.526515151515152e-05,
+ "loss": 1.6557,
+ "step": 553
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.507575757575759e-05,
+ "loss": 1.6642,
+ "step": 554
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.488636363636364e-05,
+ "loss": 1.6674,
+ "step": 555
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.469696969696971e-05,
+ "loss": 1.6492,
+ "step": 556
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.450757575757577e-05,
+ "loss": 1.6915,
+ "step": 557
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.431818181818182e-05,
+ "loss": 1.7028,
+ "step": 558
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.412878787878788e-05,
+ "loss": 1.6749,
+ "step": 559
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.393939393939395e-05,
+ "loss": 1.6526,
+ "step": 560
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.375e-05,
+ "loss": 1.687,
+ "step": 561
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.356060606060606e-05,
+ "loss": 1.6632,
+ "step": 562
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.337121212121213e-05,
+ "loss": 1.7074,
+ "step": 563
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.318181818181818e-05,
+ "loss": 1.6164,
+ "step": 564
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.299242424242425e-05,
+ "loss": 1.6594,
+ "step": 565
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.280303030303031e-05,
+ "loss": 1.6603,
+ "step": 566
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.261363636363636e-05,
+ "loss": 1.6213,
+ "step": 567
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.242424242424242e-05,
+ "loss": 1.6899,
+ "step": 568
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.223484848484849e-05,
+ "loss": 1.6619,
+ "step": 569
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.204545454545454e-05,
+ "loss": 1.7035,
+ "step": 570
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.185606060606061e-05,
+ "loss": 1.6408,
+ "step": 571
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.166666666666667e-05,
+ "loss": 1.6506,
+ "step": 572
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.147727272727274e-05,
+ "loss": 1.658,
+ "step": 573
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.128787878787879e-05,
+ "loss": 1.6005,
+ "step": 574
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.109848484848486e-05,
+ "loss": 1.6821,
+ "step": 575
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.090909090909092e-05,
+ "loss": 1.6858,
+ "step": 576
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.071969696969697e-05,
+ "loss": 1.6933,
+ "step": 577
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.053030303030303e-05,
+ "loss": 1.6757,
+ "step": 578
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.03409090909091e-05,
+ "loss": 1.6107,
+ "step": 579
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 9.015151515151515e-05,
+ "loss": 1.5751,
+ "step": 580
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.996212121212122e-05,
+ "loss": 1.6168,
+ "step": 581
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.977272727272728e-05,
+ "loss": 1.6213,
+ "step": 582
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.958333333333335e-05,
+ "loss": 1.6243,
+ "step": 583
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.93939393939394e-05,
+ "loss": 1.6249,
+ "step": 584
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.920454545454546e-05,
+ "loss": 1.6529,
+ "step": 585
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.901515151515151e-05,
+ "loss": 1.626,
+ "step": 586
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.882575757575758e-05,
+ "loss": 1.6616,
+ "step": 587
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.863636363636364e-05,
+ "loss": 1.6622,
+ "step": 588
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.844696969696971e-05,
+ "loss": 1.5927,
+ "step": 589
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.825757575757576e-05,
+ "loss": 1.6351,
+ "step": 590
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.806818181818183e-05,
+ "loss": 1.6213,
+ "step": 591
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.787878787878789e-05,
+ "loss": 1.635,
+ "step": 592
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.768939393939394e-05,
+ "loss": 1.6406,
+ "step": 593
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.75e-05,
+ "loss": 1.6387,
+ "step": 594
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.731060606060605e-05,
+ "loss": 1.602,
+ "step": 595
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.712121212121212e-05,
+ "loss": 1.601,
+ "step": 596
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.693181818181818e-05,
+ "loss": 1.5855,
+ "step": 597
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.674242424242425e-05,
+ "loss": 1.6236,
+ "step": 598
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.65530303030303e-05,
+ "loss": 1.5999,
+ "step": 599
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.636363636363637e-05,
+ "loss": 1.6093,
+ "step": 600
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.617424242424243e-05,
+ "loss": 1.6602,
+ "step": 601
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.598484848484848e-05,
+ "loss": 1.599,
+ "step": 602
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.579545454545454e-05,
+ "loss": 1.6056,
+ "step": 603
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.560606060606061e-05,
+ "loss": 1.6377,
+ "step": 604
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.541666666666666e-05,
+ "loss": 1.5769,
+ "step": 605
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.522727272727273e-05,
+ "loss": 1.6219,
+ "step": 606
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.503787878787879e-05,
+ "loss": 1.5917,
+ "step": 607
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.484848484848486e-05,
+ "loss": 1.6019,
+ "step": 608
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.465909090909091e-05,
+ "loss": 1.6316,
+ "step": 609
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.446969696969697e-05,
+ "loss": 1.6327,
+ "step": 610
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.428030303030303e-05,
+ "loss": 1.6023,
+ "step": 611
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.40909090909091e-05,
+ "loss": 1.6087,
+ "step": 612
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.390151515151515e-05,
+ "loss": 1.6245,
+ "step": 613
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.371212121212122e-05,
+ "loss": 1.5957,
+ "step": 614
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.352272727272727e-05,
+ "loss": 1.6196,
+ "step": 615
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.333333333333334e-05,
+ "loss": 1.6364,
+ "step": 616
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.31439393939394e-05,
+ "loss": 1.5977,
+ "step": 617
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.295454545454547e-05,
+ "loss": 1.6018,
+ "step": 618
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.276515151515152e-05,
+ "loss": 1.5973,
+ "step": 619
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.257575757575758e-05,
+ "loss": 1.6216,
+ "step": 620
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.238636363636364e-05,
+ "loss": 1.6422,
+ "step": 621
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.21969696969697e-05,
+ "loss": 1.6401,
+ "step": 622
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.200757575757576e-05,
+ "loss": 1.6446,
+ "step": 623
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.181818181818183e-05,
+ "loss": 1.5791,
+ "step": 624
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.162878787878789e-05,
+ "loss": 1.5953,
+ "step": 625
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.143939393939395e-05,
+ "loss": 1.5941,
+ "step": 626
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.125000000000001e-05,
+ "loss": 1.5784,
+ "step": 627
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.106060606060607e-05,
+ "loss": 1.6024,
+ "step": 628
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.087121212121212e-05,
+ "loss": 1.6295,
+ "step": 629
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.068181818181818e-05,
+ "loss": 1.5905,
+ "step": 630
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.049242424242425e-05,
+ "loss": 1.6073,
+ "step": 631
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.03030303030303e-05,
+ "loss": 1.6104,
+ "step": 632
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 8.011363636363637e-05,
+ "loss": 1.6134,
+ "step": 633
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.992424242424243e-05,
+ "loss": 1.6569,
+ "step": 634
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.97348484848485e-05,
+ "loss": 1.5493,
+ "step": 635
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.954545454545455e-05,
+ "loss": 1.5767,
+ "step": 636
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.93560606060606e-05,
+ "loss": 1.5692,
+ "step": 637
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.916666666666666e-05,
+ "loss": 1.6116,
+ "step": 638
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.897727272727273e-05,
+ "loss": 1.5684,
+ "step": 639
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.878787878787879e-05,
+ "loss": 1.6177,
+ "step": 640
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.859848484848486e-05,
+ "loss": 1.6151,
+ "step": 641
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.840909090909091e-05,
+ "loss": 1.6293,
+ "step": 642
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.821969696969698e-05,
+ "loss": 1.6298,
+ "step": 643
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.803030303030304e-05,
+ "loss": 1.6073,
+ "step": 644
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.784090909090909e-05,
+ "loss": 1.5328,
+ "step": 645
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.765151515151515e-05,
+ "loss": 1.5895,
+ "step": 646
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.746212121212122e-05,
+ "loss": 1.5728,
+ "step": 647
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.727272727272727e-05,
+ "loss": 1.5449,
+ "step": 648
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.708333333333334e-05,
+ "loss": 1.5731,
+ "step": 649
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.68939393939394e-05,
+ "loss": 1.627,
+ "step": 650
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.670454545454547e-05,
+ "loss": 1.6139,
+ "step": 651
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.651515151515152e-05,
+ "loss": 1.5613,
+ "step": 652
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.632575757575758e-05,
+ "loss": 1.5734,
+ "step": 653
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.613636363636363e-05,
+ "loss": 1.5537,
+ "step": 654
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.59469696969697e-05,
+ "loss": 1.5886,
+ "step": 655
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.575757575757576e-05,
+ "loss": 1.5504,
+ "step": 656
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.556818181818183e-05,
+ "loss": 1.5613,
+ "step": 657
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.537878787878788e-05,
+ "loss": 1.5877,
+ "step": 658
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.518939393939395e-05,
+ "loss": 1.605,
+ "step": 659
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.500000000000001e-05,
+ "loss": 1.5403,
+ "step": 660
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.481060606060606e-05,
+ "loss": 1.6039,
+ "step": 661
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.462121212121213e-05,
+ "loss": 1.5708,
+ "step": 662
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.443181818181817e-05,
+ "loss": 1.5692,
+ "step": 663
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.424242424242424e-05,
+ "loss": 1.5084,
+ "step": 664
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.40530303030303e-05,
+ "loss": 1.5982,
+ "step": 665
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.386363636363637e-05,
+ "loss": 1.5881,
+ "step": 666
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.367424242424242e-05,
+ "loss": 1.5593,
+ "step": 667
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.348484848484849e-05,
+ "loss": 1.5871,
+ "step": 668
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.329545454545455e-05,
+ "loss": 1.6134,
+ "step": 669
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.310606060606062e-05,
+ "loss": 1.5516,
+ "step": 670
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.291666666666667e-05,
+ "loss": 1.5691,
+ "step": 671
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.272727272727273e-05,
+ "loss": 1.5801,
+ "step": 672
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.253787878787878e-05,
+ "loss": 1.5684,
+ "step": 673
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.234848484848485e-05,
+ "loss": 1.5591,
+ "step": 674
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.215909090909091e-05,
+ "loss": 1.5727,
+ "step": 675
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.196969696969698e-05,
+ "loss": 1.6081,
+ "step": 676
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.178030303030303e-05,
+ "loss": 1.5884,
+ "step": 677
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.15909090909091e-05,
+ "loss": 1.5638,
+ "step": 678
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.140151515151516e-05,
+ "loss": 1.5614,
+ "step": 679
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.121212121212121e-05,
+ "loss": 1.5543,
+ "step": 680
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.102272727272727e-05,
+ "loss": 1.5801,
+ "step": 681
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.083333333333334e-05,
+ "loss": 1.5458,
+ "step": 682
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.06439393939394e-05,
+ "loss": 1.5567,
+ "step": 683
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.045454545454546e-05,
+ "loss": 1.5567,
+ "step": 684
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.026515151515152e-05,
+ "loss": 1.5638,
+ "step": 685
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 7.007575757575759e-05,
+ "loss": 1.5431,
+ "step": 686
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.988636363636364e-05,
+ "loss": 1.5729,
+ "step": 687
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.96969696969697e-05,
+ "loss": 1.5235,
+ "step": 688
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.950757575757575e-05,
+ "loss": 1.5753,
+ "step": 689
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.931818181818182e-05,
+ "loss": 1.5319,
+ "step": 690
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.912878787878788e-05,
+ "loss": 1.5847,
+ "step": 691
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.893939393939395e-05,
+ "loss": 1.5533,
+ "step": 692
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.875e-05,
+ "loss": 1.5665,
+ "step": 693
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.856060606060606e-05,
+ "loss": 1.5913,
+ "step": 694
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.837121212121213e-05,
+ "loss": 1.6011,
+ "step": 695
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.818181818181818e-05,
+ "loss": 1.5201,
+ "step": 696
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.799242424242424e-05,
+ "loss": 1.57,
+ "step": 697
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.78030303030303e-05,
+ "loss": 1.5381,
+ "step": 698
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.761363636363636e-05,
+ "loss": 1.5681,
+ "step": 699
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.742424242424242e-05,
+ "loss": 1.5542,
+ "step": 700
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.723484848484849e-05,
+ "loss": 1.5779,
+ "step": 701
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.704545454545455e-05,
+ "loss": 1.578,
+ "step": 702
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.685606060606061e-05,
+ "loss": 1.6131,
+ "step": 703
+ },
+ {
+ "epoch": 2.0,
+ "learning_rate": 6.666666666666667e-05,
+ "loss": 1.5085,
+ "step": 704
+ },
+ {
+ "epoch": 2.0,
+ "learning_rate": 6.647727272727274e-05,
+ "loss": 1.4876,
+ "step": 705
+ },
+ {
+ "epoch": 2.0,
+ "learning_rate": 6.628787878787878e-05,
+ "loss": 1.5071,
+ "step": 706
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.609848484848485e-05,
+ "loss": 1.574,
+ "step": 707
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.59090909090909e-05,
+ "loss": 1.5214,
+ "step": 708
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.571969696969697e-05,
+ "loss": 1.5382,
+ "step": 709
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.553030303030303e-05,
+ "loss": 1.5136,
+ "step": 710
+ },
+ {
+ "epoch": 2.02,
+ "learning_rate": 6.53409090909091e-05,
+ "loss": 1.4807,
+ "step": 711
+ },
+ {
+ "epoch": 2.02,
+ "learning_rate": 6.515151515151516e-05,
+ "loss": 1.491,
+ "step": 712
+ },
+ {
+ "epoch": 2.02,
+ "learning_rate": 6.496212121212122e-05,
+ "loss": 1.5595,
+ "step": 713
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.477272727272728e-05,
+ "loss": 1.5342,
+ "step": 714
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.458333333333334e-05,
+ "loss": 1.5173,
+ "step": 715
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.439393939393939e-05,
+ "loss": 1.5353,
+ "step": 716
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.420454545454546e-05,
+ "loss": 1.4826,
+ "step": 717
+ },
+ {
+ "epoch": 2.04,
+ "learning_rate": 6.401515151515152e-05,
+ "loss": 1.5404,
+ "step": 718
+ },
+ {
+ "epoch": 2.04,
+ "learning_rate": 6.382575757575759e-05,
+ "loss": 1.5612,
+ "step": 719
+ },
+ {
+ "epoch": 2.04,
+ "learning_rate": 6.363636363636364e-05,
+ "loss": 1.5203,
+ "step": 720
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.344696969696971e-05,
+ "loss": 1.52,
+ "step": 721
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.325757575757577e-05,
+ "loss": 1.5417,
+ "step": 722
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.306818181818182e-05,
+ "loss": 1.5352,
+ "step": 723
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.287878787878788e-05,
+ "loss": 1.4671,
+ "step": 724
+ },
+ {
+ "epoch": 2.06,
+ "learning_rate": 6.268939393939395e-05,
+ "loss": 1.5739,
+ "step": 725
+ },
+ {
+ "epoch": 2.06,
+ "learning_rate": 6.25e-05,
+ "loss": 1.4987,
+ "step": 726
+ },
+ {
+ "epoch": 2.06,
+ "learning_rate": 6.231060606060606e-05,
+ "loss": 1.5145,
+ "step": 727
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.212121212121213e-05,
+ "loss": 1.5686,
+ "step": 728
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.193181818181818e-05,
+ "loss": 1.4872,
+ "step": 729
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.174242424242425e-05,
+ "loss": 1.4831,
+ "step": 730
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.15530303030303e-05,
+ "loss": 1.5242,
+ "step": 731
+ },
+ {
+ "epoch": 2.08,
+ "learning_rate": 6.136363636363636e-05,
+ "loss": 1.5298,
+ "step": 732
+ },
+ {
+ "epoch": 2.08,
+ "learning_rate": 6.117424242424242e-05,
+ "loss": 1.4941,
+ "step": 733
+ },
+ {
+ "epoch": 2.08,
+ "learning_rate": 6.098484848484849e-05,
+ "loss": 1.5022,
+ "step": 734
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.079545454545454e-05,
+ "loss": 1.4947,
+ "step": 735
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.060606060606061e-05,
+ "loss": 1.4922,
+ "step": 736
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.041666666666667e-05,
+ "loss": 1.4796,
+ "step": 737
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.022727272727273e-05,
+ "loss": 1.4619,
+ "step": 738
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 6.0037878787878785e-05,
+ "loss": 1.5346,
+ "step": 739
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 5.9848484848484854e-05,
+ "loss": 1.4987,
+ "step": 740
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 5.965909090909091e-05,
+ "loss": 1.516,
+ "step": 741
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 5.946969696969697e-05,
+ "loss": 1.5075,
+ "step": 742
+ },
+ {
+ "epoch": 2.11,
+ "learning_rate": 5.928030303030303e-05,
+ "loss": 1.4584,
+ "step": 743
+ },
+ {
+ "epoch": 2.11,
+ "learning_rate": 5.90909090909091e-05,
+ "loss": 1.5285,
+ "step": 744
+ },
+ {
+ "epoch": 2.11,
+ "learning_rate": 5.890151515151515e-05,
+ "loss": 1.5339,
+ "step": 745
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.871212121212122e-05,
+ "loss": 1.4811,
+ "step": 746
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.852272727272727e-05,
+ "loss": 1.5158,
+ "step": 747
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.833333333333334e-05,
+ "loss": 1.5523,
+ "step": 748
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.8143939393939395e-05,
+ "loss": 1.4911,
+ "step": 749
+ },
+ {
+ "epoch": 2.13,
+ "learning_rate": 5.7954545454545464e-05,
+ "loss": 1.508,
+ "step": 750
+ },
+ {
+ "epoch": 2.13,
+ "learning_rate": 5.776515151515152e-05,
+ "loss": 1.5273,
+ "step": 751
+ },
+ {
+ "epoch": 2.13,
+ "learning_rate": 5.757575757575758e-05,
+ "loss": 1.5231,
+ "step": 752
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.738636363636364e-05,
+ "loss": 1.5269,
+ "step": 753
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.719696969696971e-05,
+ "loss": 1.5306,
+ "step": 754
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.700757575757576e-05,
+ "loss": 1.4519,
+ "step": 755
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.6818181818181825e-05,
+ "loss": 1.4976,
+ "step": 756
+ },
+ {
+ "epoch": 2.15,
+ "learning_rate": 5.662878787878788e-05,
+ "loss": 1.5136,
+ "step": 757
+ },
+ {
+ "epoch": 2.15,
+ "learning_rate": 5.643939393939395e-05,
+ "loss": 1.536,
+ "step": 758
+ },
+ {
+ "epoch": 2.15,
+ "learning_rate": 5.6250000000000005e-05,
+ "loss": 1.4927,
+ "step": 759
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.606060606060606e-05,
+ "loss": 1.4931,
+ "step": 760
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.587121212121212e-05,
+ "loss": 1.4939,
+ "step": 761
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.568181818181818e-05,
+ "loss": 1.4446,
+ "step": 762
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.549242424242425e-05,
+ "loss": 1.5113,
+ "step": 763
+ },
+ {
+ "epoch": 2.17,
+ "learning_rate": 5.5303030303030304e-05,
+ "loss": 1.4886,
+ "step": 764
+ },
+ {
+ "epoch": 2.17,
+ "learning_rate": 5.5113636363636366e-05,
+ "loss": 1.5291,
+ "step": 765
+ },
+ {
+ "epoch": 2.17,
+ "learning_rate": 5.492424242424242e-05,
+ "loss": 1.5204,
+ "step": 766
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.473484848484849e-05,
+ "loss": 1.4677,
+ "step": 767
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.4545454545454546e-05,
+ "loss": 1.4499,
+ "step": 768
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.435606060606061e-05,
+ "loss": 1.4735,
+ "step": 769
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.4166666666666664e-05,
+ "loss": 1.4688,
+ "step": 770
+ },
+ {
+ "epoch": 2.19,
+ "learning_rate": 5.397727272727273e-05,
+ "loss": 1.4728,
+ "step": 771
+ },
+ {
+ "epoch": 2.19,
+ "learning_rate": 5.378787878787879e-05,
+ "loss": 1.4791,
+ "step": 772
+ },
+ {
+ "epoch": 2.19,
+ "learning_rate": 5.359848484848485e-05,
+ "loss": 1.5265,
+ "step": 773
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.340909090909091e-05,
+ "loss": 1.4775,
+ "step": 774
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.3219696969696976e-05,
+ "loss": 1.4531,
+ "step": 775
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.303030303030303e-05,
+ "loss": 1.4891,
+ "step": 776
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.2840909090909094e-05,
+ "loss": 1.4764,
+ "step": 777
+ },
+ {
+ "epoch": 2.21,
+ "learning_rate": 5.265151515151515e-05,
+ "loss": 1.4864,
+ "step": 778
+ },
+ {
+ "epoch": 2.21,
+ "learning_rate": 5.246212121212122e-05,
+ "loss": 1.521,
+ "step": 779
+ },
+ {
+ "epoch": 2.21,
+ "learning_rate": 5.2272727272727274e-05,
+ "loss": 1.473,
+ "step": 780
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.208333333333334e-05,
+ "loss": 1.477,
+ "step": 781
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.189393939393939e-05,
+ "loss": 1.482,
+ "step": 782
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.170454545454546e-05,
+ "loss": 1.5387,
+ "step": 783
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.151515151515152e-05,
+ "loss": 1.4354,
+ "step": 784
+ },
+ {
+ "epoch": 2.23,
+ "learning_rate": 5.132575757575758e-05,
+ "loss": 1.4792,
+ "step": 785
+ },
+ {
+ "epoch": 2.23,
+ "learning_rate": 5.1136363636363635e-05,
+ "loss": 1.4589,
+ "step": 786
+ },
+ {
+ "epoch": 2.23,
+ "learning_rate": 5.0946969696969704e-05,
+ "loss": 1.4883,
+ "step": 787
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.075757575757576e-05,
+ "loss": 1.4442,
+ "step": 788
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.056818181818183e-05,
+ "loss": 1.4387,
+ "step": 789
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.037878787878788e-05,
+ "loss": 1.5203,
+ "step": 790
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.018939393939395e-05,
+ "loss": 1.4634,
+ "step": 791
+ },
+ {
+ "epoch": 2.25,
+ "learning_rate": 5e-05,
+ "loss": 1.4734,
+ "step": 792
+ },
+ {
+ "epoch": 2.25,
+ "learning_rate": 4.9810606060606065e-05,
+ "loss": 1.5079,
+ "step": 793
+ },
+ {
+ "epoch": 2.25,
+ "learning_rate": 4.962121212121213e-05,
+ "loss": 1.4986,
+ "step": 794
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.943181818181818e-05,
+ "loss": 1.4132,
+ "step": 795
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.9242424242424245e-05,
+ "loss": 1.4992,
+ "step": 796
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.905303030303031e-05,
+ "loss": 1.4601,
+ "step": 797
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.886363636363637e-05,
+ "loss": 1.47,
+ "step": 798
+ },
+ {
+ "epoch": 2.27,
+ "learning_rate": 4.8674242424242425e-05,
+ "loss": 1.4575,
+ "step": 799
+ },
+ {
+ "epoch": 2.27,
+ "learning_rate": 4.848484848484849e-05,
+ "loss": 1.4508,
+ "step": 800
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1056,
+ "num_train_epochs": 3,
+ "save_steps": 100,
+ "total_flos": 2.026889424215685e+18,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-800/training_args.bin b/checkpoint-800/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..574132c086f9a526d71493b1ec4c09396eac5482
--- /dev/null
+++ b/checkpoint-800/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:50303c7f1586feb001f01b7e968e567295f501ef6c6407e28250c458696a43af
+size 4155
diff --git a/checkpoint-900/README.md b/checkpoint-900/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4
--- /dev/null
+++ b/checkpoint-900/README.md
@@ -0,0 +1,34 @@
+---
+library_name: peft
+---
+## Training procedure
+
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+
+The following `bitsandbytes` quantization config was used during training:
+- quant_method: bitsandbytes
+- load_in_8bit: True
+- load_in_4bit: False
+- llm_int8_threshold: 6.0
+- llm_int8_skip_modules: None
+- llm_int8_enable_fp32_cpu_offload: False
+- llm_int8_has_fp16_weight: False
+- bnb_4bit_quant_type: fp4
+- bnb_4bit_use_double_quant: False
+- bnb_4bit_compute_dtype: float32
+### Framework versions
+
+- PEFT 0.6.0.dev0
+
+- PEFT 0.6.0.dev0
diff --git a/checkpoint-900/adapter_config.json b/checkpoint-900/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b
--- /dev/null
+++ b/checkpoint-900/adapter_config.json
@@ -0,0 +1,23 @@
+{
+ "auto_mapping": null,
+ "base_model_name_or_path": "bigscience/bloomz-3b",
+ "bias": "none",
+ "fan_in_fan_out": false,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "lora_alpha": 16,
+ "lora_dropout": 0.0,
+ "modules_to_save": null,
+ "peft_type": "LORA",
+ "r": 8,
+ "revision": null,
+ "target_modules": [
+ "dense_4h_to_h",
+ "dense",
+ "dense_h_to_4h",
+ "query_key_value"
+ ],
+ "task_type": "CAUSAL_LM"
+}
\ No newline at end of file
diff --git a/checkpoint-900/adapter_model.bin b/checkpoint-900/adapter_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..ff470fe17f41cd1e6abe818e008d681e7bd9a9dd
--- /dev/null
+++ b/checkpoint-900/adapter_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:07b6f588ceda20cbb55e89298fe91783b4445ea77bc0d86e6d8c167bcc7f6e93
+size 39409357
diff --git a/checkpoint-900/optimizer.pt b/checkpoint-900/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..82d613f8331a582e573ceb3c032846b7e1782c47
--- /dev/null
+++ b/checkpoint-900/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1791e01d449b05999ad74785469bf43c95c1a0f6dc69328e47ef90504899ab16
+size 78844421
diff --git a/checkpoint-900/rng_state.pth b/checkpoint-900/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..e0df6592e27bef84a4beb9293e7b49666a4d652e
--- /dev/null
+++ b/checkpoint-900/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7508d4b8dd267de5cc58e972da25236687927651336a28f292c92f7f23951475
+size 14575
diff --git a/checkpoint-900/scheduler.pt b/checkpoint-900/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..eac85d814ac63b9122c126dd792a5d95ce8f87e2
--- /dev/null
+++ b/checkpoint-900/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6798436afcf49d46a32149ee732d09e9e67f9b964e1086b7e4a1fb61f16a0277
+size 627
diff --git a/checkpoint-900/special_tokens_map.json b/checkpoint-900/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d
--- /dev/null
+++ b/checkpoint-900/special_tokens_map.json
@@ -0,0 +1,6 @@
+{
+ "bos_token": "",
+ "eos_token": "",
+ "pad_token": "",
+ "unk_token": ""
+}
diff --git a/checkpoint-900/tokenizer.json b/checkpoint-900/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207
--- /dev/null
+++ b/checkpoint-900/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe
+size 14500570
diff --git a/checkpoint-900/tokenizer_config.json b/checkpoint-900/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3
--- /dev/null
+++ b/checkpoint-900/tokenizer_config.json
@@ -0,0 +1,10 @@
+{
+ "add_prefix_space": false,
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "model_max_length": 1000000000000000019884624838656,
+ "pad_token": "",
+ "tokenizer_class": "BloomTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-900/trainer_state.json b/checkpoint-900/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..b4bde66d545e58b3407f7d4472d9c995b5e0e424
--- /dev/null
+++ b/checkpoint-900/trainer_state.json
@@ -0,0 +1,5419 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 2.5531914893617023,
+ "eval_steps": 500,
+ "global_step": 900,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 0.00019981060606060605,
+ "loss": 2.9206,
+ "step": 1
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019962121212121212,
+ "loss": 2.7609,
+ "step": 2
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001994318181818182,
+ "loss": 2.6878,
+ "step": 3
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.00019924242424242426,
+ "loss": 2.6697,
+ "step": 4
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 0.0001990530303030303,
+ "loss": 2.5818,
+ "step": 5
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019886363636363637,
+ "loss": 2.5396,
+ "step": 6
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.00019867424242424244,
+ "loss": 2.5265,
+ "step": 7
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 0.0001984848484848485,
+ "loss": 2.5475,
+ "step": 8
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019829545454545455,
+ "loss": 2.4835,
+ "step": 9
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001981060606060606,
+ "loss": 2.4559,
+ "step": 10
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.0001979166666666667,
+ "loss": 2.4511,
+ "step": 11
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 0.00019772727272727273,
+ "loss": 2.4592,
+ "step": 12
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.0001975378787878788,
+ "loss": 2.4495,
+ "step": 13
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019734848484848484,
+ "loss": 2.4714,
+ "step": 14
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 0.00019715909090909094,
+ "loss": 2.4302,
+ "step": 15
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019696969696969698,
+ "loss": 2.4097,
+ "step": 16
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019678030303030305,
+ "loss": 2.4523,
+ "step": 17
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.0001965909090909091,
+ "loss": 2.4325,
+ "step": 18
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 0.00019640151515151516,
+ "loss": 2.4125,
+ "step": 19
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019621212121212123,
+ "loss": 2.4329,
+ "step": 20
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019602272727272727,
+ "loss": 2.3471,
+ "step": 21
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 0.00019583333333333334,
+ "loss": 2.3012,
+ "step": 22
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001956439393939394,
+ "loss": 2.3869,
+ "step": 23
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019545454545454548,
+ "loss": 2.3822,
+ "step": 24
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.00019526515151515152,
+ "loss": 2.3427,
+ "step": 25
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 0.0001950757575757576,
+ "loss": 2.3659,
+ "step": 26
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019488636363636366,
+ "loss": 2.3826,
+ "step": 27
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.0001946969696969697,
+ "loss": 2.3532,
+ "step": 28
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 0.00019450757575757577,
+ "loss": 2.3828,
+ "step": 29
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001943181818181818,
+ "loss": 2.3133,
+ "step": 30
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.0001941287878787879,
+ "loss": 2.3613,
+ "step": 31
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019393939393939395,
+ "loss": 2.3867,
+ "step": 32
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 0.00019375000000000002,
+ "loss": 2.2966,
+ "step": 33
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019356060606060606,
+ "loss": 2.3436,
+ "step": 34
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019337121212121213,
+ "loss": 2.3425,
+ "step": 35
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.0001931818181818182,
+ "loss": 2.307,
+ "step": 36
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 0.00019299242424242424,
+ "loss": 2.3521,
+ "step": 37
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.0001928030303030303,
+ "loss": 2.3302,
+ "step": 38
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019261363636363635,
+ "loss": 2.312,
+ "step": 39
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 0.00019242424242424245,
+ "loss": 2.3655,
+ "step": 40
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001922348484848485,
+ "loss": 2.344,
+ "step": 41
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019204545454545456,
+ "loss": 2.3373,
+ "step": 42
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.0001918560606060606,
+ "loss": 2.3331,
+ "step": 43
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 0.00019166666666666667,
+ "loss": 2.3376,
+ "step": 44
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019147727272727274,
+ "loss": 2.3369,
+ "step": 45
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019128787878787878,
+ "loss": 2.3413,
+ "step": 46
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 0.00019109848484848485,
+ "loss": 2.3212,
+ "step": 47
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019090909090909092,
+ "loss": 2.307,
+ "step": 48
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.000190719696969697,
+ "loss": 2.2929,
+ "step": 49
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.00019053030303030303,
+ "loss": 2.2873,
+ "step": 50
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 0.0001903409090909091,
+ "loss": 2.3098,
+ "step": 51
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00019015151515151517,
+ "loss": 2.3129,
+ "step": 52
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.0001899621212121212,
+ "loss": 2.3038,
+ "step": 53
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 0.00018977272727272728,
+ "loss": 2.286,
+ "step": 54
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018958333333333332,
+ "loss": 2.3388,
+ "step": 55
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018939393939393942,
+ "loss": 2.3193,
+ "step": 56
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018920454545454546,
+ "loss": 2.3136,
+ "step": 57
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 0.00018901515151515153,
+ "loss": 2.3141,
+ "step": 58
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018882575757575757,
+ "loss": 2.3646,
+ "step": 59
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.00018863636363636364,
+ "loss": 2.3318,
+ "step": 60
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 0.0001884469696969697,
+ "loss": 2.2977,
+ "step": 61
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018825757575757575,
+ "loss": 2.2764,
+ "step": 62
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018806818181818182,
+ "loss": 2.3095,
+ "step": 63
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.0001878787878787879,
+ "loss": 2.252,
+ "step": 64
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 0.00018768939393939396,
+ "loss": 2.2786,
+ "step": 65
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.0001875,
+ "loss": 2.2789,
+ "step": 66
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018731060606060607,
+ "loss": 2.2841,
+ "step": 67
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 0.00018712121212121212,
+ "loss": 2.3436,
+ "step": 68
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018693181818181818,
+ "loss": 2.2956,
+ "step": 69
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018674242424242425,
+ "loss": 2.2353,
+ "step": 70
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.0001865530303030303,
+ "loss": 2.2772,
+ "step": 71
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 0.00018636363636363636,
+ "loss": 2.2496,
+ "step": 72
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018617424242424243,
+ "loss": 2.2477,
+ "step": 73
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.0001859848484848485,
+ "loss": 2.2791,
+ "step": 74
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 0.00018579545454545454,
+ "loss": 2.2799,
+ "step": 75
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018560606060606061,
+ "loss": 2.3132,
+ "step": 76
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018541666666666668,
+ "loss": 2.2542,
+ "step": 77
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.00018522727272727273,
+ "loss": 2.2609,
+ "step": 78
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 0.0001850378787878788,
+ "loss": 2.2819,
+ "step": 79
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018484848484848484,
+ "loss": 2.2844,
+ "step": 80
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018465909090909093,
+ "loss": 2.2542,
+ "step": 81
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 0.00018446969696969697,
+ "loss": 2.2603,
+ "step": 82
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018428030303030304,
+ "loss": 2.2832,
+ "step": 83
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018409090909090909,
+ "loss": 2.2869,
+ "step": 84
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018390151515151518,
+ "loss": 2.2646,
+ "step": 85
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 0.00018371212121212122,
+ "loss": 2.2698,
+ "step": 86
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018352272727272727,
+ "loss": 2.2757,
+ "step": 87
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.00018333333333333334,
+ "loss": 2.2544,
+ "step": 88
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 0.0001831439393939394,
+ "loss": 2.2678,
+ "step": 89
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018295454545454547,
+ "loss": 2.2778,
+ "step": 90
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018276515151515152,
+ "loss": 2.2027,
+ "step": 91
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018257575757575758,
+ "loss": 2.2167,
+ "step": 92
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 0.00018238636363636365,
+ "loss": 2.2602,
+ "step": 93
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018219696969696972,
+ "loss": 2.2736,
+ "step": 94
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018200757575757577,
+ "loss": 2.2443,
+ "step": 95
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 0.00018181818181818183,
+ "loss": 2.2299,
+ "step": 96
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.0001816287878787879,
+ "loss": 2.2644,
+ "step": 97
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018143939393939395,
+ "loss": 2.259,
+ "step": 98
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018125000000000001,
+ "loss": 2.2567,
+ "step": 99
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 0.00018106060606060606,
+ "loss": 2.2599,
+ "step": 100
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018087121212121213,
+ "loss": 2.2091,
+ "step": 101
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.0001806818181818182,
+ "loss": 2.2312,
+ "step": 102
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 0.00018049242424242426,
+ "loss": 2.1869,
+ "step": 103
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001803030303030303,
+ "loss": 2.2023,
+ "step": 104
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00018011363636363638,
+ "loss": 2.2132,
+ "step": 105
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.00017992424242424244,
+ "loss": 2.2612,
+ "step": 106
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 0.0001797348484848485,
+ "loss": 2.2109,
+ "step": 107
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017954545454545456,
+ "loss": 2.215,
+ "step": 108
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001793560606060606,
+ "loss": 2.2114,
+ "step": 109
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.0001791666666666667,
+ "loss": 2.2203,
+ "step": 110
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 0.00017897727272727274,
+ "loss": 2.2594,
+ "step": 111
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.0001787878787878788,
+ "loss": 2.2001,
+ "step": 112
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017859848484848485,
+ "loss": 2.2046,
+ "step": 113
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 0.00017840909090909092,
+ "loss": 2.1907,
+ "step": 114
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017821969696969699,
+ "loss": 2.2539,
+ "step": 115
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017803030303030303,
+ "loss": 2.2335,
+ "step": 116
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.0001778409090909091,
+ "loss": 2.2171,
+ "step": 117
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 0.00017765151515151517,
+ "loss": 2.2278,
+ "step": 118
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017746212121212123,
+ "loss": 2.231,
+ "step": 119
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017727272727272728,
+ "loss": 2.2141,
+ "step": 120
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 0.00017708333333333335,
+ "loss": 2.2432,
+ "step": 121
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017689393939393942,
+ "loss": 2.2266,
+ "step": 122
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017670454545454546,
+ "loss": 2.1929,
+ "step": 123
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017651515151515153,
+ "loss": 2.2077,
+ "step": 124
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 0.00017632575757575757,
+ "loss": 2.2133,
+ "step": 125
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017613636363636366,
+ "loss": 2.2251,
+ "step": 126
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.0001759469696969697,
+ "loss": 2.2265,
+ "step": 127
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 0.00017575757575757578,
+ "loss": 2.2186,
+ "step": 128
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017556818181818182,
+ "loss": 2.1925,
+ "step": 129
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.0001753787878787879,
+ "loss": 2.1956,
+ "step": 130
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.00017518939393939396,
+ "loss": 2.2459,
+ "step": 131
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 0.000175,
+ "loss": 2.22,
+ "step": 132
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.00017481060606060607,
+ "loss": 2.2143,
+ "step": 133
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001746212121212121,
+ "loss": 2.2359,
+ "step": 134
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 0.0001744318181818182,
+ "loss": 2.2058,
+ "step": 135
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017424242424242425,
+ "loss": 2.2307,
+ "step": 136
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017405303030303032,
+ "loss": 2.2062,
+ "step": 137
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017386363636363636,
+ "loss": 2.1796,
+ "step": 138
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 0.00017367424242424243,
+ "loss": 2.2054,
+ "step": 139
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001734848484848485,
+ "loss": 2.1651,
+ "step": 140
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.00017329545454545454,
+ "loss": 2.2159,
+ "step": 141
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 0.0001731060606060606,
+ "loss": 2.1988,
+ "step": 142
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017291666666666668,
+ "loss": 2.1676,
+ "step": 143
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017272727272727275,
+ "loss": 2.1725,
+ "step": 144
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.0001725378787878788,
+ "loss": 2.2205,
+ "step": 145
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 0.00017234848484848486,
+ "loss": 2.1486,
+ "step": 146
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017215909090909093,
+ "loss": 2.147,
+ "step": 147
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017196969696969697,
+ "loss": 2.1651,
+ "step": 148
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 0.00017178030303030304,
+ "loss": 2.1983,
+ "step": 149
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017159090909090908,
+ "loss": 2.1778,
+ "step": 150
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017140151515151518,
+ "loss": 2.1631,
+ "step": 151
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.00017121212121212122,
+ "loss": 2.1442,
+ "step": 152
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 0.0001710227272727273,
+ "loss": 2.1397,
+ "step": 153
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017083333333333333,
+ "loss": 2.1697,
+ "step": 154
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.0001706439393939394,
+ "loss": 2.1451,
+ "step": 155
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 0.00017045454545454547,
+ "loss": 2.1789,
+ "step": 156
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.0001702651515151515,
+ "loss": 2.1037,
+ "step": 157
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00017007575757575758,
+ "loss": 2.1698,
+ "step": 158
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016988636363636365,
+ "loss": 2.1538,
+ "step": 159
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 0.00016969696969696972,
+ "loss": 2.2015,
+ "step": 160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016950757575757576,
+ "loss": 2.179,
+ "step": 161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.00016931818181818183,
+ "loss": 2.1766,
+ "step": 162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 0.0001691287878787879,
+ "loss": 2.1646,
+ "step": 163
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016893939393939394,
+ "loss": 2.1694,
+ "step": 164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016875,
+ "loss": 2.1562,
+ "step": 165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016856060606060605,
+ "loss": 2.1551,
+ "step": 166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 0.00016837121212121212,
+ "loss": 2.1652,
+ "step": 167
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001681818181818182,
+ "loss": 2.1594,
+ "step": 168
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.00016799242424242426,
+ "loss": 2.1674,
+ "step": 169
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 0.0001678030303030303,
+ "loss": 2.1378,
+ "step": 170
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016761363636363637,
+ "loss": 2.1447,
+ "step": 171
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016742424242424244,
+ "loss": 2.1451,
+ "step": 172
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016723484848484848,
+ "loss": 2.1336,
+ "step": 173
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 0.00016704545454545455,
+ "loss": 2.1231,
+ "step": 174
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001668560606060606,
+ "loss": 2.1143,
+ "step": 175
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001666666666666667,
+ "loss": 2.1316,
+ "step": 176
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.00016647727272727273,
+ "loss": 2.1281,
+ "step": 177
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 0.0001662878787878788,
+ "loss": 2.136,
+ "step": 178
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016609848484848484,
+ "loss": 2.1279,
+ "step": 179
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016590909090909094,
+ "loss": 2.1421,
+ "step": 180
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 0.00016571969696969698,
+ "loss": 2.1541,
+ "step": 181
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016553030303030305,
+ "loss": 2.1293,
+ "step": 182
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.0001653409090909091,
+ "loss": 2.1294,
+ "step": 183
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016515151515151516,
+ "loss": 2.1459,
+ "step": 184
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 0.00016496212121212123,
+ "loss": 2.1113,
+ "step": 185
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016477272727272727,
+ "loss": 2.1394,
+ "step": 186
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.00016458333333333334,
+ "loss": 2.1321,
+ "step": 187
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 0.0001643939393939394,
+ "loss": 2.148,
+ "step": 188
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016420454545454548,
+ "loss": 2.1631,
+ "step": 189
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016401515151515152,
+ "loss": 2.1276,
+ "step": 190
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.0001638257575757576,
+ "loss": 2.0706,
+ "step": 191
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 0.00016363636363636366,
+ "loss": 2.127,
+ "step": 192
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001634469696969697,
+ "loss": 2.1449,
+ "step": 193
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.00016325757575757577,
+ "loss": 2.1204,
+ "step": 194
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 0.0001630681818181818,
+ "loss": 2.0904,
+ "step": 195
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.0001628787878787879,
+ "loss": 2.1129,
+ "step": 196
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016268939393939395,
+ "loss": 2.1036,
+ "step": 197
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016250000000000002,
+ "loss": 2.1509,
+ "step": 198
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 0.00016231060606060606,
+ "loss": 2.1239,
+ "step": 199
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016212121212121213,
+ "loss": 2.145,
+ "step": 200
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.0001619318181818182,
+ "loss": 2.1221,
+ "step": 201
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 0.00016174242424242424,
+ "loss": 2.1181,
+ "step": 202
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001615530303030303,
+ "loss": 2.1306,
+ "step": 203
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016136363636363635,
+ "loss": 2.0199,
+ "step": 204
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.00016117424242424245,
+ "loss": 2.1178,
+ "step": 205
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 0.0001609848484848485,
+ "loss": 2.1584,
+ "step": 206
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016079545454545456,
+ "loss": 2.0872,
+ "step": 207
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.0001606060606060606,
+ "loss": 2.1033,
+ "step": 208
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 0.00016041666666666667,
+ "loss": 2.1381,
+ "step": 209
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016022727272727274,
+ "loss": 2.1127,
+ "step": 210
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00016003787878787878,
+ "loss": 2.1077,
+ "step": 211
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015984848484848485,
+ "loss": 2.0984,
+ "step": 212
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 0.00015965909090909092,
+ "loss": 2.0994,
+ "step": 213
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.000159469696969697,
+ "loss": 2.096,
+ "step": 214
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.00015928030303030303,
+ "loss": 2.0909,
+ "step": 215
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 0.0001590909090909091,
+ "loss": 2.118,
+ "step": 216
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015890151515151517,
+ "loss": 2.0783,
+ "step": 217
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.0001587121212121212,
+ "loss": 2.0876,
+ "step": 218
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015852272727272728,
+ "loss": 2.0581,
+ "step": 219
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 0.00015833333333333332,
+ "loss": 2.0548,
+ "step": 220
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015814393939393942,
+ "loss": 2.0595,
+ "step": 221
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015795454545454546,
+ "loss": 2.0719,
+ "step": 222
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 0.00015776515151515153,
+ "loss": 2.0903,
+ "step": 223
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015757575757575757,
+ "loss": 2.0941,
+ "step": 224
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015738636363636364,
+ "loss": 2.0926,
+ "step": 225
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.0001571969696969697,
+ "loss": 2.0816,
+ "step": 226
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 0.00015700757575757575,
+ "loss": 2.0894,
+ "step": 227
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015681818181818182,
+ "loss": 2.0798,
+ "step": 228
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.0001566287878787879,
+ "loss": 2.0672,
+ "step": 229
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 0.00015643939393939396,
+ "loss": 2.0787,
+ "step": 230
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015625,
+ "loss": 2.0611,
+ "step": 231
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015606060606060607,
+ "loss": 2.0805,
+ "step": 232
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015587121212121211,
+ "loss": 2.053,
+ "step": 233
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 0.00015568181818181818,
+ "loss": 2.0575,
+ "step": 234
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015549242424242425,
+ "loss": 2.0459,
+ "step": 235
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.0001553030303030303,
+ "loss": 2.0635,
+ "step": 236
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 0.00015511363636363636,
+ "loss": 2.0335,
+ "step": 237
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015492424242424243,
+ "loss": 2.0681,
+ "step": 238
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001547348484848485,
+ "loss": 2.0748,
+ "step": 239
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.00015454545454545454,
+ "loss": 2.1091,
+ "step": 240
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 0.0001543560606060606,
+ "loss": 2.0732,
+ "step": 241
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015416666666666668,
+ "loss": 2.0746,
+ "step": 242
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.00015397727272727272,
+ "loss": 2.0306,
+ "step": 243
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 0.0001537878787878788,
+ "loss": 2.0864,
+ "step": 244
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015359848484848484,
+ "loss": 2.0664,
+ "step": 245
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015340909090909093,
+ "loss": 2.0801,
+ "step": 246
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015321969696969697,
+ "loss": 2.0799,
+ "step": 247
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 0.00015303030303030304,
+ "loss": 2.0621,
+ "step": 248
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015284090909090909,
+ "loss": 2.0687,
+ "step": 249
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015265151515151515,
+ "loss": 2.018,
+ "step": 250
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015246212121212122,
+ "loss": 2.0256,
+ "step": 251
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 0.00015227272727272727,
+ "loss": 2.0736,
+ "step": 252
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015208333333333333,
+ "loss": 2.0609,
+ "step": 253
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.0001518939393939394,
+ "loss": 2.0539,
+ "step": 254
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 0.00015170454545454547,
+ "loss": 2.0282,
+ "step": 255
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015151515151515152,
+ "loss": 2.0417,
+ "step": 256
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015132575757575758,
+ "loss": 2.0333,
+ "step": 257
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015113636363636365,
+ "loss": 2.0428,
+ "step": 258
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 0.00015094696969696972,
+ "loss": 2.045,
+ "step": 259
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.00015075757575757576,
+ "loss": 2.0463,
+ "step": 260
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001505681818181818,
+ "loss": 2.0539,
+ "step": 261
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 0.0001503787878787879,
+ "loss": 2.0184,
+ "step": 262
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015018939393939394,
+ "loss": 2.0858,
+ "step": 263
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 2.0239,
+ "step": 264
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014981060606060606,
+ "loss": 2.0425,
+ "step": 265
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 0.00014962121212121213,
+ "loss": 2.0263,
+ "step": 266
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001494318181818182,
+ "loss": 2.042,
+ "step": 267
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.00014924242424242426,
+ "loss": 2.026,
+ "step": 268
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 0.0001490530303030303,
+ "loss": 2.0411,
+ "step": 269
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014886363636363635,
+ "loss": 2.028,
+ "step": 270
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014867424242424244,
+ "loss": 2.0172,
+ "step": 271
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014848484848484849,
+ "loss": 2.0196,
+ "step": 272
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 0.00014829545454545455,
+ "loss": 2.0142,
+ "step": 273
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001481060606060606,
+ "loss": 2.0265,
+ "step": 274
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.0001479166666666667,
+ "loss": 2.0353,
+ "step": 275
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 0.00014772727272727274,
+ "loss": 2.0327,
+ "step": 276
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.0001475378787878788,
+ "loss": 2.0188,
+ "step": 277
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014734848484848485,
+ "loss": 1.9987,
+ "step": 278
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014715909090909092,
+ "loss": 2.0141,
+ "step": 279
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 0.00014696969696969698,
+ "loss": 2.0403,
+ "step": 280
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014678030303030303,
+ "loss": 1.9977,
+ "step": 281
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.0001465909090909091,
+ "loss": 1.9674,
+ "step": 282
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 0.00014640151515151517,
+ "loss": 1.9984,
+ "step": 283
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014621212121212123,
+ "loss": 1.9796,
+ "step": 284
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014602272727272728,
+ "loss": 2.0139,
+ "step": 285
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014583333333333335,
+ "loss": 1.9866,
+ "step": 286
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 0.00014564393939393941,
+ "loss": 2.0208,
+ "step": 287
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014545454545454546,
+ "loss": 1.9844,
+ "step": 288
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014526515151515153,
+ "loss": 2.0082,
+ "step": 289
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 0.00014507575757575757,
+ "loss": 1.984,
+ "step": 290
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014488636363636366,
+ "loss": 2.0015,
+ "step": 291
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.0001446969696969697,
+ "loss": 2.0209,
+ "step": 292
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014450757575757578,
+ "loss": 1.9728,
+ "step": 293
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 0.00014431818181818182,
+ "loss": 2.0032,
+ "step": 294
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014412878787878789,
+ "loss": 1.9641,
+ "step": 295
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014393939393939396,
+ "loss": 1.9945,
+ "step": 296
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 0.00014375,
+ "loss": 1.9658,
+ "step": 297
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014356060606060607,
+ "loss": 1.9907,
+ "step": 298
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001433712121212121,
+ "loss": 1.9935,
+ "step": 299
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.0001431818181818182,
+ "loss": 1.9897,
+ "step": 300
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 0.00014299242424242425,
+ "loss": 1.984,
+ "step": 301
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014280303030303032,
+ "loss": 1.9581,
+ "step": 302
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014261363636363636,
+ "loss": 1.9893,
+ "step": 303
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 0.00014242424242424243,
+ "loss": 1.9568,
+ "step": 304
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001422348484848485,
+ "loss": 1.98,
+ "step": 305
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014204545454545454,
+ "loss": 1.9519,
+ "step": 306
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.0001418560606060606,
+ "loss": 1.9693,
+ "step": 307
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 0.00014166666666666668,
+ "loss": 1.9866,
+ "step": 308
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014147727272727275,
+ "loss": 1.9508,
+ "step": 309
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.0001412878787878788,
+ "loss": 1.9653,
+ "step": 310
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 0.00014109848484848486,
+ "loss": 1.9991,
+ "step": 311
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014090909090909093,
+ "loss": 1.9442,
+ "step": 312
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014071969696969697,
+ "loss": 1.9807,
+ "step": 313
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014053030303030304,
+ "loss": 1.9958,
+ "step": 314
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 0.00014034090909090908,
+ "loss": 1.9459,
+ "step": 315
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00014015151515151518,
+ "loss": 1.9508,
+ "step": 316
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013996212121212122,
+ "loss": 1.9933,
+ "step": 317
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.0001397727272727273,
+ "loss": 1.9703,
+ "step": 318
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 0.00013958333333333333,
+ "loss": 1.965,
+ "step": 319
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001393939393939394,
+ "loss": 1.9264,
+ "step": 320
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.00013920454545454547,
+ "loss": 1.9688,
+ "step": 321
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 0.0001390151515151515,
+ "loss": 1.9901,
+ "step": 322
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013882575757575758,
+ "loss": 1.9363,
+ "step": 323
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013863636363636365,
+ "loss": 1.9269,
+ "step": 324
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013844696969696972,
+ "loss": 1.9688,
+ "step": 325
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 0.00013825757575757576,
+ "loss": 1.9758,
+ "step": 326
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013806818181818183,
+ "loss": 1.9414,
+ "step": 327
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.0001378787878787879,
+ "loss": 1.9397,
+ "step": 328
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 0.00013768939393939394,
+ "loss": 1.9032,
+ "step": 329
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001375,
+ "loss": 1.9777,
+ "step": 330
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013731060606060605,
+ "loss": 1.9173,
+ "step": 331
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.00013712121212121212,
+ "loss": 1.9307,
+ "step": 332
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 0.0001369318181818182,
+ "loss": 1.9611,
+ "step": 333
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013674242424242426,
+ "loss": 1.9698,
+ "step": 334
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.0001365530303030303,
+ "loss": 1.9619,
+ "step": 335
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 0.00013636363636363637,
+ "loss": 1.9322,
+ "step": 336
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013617424242424244,
+ "loss": 1.9441,
+ "step": 337
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013598484848484848,
+ "loss": 1.9563,
+ "step": 338
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.00013579545454545455,
+ "loss": 1.9283,
+ "step": 339
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 0.0001356060606060606,
+ "loss": 1.9508,
+ "step": 340
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001354166666666667,
+ "loss": 1.9285,
+ "step": 341
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.00013522727272727273,
+ "loss": 1.9295,
+ "step": 342
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 0.0001350378787878788,
+ "loss": 1.9272,
+ "step": 343
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013484848484848484,
+ "loss": 1.905,
+ "step": 344
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013465909090909094,
+ "loss": 1.9409,
+ "step": 345
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013446969696969698,
+ "loss": 1.9674,
+ "step": 346
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 0.00013428030303030302,
+ "loss": 1.9278,
+ "step": 347
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.0001340909090909091,
+ "loss": 1.9136,
+ "step": 348
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013390151515151516,
+ "loss": 1.9143,
+ "step": 349
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 0.00013371212121212123,
+ "loss": 1.9381,
+ "step": 350
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013352272727272727,
+ "loss": 1.9136,
+ "step": 351
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013333333333333334,
+ "loss": 1.9103,
+ "step": 352
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0001331439393939394,
+ "loss": 1.9027,
+ "step": 353
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.00013295454545454548,
+ "loss": 1.8674,
+ "step": 354
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013276515151515152,
+ "loss": 1.886,
+ "step": 355
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013257575757575756,
+ "loss": 1.887,
+ "step": 356
+ },
+ {
+ "epoch": 1.01,
+ "learning_rate": 0.00013238636363636366,
+ "loss": 1.87,
+ "step": 357
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001321969696969697,
+ "loss": 1.8715,
+ "step": 358
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.00013200757575757577,
+ "loss": 1.8993,
+ "step": 359
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001318181818181818,
+ "loss": 1.8844,
+ "step": 360
+ },
+ {
+ "epoch": 1.02,
+ "learning_rate": 0.0001316287878787879,
+ "loss": 1.8965,
+ "step": 361
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013143939393939395,
+ "loss": 1.8956,
+ "step": 362
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013125000000000002,
+ "loss": 1.869,
+ "step": 363
+ },
+ {
+ "epoch": 1.03,
+ "learning_rate": 0.00013106060606060606,
+ "loss": 1.8702,
+ "step": 364
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013087121212121213,
+ "loss": 1.8962,
+ "step": 365
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001306818181818182,
+ "loss": 1.8613,
+ "step": 366
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.00013049242424242424,
+ "loss": 1.8845,
+ "step": 367
+ },
+ {
+ "epoch": 1.04,
+ "learning_rate": 0.0001303030303030303,
+ "loss": 1.8689,
+ "step": 368
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00013011363636363635,
+ "loss": 1.9059,
+ "step": 369
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.00012992424242424245,
+ "loss": 1.9082,
+ "step": 370
+ },
+ {
+ "epoch": 1.05,
+ "learning_rate": 0.0001297348484848485,
+ "loss": 1.8918,
+ "step": 371
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012954545454545456,
+ "loss": 1.8657,
+ "step": 372
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.0001293560606060606,
+ "loss": 1.8909,
+ "step": 373
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012916666666666667,
+ "loss": 1.8649,
+ "step": 374
+ },
+ {
+ "epoch": 1.06,
+ "learning_rate": 0.00012897727272727274,
+ "loss": 1.833,
+ "step": 375
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012878787878787878,
+ "loss": 1.8815,
+ "step": 376
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012859848484848485,
+ "loss": 1.8646,
+ "step": 377
+ },
+ {
+ "epoch": 1.07,
+ "learning_rate": 0.00012840909090909092,
+ "loss": 1.846,
+ "step": 378
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.000128219696969697,
+ "loss": 1.8631,
+ "step": 379
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012803030303030303,
+ "loss": 1.917,
+ "step": 380
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.0001278409090909091,
+ "loss": 1.9068,
+ "step": 381
+ },
+ {
+ "epoch": 1.08,
+ "learning_rate": 0.00012765151515151517,
+ "loss": 1.8772,
+ "step": 382
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.0001274621212121212,
+ "loss": 1.8414,
+ "step": 383
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012727272727272728,
+ "loss": 1.9003,
+ "step": 384
+ },
+ {
+ "epoch": 1.09,
+ "learning_rate": 0.00012708333333333332,
+ "loss": 1.8415,
+ "step": 385
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012689393939393942,
+ "loss": 1.8491,
+ "step": 386
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012670454545454546,
+ "loss": 1.8875,
+ "step": 387
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012651515151515153,
+ "loss": 1.8629,
+ "step": 388
+ },
+ {
+ "epoch": 1.1,
+ "learning_rate": 0.00012632575757575757,
+ "loss": 1.8378,
+ "step": 389
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012613636363636364,
+ "loss": 1.8442,
+ "step": 390
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.0001259469696969697,
+ "loss": 1.8587,
+ "step": 391
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012575757575757575,
+ "loss": 1.8659,
+ "step": 392
+ },
+ {
+ "epoch": 1.11,
+ "learning_rate": 0.00012556818181818182,
+ "loss": 1.8271,
+ "step": 393
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.0001253787878787879,
+ "loss": 1.8692,
+ "step": 394
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.00012518939393939396,
+ "loss": 1.8071,
+ "step": 395
+ },
+ {
+ "epoch": 1.12,
+ "learning_rate": 0.000125,
+ "loss": 1.8564,
+ "step": 396
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012481060606060607,
+ "loss": 1.8891,
+ "step": 397
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012462121212121211,
+ "loss": 1.8173,
+ "step": 398
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012443181818181818,
+ "loss": 1.8653,
+ "step": 399
+ },
+ {
+ "epoch": 1.13,
+ "learning_rate": 0.00012424242424242425,
+ "loss": 1.8843,
+ "step": 400
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.0001240530303030303,
+ "loss": 1.8527,
+ "step": 401
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012386363636363636,
+ "loss": 1.8352,
+ "step": 402
+ },
+ {
+ "epoch": 1.14,
+ "learning_rate": 0.00012367424242424243,
+ "loss": 1.866,
+ "step": 403
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001234848484848485,
+ "loss": 1.8557,
+ "step": 404
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012329545454545454,
+ "loss": 1.8284,
+ "step": 405
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.0001231060606060606,
+ "loss": 1.8359,
+ "step": 406
+ },
+ {
+ "epoch": 1.15,
+ "learning_rate": 0.00012291666666666668,
+ "loss": 1.8437,
+ "step": 407
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012272727272727272,
+ "loss": 1.8256,
+ "step": 408
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.0001225378787878788,
+ "loss": 1.8297,
+ "step": 409
+ },
+ {
+ "epoch": 1.16,
+ "learning_rate": 0.00012234848484848484,
+ "loss": 1.8515,
+ "step": 410
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012215909090909093,
+ "loss": 1.8198,
+ "step": 411
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012196969696969697,
+ "loss": 1.7809,
+ "step": 412
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012178030303030303,
+ "loss": 1.8438,
+ "step": 413
+ },
+ {
+ "epoch": 1.17,
+ "learning_rate": 0.00012159090909090908,
+ "loss": 1.8497,
+ "step": 414
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012140151515151517,
+ "loss": 1.8463,
+ "step": 415
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012121212121212122,
+ "loss": 1.7768,
+ "step": 416
+ },
+ {
+ "epoch": 1.18,
+ "learning_rate": 0.00012102272727272728,
+ "loss": 1.8561,
+ "step": 417
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012083333333333333,
+ "loss": 1.863,
+ "step": 418
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.0001206439393939394,
+ "loss": 1.8193,
+ "step": 419
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012045454545454546,
+ "loss": 1.7732,
+ "step": 420
+ },
+ {
+ "epoch": 1.19,
+ "learning_rate": 0.00012026515151515151,
+ "loss": 1.7728,
+ "step": 421
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00012007575757575757,
+ "loss": 1.8113,
+ "step": 422
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011988636363636365,
+ "loss": 1.7976,
+ "step": 423
+ },
+ {
+ "epoch": 1.2,
+ "learning_rate": 0.00011969696969696971,
+ "loss": 1.786,
+ "step": 424
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011950757575757576,
+ "loss": 1.8019,
+ "step": 425
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011931818181818182,
+ "loss": 1.786,
+ "step": 426
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011912878787878789,
+ "loss": 1.8102,
+ "step": 427
+ },
+ {
+ "epoch": 1.21,
+ "learning_rate": 0.00011893939393939394,
+ "loss": 1.7828,
+ "step": 428
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011875,
+ "loss": 1.8498,
+ "step": 429
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011856060606060606,
+ "loss": 1.7983,
+ "step": 430
+ },
+ {
+ "epoch": 1.22,
+ "learning_rate": 0.00011837121212121211,
+ "loss": 1.7863,
+ "step": 431
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001181818181818182,
+ "loss": 1.8171,
+ "step": 432
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011799242424242425,
+ "loss": 1.8143,
+ "step": 433
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.0001178030303030303,
+ "loss": 1.7815,
+ "step": 434
+ },
+ {
+ "epoch": 1.23,
+ "learning_rate": 0.00011761363636363636,
+ "loss": 1.7652,
+ "step": 435
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011742424242424244,
+ "loss": 1.8242,
+ "step": 436
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011723484848484849,
+ "loss": 1.7789,
+ "step": 437
+ },
+ {
+ "epoch": 1.24,
+ "learning_rate": 0.00011704545454545454,
+ "loss": 1.7549,
+ "step": 438
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.0001168560606060606,
+ "loss": 1.7528,
+ "step": 439
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011666666666666668,
+ "loss": 1.7443,
+ "step": 440
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011647727272727273,
+ "loss": 1.7911,
+ "step": 441
+ },
+ {
+ "epoch": 1.25,
+ "learning_rate": 0.00011628787878787879,
+ "loss": 1.7848,
+ "step": 442
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011609848484848485,
+ "loss": 1.8137,
+ "step": 443
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011590909090909093,
+ "loss": 1.791,
+ "step": 444
+ },
+ {
+ "epoch": 1.26,
+ "learning_rate": 0.00011571969696969698,
+ "loss": 1.7921,
+ "step": 445
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011553030303030304,
+ "loss": 1.772,
+ "step": 446
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011534090909090908,
+ "loss": 1.776,
+ "step": 447
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011515151515151516,
+ "loss": 1.7948,
+ "step": 448
+ },
+ {
+ "epoch": 1.27,
+ "learning_rate": 0.00011496212121212122,
+ "loss": 1.8187,
+ "step": 449
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011477272727272728,
+ "loss": 1.7436,
+ "step": 450
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011458333333333333,
+ "loss": 1.7326,
+ "step": 451
+ },
+ {
+ "epoch": 1.28,
+ "learning_rate": 0.00011439393939393941,
+ "loss": 1.8005,
+ "step": 452
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011420454545454547,
+ "loss": 1.8088,
+ "step": 453
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011401515151515153,
+ "loss": 1.7632,
+ "step": 454
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011382575757575758,
+ "loss": 1.7848,
+ "step": 455
+ },
+ {
+ "epoch": 1.29,
+ "learning_rate": 0.00011363636363636365,
+ "loss": 1.7756,
+ "step": 456
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001134469696969697,
+ "loss": 1.7964,
+ "step": 457
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011325757575757576,
+ "loss": 1.7604,
+ "step": 458
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.00011306818181818182,
+ "loss": 1.7914,
+ "step": 459
+ },
+ {
+ "epoch": 1.3,
+ "learning_rate": 0.0001128787878787879,
+ "loss": 1.8059,
+ "step": 460
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011268939393939395,
+ "loss": 1.7647,
+ "step": 461
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011250000000000001,
+ "loss": 1.7526,
+ "step": 462
+ },
+ {
+ "epoch": 1.31,
+ "learning_rate": 0.00011231060606060607,
+ "loss": 1.7736,
+ "step": 463
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011212121212121212,
+ "loss": 1.7449,
+ "step": 464
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011193181818181819,
+ "loss": 1.7636,
+ "step": 465
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.00011174242424242425,
+ "loss": 1.7846,
+ "step": 466
+ },
+ {
+ "epoch": 1.32,
+ "learning_rate": 0.0001115530303030303,
+ "loss": 1.78,
+ "step": 467
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011136363636363636,
+ "loss": 1.7828,
+ "step": 468
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.00011117424242424244,
+ "loss": 1.729,
+ "step": 469
+ },
+ {
+ "epoch": 1.33,
+ "learning_rate": 0.0001109848484848485,
+ "loss": 1.7145,
+ "step": 470
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011079545454545455,
+ "loss": 1.7189,
+ "step": 471
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011060606060606061,
+ "loss": 1.7628,
+ "step": 472
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011041666666666668,
+ "loss": 1.7399,
+ "step": 473
+ },
+ {
+ "epoch": 1.34,
+ "learning_rate": 0.00011022727272727273,
+ "loss": 1.7561,
+ "step": 474
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00011003787878787879,
+ "loss": 1.7979,
+ "step": 475
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010984848484848484,
+ "loss": 1.7673,
+ "step": 476
+ },
+ {
+ "epoch": 1.35,
+ "learning_rate": 0.00010965909090909093,
+ "loss": 1.777,
+ "step": 477
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010946969696969698,
+ "loss": 1.7042,
+ "step": 478
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010928030303030304,
+ "loss": 1.7764,
+ "step": 479
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010909090909090909,
+ "loss": 1.6993,
+ "step": 480
+ },
+ {
+ "epoch": 1.36,
+ "learning_rate": 0.00010890151515151516,
+ "loss": 1.7688,
+ "step": 481
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010871212121212122,
+ "loss": 1.7428,
+ "step": 482
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010852272727272727,
+ "loss": 1.675,
+ "step": 483
+ },
+ {
+ "epoch": 1.37,
+ "learning_rate": 0.00010833333333333333,
+ "loss": 1.7183,
+ "step": 484
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010814393939393941,
+ "loss": 1.7305,
+ "step": 485
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010795454545454547,
+ "loss": 1.7541,
+ "step": 486
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010776515151515152,
+ "loss": 1.7074,
+ "step": 487
+ },
+ {
+ "epoch": 1.38,
+ "learning_rate": 0.00010757575757575758,
+ "loss": 1.7093,
+ "step": 488
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010738636363636365,
+ "loss": 1.7354,
+ "step": 489
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.0001071969696969697,
+ "loss": 1.7415,
+ "step": 490
+ },
+ {
+ "epoch": 1.39,
+ "learning_rate": 0.00010700757575757576,
+ "loss": 1.72,
+ "step": 491
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010681818181818181,
+ "loss": 1.7453,
+ "step": 492
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.0001066287878787879,
+ "loss": 1.7077,
+ "step": 493
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010643939393939395,
+ "loss": 1.6936,
+ "step": 494
+ },
+ {
+ "epoch": 1.4,
+ "learning_rate": 0.00010625000000000001,
+ "loss": 1.7616,
+ "step": 495
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010606060606060606,
+ "loss": 1.7749,
+ "step": 496
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010587121212121212,
+ "loss": 1.7375,
+ "step": 497
+ },
+ {
+ "epoch": 1.41,
+ "learning_rate": 0.00010568181818181819,
+ "loss": 1.7203,
+ "step": 498
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010549242424242424,
+ "loss": 1.7148,
+ "step": 499
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.0001053030303030303,
+ "loss": 1.7859,
+ "step": 500
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010511363636363635,
+ "loss": 1.7478,
+ "step": 501
+ },
+ {
+ "epoch": 1.42,
+ "learning_rate": 0.00010492424242424244,
+ "loss": 1.7091,
+ "step": 502
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.00010473484848484849,
+ "loss": 1.7112,
+ "step": 503
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.00010454545454545455,
+ "loss": 1.6967,
+ "step": 504
+ },
+ {
+ "epoch": 1.43,
+ "learning_rate": 0.0001043560606060606,
+ "loss": 1.7431,
+ "step": 505
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010416666666666667,
+ "loss": 1.7065,
+ "step": 506
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010397727272727273,
+ "loss": 1.6955,
+ "step": 507
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010378787878787878,
+ "loss": 1.7375,
+ "step": 508
+ },
+ {
+ "epoch": 1.44,
+ "learning_rate": 0.00010359848484848484,
+ "loss": 1.7056,
+ "step": 509
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010340909090909092,
+ "loss": 1.7044,
+ "step": 510
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010321969696969698,
+ "loss": 1.7204,
+ "step": 511
+ },
+ {
+ "epoch": 1.45,
+ "learning_rate": 0.00010303030303030303,
+ "loss": 1.6801,
+ "step": 512
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010284090909090909,
+ "loss": 1.7381,
+ "step": 513
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010265151515151516,
+ "loss": 1.7064,
+ "step": 514
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010246212121212121,
+ "loss": 1.6973,
+ "step": 515
+ },
+ {
+ "epoch": 1.46,
+ "learning_rate": 0.00010227272727272727,
+ "loss": 1.7295,
+ "step": 516
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010208333333333333,
+ "loss": 1.6991,
+ "step": 517
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010189393939393941,
+ "loss": 1.6986,
+ "step": 518
+ },
+ {
+ "epoch": 1.47,
+ "learning_rate": 0.00010170454545454546,
+ "loss": 1.6989,
+ "step": 519
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010151515151515152,
+ "loss": 1.7009,
+ "step": 520
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010132575757575757,
+ "loss": 1.6919,
+ "step": 521
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010113636363636366,
+ "loss": 1.6955,
+ "step": 522
+ },
+ {
+ "epoch": 1.48,
+ "learning_rate": 0.00010094696969696971,
+ "loss": 1.7177,
+ "step": 523
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.00010075757575757576,
+ "loss": 1.715,
+ "step": 524
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.00010056818181818181,
+ "loss": 1.6686,
+ "step": 525
+ },
+ {
+ "epoch": 1.49,
+ "learning_rate": 0.0001003787878787879,
+ "loss": 1.771,
+ "step": 526
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 0.00010018939393939395,
+ "loss": 1.7024,
+ "step": 527
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 0.0001,
+ "loss": 1.7016,
+ "step": 528
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 9.981060606060606e-05,
+ "loss": 1.6501,
+ "step": 529
+ },
+ {
+ "epoch": 1.5,
+ "learning_rate": 9.962121212121213e-05,
+ "loss": 1.6903,
+ "step": 530
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.943181818181819e-05,
+ "loss": 1.6806,
+ "step": 531
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.924242424242425e-05,
+ "loss": 1.7096,
+ "step": 532
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.90530303030303e-05,
+ "loss": 1.7307,
+ "step": 533
+ },
+ {
+ "epoch": 1.51,
+ "learning_rate": 9.886363636363637e-05,
+ "loss": 1.6871,
+ "step": 534
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.867424242424242e-05,
+ "loss": 1.7457,
+ "step": 535
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.848484848484849e-05,
+ "loss": 1.6867,
+ "step": 536
+ },
+ {
+ "epoch": 1.52,
+ "learning_rate": 9.829545454545455e-05,
+ "loss": 1.6789,
+ "step": 537
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.810606060606061e-05,
+ "loss": 1.6403,
+ "step": 538
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.791666666666667e-05,
+ "loss": 1.6697,
+ "step": 539
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.772727272727274e-05,
+ "loss": 1.7293,
+ "step": 540
+ },
+ {
+ "epoch": 1.53,
+ "learning_rate": 9.75378787878788e-05,
+ "loss": 1.6998,
+ "step": 541
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.734848484848485e-05,
+ "loss": 1.693,
+ "step": 542
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.71590909090909e-05,
+ "loss": 1.664,
+ "step": 543
+ },
+ {
+ "epoch": 1.54,
+ "learning_rate": 9.696969696969698e-05,
+ "loss": 1.7061,
+ "step": 544
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.678030303030303e-05,
+ "loss": 1.6631,
+ "step": 545
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.65909090909091e-05,
+ "loss": 1.6343,
+ "step": 546
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.640151515151516e-05,
+ "loss": 1.6939,
+ "step": 547
+ },
+ {
+ "epoch": 1.55,
+ "learning_rate": 9.621212121212123e-05,
+ "loss": 1.669,
+ "step": 548
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.602272727272728e-05,
+ "loss": 1.6561,
+ "step": 549
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.583333333333334e-05,
+ "loss": 1.6675,
+ "step": 550
+ },
+ {
+ "epoch": 1.56,
+ "learning_rate": 9.564393939393939e-05,
+ "loss": 1.7109,
+ "step": 551
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.545454545454546e-05,
+ "loss": 1.693,
+ "step": 552
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.526515151515152e-05,
+ "loss": 1.6557,
+ "step": 553
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.507575757575759e-05,
+ "loss": 1.6642,
+ "step": 554
+ },
+ {
+ "epoch": 1.57,
+ "learning_rate": 9.488636363636364e-05,
+ "loss": 1.6674,
+ "step": 555
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.469696969696971e-05,
+ "loss": 1.6492,
+ "step": 556
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.450757575757577e-05,
+ "loss": 1.6915,
+ "step": 557
+ },
+ {
+ "epoch": 1.58,
+ "learning_rate": 9.431818181818182e-05,
+ "loss": 1.7028,
+ "step": 558
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.412878787878788e-05,
+ "loss": 1.6749,
+ "step": 559
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.393939393939395e-05,
+ "loss": 1.6526,
+ "step": 560
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.375e-05,
+ "loss": 1.687,
+ "step": 561
+ },
+ {
+ "epoch": 1.59,
+ "learning_rate": 9.356060606060606e-05,
+ "loss": 1.6632,
+ "step": 562
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.337121212121213e-05,
+ "loss": 1.7074,
+ "step": 563
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.318181818181818e-05,
+ "loss": 1.6164,
+ "step": 564
+ },
+ {
+ "epoch": 1.6,
+ "learning_rate": 9.299242424242425e-05,
+ "loss": 1.6594,
+ "step": 565
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.280303030303031e-05,
+ "loss": 1.6603,
+ "step": 566
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.261363636363636e-05,
+ "loss": 1.6213,
+ "step": 567
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.242424242424242e-05,
+ "loss": 1.6899,
+ "step": 568
+ },
+ {
+ "epoch": 1.61,
+ "learning_rate": 9.223484848484849e-05,
+ "loss": 1.6619,
+ "step": 569
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.204545454545454e-05,
+ "loss": 1.7035,
+ "step": 570
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.185606060606061e-05,
+ "loss": 1.6408,
+ "step": 571
+ },
+ {
+ "epoch": 1.62,
+ "learning_rate": 9.166666666666667e-05,
+ "loss": 1.6506,
+ "step": 572
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.147727272727274e-05,
+ "loss": 1.658,
+ "step": 573
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.128787878787879e-05,
+ "loss": 1.6005,
+ "step": 574
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.109848484848486e-05,
+ "loss": 1.6821,
+ "step": 575
+ },
+ {
+ "epoch": 1.63,
+ "learning_rate": 9.090909090909092e-05,
+ "loss": 1.6858,
+ "step": 576
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.071969696969697e-05,
+ "loss": 1.6933,
+ "step": 577
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.053030303030303e-05,
+ "loss": 1.6757,
+ "step": 578
+ },
+ {
+ "epoch": 1.64,
+ "learning_rate": 9.03409090909091e-05,
+ "loss": 1.6107,
+ "step": 579
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 9.015151515151515e-05,
+ "loss": 1.5751,
+ "step": 580
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.996212121212122e-05,
+ "loss": 1.6168,
+ "step": 581
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.977272727272728e-05,
+ "loss": 1.6213,
+ "step": 582
+ },
+ {
+ "epoch": 1.65,
+ "learning_rate": 8.958333333333335e-05,
+ "loss": 1.6243,
+ "step": 583
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.93939393939394e-05,
+ "loss": 1.6249,
+ "step": 584
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.920454545454546e-05,
+ "loss": 1.6529,
+ "step": 585
+ },
+ {
+ "epoch": 1.66,
+ "learning_rate": 8.901515151515151e-05,
+ "loss": 1.626,
+ "step": 586
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.882575757575758e-05,
+ "loss": 1.6616,
+ "step": 587
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.863636363636364e-05,
+ "loss": 1.6622,
+ "step": 588
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.844696969696971e-05,
+ "loss": 1.5927,
+ "step": 589
+ },
+ {
+ "epoch": 1.67,
+ "learning_rate": 8.825757575757576e-05,
+ "loss": 1.6351,
+ "step": 590
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.806818181818183e-05,
+ "loss": 1.6213,
+ "step": 591
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.787878787878789e-05,
+ "loss": 1.635,
+ "step": 592
+ },
+ {
+ "epoch": 1.68,
+ "learning_rate": 8.768939393939394e-05,
+ "loss": 1.6406,
+ "step": 593
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.75e-05,
+ "loss": 1.6387,
+ "step": 594
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.731060606060605e-05,
+ "loss": 1.602,
+ "step": 595
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.712121212121212e-05,
+ "loss": 1.601,
+ "step": 596
+ },
+ {
+ "epoch": 1.69,
+ "learning_rate": 8.693181818181818e-05,
+ "loss": 1.5855,
+ "step": 597
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.674242424242425e-05,
+ "loss": 1.6236,
+ "step": 598
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.65530303030303e-05,
+ "loss": 1.5999,
+ "step": 599
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.636363636363637e-05,
+ "loss": 1.6093,
+ "step": 600
+ },
+ {
+ "epoch": 1.7,
+ "learning_rate": 8.617424242424243e-05,
+ "loss": 1.6602,
+ "step": 601
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.598484848484848e-05,
+ "loss": 1.599,
+ "step": 602
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.579545454545454e-05,
+ "loss": 1.6056,
+ "step": 603
+ },
+ {
+ "epoch": 1.71,
+ "learning_rate": 8.560606060606061e-05,
+ "loss": 1.6377,
+ "step": 604
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.541666666666666e-05,
+ "loss": 1.5769,
+ "step": 605
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.522727272727273e-05,
+ "loss": 1.6219,
+ "step": 606
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.503787878787879e-05,
+ "loss": 1.5917,
+ "step": 607
+ },
+ {
+ "epoch": 1.72,
+ "learning_rate": 8.484848484848486e-05,
+ "loss": 1.6019,
+ "step": 608
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.465909090909091e-05,
+ "loss": 1.6316,
+ "step": 609
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.446969696969697e-05,
+ "loss": 1.6327,
+ "step": 610
+ },
+ {
+ "epoch": 1.73,
+ "learning_rate": 8.428030303030303e-05,
+ "loss": 1.6023,
+ "step": 611
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.40909090909091e-05,
+ "loss": 1.6087,
+ "step": 612
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.390151515151515e-05,
+ "loss": 1.6245,
+ "step": 613
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.371212121212122e-05,
+ "loss": 1.5957,
+ "step": 614
+ },
+ {
+ "epoch": 1.74,
+ "learning_rate": 8.352272727272727e-05,
+ "loss": 1.6196,
+ "step": 615
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.333333333333334e-05,
+ "loss": 1.6364,
+ "step": 616
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.31439393939394e-05,
+ "loss": 1.5977,
+ "step": 617
+ },
+ {
+ "epoch": 1.75,
+ "learning_rate": 8.295454545454547e-05,
+ "loss": 1.6018,
+ "step": 618
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.276515151515152e-05,
+ "loss": 1.5973,
+ "step": 619
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.257575757575758e-05,
+ "loss": 1.6216,
+ "step": 620
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.238636363636364e-05,
+ "loss": 1.6422,
+ "step": 621
+ },
+ {
+ "epoch": 1.76,
+ "learning_rate": 8.21969696969697e-05,
+ "loss": 1.6401,
+ "step": 622
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.200757575757576e-05,
+ "loss": 1.6446,
+ "step": 623
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.181818181818183e-05,
+ "loss": 1.5791,
+ "step": 624
+ },
+ {
+ "epoch": 1.77,
+ "learning_rate": 8.162878787878789e-05,
+ "loss": 1.5953,
+ "step": 625
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.143939393939395e-05,
+ "loss": 1.5941,
+ "step": 626
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.125000000000001e-05,
+ "loss": 1.5784,
+ "step": 627
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.106060606060607e-05,
+ "loss": 1.6024,
+ "step": 628
+ },
+ {
+ "epoch": 1.78,
+ "learning_rate": 8.087121212121212e-05,
+ "loss": 1.6295,
+ "step": 629
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.068181818181818e-05,
+ "loss": 1.5905,
+ "step": 630
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.049242424242425e-05,
+ "loss": 1.6073,
+ "step": 631
+ },
+ {
+ "epoch": 1.79,
+ "learning_rate": 8.03030303030303e-05,
+ "loss": 1.6104,
+ "step": 632
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 8.011363636363637e-05,
+ "loss": 1.6134,
+ "step": 633
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.992424242424243e-05,
+ "loss": 1.6569,
+ "step": 634
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.97348484848485e-05,
+ "loss": 1.5493,
+ "step": 635
+ },
+ {
+ "epoch": 1.8,
+ "learning_rate": 7.954545454545455e-05,
+ "loss": 1.5767,
+ "step": 636
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.93560606060606e-05,
+ "loss": 1.5692,
+ "step": 637
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.916666666666666e-05,
+ "loss": 1.6116,
+ "step": 638
+ },
+ {
+ "epoch": 1.81,
+ "learning_rate": 7.897727272727273e-05,
+ "loss": 1.5684,
+ "step": 639
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.878787878787879e-05,
+ "loss": 1.6177,
+ "step": 640
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.859848484848486e-05,
+ "loss": 1.6151,
+ "step": 641
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.840909090909091e-05,
+ "loss": 1.6293,
+ "step": 642
+ },
+ {
+ "epoch": 1.82,
+ "learning_rate": 7.821969696969698e-05,
+ "loss": 1.6298,
+ "step": 643
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.803030303030304e-05,
+ "loss": 1.6073,
+ "step": 644
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.784090909090909e-05,
+ "loss": 1.5328,
+ "step": 645
+ },
+ {
+ "epoch": 1.83,
+ "learning_rate": 7.765151515151515e-05,
+ "loss": 1.5895,
+ "step": 646
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.746212121212122e-05,
+ "loss": 1.5728,
+ "step": 647
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.727272727272727e-05,
+ "loss": 1.5449,
+ "step": 648
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.708333333333334e-05,
+ "loss": 1.5731,
+ "step": 649
+ },
+ {
+ "epoch": 1.84,
+ "learning_rate": 7.68939393939394e-05,
+ "loss": 1.627,
+ "step": 650
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.670454545454547e-05,
+ "loss": 1.6139,
+ "step": 651
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.651515151515152e-05,
+ "loss": 1.5613,
+ "step": 652
+ },
+ {
+ "epoch": 1.85,
+ "learning_rate": 7.632575757575758e-05,
+ "loss": 1.5734,
+ "step": 653
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.613636363636363e-05,
+ "loss": 1.5537,
+ "step": 654
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.59469696969697e-05,
+ "loss": 1.5886,
+ "step": 655
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.575757575757576e-05,
+ "loss": 1.5504,
+ "step": 656
+ },
+ {
+ "epoch": 1.86,
+ "learning_rate": 7.556818181818183e-05,
+ "loss": 1.5613,
+ "step": 657
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.537878787878788e-05,
+ "loss": 1.5877,
+ "step": 658
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.518939393939395e-05,
+ "loss": 1.605,
+ "step": 659
+ },
+ {
+ "epoch": 1.87,
+ "learning_rate": 7.500000000000001e-05,
+ "loss": 1.5403,
+ "step": 660
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.481060606060606e-05,
+ "loss": 1.6039,
+ "step": 661
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.462121212121213e-05,
+ "loss": 1.5708,
+ "step": 662
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.443181818181817e-05,
+ "loss": 1.5692,
+ "step": 663
+ },
+ {
+ "epoch": 1.88,
+ "learning_rate": 7.424242424242424e-05,
+ "loss": 1.5084,
+ "step": 664
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.40530303030303e-05,
+ "loss": 1.5982,
+ "step": 665
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.386363636363637e-05,
+ "loss": 1.5881,
+ "step": 666
+ },
+ {
+ "epoch": 1.89,
+ "learning_rate": 7.367424242424242e-05,
+ "loss": 1.5593,
+ "step": 667
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.348484848484849e-05,
+ "loss": 1.5871,
+ "step": 668
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.329545454545455e-05,
+ "loss": 1.6134,
+ "step": 669
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.310606060606062e-05,
+ "loss": 1.5516,
+ "step": 670
+ },
+ {
+ "epoch": 1.9,
+ "learning_rate": 7.291666666666667e-05,
+ "loss": 1.5691,
+ "step": 671
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.272727272727273e-05,
+ "loss": 1.5801,
+ "step": 672
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.253787878787878e-05,
+ "loss": 1.5684,
+ "step": 673
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.234848484848485e-05,
+ "loss": 1.5591,
+ "step": 674
+ },
+ {
+ "epoch": 1.91,
+ "learning_rate": 7.215909090909091e-05,
+ "loss": 1.5727,
+ "step": 675
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.196969696969698e-05,
+ "loss": 1.6081,
+ "step": 676
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.178030303030303e-05,
+ "loss": 1.5884,
+ "step": 677
+ },
+ {
+ "epoch": 1.92,
+ "learning_rate": 7.15909090909091e-05,
+ "loss": 1.5638,
+ "step": 678
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.140151515151516e-05,
+ "loss": 1.5614,
+ "step": 679
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.121212121212121e-05,
+ "loss": 1.5543,
+ "step": 680
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.102272727272727e-05,
+ "loss": 1.5801,
+ "step": 681
+ },
+ {
+ "epoch": 1.93,
+ "learning_rate": 7.083333333333334e-05,
+ "loss": 1.5458,
+ "step": 682
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.06439393939394e-05,
+ "loss": 1.5567,
+ "step": 683
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.045454545454546e-05,
+ "loss": 1.5567,
+ "step": 684
+ },
+ {
+ "epoch": 1.94,
+ "learning_rate": 7.026515151515152e-05,
+ "loss": 1.5638,
+ "step": 685
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 7.007575757575759e-05,
+ "loss": 1.5431,
+ "step": 686
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.988636363636364e-05,
+ "loss": 1.5729,
+ "step": 687
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.96969696969697e-05,
+ "loss": 1.5235,
+ "step": 688
+ },
+ {
+ "epoch": 1.95,
+ "learning_rate": 6.950757575757575e-05,
+ "loss": 1.5753,
+ "step": 689
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.931818181818182e-05,
+ "loss": 1.5319,
+ "step": 690
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.912878787878788e-05,
+ "loss": 1.5847,
+ "step": 691
+ },
+ {
+ "epoch": 1.96,
+ "learning_rate": 6.893939393939395e-05,
+ "loss": 1.5533,
+ "step": 692
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.875e-05,
+ "loss": 1.5665,
+ "step": 693
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.856060606060606e-05,
+ "loss": 1.5913,
+ "step": 694
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.837121212121213e-05,
+ "loss": 1.6011,
+ "step": 695
+ },
+ {
+ "epoch": 1.97,
+ "learning_rate": 6.818181818181818e-05,
+ "loss": 1.5201,
+ "step": 696
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.799242424242424e-05,
+ "loss": 1.57,
+ "step": 697
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.78030303030303e-05,
+ "loss": 1.5381,
+ "step": 698
+ },
+ {
+ "epoch": 1.98,
+ "learning_rate": 6.761363636363636e-05,
+ "loss": 1.5681,
+ "step": 699
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.742424242424242e-05,
+ "loss": 1.5542,
+ "step": 700
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.723484848484849e-05,
+ "loss": 1.5779,
+ "step": 701
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.704545454545455e-05,
+ "loss": 1.578,
+ "step": 702
+ },
+ {
+ "epoch": 1.99,
+ "learning_rate": 6.685606060606061e-05,
+ "loss": 1.6131,
+ "step": 703
+ },
+ {
+ "epoch": 2.0,
+ "learning_rate": 6.666666666666667e-05,
+ "loss": 1.5085,
+ "step": 704
+ },
+ {
+ "epoch": 2.0,
+ "learning_rate": 6.647727272727274e-05,
+ "loss": 1.4876,
+ "step": 705
+ },
+ {
+ "epoch": 2.0,
+ "learning_rate": 6.628787878787878e-05,
+ "loss": 1.5071,
+ "step": 706
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.609848484848485e-05,
+ "loss": 1.574,
+ "step": 707
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.59090909090909e-05,
+ "loss": 1.5214,
+ "step": 708
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.571969696969697e-05,
+ "loss": 1.5382,
+ "step": 709
+ },
+ {
+ "epoch": 2.01,
+ "learning_rate": 6.553030303030303e-05,
+ "loss": 1.5136,
+ "step": 710
+ },
+ {
+ "epoch": 2.02,
+ "learning_rate": 6.53409090909091e-05,
+ "loss": 1.4807,
+ "step": 711
+ },
+ {
+ "epoch": 2.02,
+ "learning_rate": 6.515151515151516e-05,
+ "loss": 1.491,
+ "step": 712
+ },
+ {
+ "epoch": 2.02,
+ "learning_rate": 6.496212121212122e-05,
+ "loss": 1.5595,
+ "step": 713
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.477272727272728e-05,
+ "loss": 1.5342,
+ "step": 714
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.458333333333334e-05,
+ "loss": 1.5173,
+ "step": 715
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.439393939393939e-05,
+ "loss": 1.5353,
+ "step": 716
+ },
+ {
+ "epoch": 2.03,
+ "learning_rate": 6.420454545454546e-05,
+ "loss": 1.4826,
+ "step": 717
+ },
+ {
+ "epoch": 2.04,
+ "learning_rate": 6.401515151515152e-05,
+ "loss": 1.5404,
+ "step": 718
+ },
+ {
+ "epoch": 2.04,
+ "learning_rate": 6.382575757575759e-05,
+ "loss": 1.5612,
+ "step": 719
+ },
+ {
+ "epoch": 2.04,
+ "learning_rate": 6.363636363636364e-05,
+ "loss": 1.5203,
+ "step": 720
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.344696969696971e-05,
+ "loss": 1.52,
+ "step": 721
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.325757575757577e-05,
+ "loss": 1.5417,
+ "step": 722
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.306818181818182e-05,
+ "loss": 1.5352,
+ "step": 723
+ },
+ {
+ "epoch": 2.05,
+ "learning_rate": 6.287878787878788e-05,
+ "loss": 1.4671,
+ "step": 724
+ },
+ {
+ "epoch": 2.06,
+ "learning_rate": 6.268939393939395e-05,
+ "loss": 1.5739,
+ "step": 725
+ },
+ {
+ "epoch": 2.06,
+ "learning_rate": 6.25e-05,
+ "loss": 1.4987,
+ "step": 726
+ },
+ {
+ "epoch": 2.06,
+ "learning_rate": 6.231060606060606e-05,
+ "loss": 1.5145,
+ "step": 727
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.212121212121213e-05,
+ "loss": 1.5686,
+ "step": 728
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.193181818181818e-05,
+ "loss": 1.4872,
+ "step": 729
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.174242424242425e-05,
+ "loss": 1.4831,
+ "step": 730
+ },
+ {
+ "epoch": 2.07,
+ "learning_rate": 6.15530303030303e-05,
+ "loss": 1.5242,
+ "step": 731
+ },
+ {
+ "epoch": 2.08,
+ "learning_rate": 6.136363636363636e-05,
+ "loss": 1.5298,
+ "step": 732
+ },
+ {
+ "epoch": 2.08,
+ "learning_rate": 6.117424242424242e-05,
+ "loss": 1.4941,
+ "step": 733
+ },
+ {
+ "epoch": 2.08,
+ "learning_rate": 6.098484848484849e-05,
+ "loss": 1.5022,
+ "step": 734
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.079545454545454e-05,
+ "loss": 1.4947,
+ "step": 735
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.060606060606061e-05,
+ "loss": 1.4922,
+ "step": 736
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.041666666666667e-05,
+ "loss": 1.4796,
+ "step": 737
+ },
+ {
+ "epoch": 2.09,
+ "learning_rate": 6.022727272727273e-05,
+ "loss": 1.4619,
+ "step": 738
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 6.0037878787878785e-05,
+ "loss": 1.5346,
+ "step": 739
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 5.9848484848484854e-05,
+ "loss": 1.4987,
+ "step": 740
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 5.965909090909091e-05,
+ "loss": 1.516,
+ "step": 741
+ },
+ {
+ "epoch": 2.1,
+ "learning_rate": 5.946969696969697e-05,
+ "loss": 1.5075,
+ "step": 742
+ },
+ {
+ "epoch": 2.11,
+ "learning_rate": 5.928030303030303e-05,
+ "loss": 1.4584,
+ "step": 743
+ },
+ {
+ "epoch": 2.11,
+ "learning_rate": 5.90909090909091e-05,
+ "loss": 1.5285,
+ "step": 744
+ },
+ {
+ "epoch": 2.11,
+ "learning_rate": 5.890151515151515e-05,
+ "loss": 1.5339,
+ "step": 745
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.871212121212122e-05,
+ "loss": 1.4811,
+ "step": 746
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.852272727272727e-05,
+ "loss": 1.5158,
+ "step": 747
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.833333333333334e-05,
+ "loss": 1.5523,
+ "step": 748
+ },
+ {
+ "epoch": 2.12,
+ "learning_rate": 5.8143939393939395e-05,
+ "loss": 1.4911,
+ "step": 749
+ },
+ {
+ "epoch": 2.13,
+ "learning_rate": 5.7954545454545464e-05,
+ "loss": 1.508,
+ "step": 750
+ },
+ {
+ "epoch": 2.13,
+ "learning_rate": 5.776515151515152e-05,
+ "loss": 1.5273,
+ "step": 751
+ },
+ {
+ "epoch": 2.13,
+ "learning_rate": 5.757575757575758e-05,
+ "loss": 1.5231,
+ "step": 752
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.738636363636364e-05,
+ "loss": 1.5269,
+ "step": 753
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.719696969696971e-05,
+ "loss": 1.5306,
+ "step": 754
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.700757575757576e-05,
+ "loss": 1.4519,
+ "step": 755
+ },
+ {
+ "epoch": 2.14,
+ "learning_rate": 5.6818181818181825e-05,
+ "loss": 1.4976,
+ "step": 756
+ },
+ {
+ "epoch": 2.15,
+ "learning_rate": 5.662878787878788e-05,
+ "loss": 1.5136,
+ "step": 757
+ },
+ {
+ "epoch": 2.15,
+ "learning_rate": 5.643939393939395e-05,
+ "loss": 1.536,
+ "step": 758
+ },
+ {
+ "epoch": 2.15,
+ "learning_rate": 5.6250000000000005e-05,
+ "loss": 1.4927,
+ "step": 759
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.606060606060606e-05,
+ "loss": 1.4931,
+ "step": 760
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.587121212121212e-05,
+ "loss": 1.4939,
+ "step": 761
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.568181818181818e-05,
+ "loss": 1.4446,
+ "step": 762
+ },
+ {
+ "epoch": 2.16,
+ "learning_rate": 5.549242424242425e-05,
+ "loss": 1.5113,
+ "step": 763
+ },
+ {
+ "epoch": 2.17,
+ "learning_rate": 5.5303030303030304e-05,
+ "loss": 1.4886,
+ "step": 764
+ },
+ {
+ "epoch": 2.17,
+ "learning_rate": 5.5113636363636366e-05,
+ "loss": 1.5291,
+ "step": 765
+ },
+ {
+ "epoch": 2.17,
+ "learning_rate": 5.492424242424242e-05,
+ "loss": 1.5204,
+ "step": 766
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.473484848484849e-05,
+ "loss": 1.4677,
+ "step": 767
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.4545454545454546e-05,
+ "loss": 1.4499,
+ "step": 768
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.435606060606061e-05,
+ "loss": 1.4735,
+ "step": 769
+ },
+ {
+ "epoch": 2.18,
+ "learning_rate": 5.4166666666666664e-05,
+ "loss": 1.4688,
+ "step": 770
+ },
+ {
+ "epoch": 2.19,
+ "learning_rate": 5.397727272727273e-05,
+ "loss": 1.4728,
+ "step": 771
+ },
+ {
+ "epoch": 2.19,
+ "learning_rate": 5.378787878787879e-05,
+ "loss": 1.4791,
+ "step": 772
+ },
+ {
+ "epoch": 2.19,
+ "learning_rate": 5.359848484848485e-05,
+ "loss": 1.5265,
+ "step": 773
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.340909090909091e-05,
+ "loss": 1.4775,
+ "step": 774
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.3219696969696976e-05,
+ "loss": 1.4531,
+ "step": 775
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.303030303030303e-05,
+ "loss": 1.4891,
+ "step": 776
+ },
+ {
+ "epoch": 2.2,
+ "learning_rate": 5.2840909090909094e-05,
+ "loss": 1.4764,
+ "step": 777
+ },
+ {
+ "epoch": 2.21,
+ "learning_rate": 5.265151515151515e-05,
+ "loss": 1.4864,
+ "step": 778
+ },
+ {
+ "epoch": 2.21,
+ "learning_rate": 5.246212121212122e-05,
+ "loss": 1.521,
+ "step": 779
+ },
+ {
+ "epoch": 2.21,
+ "learning_rate": 5.2272727272727274e-05,
+ "loss": 1.473,
+ "step": 780
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.208333333333334e-05,
+ "loss": 1.477,
+ "step": 781
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.189393939393939e-05,
+ "loss": 1.482,
+ "step": 782
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.170454545454546e-05,
+ "loss": 1.5387,
+ "step": 783
+ },
+ {
+ "epoch": 2.22,
+ "learning_rate": 5.151515151515152e-05,
+ "loss": 1.4354,
+ "step": 784
+ },
+ {
+ "epoch": 2.23,
+ "learning_rate": 5.132575757575758e-05,
+ "loss": 1.4792,
+ "step": 785
+ },
+ {
+ "epoch": 2.23,
+ "learning_rate": 5.1136363636363635e-05,
+ "loss": 1.4589,
+ "step": 786
+ },
+ {
+ "epoch": 2.23,
+ "learning_rate": 5.0946969696969704e-05,
+ "loss": 1.4883,
+ "step": 787
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.075757575757576e-05,
+ "loss": 1.4442,
+ "step": 788
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.056818181818183e-05,
+ "loss": 1.4387,
+ "step": 789
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.037878787878788e-05,
+ "loss": 1.5203,
+ "step": 790
+ },
+ {
+ "epoch": 2.24,
+ "learning_rate": 5.018939393939395e-05,
+ "loss": 1.4634,
+ "step": 791
+ },
+ {
+ "epoch": 2.25,
+ "learning_rate": 5e-05,
+ "loss": 1.4734,
+ "step": 792
+ },
+ {
+ "epoch": 2.25,
+ "learning_rate": 4.9810606060606065e-05,
+ "loss": 1.5079,
+ "step": 793
+ },
+ {
+ "epoch": 2.25,
+ "learning_rate": 4.962121212121213e-05,
+ "loss": 1.4986,
+ "step": 794
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.943181818181818e-05,
+ "loss": 1.4132,
+ "step": 795
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.9242424242424245e-05,
+ "loss": 1.4992,
+ "step": 796
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.905303030303031e-05,
+ "loss": 1.4601,
+ "step": 797
+ },
+ {
+ "epoch": 2.26,
+ "learning_rate": 4.886363636363637e-05,
+ "loss": 1.47,
+ "step": 798
+ },
+ {
+ "epoch": 2.27,
+ "learning_rate": 4.8674242424242425e-05,
+ "loss": 1.4575,
+ "step": 799
+ },
+ {
+ "epoch": 2.27,
+ "learning_rate": 4.848484848484849e-05,
+ "loss": 1.4508,
+ "step": 800
+ },
+ {
+ "epoch": 2.27,
+ "learning_rate": 4.829545454545455e-05,
+ "loss": 1.4806,
+ "step": 801
+ },
+ {
+ "epoch": 2.28,
+ "learning_rate": 4.810606060606061e-05,
+ "loss": 1.475,
+ "step": 802
+ },
+ {
+ "epoch": 2.28,
+ "learning_rate": 4.791666666666667e-05,
+ "loss": 1.4219,
+ "step": 803
+ },
+ {
+ "epoch": 2.28,
+ "learning_rate": 4.772727272727273e-05,
+ "loss": 1.4651,
+ "step": 804
+ },
+ {
+ "epoch": 2.28,
+ "learning_rate": 4.753787878787879e-05,
+ "loss": 1.519,
+ "step": 805
+ },
+ {
+ "epoch": 2.29,
+ "learning_rate": 4.7348484848484855e-05,
+ "loss": 1.4887,
+ "step": 806
+ },
+ {
+ "epoch": 2.29,
+ "learning_rate": 4.715909090909091e-05,
+ "loss": 1.4713,
+ "step": 807
+ },
+ {
+ "epoch": 2.29,
+ "learning_rate": 4.696969696969697e-05,
+ "loss": 1.4588,
+ "step": 808
+ },
+ {
+ "epoch": 2.3,
+ "learning_rate": 4.678030303030303e-05,
+ "loss": 1.4752,
+ "step": 809
+ },
+ {
+ "epoch": 2.3,
+ "learning_rate": 4.659090909090909e-05,
+ "loss": 1.4722,
+ "step": 810
+ },
+ {
+ "epoch": 2.3,
+ "learning_rate": 4.6401515151515154e-05,
+ "loss": 1.4735,
+ "step": 811
+ },
+ {
+ "epoch": 2.3,
+ "learning_rate": 4.621212121212121e-05,
+ "loss": 1.4893,
+ "step": 812
+ },
+ {
+ "epoch": 2.31,
+ "learning_rate": 4.602272727272727e-05,
+ "loss": 1.4793,
+ "step": 813
+ },
+ {
+ "epoch": 2.31,
+ "learning_rate": 4.5833333333333334e-05,
+ "loss": 1.4737,
+ "step": 814
+ },
+ {
+ "epoch": 2.31,
+ "learning_rate": 4.5643939393939396e-05,
+ "loss": 1.5194,
+ "step": 815
+ },
+ {
+ "epoch": 2.31,
+ "learning_rate": 4.545454545454546e-05,
+ "loss": 1.4461,
+ "step": 816
+ },
+ {
+ "epoch": 2.32,
+ "learning_rate": 4.5265151515151514e-05,
+ "loss": 1.467,
+ "step": 817
+ },
+ {
+ "epoch": 2.32,
+ "learning_rate": 4.5075757575757577e-05,
+ "loss": 1.5031,
+ "step": 818
+ },
+ {
+ "epoch": 2.32,
+ "learning_rate": 4.488636363636364e-05,
+ "loss": 1.5085,
+ "step": 819
+ },
+ {
+ "epoch": 2.33,
+ "learning_rate": 4.46969696969697e-05,
+ "loss": 1.4356,
+ "step": 820
+ },
+ {
+ "epoch": 2.33,
+ "learning_rate": 4.450757575757576e-05,
+ "loss": 1.4645,
+ "step": 821
+ },
+ {
+ "epoch": 2.33,
+ "learning_rate": 4.431818181818182e-05,
+ "loss": 1.4224,
+ "step": 822
+ },
+ {
+ "epoch": 2.33,
+ "learning_rate": 4.412878787878788e-05,
+ "loss": 1.4675,
+ "step": 823
+ },
+ {
+ "epoch": 2.34,
+ "learning_rate": 4.3939393939393944e-05,
+ "loss": 1.4892,
+ "step": 824
+ },
+ {
+ "epoch": 2.34,
+ "learning_rate": 4.375e-05,
+ "loss": 1.5441,
+ "step": 825
+ },
+ {
+ "epoch": 2.34,
+ "learning_rate": 4.356060606060606e-05,
+ "loss": 1.4205,
+ "step": 826
+ },
+ {
+ "epoch": 2.35,
+ "learning_rate": 4.3371212121212124e-05,
+ "loss": 1.4849,
+ "step": 827
+ },
+ {
+ "epoch": 2.35,
+ "learning_rate": 4.318181818181819e-05,
+ "loss": 1.4789,
+ "step": 828
+ },
+ {
+ "epoch": 2.35,
+ "learning_rate": 4.299242424242424e-05,
+ "loss": 1.445,
+ "step": 829
+ },
+ {
+ "epoch": 2.35,
+ "learning_rate": 4.2803030303030305e-05,
+ "loss": 1.4528,
+ "step": 830
+ },
+ {
+ "epoch": 2.36,
+ "learning_rate": 4.261363636363637e-05,
+ "loss": 1.4588,
+ "step": 831
+ },
+ {
+ "epoch": 2.36,
+ "learning_rate": 4.242424242424243e-05,
+ "loss": 1.4158,
+ "step": 832
+ },
+ {
+ "epoch": 2.36,
+ "learning_rate": 4.2234848484848485e-05,
+ "loss": 1.4933,
+ "step": 833
+ },
+ {
+ "epoch": 2.37,
+ "learning_rate": 4.204545454545455e-05,
+ "loss": 1.4294,
+ "step": 834
+ },
+ {
+ "epoch": 2.37,
+ "learning_rate": 4.185606060606061e-05,
+ "loss": 1.4764,
+ "step": 835
+ },
+ {
+ "epoch": 2.37,
+ "learning_rate": 4.166666666666667e-05,
+ "loss": 1.4262,
+ "step": 836
+ },
+ {
+ "epoch": 2.37,
+ "learning_rate": 4.1477272727272734e-05,
+ "loss": 1.3994,
+ "step": 837
+ },
+ {
+ "epoch": 2.38,
+ "learning_rate": 4.128787878787879e-05,
+ "loss": 1.4912,
+ "step": 838
+ },
+ {
+ "epoch": 2.38,
+ "learning_rate": 4.109848484848485e-05,
+ "loss": 1.4228,
+ "step": 839
+ },
+ {
+ "epoch": 2.38,
+ "learning_rate": 4.0909090909090915e-05,
+ "loss": 1.4403,
+ "step": 840
+ },
+ {
+ "epoch": 2.39,
+ "learning_rate": 4.071969696969698e-05,
+ "loss": 1.4738,
+ "step": 841
+ },
+ {
+ "epoch": 2.39,
+ "learning_rate": 4.053030303030303e-05,
+ "loss": 1.4715,
+ "step": 842
+ },
+ {
+ "epoch": 2.39,
+ "learning_rate": 4.034090909090909e-05,
+ "loss": 1.4354,
+ "step": 843
+ },
+ {
+ "epoch": 2.39,
+ "learning_rate": 4.015151515151515e-05,
+ "loss": 1.4296,
+ "step": 844
+ },
+ {
+ "epoch": 2.4,
+ "learning_rate": 3.996212121212121e-05,
+ "loss": 1.4773,
+ "step": 845
+ },
+ {
+ "epoch": 2.4,
+ "learning_rate": 3.9772727272727275e-05,
+ "loss": 1.4813,
+ "step": 846
+ },
+ {
+ "epoch": 2.4,
+ "learning_rate": 3.958333333333333e-05,
+ "loss": 1.4763,
+ "step": 847
+ },
+ {
+ "epoch": 2.41,
+ "learning_rate": 3.939393939393939e-05,
+ "loss": 1.472,
+ "step": 848
+ },
+ {
+ "epoch": 2.41,
+ "learning_rate": 3.9204545454545456e-05,
+ "loss": 1.3975,
+ "step": 849
+ },
+ {
+ "epoch": 2.41,
+ "learning_rate": 3.901515151515152e-05,
+ "loss": 1.4507,
+ "step": 850
+ },
+ {
+ "epoch": 2.41,
+ "learning_rate": 3.8825757575757574e-05,
+ "loss": 1.4911,
+ "step": 851
+ },
+ {
+ "epoch": 2.42,
+ "learning_rate": 3.8636363636363636e-05,
+ "loss": 1.3785,
+ "step": 852
+ },
+ {
+ "epoch": 2.42,
+ "learning_rate": 3.84469696969697e-05,
+ "loss": 1.3964,
+ "step": 853
+ },
+ {
+ "epoch": 2.42,
+ "learning_rate": 3.825757575757576e-05,
+ "loss": 1.471,
+ "step": 854
+ },
+ {
+ "epoch": 2.43,
+ "learning_rate": 3.8068181818181816e-05,
+ "loss": 1.4034,
+ "step": 855
+ },
+ {
+ "epoch": 2.43,
+ "learning_rate": 3.787878787878788e-05,
+ "loss": 1.5032,
+ "step": 856
+ },
+ {
+ "epoch": 2.43,
+ "learning_rate": 3.768939393939394e-05,
+ "loss": 1.488,
+ "step": 857
+ },
+ {
+ "epoch": 2.43,
+ "learning_rate": 3.7500000000000003e-05,
+ "loss": 1.4341,
+ "step": 858
+ },
+ {
+ "epoch": 2.44,
+ "learning_rate": 3.7310606060606066e-05,
+ "loss": 1.4728,
+ "step": 859
+ },
+ {
+ "epoch": 2.44,
+ "learning_rate": 3.712121212121212e-05,
+ "loss": 1.4146,
+ "step": 860
+ },
+ {
+ "epoch": 2.44,
+ "learning_rate": 3.6931818181818184e-05,
+ "loss": 1.4241,
+ "step": 861
+ },
+ {
+ "epoch": 2.45,
+ "learning_rate": 3.6742424242424246e-05,
+ "loss": 1.4811,
+ "step": 862
+ },
+ {
+ "epoch": 2.45,
+ "learning_rate": 3.655303030303031e-05,
+ "loss": 1.5406,
+ "step": 863
+ },
+ {
+ "epoch": 2.45,
+ "learning_rate": 3.6363636363636364e-05,
+ "loss": 1.4578,
+ "step": 864
+ },
+ {
+ "epoch": 2.45,
+ "learning_rate": 3.6174242424242427e-05,
+ "loss": 1.4487,
+ "step": 865
+ },
+ {
+ "epoch": 2.46,
+ "learning_rate": 3.598484848484849e-05,
+ "loss": 1.4609,
+ "step": 866
+ },
+ {
+ "epoch": 2.46,
+ "learning_rate": 3.579545454545455e-05,
+ "loss": 1.3872,
+ "step": 867
+ },
+ {
+ "epoch": 2.46,
+ "learning_rate": 3.560606060606061e-05,
+ "loss": 1.4584,
+ "step": 868
+ },
+ {
+ "epoch": 2.47,
+ "learning_rate": 3.541666666666667e-05,
+ "loss": 1.4516,
+ "step": 869
+ },
+ {
+ "epoch": 2.47,
+ "learning_rate": 3.522727272727273e-05,
+ "loss": 1.4802,
+ "step": 870
+ },
+ {
+ "epoch": 2.47,
+ "learning_rate": 3.5037878787878794e-05,
+ "loss": 1.4247,
+ "step": 871
+ },
+ {
+ "epoch": 2.47,
+ "learning_rate": 3.484848484848485e-05,
+ "loss": 1.4203,
+ "step": 872
+ },
+ {
+ "epoch": 2.48,
+ "learning_rate": 3.465909090909091e-05,
+ "loss": 1.4793,
+ "step": 873
+ },
+ {
+ "epoch": 2.48,
+ "learning_rate": 3.4469696969696974e-05,
+ "loss": 1.4442,
+ "step": 874
+ },
+ {
+ "epoch": 2.48,
+ "learning_rate": 3.428030303030303e-05,
+ "loss": 1.4779,
+ "step": 875
+ },
+ {
+ "epoch": 2.49,
+ "learning_rate": 3.409090909090909e-05,
+ "loss": 1.4143,
+ "step": 876
+ },
+ {
+ "epoch": 2.49,
+ "learning_rate": 3.390151515151515e-05,
+ "loss": 1.4758,
+ "step": 877
+ },
+ {
+ "epoch": 2.49,
+ "learning_rate": 3.371212121212121e-05,
+ "loss": 1.4327,
+ "step": 878
+ },
+ {
+ "epoch": 2.49,
+ "learning_rate": 3.352272727272727e-05,
+ "loss": 1.466,
+ "step": 879
+ },
+ {
+ "epoch": 2.5,
+ "learning_rate": 3.3333333333333335e-05,
+ "loss": 1.498,
+ "step": 880
+ },
+ {
+ "epoch": 2.5,
+ "learning_rate": 3.314393939393939e-05,
+ "loss": 1.4679,
+ "step": 881
+ },
+ {
+ "epoch": 2.5,
+ "learning_rate": 3.295454545454545e-05,
+ "loss": 1.4606,
+ "step": 882
+ },
+ {
+ "epoch": 2.5,
+ "learning_rate": 3.2765151515151515e-05,
+ "loss": 1.4416,
+ "step": 883
+ },
+ {
+ "epoch": 2.51,
+ "learning_rate": 3.257575757575758e-05,
+ "loss": 1.4284,
+ "step": 884
+ },
+ {
+ "epoch": 2.51,
+ "learning_rate": 3.238636363636364e-05,
+ "loss": 1.4554,
+ "step": 885
+ },
+ {
+ "epoch": 2.51,
+ "learning_rate": 3.2196969696969696e-05,
+ "loss": 1.4306,
+ "step": 886
+ },
+ {
+ "epoch": 2.52,
+ "learning_rate": 3.200757575757576e-05,
+ "loss": 1.4751,
+ "step": 887
+ },
+ {
+ "epoch": 2.52,
+ "learning_rate": 3.181818181818182e-05,
+ "loss": 1.4787,
+ "step": 888
+ },
+ {
+ "epoch": 2.52,
+ "learning_rate": 3.162878787878788e-05,
+ "loss": 1.4345,
+ "step": 889
+ },
+ {
+ "epoch": 2.52,
+ "learning_rate": 3.143939393939394e-05,
+ "loss": 1.4951,
+ "step": 890
+ },
+ {
+ "epoch": 2.53,
+ "learning_rate": 3.125e-05,
+ "loss": 1.4212,
+ "step": 891
+ },
+ {
+ "epoch": 2.53,
+ "learning_rate": 3.106060606060606e-05,
+ "loss": 1.4258,
+ "step": 892
+ },
+ {
+ "epoch": 2.53,
+ "learning_rate": 3.0871212121212125e-05,
+ "loss": 1.4454,
+ "step": 893
+ },
+ {
+ "epoch": 2.54,
+ "learning_rate": 3.068181818181818e-05,
+ "loss": 1.4717,
+ "step": 894
+ },
+ {
+ "epoch": 2.54,
+ "learning_rate": 3.0492424242424243e-05,
+ "loss": 1.45,
+ "step": 895
+ },
+ {
+ "epoch": 2.54,
+ "learning_rate": 3.0303030303030306e-05,
+ "loss": 1.4785,
+ "step": 896
+ },
+ {
+ "epoch": 2.54,
+ "learning_rate": 3.0113636363636365e-05,
+ "loss": 1.3477,
+ "step": 897
+ },
+ {
+ "epoch": 2.55,
+ "learning_rate": 2.9924242424242427e-05,
+ "loss": 1.4807,
+ "step": 898
+ },
+ {
+ "epoch": 2.55,
+ "learning_rate": 2.9734848484848486e-05,
+ "loss": 1.4737,
+ "step": 899
+ },
+ {
+ "epoch": 2.55,
+ "learning_rate": 2.954545454545455e-05,
+ "loss": 1.4427,
+ "step": 900
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1056,
+ "num_train_epochs": 3,
+ "save_steps": 100,
+ "total_flos": 2.280591096435026e+18,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-900/training_args.bin b/checkpoint-900/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..574132c086f9a526d71493b1ec4c09396eac5482
--- /dev/null
+++ b/checkpoint-900/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:50303c7f1586feb001f01b7e968e567295f501ef6c6407e28250c458696a43af
+size 4155
diff --git a/runs/Sep11_22-51-40_ThanhHa/events.out.tfevents.1694447526.ThanhHa.402750.0 b/runs/Sep11_22-51-40_ThanhHa/events.out.tfevents.1694447526.ThanhHa.402750.0
new file mode 100644
index 0000000000000000000000000000000000000000..a094e097bd3efb233f68d18ead5be010c4a51dd8
--- /dev/null
+++ b/runs/Sep11_22-51-40_ThanhHa/events.out.tfevents.1694447526.ThanhHa.402750.0
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6e176e80671deb9433b7962e886fa5e81bb0b8aad7dd0513db05615e7dfb5b53
+size 170540