diff --git a/README.md b/README.md index 80ee0ae2b01a96ab60dd97039764ed018ddcd8e2..248024822db84371dc4febe0f34f51fbf9c36e22 100644 --- a/README.md +++ b/README.md @@ -4,90 +4,6 @@ library_name: peft ## Training procedure -The following `bitsandbytes` quantization config was used during training: -- quant_method: bitsandbytes -- load_in_8bit: True -- load_in_4bit: False -- llm_int8_threshold: 6.0 -- llm_int8_skip_modules: None -- llm_int8_enable_fp32_cpu_offload: False -- llm_int8_has_fp16_weight: False -- bnb_4bit_quant_type: fp4 -- bnb_4bit_use_double_quant: False -- bnb_4bit_compute_dtype: float32 - -The following `bitsandbytes` quantization config was used during training: -- quant_method: bitsandbytes -- load_in_8bit: True -- load_in_4bit: False -- llm_int8_threshold: 6.0 -- llm_int8_skip_modules: None -- llm_int8_enable_fp32_cpu_offload: False -- llm_int8_has_fp16_weight: False -- bnb_4bit_quant_type: fp4 -- bnb_4bit_use_double_quant: False -- bnb_4bit_compute_dtype: float32 - -The following `bitsandbytes` quantization config was used during training: -- quant_method: bitsandbytes -- load_in_8bit: True -- load_in_4bit: False -- llm_int8_threshold: 6.0 -- llm_int8_skip_modules: None -- llm_int8_enable_fp32_cpu_offload: False -- llm_int8_has_fp16_weight: False -- bnb_4bit_quant_type: fp4 -- bnb_4bit_use_double_quant: False -- bnb_4bit_compute_dtype: float32 - -The following `bitsandbytes` quantization config was used during training: -- quant_method: bitsandbytes -- load_in_8bit: True -- load_in_4bit: False -- llm_int8_threshold: 6.0 -- llm_int8_skip_modules: None -- llm_int8_enable_fp32_cpu_offload: False -- llm_int8_has_fp16_weight: False -- bnb_4bit_quant_type: fp4 -- bnb_4bit_use_double_quant: False -- bnb_4bit_compute_dtype: float32 - -The following `bitsandbytes` quantization config was used during training: -- quant_method: bitsandbytes -- load_in_8bit: True -- load_in_4bit: False -- llm_int8_threshold: 6.0 -- llm_int8_skip_modules: None -- llm_int8_enable_fp32_cpu_offload: False -- llm_int8_has_fp16_weight: False -- bnb_4bit_quant_type: fp4 -- bnb_4bit_use_double_quant: False -- bnb_4bit_compute_dtype: float32 - -The following `bitsandbytes` quantization config was used during training: -- quant_method: bitsandbytes -- load_in_8bit: True -- load_in_4bit: False -- llm_int8_threshold: 6.0 -- llm_int8_skip_modules: None -- llm_int8_enable_fp32_cpu_offload: False -- llm_int8_has_fp16_weight: False -- bnb_4bit_quant_type: fp4 -- bnb_4bit_use_double_quant: False -- bnb_4bit_compute_dtype: float32 - -The following `bitsandbytes` quantization config was used during training: -- quant_method: bitsandbytes -- load_in_8bit: True -- load_in_4bit: False -- llm_int8_threshold: 6.0 -- llm_int8_skip_modules: None -- llm_int8_enable_fp32_cpu_offload: False -- llm_int8_has_fp16_weight: False -- bnb_4bit_quant_type: fp4 -- bnb_4bit_use_double_quant: False -- bnb_4bit_compute_dtype: float32 - The following `bitsandbytes` quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: True @@ -149,13 +65,6 @@ The following `bitsandbytes` quantization config was used during training: - bnb_4bit_compute_dtype: float32 ### Framework versions -- PEFT 0.6.0.dev0 -- PEFT 0.6.0.dev0 -- PEFT 0.6.0.dev0 -- PEFT 0.6.0.dev0 -- PEFT 0.6.0.dev0 -- PEFT 0.6.0.dev0 -- PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 diff --git a/adapter_model.bin b/adapter_model.bin index 769701ae943c9ac01dcd07f8a646624dea495446..91e98fe05a6d865275fa50fd4f2ae6e8c6a67cee 100644 --- a/adapter_model.bin +++ b/adapter_model.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:ab7d7332df354d85019ef8dbda22bf275f3e4612ebbe07ca7d3538dd755384e9 +oid sha256:e4ecd5f337c6a7563a7858b385415fc38ee7d3ebdd64f3cbdff82c605ba198cd size 39409357 diff --git a/checkpoint-100/README.md b/checkpoint-100/README.md index 08371015f02382e6fcba318f4aaea54ae52cd3c4..80ee0ae2b01a96ab60dd97039764ed018ddcd8e2 100644 --- a/checkpoint-100/README.md +++ b/checkpoint-100/README.md @@ -4,6 +4,126 @@ library_name: peft ## Training procedure +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + The following `bitsandbytes` quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: True @@ -29,6 +149,16 @@ The following `bitsandbytes` quantization config was used during training: - bnb_4bit_compute_dtype: float32 ### Framework versions +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 diff --git a/checkpoint-100/adapter_model.bin b/checkpoint-100/adapter_model.bin index 9cefddebe32057b0552baedbf3a139d5b5054704..769701ae943c9ac01dcd07f8a646624dea495446 100644 --- a/checkpoint-100/adapter_model.bin +++ b/checkpoint-100/adapter_model.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:a1494d9295feb24e846e9762b1b2118d8c5592c6b44f650b463565b33581b2bd +oid sha256:ab7d7332df354d85019ef8dbda22bf275f3e4612ebbe07ca7d3538dd755384e9 size 39409357 diff --git a/checkpoint-100/optimizer.pt b/checkpoint-100/optimizer.pt index fac0f5f0b3493cc8e6ed7517e64a85da03a95adc..34032801bbe67857d30692ed9898878d063cb7e7 100644 --- a/checkpoint-100/optimizer.pt +++ b/checkpoint-100/optimizer.pt @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:c0a5a340ededaf6a479a86b5f1d38b1ad96df10d24d2e3b600016733e406c965 +oid sha256:8e7ecf4519b7c7fe30096c1ce0de750678f8cf403d65a166a096e8155b6d1665 size 78844421 diff --git a/checkpoint-100/rng_state.pth b/checkpoint-100/rng_state.pth index 452f99ac534b9117d836494d73222e3d44e1523b..e8a62beafdd6e61c41abbc54c8b7428f4e3484c0 100644 --- a/checkpoint-100/rng_state.pth +++ b/checkpoint-100/rng_state.pth @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:c6869750f95a25c4e970298a33adf90e2d7ab52680bf3317239bff1b10103235 +oid sha256:b2278a87cdf86c3f9219223c847f6b27f6b7f15b8226b617f38936e8ff2cbcde size 14575 diff --git a/checkpoint-100/scheduler.pt b/checkpoint-100/scheduler.pt index 1e03fe8e3f8f657765fc796487ba79b428fc773d..bb6d23cefeb42e3de94cbbe62d4d34c67e305d59 100644 --- a/checkpoint-100/scheduler.pt +++ b/checkpoint-100/scheduler.pt @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:a58b95c1126bab491ac28f5f742cbad6d9169c46a57b0c332b4be7e7507c82f5 +oid sha256:02822a64d6ab3629baf2f69c5adf000658973be0bc2532154cc2534085175f34 size 627 diff --git a/checkpoint-100/tokenizer.json b/checkpoint-100/tokenizer.json index dbf002cafbd4818dcff2abc9156c088d681b4533..673c31abdeadf6576c3c754df86459e1ad64e207 100644 --- a/checkpoint-100/tokenizer.json +++ b/checkpoint-100/tokenizer.json @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba -size 14500471 +oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe +size 14500570 diff --git a/checkpoint-100/trainer_state.json b/checkpoint-100/trainer_state.json index 522ea2d0c726e7f11042bb7fa90f062916630bba..5e07f9780764385efc69f7e354fe98c6a40d34a0 100644 --- a/checkpoint-100/trainer_state.json +++ b/checkpoint-100/trainer_state.json @@ -1,7 +1,7 @@ { "best_metric": null, "best_model_checkpoint": null, - "epoch": 4.0, + "epoch": 0.3429796355841372, "eval_steps": 500, "global_step": 100, "is_hyper_param_search": false, @@ -9,611 +9,611 @@ "is_world_process_zero": true, "log_history": [ { - "epoch": 0.04, - "learning_rate": 0.0001996, - "loss": 2.4683, + "epoch": 0.0, + "learning_rate": 0.00019931271477663232, + "loss": 2.5587, "step": 1 }, { - "epoch": 0.08, - "learning_rate": 0.00019920000000000002, - "loss": 2.2023, + "epoch": 0.01, + "learning_rate": 0.0001986254295532646, + "loss": 2.3914, "step": 2 }, { - "epoch": 0.12, - "learning_rate": 0.0001988, - "loss": 2.3039, + "epoch": 0.01, + "learning_rate": 0.00019793814432989693, + "loss": 2.4218, "step": 3 }, { - "epoch": 0.16, - "learning_rate": 0.0001984, - "loss": 2.1855, + "epoch": 0.01, + "learning_rate": 0.00019725085910652924, + "loss": 2.3414, "step": 4 }, { - "epoch": 0.2, - "learning_rate": 0.00019800000000000002, - "loss": 2.328, + "epoch": 0.02, + "learning_rate": 0.0001965635738831615, + "loss": 2.2469, "step": 5 }, { - "epoch": 0.24, - "learning_rate": 0.0001976, - "loss": 2.3407, + "epoch": 0.02, + "learning_rate": 0.00019587628865979381, + "loss": 2.3241, "step": 6 }, { - "epoch": 0.28, - "learning_rate": 0.0001972, - "loss": 2.3432, + "epoch": 0.02, + "learning_rate": 0.00019518900343642613, + "loss": 2.3266, "step": 7 }, { - "epoch": 0.32, - "learning_rate": 0.0001968, - "loss": 2.2333, + "epoch": 0.03, + "learning_rate": 0.00019450171821305842, + "loss": 2.1856, "step": 8 }, { - "epoch": 0.36, - "learning_rate": 0.0001964, - "loss": 2.1761, + "epoch": 0.03, + "learning_rate": 0.00019381443298969073, + "loss": 2.3247, "step": 9 }, { - "epoch": 0.4, - "learning_rate": 0.000196, - "loss": 2.1473, + "epoch": 0.03, + "learning_rate": 0.00019312714776632305, + "loss": 2.3245, "step": 10 }, { - "epoch": 0.44, - "learning_rate": 0.0001956, - "loss": 2.2076, + "epoch": 0.04, + "learning_rate": 0.00019243986254295533, + "loss": 2.2591, "step": 11 }, { - "epoch": 0.48, - "learning_rate": 0.0001952, - "loss": 2.1925, + "epoch": 0.04, + "learning_rate": 0.00019175257731958765, + "loss": 2.1767, "step": 12 }, { - "epoch": 0.52, - "learning_rate": 0.0001948, - "loss": 2.1694, + "epoch": 0.04, + "learning_rate": 0.00019106529209621996, + "loss": 2.3478, "step": 13 }, { - "epoch": 0.56, - "learning_rate": 0.0001944, - "loss": 2.1056, + "epoch": 0.05, + "learning_rate": 0.00019037800687285222, + "loss": 2.3339, "step": 14 }, { - "epoch": 0.6, - "learning_rate": 0.000194, - "loss": 2.2426, + "epoch": 0.05, + "learning_rate": 0.00018969072164948454, + "loss": 2.234, "step": 15 }, { - "epoch": 0.64, - "learning_rate": 0.00019360000000000002, - "loss": 2.2635, + "epoch": 0.05, + "learning_rate": 0.00018900343642611685, + "loss": 2.2651, "step": 16 }, { - "epoch": 0.68, - "learning_rate": 0.0001932, - "loss": 2.251, + "epoch": 0.06, + "learning_rate": 0.00018831615120274914, + "loss": 2.1831, "step": 17 }, { - "epoch": 0.72, - "learning_rate": 0.0001928, - "loss": 2.224, + "epoch": 0.06, + "learning_rate": 0.00018762886597938145, + "loss": 2.216, "step": 18 }, { - "epoch": 0.76, - "learning_rate": 0.00019240000000000001, - "loss": 2.1848, + "epoch": 0.07, + "learning_rate": 0.00018694158075601377, + "loss": 2.1359, "step": 19 }, { - "epoch": 0.8, - "learning_rate": 0.000192, - "loss": 2.1341, + "epoch": 0.07, + "learning_rate": 0.00018625429553264605, + "loss": 2.1215, "step": 20 }, { - "epoch": 0.84, - "learning_rate": 0.0001916, - "loss": 2.126, + "epoch": 0.07, + "learning_rate": 0.00018556701030927837, + "loss": 2.2179, "step": 21 }, { - "epoch": 0.88, - "learning_rate": 0.0001912, - "loss": 2.0959, + "epoch": 0.08, + "learning_rate": 0.00018487972508591068, + "loss": 2.2598, "step": 22 }, { - "epoch": 0.92, - "learning_rate": 0.0001908, - "loss": 2.2446, + "epoch": 0.08, + "learning_rate": 0.00018419243986254294, + "loss": 2.1813, "step": 23 }, { - "epoch": 0.96, - "learning_rate": 0.0001904, - "loss": 1.9448, + "epoch": 0.08, + "learning_rate": 0.00018350515463917526, + "loss": 2.2006, "step": 24 }, { - "epoch": 1.0, - "learning_rate": 0.00019, - "loss": 2.0939, + "epoch": 0.09, + "learning_rate": 0.00018281786941580757, + "loss": 2.1564, "step": 25 }, { - "epoch": 1.04, - "learning_rate": 0.0001896, - "loss": 2.1247, + "epoch": 0.09, + "learning_rate": 0.00018213058419243986, + "loss": 2.2537, "step": 26 }, { - "epoch": 1.08, - "learning_rate": 0.0001892, - "loss": 1.9417, + "epoch": 0.09, + "learning_rate": 0.00018144329896907217, + "loss": 2.1975, "step": 27 }, { - "epoch": 1.12, - "learning_rate": 0.0001888, - "loss": 2.105, + "epoch": 0.1, + "learning_rate": 0.0001807560137457045, + "loss": 2.2566, "step": 28 }, { - "epoch": 1.16, - "learning_rate": 0.0001884, - "loss": 2.1595, + "epoch": 0.1, + "learning_rate": 0.00018006872852233677, + "loss": 2.1464, "step": 29 }, { - "epoch": 1.2, - "learning_rate": 0.000188, - "loss": 2.2009, + "epoch": 0.1, + "learning_rate": 0.0001793814432989691, + "loss": 2.1421, "step": 30 }, { - "epoch": 1.24, - "learning_rate": 0.0001876, - "loss": 2.0784, + "epoch": 0.11, + "learning_rate": 0.0001786941580756014, + "loss": 2.1276, "step": 31 }, { - "epoch": 1.28, - "learning_rate": 0.00018720000000000002, - "loss": 2.2173, + "epoch": 0.11, + "learning_rate": 0.00017800687285223366, + "loss": 2.0649, "step": 32 }, { - "epoch": 1.32, - "learning_rate": 0.00018680000000000001, - "loss": 2.1185, + "epoch": 0.11, + "learning_rate": 0.00017731958762886598, + "loss": 2.1835, "step": 33 }, { - "epoch": 1.36, - "learning_rate": 0.00018640000000000003, - "loss": 1.988, + "epoch": 0.12, + "learning_rate": 0.0001766323024054983, + "loss": 2.1711, "step": 34 }, { - "epoch": 1.4, - "learning_rate": 0.00018600000000000002, - "loss": 1.9493, + "epoch": 0.12, + "learning_rate": 0.00017594501718213058, + "loss": 2.2591, "step": 35 }, { - "epoch": 1.44, - "learning_rate": 0.0001856, - "loss": 1.9947, + "epoch": 0.12, + "learning_rate": 0.0001752577319587629, + "loss": 2.1471, "step": 36 }, { - "epoch": 1.48, - "learning_rate": 0.00018520000000000003, - "loss": 2.0506, + "epoch": 0.13, + "learning_rate": 0.0001745704467353952, + "loss": 2.0861, "step": 37 }, { - "epoch": 1.52, - "learning_rate": 0.00018480000000000002, - "loss": 2.0829, + "epoch": 0.13, + "learning_rate": 0.0001738831615120275, + "loss": 2.0702, "step": 38 }, { - "epoch": 1.56, - "learning_rate": 0.0001844, - "loss": 2.1455, + "epoch": 0.13, + "learning_rate": 0.0001731958762886598, + "loss": 2.1096, "step": 39 }, { - "epoch": 1.6, - "learning_rate": 0.00018400000000000003, - "loss": 2.0304, + "epoch": 0.14, + "learning_rate": 0.00017250859106529212, + "loss": 2.1062, "step": 40 }, { - "epoch": 1.64, - "learning_rate": 0.00018360000000000002, - "loss": 2.006, + "epoch": 0.14, + "learning_rate": 0.00017182130584192438, + "loss": 2.2545, "step": 41 }, { - "epoch": 1.68, - "learning_rate": 0.0001832, - "loss": 2.1759, + "epoch": 0.14, + "learning_rate": 0.0001711340206185567, + "loss": 2.1572, "step": 42 }, { - "epoch": 1.72, - "learning_rate": 0.00018280000000000003, - "loss": 2.0093, + "epoch": 0.15, + "learning_rate": 0.000170446735395189, + "loss": 2.0749, "step": 43 }, { - "epoch": 1.76, - "learning_rate": 0.00018240000000000002, - "loss": 2.0683, + "epoch": 0.15, + "learning_rate": 0.0001697594501718213, + "loss": 2.1922, "step": 44 }, { - "epoch": 1.8, - "learning_rate": 0.000182, - "loss": 2.1352, + "epoch": 0.15, + "learning_rate": 0.00016907216494845361, + "loss": 2.1915, "step": 45 }, { - "epoch": 1.84, - "learning_rate": 0.00018160000000000002, - "loss": 1.9736, + "epoch": 0.16, + "learning_rate": 0.00016838487972508593, + "loss": 2.1594, "step": 46 }, { - "epoch": 1.88, - "learning_rate": 0.0001812, - "loss": 1.9631, + "epoch": 0.16, + "learning_rate": 0.00016769759450171822, + "loss": 2.176, "step": 47 }, { - "epoch": 1.92, - "learning_rate": 0.0001808, - "loss": 2.1024, + "epoch": 0.16, + "learning_rate": 0.00016701030927835053, + "loss": 2.1223, "step": 48 }, { - "epoch": 1.96, - "learning_rate": 0.00018040000000000002, - "loss": 1.9895, + "epoch": 0.17, + "learning_rate": 0.00016632302405498285, + "loss": 2.1263, "step": 49 }, { - "epoch": 2.0, - "learning_rate": 0.00018, - "loss": 2.0067, + "epoch": 0.17, + "learning_rate": 0.00016563573883161513, + "loss": 2.0481, "step": 50 }, { - "epoch": 2.04, - "learning_rate": 0.0001796, - "loss": 2.0509, + "epoch": 0.17, + "learning_rate": 0.00016494845360824742, + "loss": 2.1043, "step": 51 }, { - "epoch": 2.08, - "learning_rate": 0.00017920000000000002, - "loss": 1.8795, + "epoch": 0.18, + "learning_rate": 0.00016426116838487973, + "loss": 2.1678, "step": 52 }, { - "epoch": 2.12, - "learning_rate": 0.0001788, - "loss": 2.0633, + "epoch": 0.18, + "learning_rate": 0.00016357388316151202, + "loss": 2.1602, "step": 53 }, { - "epoch": 2.16, - "learning_rate": 0.0001784, - "loss": 1.9998, + "epoch": 0.19, + "learning_rate": 0.00016288659793814434, + "loss": 2.1448, "step": 54 }, { - "epoch": 2.2, - "learning_rate": 0.00017800000000000002, - "loss": 2.0675, + "epoch": 0.19, + "learning_rate": 0.00016219931271477665, + "loss": 2.1536, "step": 55 }, { - "epoch": 2.24, - "learning_rate": 0.0001776, - "loss": 2.0129, + "epoch": 0.19, + "learning_rate": 0.00016151202749140894, + "loss": 2.0339, "step": 56 }, { - "epoch": 2.28, - "learning_rate": 0.0001772, - "loss": 1.9302, + "epoch": 0.2, + "learning_rate": 0.00016082474226804125, + "loss": 2.023, "step": 57 }, { - "epoch": 2.32, - "learning_rate": 0.00017680000000000001, - "loss": 2.0472, + "epoch": 0.2, + "learning_rate": 0.00016013745704467357, + "loss": 2.1407, "step": 58 }, { - "epoch": 2.36, - "learning_rate": 0.0001764, - "loss": 2.0031, + "epoch": 0.2, + "learning_rate": 0.00015945017182130585, + "loss": 2.1134, "step": 59 }, { - "epoch": 2.4, - "learning_rate": 0.00017600000000000002, - "loss": 2.0231, + "epoch": 0.21, + "learning_rate": 0.00015876288659793814, + "loss": 2.1652, "step": 60 }, { - "epoch": 2.44, - "learning_rate": 0.0001756, - "loss": 2.0192, + "epoch": 0.21, + "learning_rate": 0.00015807560137457046, + "loss": 2.0051, "step": 61 }, { - "epoch": 2.48, - "learning_rate": 0.0001752, - "loss": 2.1336, + "epoch": 0.21, + "learning_rate": 0.00015738831615120274, + "loss": 2.0604, "step": 62 }, { - "epoch": 2.52, - "learning_rate": 0.00017480000000000002, - "loss": 1.7753, + "epoch": 0.22, + "learning_rate": 0.00015670103092783506, + "loss": 2.1708, "step": 63 }, { - "epoch": 2.56, - "learning_rate": 0.0001744, - "loss": 2.0274, + "epoch": 0.22, + "learning_rate": 0.00015601374570446737, + "loss": 2.1106, "step": 64 }, { - "epoch": 2.6, - "learning_rate": 0.000174, - "loss": 1.8893, + "epoch": 0.22, + "learning_rate": 0.00015532646048109966, + "loss": 2.1445, "step": 65 }, { - "epoch": 2.64, - "learning_rate": 0.00017360000000000002, - "loss": 2.0341, + "epoch": 0.23, + "learning_rate": 0.00015463917525773197, + "loss": 2.0879, "step": 66 }, { - "epoch": 2.68, - "learning_rate": 0.0001732, - "loss": 2.1081, + "epoch": 0.23, + "learning_rate": 0.0001539518900343643, + "loss": 2.1498, "step": 67 }, { - "epoch": 2.72, - "learning_rate": 0.0001728, - "loss": 1.9463, + "epoch": 0.23, + "learning_rate": 0.00015326460481099657, + "loss": 2.0719, "step": 68 }, { - "epoch": 2.76, - "learning_rate": 0.00017240000000000002, - "loss": 2.0607, + "epoch": 0.24, + "learning_rate": 0.00015257731958762886, + "loss": 2.2167, "step": 69 }, { - "epoch": 2.8, - "learning_rate": 0.000172, - "loss": 1.9803, + "epoch": 0.24, + "learning_rate": 0.00015189003436426118, + "loss": 2.0811, "step": 70 }, { - "epoch": 2.84, - "learning_rate": 0.0001716, - "loss": 1.7663, + "epoch": 0.24, + "learning_rate": 0.00015120274914089346, + "loss": 2.1058, "step": 71 }, { - "epoch": 2.88, - "learning_rate": 0.00017120000000000001, - "loss": 2.1116, + "epoch": 0.25, + "learning_rate": 0.00015051546391752578, + "loss": 2.0392, "step": 72 }, { - "epoch": 2.92, - "learning_rate": 0.0001708, - "loss": 1.9039, + "epoch": 0.25, + "learning_rate": 0.0001498281786941581, + "loss": 2.0957, "step": 73 }, { - "epoch": 2.96, - "learning_rate": 0.0001704, - "loss": 2.049, + "epoch": 0.25, + "learning_rate": 0.00014914089347079038, + "loss": 1.9994, "step": 74 }, { - "epoch": 3.0, - "learning_rate": 0.00017, - "loss": 1.991, + "epoch": 0.26, + "learning_rate": 0.0001484536082474227, + "loss": 2.0464, "step": 75 }, { - "epoch": 3.04, - "learning_rate": 0.0001696, - "loss": 1.8755, + "epoch": 0.26, + "learning_rate": 0.000147766323024055, + "loss": 2.0417, "step": 76 }, { - "epoch": 3.08, - "learning_rate": 0.0001692, - "loss": 2.0367, + "epoch": 0.26, + "learning_rate": 0.0001470790378006873, + "loss": 2.105, "step": 77 }, { - "epoch": 3.12, - "learning_rate": 0.0001688, - "loss": 1.9959, + "epoch": 0.27, + "learning_rate": 0.00014639175257731958, + "loss": 2.1147, "step": 78 }, { - "epoch": 3.16, - "learning_rate": 0.0001684, - "loss": 1.8051, + "epoch": 0.27, + "learning_rate": 0.0001457044673539519, + "loss": 1.9964, "step": 79 }, { - "epoch": 3.2, - "learning_rate": 0.000168, - "loss": 2.0446, + "epoch": 0.27, + "learning_rate": 0.00014501718213058418, + "loss": 1.9723, "step": 80 }, { - "epoch": 3.24, - "learning_rate": 0.0001676, - "loss": 1.7873, + "epoch": 0.28, + "learning_rate": 0.0001443298969072165, + "loss": 2.0621, "step": 81 }, { - "epoch": 3.28, - "learning_rate": 0.0001672, - "loss": 1.9264, + "epoch": 0.28, + "learning_rate": 0.00014364261168384881, + "loss": 2.2703, "step": 82 }, { - "epoch": 3.32, - "learning_rate": 0.0001668, - "loss": 1.8595, + "epoch": 0.28, + "learning_rate": 0.0001429553264604811, + "loss": 2.0815, "step": 83 }, { - "epoch": 3.36, - "learning_rate": 0.0001664, - "loss": 1.9992, + "epoch": 0.29, + "learning_rate": 0.00014226804123711342, + "loss": 2.0774, "step": 84 }, { - "epoch": 3.4, - "learning_rate": 0.000166, - "loss": 1.8783, + "epoch": 0.29, + "learning_rate": 0.00014158075601374573, + "loss": 2.066, "step": 85 }, { - "epoch": 3.44, - "learning_rate": 0.0001656, - "loss": 2.0216, + "epoch": 0.29, + "learning_rate": 0.00014089347079037802, + "loss": 2.03, "step": 86 }, { - "epoch": 3.48, - "learning_rate": 0.0001652, - "loss": 1.947, + "epoch": 0.3, + "learning_rate": 0.0001402061855670103, + "loss": 2.1433, "step": 87 }, { - "epoch": 3.52, - "learning_rate": 0.0001648, - "loss": 1.9554, + "epoch": 0.3, + "learning_rate": 0.00013951890034364262, + "loss": 2.0811, "step": 88 }, { - "epoch": 3.56, - "learning_rate": 0.0001644, - "loss": 1.8563, + "epoch": 0.31, + "learning_rate": 0.0001388316151202749, + "loss": 1.9791, "step": 89 }, { - "epoch": 3.6, - "learning_rate": 0.000164, - "loss": 1.7795, + "epoch": 0.31, + "learning_rate": 0.00013814432989690722, + "loss": 2.0876, "step": 90 }, { - "epoch": 3.64, - "learning_rate": 0.0001636, - "loss": 1.9347, + "epoch": 0.31, + "learning_rate": 0.00013745704467353953, + "loss": 2.0314, "step": 91 }, { - "epoch": 3.68, - "learning_rate": 0.0001632, - "loss": 2.0078, + "epoch": 0.32, + "learning_rate": 0.00013676975945017182, + "loss": 1.9485, "step": 92 }, { - "epoch": 3.72, - "learning_rate": 0.0001628, - "loss": 1.9964, + "epoch": 0.32, + "learning_rate": 0.00013608247422680414, + "loss": 2.078, "step": 93 }, { - "epoch": 3.76, - "learning_rate": 0.00016240000000000002, - "loss": 1.9004, + "epoch": 0.32, + "learning_rate": 0.00013539518900343645, + "loss": 2.1251, "step": 94 }, { - "epoch": 3.8, - "learning_rate": 0.000162, - "loss": 1.9446, + "epoch": 0.33, + "learning_rate": 0.00013470790378006874, + "loss": 1.9736, "step": 95 }, { - "epoch": 3.84, - "learning_rate": 0.00016160000000000002, - "loss": 1.8857, + "epoch": 0.33, + "learning_rate": 0.00013402061855670103, + "loss": 2.0189, "step": 96 }, { - "epoch": 3.88, - "learning_rate": 0.00016120000000000002, - "loss": 1.8797, + "epoch": 0.33, + "learning_rate": 0.00013333333333333334, + "loss": 2.0061, "step": 97 }, { - "epoch": 3.92, - "learning_rate": 0.0001608, - "loss": 1.7643, + "epoch": 0.34, + "learning_rate": 0.00013264604810996563, + "loss": 1.9595, "step": 98 }, { - "epoch": 3.96, - "learning_rate": 0.00016040000000000002, - "loss": 1.9724, + "epoch": 0.34, + "learning_rate": 0.00013195876288659794, + "loss": 1.9702, "step": 99 }, { - "epoch": 4.0, - "learning_rate": 0.00016, - "loss": 1.9058, + "epoch": 0.34, + "learning_rate": 0.00013127147766323026, + "loss": 2.0322, "step": 100 } ], "logging_steps": 1, - "max_steps": 500, - "num_train_epochs": 20, + "max_steps": 291, + "num_train_epochs": 1, "save_steps": 100, - "total_flos": 3.406529677264896e+16, + "total_flos": 6.083398113601536e+16, "trial_name": null, "trial_params": null } diff --git a/checkpoint-100/training_args.bin b/checkpoint-100/training_args.bin index 4de6572a838c337c9990635a9406ebf46c0ec336..514bd8136ebdc96a275890c7b9b4df6ac072906c 100644 --- a/checkpoint-100/training_args.bin +++ b/checkpoint-100/training_args.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6ef74b31950ae6c8955316bed48c343fb06cda0cb6a8a54ca46ca3cb681c8736 +oid sha256:7e3410eea0ea0eb09ca576511099334880accaab360c0279f3099c9e4d2e877a size 4027 diff --git a/checkpoint-200/README.md b/checkpoint-200/README.md index 08371015f02382e6fcba318f4aaea54ae52cd3c4..876f20b74d7edd2433c53bf3037eb5a69dd2d328 100644 --- a/checkpoint-200/README.md +++ b/checkpoint-200/README.md @@ -4,6 +4,54 @@ library_name: peft ## Training procedure +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + The following `bitsandbytes` quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: True @@ -29,6 +77,10 @@ The following `bitsandbytes` quantization config was used during training: - bnb_4bit_compute_dtype: float32 ### Framework versions +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 diff --git a/checkpoint-200/adapter_model.bin b/checkpoint-200/adapter_model.bin index ef1e30077687745ef49bd7f0dd6d052f39af2e4f..f37b103d9f11e64e1f3f3ccdfe0d80e5cd684300 100644 --- a/checkpoint-200/adapter_model.bin +++ b/checkpoint-200/adapter_model.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6eb0dcfb489eef093d9f20354ad4dbd76c3cacdee8012aaf3a92dc2a4267e697 +oid sha256:751fbae1398bb745f5e6d01f275966cedebe1e984b206292fb8476886cfd7027 size 39409357 diff --git a/checkpoint-200/optimizer.pt b/checkpoint-200/optimizer.pt index 3a352549f61b48d5ef8779874d4311cb8543ecf6..d738b6d76f95b9fc3edbf31110c82a541e289d1e 100644 --- a/checkpoint-200/optimizer.pt +++ b/checkpoint-200/optimizer.pt @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:b5f31f7a2996c207ab778c7745622649ab7de438c906222dc8ef5f81d8c4ed10 +oid sha256:6d7f250b734db4b40976d1847121f325e5f8e4aa321a8357abe3dc127b5fc83a size 78844421 diff --git a/checkpoint-200/rng_state.pth b/checkpoint-200/rng_state.pth index a0e229d77fa9bb9693355411d7e7ba7367c65223..e8a62beafdd6e61c41abbc54c8b7428f4e3484c0 100644 --- a/checkpoint-200/rng_state.pth +++ b/checkpoint-200/rng_state.pth @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:36fc71bd44bd7f04f2599c5dface64c517de1a7ab7bac3600f3f6470c6c72673 +oid sha256:b2278a87cdf86c3f9219223c847f6b27f6b7f15b8226b617f38936e8ff2cbcde size 14575 diff --git a/checkpoint-200/scheduler.pt b/checkpoint-200/scheduler.pt index 40f3132372df114bd32a97767334e3e4ddd9e131..4d75c6ac674606680612e451dbc35eaa55daab52 100644 --- a/checkpoint-200/scheduler.pt +++ b/checkpoint-200/scheduler.pt @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7cf1ea83db88b65824ac70ed44968b05e8a83b81aa97e9d6a33c16e33168c1fe +oid sha256:e683229dde9f79f059d3d1efb56203ba5ea68642c6ec20811bb7b486e3a6e3a2 size 627 diff --git a/checkpoint-200/tokenizer.json b/checkpoint-200/tokenizer.json index dbf002cafbd4818dcff2abc9156c088d681b4533..673c31abdeadf6576c3c754df86459e1ad64e207 100644 --- a/checkpoint-200/tokenizer.json +++ b/checkpoint-200/tokenizer.json @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba -size 14500471 +oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe +size 14500570 diff --git a/checkpoint-200/trainer_state.json b/checkpoint-200/trainer_state.json index 93b4e50d2d23a116bf60296f900d7238745a0bf0..361e976614da9960ef805333a02262d0c4eb2739 100644 --- a/checkpoint-200/trainer_state.json +++ b/checkpoint-200/trainer_state.json @@ -1,7 +1,7 @@ { "best_metric": null, "best_model_checkpoint": null, - "epoch": 8.0, + "epoch": 0.6859592711682744, "eval_steps": 500, "global_step": 200, "is_hyper_param_search": false, @@ -9,1211 +9,1211 @@ "is_world_process_zero": true, "log_history": [ { - "epoch": 0.04, - "learning_rate": 0.0001996, - "loss": 2.4683, + "epoch": 0.0, + "learning_rate": 0.00019931271477663232, + "loss": 2.5587, "step": 1 }, { - "epoch": 0.08, - "learning_rate": 0.00019920000000000002, - "loss": 2.2023, + "epoch": 0.01, + "learning_rate": 0.0001986254295532646, + "loss": 2.3914, "step": 2 }, { - "epoch": 0.12, - "learning_rate": 0.0001988, - "loss": 2.3039, + "epoch": 0.01, + "learning_rate": 0.00019793814432989693, + "loss": 2.4218, "step": 3 }, { - "epoch": 0.16, - "learning_rate": 0.0001984, - "loss": 2.1855, + "epoch": 0.01, + "learning_rate": 0.00019725085910652924, + "loss": 2.3414, "step": 4 }, { - "epoch": 0.2, - "learning_rate": 0.00019800000000000002, - "loss": 2.328, + "epoch": 0.02, + "learning_rate": 0.0001965635738831615, + "loss": 2.2469, "step": 5 }, { - "epoch": 0.24, - "learning_rate": 0.0001976, - "loss": 2.3407, + "epoch": 0.02, + "learning_rate": 0.00019587628865979381, + "loss": 2.3241, "step": 6 }, { - "epoch": 0.28, - "learning_rate": 0.0001972, - "loss": 2.3432, + "epoch": 0.02, + "learning_rate": 0.00019518900343642613, + "loss": 2.3266, "step": 7 }, { - "epoch": 0.32, - "learning_rate": 0.0001968, - "loss": 2.2333, + "epoch": 0.03, + "learning_rate": 0.00019450171821305842, + "loss": 2.1856, "step": 8 }, { - "epoch": 0.36, - "learning_rate": 0.0001964, - "loss": 2.1761, + "epoch": 0.03, + "learning_rate": 0.00019381443298969073, + "loss": 2.3247, "step": 9 }, { - "epoch": 0.4, - "learning_rate": 0.000196, - "loss": 2.1473, + "epoch": 0.03, + "learning_rate": 0.00019312714776632305, + "loss": 2.3245, "step": 10 }, { - "epoch": 0.44, - "learning_rate": 0.0001956, - "loss": 2.2076, + "epoch": 0.04, + "learning_rate": 0.00019243986254295533, + "loss": 2.2591, "step": 11 }, { - "epoch": 0.48, - "learning_rate": 0.0001952, - "loss": 2.1925, + "epoch": 0.04, + "learning_rate": 0.00019175257731958765, + "loss": 2.1767, "step": 12 }, { - "epoch": 0.52, - "learning_rate": 0.0001948, - "loss": 2.1694, + "epoch": 0.04, + "learning_rate": 0.00019106529209621996, + "loss": 2.3478, "step": 13 }, { - "epoch": 0.56, - "learning_rate": 0.0001944, - "loss": 2.1056, + "epoch": 0.05, + "learning_rate": 0.00019037800687285222, + "loss": 2.3339, "step": 14 }, { - "epoch": 0.6, - "learning_rate": 0.000194, - "loss": 2.2426, + "epoch": 0.05, + "learning_rate": 0.00018969072164948454, + "loss": 2.234, "step": 15 }, { - "epoch": 0.64, - "learning_rate": 0.00019360000000000002, - "loss": 2.2635, + "epoch": 0.05, + "learning_rate": 0.00018900343642611685, + "loss": 2.2651, "step": 16 }, { - "epoch": 0.68, - "learning_rate": 0.0001932, - "loss": 2.251, + "epoch": 0.06, + "learning_rate": 0.00018831615120274914, + "loss": 2.1831, "step": 17 }, { - "epoch": 0.72, - "learning_rate": 0.0001928, - "loss": 2.224, + "epoch": 0.06, + "learning_rate": 0.00018762886597938145, + "loss": 2.216, "step": 18 }, { - "epoch": 0.76, - "learning_rate": 0.00019240000000000001, - "loss": 2.1848, + "epoch": 0.07, + "learning_rate": 0.00018694158075601377, + "loss": 2.1359, "step": 19 }, { - "epoch": 0.8, - "learning_rate": 0.000192, - "loss": 2.1341, + "epoch": 0.07, + "learning_rate": 0.00018625429553264605, + "loss": 2.1215, "step": 20 }, { - "epoch": 0.84, - "learning_rate": 0.0001916, - "loss": 2.126, + "epoch": 0.07, + "learning_rate": 0.00018556701030927837, + "loss": 2.2179, "step": 21 }, { - "epoch": 0.88, - "learning_rate": 0.0001912, - "loss": 2.0959, + "epoch": 0.08, + "learning_rate": 0.00018487972508591068, + "loss": 2.2598, "step": 22 }, { - "epoch": 0.92, - "learning_rate": 0.0001908, - "loss": 2.2446, + "epoch": 0.08, + "learning_rate": 0.00018419243986254294, + "loss": 2.1813, "step": 23 }, { - "epoch": 0.96, - "learning_rate": 0.0001904, - "loss": 1.9448, + "epoch": 0.08, + "learning_rate": 0.00018350515463917526, + "loss": 2.2006, "step": 24 }, { - "epoch": 1.0, - "learning_rate": 0.00019, - "loss": 2.0939, + "epoch": 0.09, + "learning_rate": 0.00018281786941580757, + "loss": 2.1564, "step": 25 }, { - "epoch": 1.04, - "learning_rate": 0.0001896, - "loss": 2.1247, + "epoch": 0.09, + "learning_rate": 0.00018213058419243986, + "loss": 2.2537, "step": 26 }, { - "epoch": 1.08, - "learning_rate": 0.0001892, - "loss": 1.9417, + "epoch": 0.09, + "learning_rate": 0.00018144329896907217, + "loss": 2.1975, "step": 27 }, { - "epoch": 1.12, - "learning_rate": 0.0001888, - "loss": 2.105, + "epoch": 0.1, + "learning_rate": 0.0001807560137457045, + "loss": 2.2566, "step": 28 }, { - "epoch": 1.16, - "learning_rate": 0.0001884, - "loss": 2.1595, + "epoch": 0.1, + "learning_rate": 0.00018006872852233677, + "loss": 2.1464, "step": 29 }, { - "epoch": 1.2, - "learning_rate": 0.000188, - "loss": 2.2009, + "epoch": 0.1, + "learning_rate": 0.0001793814432989691, + "loss": 2.1421, "step": 30 }, { - "epoch": 1.24, - "learning_rate": 0.0001876, - "loss": 2.0784, + "epoch": 0.11, + "learning_rate": 0.0001786941580756014, + "loss": 2.1276, "step": 31 }, { - "epoch": 1.28, - "learning_rate": 0.00018720000000000002, - "loss": 2.2173, + "epoch": 0.11, + "learning_rate": 0.00017800687285223366, + "loss": 2.0649, "step": 32 }, { - "epoch": 1.32, - "learning_rate": 0.00018680000000000001, - "loss": 2.1185, + "epoch": 0.11, + "learning_rate": 0.00017731958762886598, + "loss": 2.1835, "step": 33 }, { - "epoch": 1.36, - "learning_rate": 0.00018640000000000003, - "loss": 1.988, + "epoch": 0.12, + "learning_rate": 0.0001766323024054983, + "loss": 2.1711, "step": 34 }, { - "epoch": 1.4, - "learning_rate": 0.00018600000000000002, - "loss": 1.9493, + "epoch": 0.12, + "learning_rate": 0.00017594501718213058, + "loss": 2.2591, "step": 35 }, { - "epoch": 1.44, - "learning_rate": 0.0001856, - "loss": 1.9947, + "epoch": 0.12, + "learning_rate": 0.0001752577319587629, + "loss": 2.1471, "step": 36 }, { - "epoch": 1.48, - "learning_rate": 0.00018520000000000003, - "loss": 2.0506, + "epoch": 0.13, + "learning_rate": 0.0001745704467353952, + "loss": 2.0861, "step": 37 }, { - "epoch": 1.52, - "learning_rate": 0.00018480000000000002, - "loss": 2.0829, + "epoch": 0.13, + "learning_rate": 0.0001738831615120275, + "loss": 2.0702, "step": 38 }, { - "epoch": 1.56, - "learning_rate": 0.0001844, - "loss": 2.1455, + "epoch": 0.13, + "learning_rate": 0.0001731958762886598, + "loss": 2.1096, "step": 39 }, { - "epoch": 1.6, - "learning_rate": 0.00018400000000000003, - "loss": 2.0304, + "epoch": 0.14, + "learning_rate": 0.00017250859106529212, + "loss": 2.1062, "step": 40 }, { - "epoch": 1.64, - "learning_rate": 0.00018360000000000002, - "loss": 2.006, + "epoch": 0.14, + "learning_rate": 0.00017182130584192438, + "loss": 2.2545, "step": 41 }, { - "epoch": 1.68, - "learning_rate": 0.0001832, - "loss": 2.1759, + "epoch": 0.14, + "learning_rate": 0.0001711340206185567, + "loss": 2.1572, "step": 42 }, { - "epoch": 1.72, - "learning_rate": 0.00018280000000000003, - "loss": 2.0093, + "epoch": 0.15, + "learning_rate": 0.000170446735395189, + "loss": 2.0749, "step": 43 }, { - "epoch": 1.76, - "learning_rate": 0.00018240000000000002, - "loss": 2.0683, + "epoch": 0.15, + "learning_rate": 0.0001697594501718213, + "loss": 2.1922, "step": 44 }, { - "epoch": 1.8, - "learning_rate": 0.000182, - "loss": 2.1352, + "epoch": 0.15, + "learning_rate": 0.00016907216494845361, + "loss": 2.1915, "step": 45 }, { - "epoch": 1.84, - "learning_rate": 0.00018160000000000002, - "loss": 1.9736, + "epoch": 0.16, + "learning_rate": 0.00016838487972508593, + "loss": 2.1594, "step": 46 }, { - "epoch": 1.88, - "learning_rate": 0.0001812, - "loss": 1.9631, + "epoch": 0.16, + "learning_rate": 0.00016769759450171822, + "loss": 2.176, "step": 47 }, { - "epoch": 1.92, - "learning_rate": 0.0001808, - "loss": 2.1024, + "epoch": 0.16, + "learning_rate": 0.00016701030927835053, + "loss": 2.1223, "step": 48 }, { - "epoch": 1.96, - "learning_rate": 0.00018040000000000002, - "loss": 1.9895, + "epoch": 0.17, + "learning_rate": 0.00016632302405498285, + "loss": 2.1263, "step": 49 }, { - "epoch": 2.0, - "learning_rate": 0.00018, - "loss": 2.0067, + "epoch": 0.17, + "learning_rate": 0.00016563573883161513, + "loss": 2.0481, "step": 50 }, { - "epoch": 2.04, - "learning_rate": 0.0001796, - "loss": 2.0509, + "epoch": 0.17, + "learning_rate": 0.00016494845360824742, + "loss": 2.1043, "step": 51 }, { - "epoch": 2.08, - "learning_rate": 0.00017920000000000002, - "loss": 1.8795, + "epoch": 0.18, + "learning_rate": 0.00016426116838487973, + "loss": 2.1678, "step": 52 }, { - "epoch": 2.12, - "learning_rate": 0.0001788, - "loss": 2.0633, + "epoch": 0.18, + "learning_rate": 0.00016357388316151202, + "loss": 2.1602, "step": 53 }, { - "epoch": 2.16, - "learning_rate": 0.0001784, - "loss": 1.9998, + "epoch": 0.19, + "learning_rate": 0.00016288659793814434, + "loss": 2.1448, "step": 54 }, { - "epoch": 2.2, - "learning_rate": 0.00017800000000000002, - "loss": 2.0675, + "epoch": 0.19, + "learning_rate": 0.00016219931271477665, + "loss": 2.1536, "step": 55 }, { - "epoch": 2.24, - "learning_rate": 0.0001776, - "loss": 2.0129, + "epoch": 0.19, + "learning_rate": 0.00016151202749140894, + "loss": 2.0339, "step": 56 }, { - "epoch": 2.28, - "learning_rate": 0.0001772, - "loss": 1.9302, + "epoch": 0.2, + "learning_rate": 0.00016082474226804125, + "loss": 2.023, "step": 57 }, { - "epoch": 2.32, - "learning_rate": 0.00017680000000000001, - "loss": 2.0472, + "epoch": 0.2, + "learning_rate": 0.00016013745704467357, + "loss": 2.1407, "step": 58 }, { - "epoch": 2.36, - "learning_rate": 0.0001764, - "loss": 2.0031, + "epoch": 0.2, + "learning_rate": 0.00015945017182130585, + "loss": 2.1134, "step": 59 }, { - "epoch": 2.4, - "learning_rate": 0.00017600000000000002, - "loss": 2.0231, + "epoch": 0.21, + "learning_rate": 0.00015876288659793814, + "loss": 2.1652, "step": 60 }, { - "epoch": 2.44, - "learning_rate": 0.0001756, - "loss": 2.0192, + "epoch": 0.21, + "learning_rate": 0.00015807560137457046, + "loss": 2.0051, "step": 61 }, { - "epoch": 2.48, - "learning_rate": 0.0001752, - "loss": 2.1336, + "epoch": 0.21, + "learning_rate": 0.00015738831615120274, + "loss": 2.0604, "step": 62 }, { - "epoch": 2.52, - "learning_rate": 0.00017480000000000002, - "loss": 1.7753, + "epoch": 0.22, + "learning_rate": 0.00015670103092783506, + "loss": 2.1708, "step": 63 }, { - "epoch": 2.56, - "learning_rate": 0.0001744, - "loss": 2.0274, + "epoch": 0.22, + "learning_rate": 0.00015601374570446737, + "loss": 2.1106, "step": 64 }, { - "epoch": 2.6, - "learning_rate": 0.000174, - "loss": 1.8893, + "epoch": 0.22, + "learning_rate": 0.00015532646048109966, + "loss": 2.1445, "step": 65 }, { - "epoch": 2.64, - "learning_rate": 0.00017360000000000002, - "loss": 2.0341, + "epoch": 0.23, + "learning_rate": 0.00015463917525773197, + "loss": 2.0879, "step": 66 }, { - "epoch": 2.68, - "learning_rate": 0.0001732, - "loss": 2.1081, + "epoch": 0.23, + "learning_rate": 0.0001539518900343643, + "loss": 2.1498, "step": 67 }, { - "epoch": 2.72, - "learning_rate": 0.0001728, - "loss": 1.9463, + "epoch": 0.23, + "learning_rate": 0.00015326460481099657, + "loss": 2.0719, "step": 68 }, { - "epoch": 2.76, - "learning_rate": 0.00017240000000000002, - "loss": 2.0607, + "epoch": 0.24, + "learning_rate": 0.00015257731958762886, + "loss": 2.2167, "step": 69 }, { - "epoch": 2.8, - "learning_rate": 0.000172, - "loss": 1.9803, + "epoch": 0.24, + "learning_rate": 0.00015189003436426118, + "loss": 2.0811, "step": 70 }, { - "epoch": 2.84, - "learning_rate": 0.0001716, - "loss": 1.7663, + "epoch": 0.24, + "learning_rate": 0.00015120274914089346, + "loss": 2.1058, "step": 71 }, { - "epoch": 2.88, - "learning_rate": 0.00017120000000000001, - "loss": 2.1116, + "epoch": 0.25, + "learning_rate": 0.00015051546391752578, + "loss": 2.0392, "step": 72 }, { - "epoch": 2.92, - "learning_rate": 0.0001708, - "loss": 1.9039, + "epoch": 0.25, + "learning_rate": 0.0001498281786941581, + "loss": 2.0957, "step": 73 }, { - "epoch": 2.96, - "learning_rate": 0.0001704, - "loss": 2.049, + "epoch": 0.25, + "learning_rate": 0.00014914089347079038, + "loss": 1.9994, "step": 74 }, { - "epoch": 3.0, - "learning_rate": 0.00017, - "loss": 1.991, + "epoch": 0.26, + "learning_rate": 0.0001484536082474227, + "loss": 2.0464, "step": 75 }, { - "epoch": 3.04, - "learning_rate": 0.0001696, - "loss": 1.8755, + "epoch": 0.26, + "learning_rate": 0.000147766323024055, + "loss": 2.0417, "step": 76 }, { - "epoch": 3.08, - "learning_rate": 0.0001692, - "loss": 2.0367, + "epoch": 0.26, + "learning_rate": 0.0001470790378006873, + "loss": 2.105, "step": 77 }, { - "epoch": 3.12, - "learning_rate": 0.0001688, - "loss": 1.9959, + "epoch": 0.27, + "learning_rate": 0.00014639175257731958, + "loss": 2.1147, "step": 78 }, { - "epoch": 3.16, - "learning_rate": 0.0001684, - "loss": 1.8051, + "epoch": 0.27, + "learning_rate": 0.0001457044673539519, + "loss": 1.9964, "step": 79 }, { - "epoch": 3.2, - "learning_rate": 0.000168, - "loss": 2.0446, + "epoch": 0.27, + "learning_rate": 0.00014501718213058418, + "loss": 1.9723, "step": 80 }, { - "epoch": 3.24, - "learning_rate": 0.0001676, - "loss": 1.7873, + "epoch": 0.28, + "learning_rate": 0.0001443298969072165, + "loss": 2.0621, "step": 81 }, { - "epoch": 3.28, - "learning_rate": 0.0001672, - "loss": 1.9264, + "epoch": 0.28, + "learning_rate": 0.00014364261168384881, + "loss": 2.2703, "step": 82 }, { - "epoch": 3.32, - "learning_rate": 0.0001668, - "loss": 1.8595, + "epoch": 0.28, + "learning_rate": 0.0001429553264604811, + "loss": 2.0815, "step": 83 }, { - "epoch": 3.36, - "learning_rate": 0.0001664, - "loss": 1.9992, + "epoch": 0.29, + "learning_rate": 0.00014226804123711342, + "loss": 2.0774, "step": 84 }, { - "epoch": 3.4, - "learning_rate": 0.000166, - "loss": 1.8783, + "epoch": 0.29, + "learning_rate": 0.00014158075601374573, + "loss": 2.066, "step": 85 }, { - "epoch": 3.44, - "learning_rate": 0.0001656, - "loss": 2.0216, + "epoch": 0.29, + "learning_rate": 0.00014089347079037802, + "loss": 2.03, "step": 86 }, { - "epoch": 3.48, - "learning_rate": 0.0001652, - "loss": 1.947, + "epoch": 0.3, + "learning_rate": 0.0001402061855670103, + "loss": 2.1433, "step": 87 }, { - "epoch": 3.52, - "learning_rate": 0.0001648, - "loss": 1.9554, + "epoch": 0.3, + "learning_rate": 0.00013951890034364262, + "loss": 2.0811, "step": 88 }, { - "epoch": 3.56, - "learning_rate": 0.0001644, - "loss": 1.8563, + "epoch": 0.31, + "learning_rate": 0.0001388316151202749, + "loss": 1.9791, "step": 89 }, { - "epoch": 3.6, - "learning_rate": 0.000164, - "loss": 1.7795, + "epoch": 0.31, + "learning_rate": 0.00013814432989690722, + "loss": 2.0876, "step": 90 }, { - "epoch": 3.64, - "learning_rate": 0.0001636, - "loss": 1.9347, + "epoch": 0.31, + "learning_rate": 0.00013745704467353953, + "loss": 2.0314, "step": 91 }, { - "epoch": 3.68, - "learning_rate": 0.0001632, - "loss": 2.0078, + "epoch": 0.32, + "learning_rate": 0.00013676975945017182, + "loss": 1.9485, "step": 92 }, { - "epoch": 3.72, - "learning_rate": 0.0001628, - "loss": 1.9964, + "epoch": 0.32, + "learning_rate": 0.00013608247422680414, + "loss": 2.078, "step": 93 }, { - "epoch": 3.76, - "learning_rate": 0.00016240000000000002, - "loss": 1.9004, + "epoch": 0.32, + "learning_rate": 0.00013539518900343645, + "loss": 2.1251, "step": 94 }, { - "epoch": 3.8, - "learning_rate": 0.000162, - "loss": 1.9446, + "epoch": 0.33, + "learning_rate": 0.00013470790378006874, + "loss": 1.9736, "step": 95 }, { - "epoch": 3.84, - "learning_rate": 0.00016160000000000002, - "loss": 1.8857, + "epoch": 0.33, + "learning_rate": 0.00013402061855670103, + "loss": 2.0189, "step": 96 }, { - "epoch": 3.88, - "learning_rate": 0.00016120000000000002, - "loss": 1.8797, + "epoch": 0.33, + "learning_rate": 0.00013333333333333334, + "loss": 2.0061, "step": 97 }, { - "epoch": 3.92, - "learning_rate": 0.0001608, - "loss": 1.7643, + "epoch": 0.34, + "learning_rate": 0.00013264604810996563, + "loss": 1.9595, "step": 98 }, { - "epoch": 3.96, - "learning_rate": 0.00016040000000000002, - "loss": 1.9724, + "epoch": 0.34, + "learning_rate": 0.00013195876288659794, + "loss": 1.9702, "step": 99 }, { - "epoch": 4.0, - "learning_rate": 0.00016, - "loss": 1.9058, + "epoch": 0.34, + "learning_rate": 0.00013127147766323026, + "loss": 2.0322, "step": 100 }, { - "epoch": 4.04, - "learning_rate": 0.0001596, - "loss": 1.8663, + "epoch": 0.35, + "learning_rate": 0.00013058419243986254, + "loss": 2.0128, "step": 101 }, { - "epoch": 4.08, - "learning_rate": 0.00015920000000000002, - "loss": 1.8457, + "epoch": 0.35, + "learning_rate": 0.00012989690721649486, + "loss": 2.1276, "step": 102 }, { - "epoch": 4.12, - "learning_rate": 0.0001588, - "loss": 1.8394, + "epoch": 0.35, + "learning_rate": 0.00012920962199312717, + "loss": 2.0177, "step": 103 }, { - "epoch": 4.16, - "learning_rate": 0.00015840000000000003, - "loss": 1.8941, + "epoch": 0.36, + "learning_rate": 0.00012852233676975946, + "loss": 1.9366, "step": 104 }, { - "epoch": 4.2, - "learning_rate": 0.00015800000000000002, - "loss": 1.8601, + "epoch": 0.36, + "learning_rate": 0.00012783505154639175, + "loss": 2.0341, "step": 105 }, { - "epoch": 4.24, - "learning_rate": 0.0001576, - "loss": 1.7365, + "epoch": 0.36, + "learning_rate": 0.00012714776632302406, + "loss": 2.0251, "step": 106 }, { - "epoch": 4.28, - "learning_rate": 0.00015720000000000003, - "loss": 1.8809, + "epoch": 0.37, + "learning_rate": 0.00012646048109965635, + "loss": 1.9588, "step": 107 }, { - "epoch": 4.32, - "learning_rate": 0.00015680000000000002, - "loss": 2.0206, + "epoch": 0.37, + "learning_rate": 0.00012577319587628866, + "loss": 2.0445, "step": 108 }, { - "epoch": 4.36, - "learning_rate": 0.0001564, - "loss": 1.7776, + "epoch": 0.37, + "learning_rate": 0.00012508591065292098, + "loss": 1.8917, "step": 109 }, { - "epoch": 4.4, - "learning_rate": 0.00015600000000000002, - "loss": 1.9089, + "epoch": 0.38, + "learning_rate": 0.00012439862542955326, + "loss": 2.0385, "step": 110 }, { - "epoch": 4.44, - "learning_rate": 0.00015560000000000001, - "loss": 1.8464, + "epoch": 0.38, + "learning_rate": 0.00012371134020618558, + "loss": 2.0435, "step": 111 }, { - "epoch": 4.48, - "learning_rate": 0.0001552, - "loss": 1.7943, + "epoch": 0.38, + "learning_rate": 0.0001230240549828179, + "loss": 2.0666, "step": 112 }, { - "epoch": 4.52, - "learning_rate": 0.00015480000000000002, - "loss": 1.8081, + "epoch": 0.39, + "learning_rate": 0.00012233676975945018, + "loss": 1.9854, "step": 113 }, { - "epoch": 4.56, - "learning_rate": 0.0001544, - "loss": 1.734, + "epoch": 0.39, + "learning_rate": 0.00012164948453608247, + "loss": 1.9233, "step": 114 }, { - "epoch": 4.6, - "learning_rate": 0.000154, - "loss": 1.8379, + "epoch": 0.39, + "learning_rate": 0.00012096219931271477, + "loss": 1.985, "step": 115 }, { - "epoch": 4.64, - "learning_rate": 0.00015360000000000002, - "loss": 1.6841, + "epoch": 0.4, + "learning_rate": 0.00012027491408934708, + "loss": 2.0679, "step": 116 }, { - "epoch": 4.68, - "learning_rate": 0.0001532, - "loss": 1.8253, + "epoch": 0.4, + "learning_rate": 0.00011958762886597938, + "loss": 1.9717, "step": 117 }, { - "epoch": 4.72, - "learning_rate": 0.0001528, - "loss": 1.7607, + "epoch": 0.4, + "learning_rate": 0.00011890034364261168, + "loss": 1.9388, "step": 118 }, { - "epoch": 4.76, - "learning_rate": 0.00015240000000000002, - "loss": 1.8205, + "epoch": 0.41, + "learning_rate": 0.000118213058419244, + "loss": 1.9607, "step": 119 }, { - "epoch": 4.8, - "learning_rate": 0.000152, - "loss": 1.8443, + "epoch": 0.41, + "learning_rate": 0.0001175257731958763, + "loss": 1.9543, "step": 120 }, { - "epoch": 4.84, - "learning_rate": 0.0001516, - "loss": 1.8824, + "epoch": 0.42, + "learning_rate": 0.0001168384879725086, + "loss": 1.9925, "step": 121 }, { - "epoch": 4.88, - "learning_rate": 0.00015120000000000002, - "loss": 1.8369, + "epoch": 0.42, + "learning_rate": 0.00011615120274914091, + "loss": 1.9913, "step": 122 }, { - "epoch": 4.92, - "learning_rate": 0.0001508, - "loss": 1.8017, + "epoch": 0.42, + "learning_rate": 0.00011546391752577319, + "loss": 1.9857, "step": 123 }, { - "epoch": 4.96, - "learning_rate": 0.0001504, - "loss": 1.7858, + "epoch": 0.43, + "learning_rate": 0.00011477663230240549, + "loss": 2.0056, "step": 124 }, { - "epoch": 5.0, - "learning_rate": 0.00015000000000000001, - "loss": 1.8366, + "epoch": 0.43, + "learning_rate": 0.0001140893470790378, + "loss": 2.0133, "step": 125 }, { - "epoch": 5.04, - "learning_rate": 0.0001496, - "loss": 1.7353, + "epoch": 0.43, + "learning_rate": 0.0001134020618556701, + "loss": 1.8104, "step": 126 }, { - "epoch": 5.08, - "learning_rate": 0.0001492, - "loss": 1.6017, + "epoch": 0.44, + "learning_rate": 0.0001127147766323024, + "loss": 1.7946, "step": 127 }, { - "epoch": 5.12, - "learning_rate": 0.0001488, - "loss": 1.6645, + "epoch": 0.44, + "learning_rate": 0.00011202749140893472, + "loss": 1.8347, "step": 128 }, { - "epoch": 5.16, - "learning_rate": 0.0001484, - "loss": 1.8663, + "epoch": 0.44, + "learning_rate": 0.00011134020618556702, + "loss": 2.0342, "step": 129 }, { - "epoch": 5.2, - "learning_rate": 0.000148, - "loss": 1.7009, + "epoch": 0.45, + "learning_rate": 0.00011065292096219932, + "loss": 1.9425, "step": 130 }, { - "epoch": 5.24, - "learning_rate": 0.0001476, - "loss": 1.7371, + "epoch": 0.45, + "learning_rate": 0.00010996563573883164, + "loss": 1.9546, "step": 131 }, { - "epoch": 5.28, - "learning_rate": 0.0001472, - "loss": 1.8555, + "epoch": 0.45, + "learning_rate": 0.00010927835051546391, + "loss": 1.9694, "step": 132 }, { - "epoch": 5.32, - "learning_rate": 0.00014680000000000002, - "loss": 1.6373, + "epoch": 0.46, + "learning_rate": 0.00010859106529209621, + "loss": 1.936, "step": 133 }, { - "epoch": 5.36, - "learning_rate": 0.0001464, - "loss": 1.7211, + "epoch": 0.46, + "learning_rate": 0.00010790378006872852, + "loss": 2.0004, "step": 134 }, { - "epoch": 5.4, - "learning_rate": 0.000146, - "loss": 1.614, + "epoch": 0.46, + "learning_rate": 0.00010721649484536083, + "loss": 2.0462, "step": 135 }, { - "epoch": 5.44, - "learning_rate": 0.00014560000000000002, - "loss": 1.8736, + "epoch": 0.47, + "learning_rate": 0.00010652920962199313, + "loss": 1.9783, "step": 136 }, { - "epoch": 5.48, - "learning_rate": 0.0001452, - "loss": 1.7229, + "epoch": 0.47, + "learning_rate": 0.00010584192439862544, + "loss": 1.8628, "step": 137 }, { - "epoch": 5.52, - "learning_rate": 0.0001448, - "loss": 1.7315, + "epoch": 0.47, + "learning_rate": 0.00010515463917525774, + "loss": 1.9134, "step": 138 }, { - "epoch": 5.56, - "learning_rate": 0.0001444, - "loss": 1.7259, + "epoch": 0.48, + "learning_rate": 0.00010446735395189004, + "loss": 1.8846, "step": 139 }, { - "epoch": 5.6, - "learning_rate": 0.000144, - "loss": 1.7032, + "epoch": 0.48, + "learning_rate": 0.00010378006872852236, + "loss": 1.8119, "step": 140 }, { - "epoch": 5.64, - "learning_rate": 0.0001436, - "loss": 1.9237, + "epoch": 0.48, + "learning_rate": 0.00010309278350515463, + "loss": 1.9242, "step": 141 }, { - "epoch": 5.68, - "learning_rate": 0.0001432, - "loss": 1.7821, + "epoch": 0.49, + "learning_rate": 0.00010240549828178693, + "loss": 1.9292, "step": 142 }, { - "epoch": 5.72, - "learning_rate": 0.0001428, - "loss": 1.6081, + "epoch": 0.49, + "learning_rate": 0.00010171821305841925, + "loss": 1.9763, "step": 143 }, { - "epoch": 5.76, - "learning_rate": 0.0001424, - "loss": 1.7979, + "epoch": 0.49, + "learning_rate": 0.00010103092783505155, + "loss": 1.9023, "step": 144 }, { - "epoch": 5.8, - "learning_rate": 0.000142, - "loss": 1.6675, + "epoch": 0.5, + "learning_rate": 0.00010034364261168385, + "loss": 1.8064, "step": 145 }, { - "epoch": 5.84, - "learning_rate": 0.0001416, - "loss": 1.6758, + "epoch": 0.5, + "learning_rate": 9.965635738831616e-05, + "loss": 1.9646, "step": 146 }, { - "epoch": 5.88, - "learning_rate": 0.0001412, - "loss": 1.7783, + "epoch": 0.5, + "learning_rate": 9.896907216494846e-05, + "loss": 1.8234, "step": 147 }, { - "epoch": 5.92, - "learning_rate": 0.0001408, - "loss": 1.5935, + "epoch": 0.51, + "learning_rate": 9.828178694158075e-05, + "loss": 2.0289, "step": 148 }, { - "epoch": 5.96, - "learning_rate": 0.0001404, - "loss": 1.7372, + "epoch": 0.51, + "learning_rate": 9.759450171821306e-05, + "loss": 1.7698, "step": 149 }, { - "epoch": 6.0, - "learning_rate": 0.00014, - "loss": 1.6308, + "epoch": 0.51, + "learning_rate": 9.690721649484537e-05, + "loss": 1.9497, "step": 150 }, { - "epoch": 6.04, - "learning_rate": 0.0001396, - "loss": 1.5829, + "epoch": 0.52, + "learning_rate": 9.621993127147767e-05, + "loss": 1.9135, "step": 151 }, { - "epoch": 6.08, - "learning_rate": 0.0001392, - "loss": 1.6538, + "epoch": 0.52, + "learning_rate": 9.553264604810998e-05, + "loss": 1.7955, "step": 152 }, { - "epoch": 6.12, - "learning_rate": 0.00013879999999999999, - "loss": 1.7624, + "epoch": 0.52, + "learning_rate": 9.484536082474227e-05, + "loss": 1.9452, "step": 153 }, { - "epoch": 6.16, - "learning_rate": 0.0001384, - "loss": 1.4968, + "epoch": 0.53, + "learning_rate": 9.415807560137457e-05, + "loss": 1.9715, "step": 154 }, { - "epoch": 6.2, - "learning_rate": 0.000138, - "loss": 1.7305, + "epoch": 0.53, + "learning_rate": 9.347079037800688e-05, + "loss": 1.8546, "step": 155 }, { - "epoch": 6.24, - "learning_rate": 0.00013759999999999998, - "loss": 1.6536, + "epoch": 0.54, + "learning_rate": 9.278350515463918e-05, + "loss": 1.9756, "step": 156 }, { - "epoch": 6.28, - "learning_rate": 0.00013720000000000003, - "loss": 1.654, + "epoch": 0.54, + "learning_rate": 9.209621993127147e-05, + "loss": 2.0691, "step": 157 }, { - "epoch": 6.32, - "learning_rate": 0.00013680000000000002, - "loss": 1.7163, + "epoch": 0.54, + "learning_rate": 9.140893470790379e-05, + "loss": 1.8373, "step": 158 }, { - "epoch": 6.36, - "learning_rate": 0.0001364, - "loss": 1.6599, + "epoch": 0.55, + "learning_rate": 9.072164948453609e-05, + "loss": 1.7061, "step": 159 }, { - "epoch": 6.4, - "learning_rate": 0.00013600000000000003, - "loss": 1.5729, + "epoch": 0.55, + "learning_rate": 9.003436426116839e-05, + "loss": 1.9069, "step": 160 }, { - "epoch": 6.44, - "learning_rate": 0.00013560000000000002, - "loss": 1.5291, + "epoch": 0.55, + "learning_rate": 8.93470790378007e-05, + "loss": 1.8055, "step": 161 }, { - "epoch": 6.48, - "learning_rate": 0.0001352, - "loss": 1.5666, + "epoch": 0.56, + "learning_rate": 8.865979381443299e-05, + "loss": 1.8873, "step": 162 }, { - "epoch": 6.52, - "learning_rate": 0.00013480000000000002, - "loss": 1.7576, + "epoch": 0.56, + "learning_rate": 8.797250859106529e-05, + "loss": 1.8406, "step": 163 }, { - "epoch": 6.56, - "learning_rate": 0.00013440000000000001, - "loss": 1.4462, + "epoch": 0.56, + "learning_rate": 8.72852233676976e-05, + "loss": 1.9428, "step": 164 }, { - "epoch": 6.6, - "learning_rate": 0.000134, - "loss": 1.5659, + "epoch": 0.57, + "learning_rate": 8.65979381443299e-05, + "loss": 1.831, "step": 165 }, { - "epoch": 6.64, - "learning_rate": 0.00013360000000000002, - "loss": 1.6234, + "epoch": 0.57, + "learning_rate": 8.591065292096219e-05, + "loss": 1.8802, "step": 166 }, { - "epoch": 6.68, - "learning_rate": 0.0001332, - "loss": 1.5176, + "epoch": 0.57, + "learning_rate": 8.52233676975945e-05, + "loss": 1.8436, "step": 167 }, { - "epoch": 6.72, - "learning_rate": 0.0001328, - "loss": 1.667, + "epoch": 0.58, + "learning_rate": 8.453608247422681e-05, + "loss": 1.848, "step": 168 }, { - "epoch": 6.76, - "learning_rate": 0.00013240000000000002, - "loss": 1.6514, + "epoch": 0.58, + "learning_rate": 8.384879725085911e-05, + "loss": 1.9022, "step": 169 }, { - "epoch": 6.8, - "learning_rate": 0.000132, - "loss": 1.7179, + "epoch": 0.58, + "learning_rate": 8.316151202749142e-05, + "loss": 1.8015, "step": 170 }, { - "epoch": 6.84, - "learning_rate": 0.0001316, - "loss": 1.4373, + "epoch": 0.59, + "learning_rate": 8.247422680412371e-05, + "loss": 1.8204, "step": 171 }, { - "epoch": 6.88, - "learning_rate": 0.00013120000000000002, - "loss": 1.7044, + "epoch": 0.59, + "learning_rate": 8.178694158075601e-05, + "loss": 1.798, "step": 172 }, { - "epoch": 6.92, - "learning_rate": 0.0001308, - "loss": 1.5673, + "epoch": 0.59, + "learning_rate": 8.109965635738833e-05, + "loss": 1.8832, "step": 173 }, { - "epoch": 6.96, - "learning_rate": 0.0001304, - "loss": 1.6303, + "epoch": 0.6, + "learning_rate": 8.041237113402063e-05, + "loss": 1.8176, "step": 174 }, { - "epoch": 7.0, - "learning_rate": 0.00013000000000000002, - "loss": 1.4828, + "epoch": 0.6, + "learning_rate": 7.972508591065293e-05, + "loss": 1.9251, "step": 175 }, { - "epoch": 7.04, - "learning_rate": 0.0001296, - "loss": 1.6204, + "epoch": 0.6, + "learning_rate": 7.903780068728523e-05, + "loss": 1.7559, "step": 176 }, { - "epoch": 7.08, - "learning_rate": 0.00012920000000000002, - "loss": 1.4955, + "epoch": 0.61, + "learning_rate": 7.835051546391753e-05, + "loss": 1.805, "step": 177 }, { - "epoch": 7.12, - "learning_rate": 0.00012880000000000001, - "loss": 1.6109, + "epoch": 0.61, + "learning_rate": 7.766323024054983e-05, + "loss": 1.7995, "step": 178 }, { - "epoch": 7.16, - "learning_rate": 0.0001284, - "loss": 1.5318, + "epoch": 0.61, + "learning_rate": 7.697594501718214e-05, + "loss": 1.9055, "step": 179 }, { - "epoch": 7.2, - "learning_rate": 0.00012800000000000002, - "loss": 1.5702, + "epoch": 0.62, + "learning_rate": 7.628865979381443e-05, + "loss": 1.8654, "step": 180 }, { - "epoch": 7.24, - "learning_rate": 0.0001276, - "loss": 1.4598, + "epoch": 0.62, + "learning_rate": 7.560137457044673e-05, + "loss": 1.852, "step": 181 }, { - "epoch": 7.28, - "learning_rate": 0.0001272, - "loss": 1.6145, + "epoch": 0.62, + "learning_rate": 7.491408934707905e-05, + "loss": 1.9377, "step": 182 }, { - "epoch": 7.32, - "learning_rate": 0.00012680000000000002, - "loss": 1.3628, + "epoch": 0.63, + "learning_rate": 7.422680412371135e-05, + "loss": 1.8024, "step": 183 }, { - "epoch": 7.36, - "learning_rate": 0.0001264, - "loss": 1.4578, + "epoch": 0.63, + "learning_rate": 7.353951890034365e-05, + "loss": 1.7329, "step": 184 }, { - "epoch": 7.4, - "learning_rate": 0.000126, - "loss": 1.7094, + "epoch": 0.63, + "learning_rate": 7.285223367697595e-05, + "loss": 1.8203, "step": 185 }, { - "epoch": 7.44, - "learning_rate": 0.00012560000000000002, - "loss": 1.4376, + "epoch": 0.64, + "learning_rate": 7.216494845360825e-05, + "loss": 1.8223, "step": 186 }, { - "epoch": 7.48, - "learning_rate": 0.0001252, - "loss": 1.3393, + "epoch": 0.64, + "learning_rate": 7.147766323024055e-05, + "loss": 1.8506, "step": 187 }, { - "epoch": 7.52, - "learning_rate": 0.0001248, - "loss": 1.4273, + "epoch": 0.64, + "learning_rate": 7.079037800687286e-05, + "loss": 1.8169, "step": 188 }, { - "epoch": 7.56, - "learning_rate": 0.00012440000000000002, - "loss": 1.5506, + "epoch": 0.65, + "learning_rate": 7.010309278350515e-05, + "loss": 1.796, "step": 189 }, { - "epoch": 7.6, - "learning_rate": 0.000124, - "loss": 1.4774, + "epoch": 0.65, + "learning_rate": 6.941580756013745e-05, + "loss": 1.7975, "step": 190 }, { - "epoch": 7.64, - "learning_rate": 0.0001236, - "loss": 1.4632, + "epoch": 0.66, + "learning_rate": 6.872852233676977e-05, + "loss": 1.8774, "step": 191 }, { - "epoch": 7.68, - "learning_rate": 0.0001232, - "loss": 1.4568, + "epoch": 0.66, + "learning_rate": 6.804123711340207e-05, + "loss": 1.7822, "step": 192 }, { - "epoch": 7.72, - "learning_rate": 0.0001228, - "loss": 1.6106, + "epoch": 0.66, + "learning_rate": 6.735395189003437e-05, + "loss": 1.8542, "step": 193 }, { - "epoch": 7.76, - "learning_rate": 0.0001224, - "loss": 1.577, + "epoch": 0.67, + "learning_rate": 6.666666666666667e-05, + "loss": 1.8148, "step": 194 }, { - "epoch": 7.8, - "learning_rate": 0.000122, - "loss": 1.4805, + "epoch": 0.67, + "learning_rate": 6.597938144329897e-05, + "loss": 1.7446, "step": 195 }, { - "epoch": 7.84, - "learning_rate": 0.0001216, - "loss": 1.4546, + "epoch": 0.67, + "learning_rate": 6.529209621993127e-05, + "loss": 1.7477, "step": 196 }, { - "epoch": 7.88, - "learning_rate": 0.0001212, - "loss": 1.5164, + "epoch": 0.68, + "learning_rate": 6.460481099656359e-05, + "loss": 1.8121, "step": 197 }, { - "epoch": 7.92, - "learning_rate": 0.0001208, - "loss": 1.5061, + "epoch": 0.68, + "learning_rate": 6.391752577319587e-05, + "loss": 1.7289, "step": 198 }, { - "epoch": 7.96, - "learning_rate": 0.0001204, - "loss": 1.5312, + "epoch": 0.68, + "learning_rate": 6.323024054982817e-05, + "loss": 1.8006, "step": 199 }, { - "epoch": 8.0, - "learning_rate": 0.00012, - "loss": 1.4463, + "epoch": 0.69, + "learning_rate": 6.254295532646049e-05, + "loss": 1.7348, "step": 200 } ], "logging_steps": 1, - "max_steps": 500, - "num_train_epochs": 20, + "max_steps": 291, + "num_train_epochs": 1, "save_steps": 100, - "total_flos": 6.816544875119616e+16, + "total_flos": 1.2148491200262144e+17, "trial_name": null, "trial_params": null } diff --git a/checkpoint-200/training_args.bin b/checkpoint-200/training_args.bin index 4de6572a838c337c9990635a9406ebf46c0ec336..514bd8136ebdc96a275890c7b9b4df6ac072906c 100644 --- a/checkpoint-200/training_args.bin +++ b/checkpoint-200/training_args.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6ef74b31950ae6c8955316bed48c343fb06cda0cb6a8a54ca46ca3cb681c8736 +oid sha256:7e3410eea0ea0eb09ca576511099334880accaab360c0279f3099c9e4d2e877a size 4027 diff --git a/checkpoint-300/README.md b/checkpoint-300/README.md index 08371015f02382e6fcba318f4aaea54ae52cd3c4..2f257a448caef1c59022426ad3ec9ba80acb3821 100644 --- a/checkpoint-300/README.md +++ b/checkpoint-300/README.md @@ -4,6 +4,30 @@ library_name: peft ## Training procedure +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + The following `bitsandbytes` quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: True @@ -29,6 +53,8 @@ The following `bitsandbytes` quantization config was used during training: - bnb_4bit_compute_dtype: float32 ### Framework versions +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 diff --git a/checkpoint-300/adapter_model.bin b/checkpoint-300/adapter_model.bin index ec364bb05a02feac36e02fbed5b909ca903878d0..f32eee84dcfc362bb3dd9f9d9127291a2f42668f 100644 --- a/checkpoint-300/adapter_model.bin +++ b/checkpoint-300/adapter_model.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6fced119b995138b5b4c9d37dd51211a238534b883b617216cadd221bc8ae36d +oid sha256:b39b2ac3c3f75772f547048fda7ecb323076b9689db7d79915bba156dc508f2f size 39409357 diff --git a/checkpoint-300/optimizer.pt b/checkpoint-300/optimizer.pt index 2e6a7d1c4e53d7609d642160c8fe43e397089790..3d79ee6144baeefb728bb603d5ca209014a1847b 100644 --- a/checkpoint-300/optimizer.pt +++ b/checkpoint-300/optimizer.pt @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:67fdec376938273ad2ba5e201ffaba7ad32265728524da3562b132b79fdf2ede +oid sha256:c734ecaa394370ee4bcd94cc0b2ae016a26765122f3f76327b28c23f96a22732 size 78844421 diff --git a/checkpoint-300/rng_state.pth b/checkpoint-300/rng_state.pth index c4f6e431605a7da3e2efbdf0dc1fae0f500de983..b9111227e3b79b9bc3e2a642832c3e49e36216d4 100644 --- a/checkpoint-300/rng_state.pth +++ b/checkpoint-300/rng_state.pth @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:370c3a07f37a8aae6ea141b54ca992b21699546baf7407eb587b6056f787333b +oid sha256:972139d83957a9cf2600cb6eeca17287d7a5377c33a53500ae7e13fe830ad36b size 14575 diff --git a/checkpoint-300/scheduler.pt b/checkpoint-300/scheduler.pt index 44bdad4dee332a8a72d1b7ed6d078cb796b00e47..ba7790505ccbdccc3664cac72a2743abf3f54ee6 100644 --- a/checkpoint-300/scheduler.pt +++ b/checkpoint-300/scheduler.pt @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:f2bc1def4a9cb9244bae2e6cfff139bec4634b7353bf4cbc7d9c4c7018fdd400 +oid sha256:7d8dcaf05375bb59f736a94e8f8b03d33cdc87bc02411e6527a29996e0a68b3b size 627 diff --git a/checkpoint-300/tokenizer.json b/checkpoint-300/tokenizer.json index dbf002cafbd4818dcff2abc9156c088d681b4533..673c31abdeadf6576c3c754df86459e1ad64e207 100644 --- a/checkpoint-300/tokenizer.json +++ b/checkpoint-300/tokenizer.json @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba -size 14500471 +oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe +size 14500570 diff --git a/checkpoint-300/trainer_state.json b/checkpoint-300/trainer_state.json index bf13daeec8eb6cb2d11435b6be5bb180b795d1c8..bf5790647f2fda83e639ffc04ccde4b8937523ee 100644 --- a/checkpoint-300/trainer_state.json +++ b/checkpoint-300/trainer_state.json @@ -1,7 +1,7 @@ { "best_metric": null, "best_model_checkpoint": null, - "epoch": 12.0, + "epoch": 1.675977653631285, "eval_steps": 500, "global_step": 300, "is_hyper_param_search": false, @@ -9,1811 +9,1811 @@ "is_world_process_zero": true, "log_history": [ { - "epoch": 0.04, - "learning_rate": 0.0001996, - "loss": 2.4683, + "epoch": 0.01, + "learning_rate": 0.00019972067039106145, + "loss": 2.6443, "step": 1 }, { - "epoch": 0.08, - "learning_rate": 0.00019920000000000002, - "loss": 2.2023, + "epoch": 0.01, + "learning_rate": 0.00019944134078212292, + "loss": 2.4104, "step": 2 }, { - "epoch": 0.12, - "learning_rate": 0.0001988, - "loss": 2.3039, + "epoch": 0.02, + "learning_rate": 0.00019916201117318435, + "loss": 2.4975, "step": 3 }, { - "epoch": 0.16, - "learning_rate": 0.0001984, - "loss": 2.1855, + "epoch": 0.02, + "learning_rate": 0.00019888268156424582, + "loss": 2.3513, "step": 4 }, { - "epoch": 0.2, - "learning_rate": 0.00019800000000000002, - "loss": 2.328, + "epoch": 0.03, + "learning_rate": 0.0001986033519553073, + "loss": 2.4274, "step": 5 }, { - "epoch": 0.24, - "learning_rate": 0.0001976, - "loss": 2.3407, + "epoch": 0.03, + "learning_rate": 0.00019832402234636873, + "loss": 2.3628, "step": 6 }, { - "epoch": 0.28, - "learning_rate": 0.0001972, - "loss": 2.3432, + "epoch": 0.04, + "learning_rate": 0.0001980446927374302, + "loss": 2.3567, "step": 7 }, { - "epoch": 0.32, - "learning_rate": 0.0001968, - "loss": 2.2333, + "epoch": 0.04, + "learning_rate": 0.00019776536312849163, + "loss": 2.4121, "step": 8 }, { - "epoch": 0.36, - "learning_rate": 0.0001964, - "loss": 2.1761, + "epoch": 0.05, + "learning_rate": 0.00019748603351955307, + "loss": 2.4033, "step": 9 }, { - "epoch": 0.4, - "learning_rate": 0.000196, - "loss": 2.1473, + "epoch": 0.06, + "learning_rate": 0.00019720670391061454, + "loss": 2.2805, "step": 10 }, { - "epoch": 0.44, - "learning_rate": 0.0001956, - "loss": 2.2076, + "epoch": 0.06, + "learning_rate": 0.00019692737430167598, + "loss": 2.2639, "step": 11 }, { - "epoch": 0.48, - "learning_rate": 0.0001952, - "loss": 2.1925, + "epoch": 0.07, + "learning_rate": 0.00019664804469273744, + "loss": 2.2724, "step": 12 }, { - "epoch": 0.52, - "learning_rate": 0.0001948, - "loss": 2.1694, + "epoch": 0.07, + "learning_rate": 0.00019636871508379888, + "loss": 2.332, "step": 13 }, { - "epoch": 0.56, - "learning_rate": 0.0001944, - "loss": 2.1056, + "epoch": 0.08, + "learning_rate": 0.00019608938547486035, + "loss": 2.2261, "step": 14 }, { - "epoch": 0.6, - "learning_rate": 0.000194, - "loss": 2.2426, + "epoch": 0.08, + "learning_rate": 0.00019581005586592182, + "loss": 2.2208, "step": 15 }, { - "epoch": 0.64, - "learning_rate": 0.00019360000000000002, - "loss": 2.2635, + "epoch": 0.09, + "learning_rate": 0.00019553072625698326, + "loss": 2.3351, "step": 16 }, { - "epoch": 0.68, - "learning_rate": 0.0001932, - "loss": 2.251, + "epoch": 0.09, + "learning_rate": 0.0001952513966480447, + "loss": 2.2475, "step": 17 }, { - "epoch": 0.72, - "learning_rate": 0.0001928, - "loss": 2.224, + "epoch": 0.1, + "learning_rate": 0.00019497206703910616, + "loss": 2.3283, "step": 18 }, { - "epoch": 0.76, - "learning_rate": 0.00019240000000000001, - "loss": 2.1848, + "epoch": 0.11, + "learning_rate": 0.0001946927374301676, + "loss": 2.1346, "step": 19 }, { - "epoch": 0.8, - "learning_rate": 0.000192, - "loss": 2.1341, + "epoch": 0.11, + "learning_rate": 0.00019441340782122907, + "loss": 2.131, "step": 20 }, { - "epoch": 0.84, - "learning_rate": 0.0001916, - "loss": 2.126, + "epoch": 0.12, + "learning_rate": 0.0001941340782122905, + "loss": 2.1718, "step": 21 }, { - "epoch": 0.88, - "learning_rate": 0.0001912, - "loss": 2.0959, + "epoch": 0.12, + "learning_rate": 0.00019385474860335195, + "loss": 2.2446, "step": 22 }, { - "epoch": 0.92, - "learning_rate": 0.0001908, - "loss": 2.2446, + "epoch": 0.13, + "learning_rate": 0.0001935754189944134, + "loss": 2.306, "step": 23 }, { - "epoch": 0.96, - "learning_rate": 0.0001904, - "loss": 1.9448, + "epoch": 0.13, + "learning_rate": 0.00019329608938547488, + "loss": 2.1908, "step": 24 }, { - "epoch": 1.0, - "learning_rate": 0.00019, - "loss": 2.0939, + "epoch": 0.14, + "learning_rate": 0.00019301675977653632, + "loss": 2.2844, "step": 25 }, { - "epoch": 1.04, - "learning_rate": 0.0001896, - "loss": 2.1247, + "epoch": 0.15, + "learning_rate": 0.00019273743016759779, + "loss": 2.2235, "step": 26 }, { - "epoch": 1.08, - "learning_rate": 0.0001892, - "loss": 1.9417, + "epoch": 0.15, + "learning_rate": 0.00019245810055865922, + "loss": 2.1842, "step": 27 }, { - "epoch": 1.12, - "learning_rate": 0.0001888, - "loss": 2.105, + "epoch": 0.16, + "learning_rate": 0.00019217877094972066, + "loss": 2.2675, "step": 28 }, { - "epoch": 1.16, - "learning_rate": 0.0001884, - "loss": 2.1595, + "epoch": 0.16, + "learning_rate": 0.00019189944134078213, + "loss": 2.2532, "step": 29 }, { - "epoch": 1.2, - "learning_rate": 0.000188, - "loss": 2.2009, + "epoch": 0.17, + "learning_rate": 0.00019162011173184357, + "loss": 2.1788, "step": 30 }, { - "epoch": 1.24, - "learning_rate": 0.0001876, - "loss": 2.0784, + "epoch": 0.17, + "learning_rate": 0.00019134078212290504, + "loss": 2.2494, "step": 31 }, { - "epoch": 1.28, - "learning_rate": 0.00018720000000000002, - "loss": 2.2173, + "epoch": 0.18, + "learning_rate": 0.0001910614525139665, + "loss": 2.1995, "step": 32 }, { - "epoch": 1.32, - "learning_rate": 0.00018680000000000001, - "loss": 2.1185, + "epoch": 0.18, + "learning_rate": 0.00019078212290502794, + "loss": 2.1451, "step": 33 }, { - "epoch": 1.36, - "learning_rate": 0.00018640000000000003, - "loss": 1.988, + "epoch": 0.19, + "learning_rate": 0.0001905027932960894, + "loss": 2.223, "step": 34 }, { - "epoch": 1.4, - "learning_rate": 0.00018600000000000002, - "loss": 1.9493, + "epoch": 0.2, + "learning_rate": 0.00019022346368715085, + "loss": 2.2854, "step": 35 }, { - "epoch": 1.44, - "learning_rate": 0.0001856, - "loss": 1.9947, + "epoch": 0.2, + "learning_rate": 0.0001899441340782123, + "loss": 2.2265, "step": 36 }, { - "epoch": 1.48, - "learning_rate": 0.00018520000000000003, - "loss": 2.0506, + "epoch": 0.21, + "learning_rate": 0.00018966480446927375, + "loss": 2.1214, "step": 37 }, { - "epoch": 1.52, - "learning_rate": 0.00018480000000000002, - "loss": 2.0829, + "epoch": 0.21, + "learning_rate": 0.0001893854748603352, + "loss": 2.1898, "step": 38 }, { - "epoch": 1.56, - "learning_rate": 0.0001844, - "loss": 2.1455, + "epoch": 0.22, + "learning_rate": 0.00018910614525139666, + "loss": 2.1974, "step": 39 }, { - "epoch": 1.6, - "learning_rate": 0.00018400000000000003, - "loss": 2.0304, + "epoch": 0.22, + "learning_rate": 0.0001888268156424581, + "loss": 2.2259, "step": 40 }, { - "epoch": 1.64, - "learning_rate": 0.00018360000000000002, - "loss": 2.006, + "epoch": 0.23, + "learning_rate": 0.00018854748603351957, + "loss": 2.2094, "step": 41 }, { - "epoch": 1.68, - "learning_rate": 0.0001832, - "loss": 2.1759, + "epoch": 0.23, + "learning_rate": 0.00018826815642458103, + "loss": 2.1731, "step": 42 }, { - "epoch": 1.72, - "learning_rate": 0.00018280000000000003, - "loss": 2.0093, + "epoch": 0.24, + "learning_rate": 0.00018798882681564247, + "loss": 2.2373, "step": 43 }, { - "epoch": 1.76, - "learning_rate": 0.00018240000000000002, - "loss": 2.0683, + "epoch": 0.25, + "learning_rate": 0.0001877094972067039, + "loss": 2.2295, "step": 44 }, { - "epoch": 1.8, - "learning_rate": 0.000182, - "loss": 2.1352, + "epoch": 0.25, + "learning_rate": 0.00018743016759776538, + "loss": 2.1947, "step": 45 }, { - "epoch": 1.84, - "learning_rate": 0.00018160000000000002, - "loss": 1.9736, + "epoch": 0.26, + "learning_rate": 0.00018715083798882682, + "loss": 2.2115, "step": 46 }, { - "epoch": 1.88, - "learning_rate": 0.0001812, - "loss": 1.9631, + "epoch": 0.26, + "learning_rate": 0.00018687150837988828, + "loss": 2.1224, "step": 47 }, { - "epoch": 1.92, - "learning_rate": 0.0001808, - "loss": 2.1024, + "epoch": 0.27, + "learning_rate": 0.00018659217877094972, + "loss": 2.2137, "step": 48 }, { - "epoch": 1.96, - "learning_rate": 0.00018040000000000002, - "loss": 1.9895, + "epoch": 0.27, + "learning_rate": 0.00018631284916201116, + "loss": 2.2338, "step": 49 }, { - "epoch": 2.0, - "learning_rate": 0.00018, - "loss": 2.0067, + "epoch": 0.28, + "learning_rate": 0.00018603351955307266, + "loss": 2.1298, "step": 50 }, { - "epoch": 2.04, - "learning_rate": 0.0001796, - "loss": 2.0509, + "epoch": 0.28, + "learning_rate": 0.0001857541899441341, + "loss": 2.0883, "step": 51 }, { - "epoch": 2.08, - "learning_rate": 0.00017920000000000002, - "loss": 1.8795, + "epoch": 0.29, + "learning_rate": 0.00018547486033519553, + "loss": 2.1216, "step": 52 }, { - "epoch": 2.12, - "learning_rate": 0.0001788, - "loss": 2.0633, + "epoch": 0.3, + "learning_rate": 0.000185195530726257, + "loss": 2.2112, "step": 53 }, { - "epoch": 2.16, - "learning_rate": 0.0001784, - "loss": 1.9998, + "epoch": 0.3, + "learning_rate": 0.00018491620111731844, + "loss": 2.1224, "step": 54 }, { - "epoch": 2.2, - "learning_rate": 0.00017800000000000002, - "loss": 2.0675, + "epoch": 0.31, + "learning_rate": 0.0001846368715083799, + "loss": 2.2375, "step": 55 }, { - "epoch": 2.24, - "learning_rate": 0.0001776, - "loss": 2.0129, + "epoch": 0.31, + "learning_rate": 0.00018435754189944135, + "loss": 2.2235, "step": 56 }, { - "epoch": 2.28, - "learning_rate": 0.0001772, - "loss": 1.9302, + "epoch": 0.32, + "learning_rate": 0.00018407821229050279, + "loss": 2.1682, "step": 57 }, { - "epoch": 2.32, - "learning_rate": 0.00017680000000000001, - "loss": 2.0472, + "epoch": 0.32, + "learning_rate": 0.00018379888268156425, + "loss": 2.2077, "step": 58 }, { - "epoch": 2.36, - "learning_rate": 0.0001764, - "loss": 2.0031, + "epoch": 0.33, + "learning_rate": 0.00018351955307262572, + "loss": 2.1596, "step": 59 }, { - "epoch": 2.4, - "learning_rate": 0.00017600000000000002, - "loss": 2.0231, + "epoch": 0.34, + "learning_rate": 0.00018324022346368716, + "loss": 2.1311, "step": 60 }, { - "epoch": 2.44, - "learning_rate": 0.0001756, - "loss": 2.0192, + "epoch": 0.34, + "learning_rate": 0.00018296089385474862, + "loss": 2.1333, "step": 61 }, { - "epoch": 2.48, - "learning_rate": 0.0001752, - "loss": 2.1336, + "epoch": 0.35, + "learning_rate": 0.00018268156424581006, + "loss": 2.0901, "step": 62 }, { - "epoch": 2.52, - "learning_rate": 0.00017480000000000002, - "loss": 1.7753, + "epoch": 0.35, + "learning_rate": 0.00018240223463687153, + "loss": 2.1971, "step": 63 }, { - "epoch": 2.56, - "learning_rate": 0.0001744, - "loss": 2.0274, + "epoch": 0.36, + "learning_rate": 0.00018212290502793297, + "loss": 2.2602, "step": 64 }, { - "epoch": 2.6, - "learning_rate": 0.000174, - "loss": 1.8893, + "epoch": 0.36, + "learning_rate": 0.0001818435754189944, + "loss": 2.2194, "step": 65 }, { - "epoch": 2.64, - "learning_rate": 0.00017360000000000002, - "loss": 2.0341, + "epoch": 0.37, + "learning_rate": 0.00018156424581005588, + "loss": 2.1218, "step": 66 }, { - "epoch": 2.68, - "learning_rate": 0.0001732, - "loss": 2.1081, + "epoch": 0.37, + "learning_rate": 0.00018128491620111731, + "loss": 2.2049, "step": 67 }, { - "epoch": 2.72, - "learning_rate": 0.0001728, - "loss": 1.9463, + "epoch": 0.38, + "learning_rate": 0.00018100558659217878, + "loss": 2.1521, "step": 68 }, { - "epoch": 2.76, - "learning_rate": 0.00017240000000000002, - "loss": 2.0607, + "epoch": 0.39, + "learning_rate": 0.00018072625698324025, + "loss": 2.112, "step": 69 }, { - "epoch": 2.8, - "learning_rate": 0.000172, - "loss": 1.9803, + "epoch": 0.39, + "learning_rate": 0.0001804469273743017, + "loss": 2.1906, "step": 70 }, { - "epoch": 2.84, - "learning_rate": 0.0001716, - "loss": 1.7663, + "epoch": 0.4, + "learning_rate": 0.00018016759776536313, + "loss": 2.1717, "step": 71 }, { - "epoch": 2.88, - "learning_rate": 0.00017120000000000001, - "loss": 2.1116, + "epoch": 0.4, + "learning_rate": 0.0001798882681564246, + "loss": 2.0712, "step": 72 }, { - "epoch": 2.92, - "learning_rate": 0.0001708, - "loss": 1.9039, + "epoch": 0.41, + "learning_rate": 0.00017960893854748603, + "loss": 2.141, "step": 73 }, { - "epoch": 2.96, - "learning_rate": 0.0001704, - "loss": 2.049, + "epoch": 0.41, + "learning_rate": 0.0001793296089385475, + "loss": 2.0656, "step": 74 }, { - "epoch": 3.0, - "learning_rate": 0.00017, - "loss": 1.991, + "epoch": 0.42, + "learning_rate": 0.00017905027932960894, + "loss": 2.1125, "step": 75 }, { - "epoch": 3.04, - "learning_rate": 0.0001696, - "loss": 1.8755, + "epoch": 0.42, + "learning_rate": 0.00017877094972067038, + "loss": 2.0869, "step": 76 }, { - "epoch": 3.08, - "learning_rate": 0.0001692, - "loss": 2.0367, + "epoch": 0.43, + "learning_rate": 0.00017849162011173187, + "loss": 2.2478, "step": 77 }, { - "epoch": 3.12, - "learning_rate": 0.0001688, - "loss": 1.9959, + "epoch": 0.44, + "learning_rate": 0.0001782122905027933, + "loss": 2.1535, "step": 78 }, { - "epoch": 3.16, - "learning_rate": 0.0001684, - "loss": 1.8051, + "epoch": 0.44, + "learning_rate": 0.00017793296089385475, + "loss": 2.1927, "step": 79 }, { - "epoch": 3.2, - "learning_rate": 0.000168, - "loss": 2.0446, + "epoch": 0.45, + "learning_rate": 0.00017765363128491622, + "loss": 2.1213, "step": 80 }, { - "epoch": 3.24, - "learning_rate": 0.0001676, - "loss": 1.7873, + "epoch": 0.45, + "learning_rate": 0.00017737430167597766, + "loss": 2.0981, "step": 81 }, { - "epoch": 3.28, - "learning_rate": 0.0001672, - "loss": 1.9264, + "epoch": 0.46, + "learning_rate": 0.00017709497206703912, + "loss": 2.1828, "step": 82 }, { - "epoch": 3.32, - "learning_rate": 0.0001668, - "loss": 1.8595, + "epoch": 0.46, + "learning_rate": 0.00017681564245810056, + "loss": 2.0562, "step": 83 }, { - "epoch": 3.36, - "learning_rate": 0.0001664, - "loss": 1.9992, + "epoch": 0.47, + "learning_rate": 0.000176536312849162, + "loss": 2.1334, "step": 84 }, { - "epoch": 3.4, - "learning_rate": 0.000166, - "loss": 1.8783, + "epoch": 0.47, + "learning_rate": 0.00017625698324022347, + "loss": 2.1225, "step": 85 }, { - "epoch": 3.44, - "learning_rate": 0.0001656, - "loss": 2.0216, + "epoch": 0.48, + "learning_rate": 0.00017597765363128493, + "loss": 2.2098, "step": 86 }, { - "epoch": 3.48, - "learning_rate": 0.0001652, - "loss": 1.947, + "epoch": 0.49, + "learning_rate": 0.00017569832402234637, + "loss": 2.1519, "step": 87 }, { - "epoch": 3.52, - "learning_rate": 0.0001648, - "loss": 1.9554, + "epoch": 0.49, + "learning_rate": 0.00017541899441340784, + "loss": 2.1132, "step": 88 }, { - "epoch": 3.56, - "learning_rate": 0.0001644, - "loss": 1.8563, + "epoch": 0.5, + "learning_rate": 0.00017513966480446928, + "loss": 2.0333, "step": 89 }, { - "epoch": 3.6, - "learning_rate": 0.000164, - "loss": 1.7795, + "epoch": 0.5, + "learning_rate": 0.00017486033519553075, + "loss": 2.2764, "step": 90 }, { - "epoch": 3.64, - "learning_rate": 0.0001636, - "loss": 1.9347, + "epoch": 0.51, + "learning_rate": 0.00017458100558659218, + "loss": 2.1838, "step": 91 }, { - "epoch": 3.68, - "learning_rate": 0.0001632, - "loss": 2.0078, + "epoch": 0.51, + "learning_rate": 0.00017430167597765362, + "loss": 2.1386, "step": 92 }, { - "epoch": 3.72, - "learning_rate": 0.0001628, - "loss": 1.9964, + "epoch": 0.52, + "learning_rate": 0.0001740223463687151, + "loss": 2.1034, "step": 93 }, { - "epoch": 3.76, - "learning_rate": 0.00016240000000000002, - "loss": 1.9004, + "epoch": 0.53, + "learning_rate": 0.00017374301675977656, + "loss": 2.0346, "step": 94 }, { - "epoch": 3.8, - "learning_rate": 0.000162, - "loss": 1.9446, + "epoch": 0.53, + "learning_rate": 0.000173463687150838, + "loss": 2.0274, "step": 95 }, { - "epoch": 3.84, - "learning_rate": 0.00016160000000000002, - "loss": 1.8857, + "epoch": 0.54, + "learning_rate": 0.00017318435754189946, + "loss": 2.1036, "step": 96 }, { - "epoch": 3.88, - "learning_rate": 0.00016120000000000002, - "loss": 1.8797, + "epoch": 0.54, + "learning_rate": 0.0001729050279329609, + "loss": 2.1208, "step": 97 }, { - "epoch": 3.92, - "learning_rate": 0.0001608, - "loss": 1.7643, + "epoch": 0.55, + "learning_rate": 0.00017262569832402237, + "loss": 2.0572, "step": 98 }, { - "epoch": 3.96, - "learning_rate": 0.00016040000000000002, - "loss": 1.9724, + "epoch": 0.55, + "learning_rate": 0.0001723463687150838, + "loss": 2.1702, "step": 99 }, { - "epoch": 4.0, - "learning_rate": 0.00016, - "loss": 1.9058, + "epoch": 0.56, + "learning_rate": 0.00017206703910614525, + "loss": 2.1302, "step": 100 }, { - "epoch": 4.04, - "learning_rate": 0.0001596, - "loss": 1.8663, + "epoch": 0.56, + "learning_rate": 0.0001717877094972067, + "loss": 2.0175, "step": 101 }, { - "epoch": 4.08, - "learning_rate": 0.00015920000000000002, - "loss": 1.8457, + "epoch": 0.57, + "learning_rate": 0.00017150837988826815, + "loss": 2.1006, "step": 102 }, { - "epoch": 4.12, - "learning_rate": 0.0001588, - "loss": 1.8394, + "epoch": 0.58, + "learning_rate": 0.00017122905027932962, + "loss": 2.0662, "step": 103 }, { - "epoch": 4.16, - "learning_rate": 0.00015840000000000003, - "loss": 1.8941, + "epoch": 0.58, + "learning_rate": 0.00017094972067039109, + "loss": 1.988, "step": 104 }, { - "epoch": 4.2, - "learning_rate": 0.00015800000000000002, - "loss": 1.8601, + "epoch": 0.59, + "learning_rate": 0.00017067039106145253, + "loss": 2.1008, "step": 105 }, { - "epoch": 4.24, - "learning_rate": 0.0001576, - "loss": 1.7365, + "epoch": 0.59, + "learning_rate": 0.00017039106145251396, + "loss": 2.1482, "step": 106 }, { - "epoch": 4.28, - "learning_rate": 0.00015720000000000003, - "loss": 1.8809, + "epoch": 0.6, + "learning_rate": 0.00017011173184357543, + "loss": 2.1052, "step": 107 }, { - "epoch": 4.32, - "learning_rate": 0.00015680000000000002, - "loss": 2.0206, + "epoch": 0.6, + "learning_rate": 0.00016983240223463687, + "loss": 2.0978, "step": 108 }, { - "epoch": 4.36, - "learning_rate": 0.0001564, - "loss": 1.7776, + "epoch": 0.61, + "learning_rate": 0.00016955307262569834, + "loss": 2.1303, "step": 109 }, { - "epoch": 4.4, - "learning_rate": 0.00015600000000000002, - "loss": 1.9089, + "epoch": 0.61, + "learning_rate": 0.00016927374301675978, + "loss": 2.0794, "step": 110 }, { - "epoch": 4.44, - "learning_rate": 0.00015560000000000001, - "loss": 1.8464, + "epoch": 0.62, + "learning_rate": 0.00016899441340782122, + "loss": 2.1059, "step": 111 }, { - "epoch": 4.48, - "learning_rate": 0.0001552, - "loss": 1.7943, + "epoch": 0.63, + "learning_rate": 0.0001687150837988827, + "loss": 1.9642, "step": 112 }, { - "epoch": 4.52, - "learning_rate": 0.00015480000000000002, - "loss": 1.8081, + "epoch": 0.63, + "learning_rate": 0.00016843575418994415, + "loss": 2.0415, "step": 113 }, { - "epoch": 4.56, - "learning_rate": 0.0001544, - "loss": 1.734, + "epoch": 0.64, + "learning_rate": 0.0001681564245810056, + "loss": 2.0795, "step": 114 }, { - "epoch": 4.6, - "learning_rate": 0.000154, - "loss": 1.8379, + "epoch": 0.64, + "learning_rate": 0.00016787709497206705, + "loss": 2.0238, "step": 115 }, { - "epoch": 4.64, - "learning_rate": 0.00015360000000000002, - "loss": 1.6841, + "epoch": 0.65, + "learning_rate": 0.0001675977653631285, + "loss": 2.078, "step": 116 }, { - "epoch": 4.68, - "learning_rate": 0.0001532, - "loss": 1.8253, + "epoch": 0.65, + "learning_rate": 0.00016731843575418996, + "loss": 2.1362, "step": 117 }, { - "epoch": 4.72, - "learning_rate": 0.0001528, - "loss": 1.7607, + "epoch": 0.66, + "learning_rate": 0.0001670391061452514, + "loss": 2.0552, "step": 118 }, { - "epoch": 4.76, - "learning_rate": 0.00015240000000000002, - "loss": 1.8205, + "epoch": 0.66, + "learning_rate": 0.00016675977653631284, + "loss": 2.213, "step": 119 }, { - "epoch": 4.8, - "learning_rate": 0.000152, - "loss": 1.8443, + "epoch": 0.67, + "learning_rate": 0.0001664804469273743, + "loss": 1.9999, "step": 120 }, { - "epoch": 4.84, - "learning_rate": 0.0001516, - "loss": 1.8824, + "epoch": 0.68, + "learning_rate": 0.00016620111731843577, + "loss": 2.06, "step": 121 }, { - "epoch": 4.88, - "learning_rate": 0.00015120000000000002, - "loss": 1.8369, + "epoch": 0.68, + "learning_rate": 0.0001659217877094972, + "loss": 2.0177, "step": 122 }, { - "epoch": 4.92, - "learning_rate": 0.0001508, - "loss": 1.8017, + "epoch": 0.69, + "learning_rate": 0.00016564245810055868, + "loss": 2.0504, "step": 123 }, { - "epoch": 4.96, - "learning_rate": 0.0001504, - "loss": 1.7858, + "epoch": 0.69, + "learning_rate": 0.00016536312849162012, + "loss": 2.0585, "step": 124 }, { - "epoch": 5.0, - "learning_rate": 0.00015000000000000001, - "loss": 1.8366, + "epoch": 0.7, + "learning_rate": 0.00016508379888268158, + "loss": 2.0273, "step": 125 }, { - "epoch": 5.04, - "learning_rate": 0.0001496, - "loss": 1.7353, + "epoch": 0.7, + "learning_rate": 0.00016480446927374302, + "loss": 2.0549, "step": 126 }, { - "epoch": 5.08, - "learning_rate": 0.0001492, - "loss": 1.6017, + "epoch": 0.71, + "learning_rate": 0.00016452513966480446, + "loss": 2.044, "step": 127 }, { - "epoch": 5.12, - "learning_rate": 0.0001488, - "loss": 1.6645, + "epoch": 0.72, + "learning_rate": 0.00016424581005586593, + "loss": 2.0731, "step": 128 }, { - "epoch": 5.16, - "learning_rate": 0.0001484, - "loss": 1.8663, + "epoch": 0.72, + "learning_rate": 0.00016396648044692737, + "loss": 2.0568, "step": 129 }, { - "epoch": 5.2, - "learning_rate": 0.000148, - "loss": 1.7009, + "epoch": 0.73, + "learning_rate": 0.00016368715083798883, + "loss": 2.007, "step": 130 }, { - "epoch": 5.24, - "learning_rate": 0.0001476, - "loss": 1.7371, + "epoch": 0.73, + "learning_rate": 0.0001634078212290503, + "loss": 2.0707, "step": 131 }, { - "epoch": 5.28, - "learning_rate": 0.0001472, - "loss": 1.8555, + "epoch": 0.74, + "learning_rate": 0.00016312849162011174, + "loss": 1.9793, "step": 132 }, { - "epoch": 5.32, - "learning_rate": 0.00014680000000000002, - "loss": 1.6373, + "epoch": 0.74, + "learning_rate": 0.0001628491620111732, + "loss": 2.1311, "step": 133 }, { - "epoch": 5.36, - "learning_rate": 0.0001464, - "loss": 1.7211, + "epoch": 0.75, + "learning_rate": 0.00016256983240223465, + "loss": 2.0016, "step": 134 }, { - "epoch": 5.4, - "learning_rate": 0.000146, - "loss": 1.614, + "epoch": 0.75, + "learning_rate": 0.00016229050279329609, + "loss": 1.9945, "step": 135 }, { - "epoch": 5.44, - "learning_rate": 0.00014560000000000002, - "loss": 1.8736, + "epoch": 0.76, + "learning_rate": 0.00016201117318435755, + "loss": 2.0186, "step": 136 }, { - "epoch": 5.48, - "learning_rate": 0.0001452, - "loss": 1.7229, + "epoch": 0.77, + "learning_rate": 0.000161731843575419, + "loss": 2.0971, "step": 137 }, { - "epoch": 5.52, - "learning_rate": 0.0001448, - "loss": 1.7315, + "epoch": 0.77, + "learning_rate": 0.00016145251396648046, + "loss": 2.0883, "step": 138 }, { - "epoch": 5.56, - "learning_rate": 0.0001444, - "loss": 1.7259, + "epoch": 0.78, + "learning_rate": 0.00016117318435754192, + "loss": 2.0803, "step": 139 }, { - "epoch": 5.6, - "learning_rate": 0.000144, - "loss": 1.7032, + "epoch": 0.78, + "learning_rate": 0.00016089385474860336, + "loss": 2.0617, "step": 140 }, { - "epoch": 5.64, - "learning_rate": 0.0001436, - "loss": 1.9237, + "epoch": 0.79, + "learning_rate": 0.00016061452513966483, + "loss": 2.1265, "step": 141 }, { - "epoch": 5.68, - "learning_rate": 0.0001432, - "loss": 1.7821, + "epoch": 0.79, + "learning_rate": 0.00016033519553072627, + "loss": 2.0151, "step": 142 }, { - "epoch": 5.72, - "learning_rate": 0.0001428, - "loss": 1.6081, + "epoch": 0.8, + "learning_rate": 0.0001600558659217877, + "loss": 1.996, "step": 143 }, { - "epoch": 5.76, - "learning_rate": 0.0001424, - "loss": 1.7979, + "epoch": 0.8, + "learning_rate": 0.00015977653631284918, + "loss": 2.0164, "step": 144 }, { - "epoch": 5.8, - "learning_rate": 0.000142, - "loss": 1.6675, + "epoch": 0.81, + "learning_rate": 0.00015949720670391061, + "loss": 2.0314, "step": 145 }, { - "epoch": 5.84, - "learning_rate": 0.0001416, - "loss": 1.6758, + "epoch": 0.82, + "learning_rate": 0.00015921787709497208, + "loss": 1.9501, "step": 146 }, { - "epoch": 5.88, - "learning_rate": 0.0001412, - "loss": 1.7783, + "epoch": 0.82, + "learning_rate": 0.00015893854748603352, + "loss": 2.087, "step": 147 }, { - "epoch": 5.92, - "learning_rate": 0.0001408, - "loss": 1.5935, + "epoch": 0.83, + "learning_rate": 0.000158659217877095, + "loss": 2.0262, "step": 148 }, { - "epoch": 5.96, - "learning_rate": 0.0001404, - "loss": 1.7372, + "epoch": 0.83, + "learning_rate": 0.00015837988826815643, + "loss": 2.0765, "step": 149 }, { - "epoch": 6.0, - "learning_rate": 0.00014, - "loss": 1.6308, + "epoch": 0.84, + "learning_rate": 0.0001581005586592179, + "loss": 2.105, "step": 150 }, { - "epoch": 6.04, - "learning_rate": 0.0001396, - "loss": 1.5829, + "epoch": 0.84, + "learning_rate": 0.00015782122905027933, + "loss": 1.9863, "step": 151 }, { - "epoch": 6.08, - "learning_rate": 0.0001392, - "loss": 1.6538, + "epoch": 0.85, + "learning_rate": 0.0001575418994413408, + "loss": 1.9873, "step": 152 }, { - "epoch": 6.12, - "learning_rate": 0.00013879999999999999, - "loss": 1.7624, + "epoch": 0.85, + "learning_rate": 0.00015726256983240224, + "loss": 2.0094, "step": 153 }, { - "epoch": 6.16, - "learning_rate": 0.0001384, - "loss": 1.4968, + "epoch": 0.86, + "learning_rate": 0.00015698324022346368, + "loss": 1.9141, "step": 154 }, { - "epoch": 6.2, - "learning_rate": 0.000138, - "loss": 1.7305, + "epoch": 0.87, + "learning_rate": 0.00015670391061452514, + "loss": 1.917, "step": 155 }, { - "epoch": 6.24, - "learning_rate": 0.00013759999999999998, - "loss": 1.6536, + "epoch": 0.87, + "learning_rate": 0.00015642458100558658, + "loss": 2.109, "step": 156 }, { - "epoch": 6.28, - "learning_rate": 0.00013720000000000003, - "loss": 1.654, + "epoch": 0.88, + "learning_rate": 0.00015614525139664805, + "loss": 1.9799, "step": 157 }, { - "epoch": 6.32, - "learning_rate": 0.00013680000000000002, - "loss": 1.7163, + "epoch": 0.88, + "learning_rate": 0.00015586592178770952, + "loss": 1.9571, "step": 158 }, { - "epoch": 6.36, - "learning_rate": 0.0001364, - "loss": 1.6599, + "epoch": 0.89, + "learning_rate": 0.00015558659217877096, + "loss": 1.9931, "step": 159 }, { - "epoch": 6.4, - "learning_rate": 0.00013600000000000003, - "loss": 1.5729, + "epoch": 0.89, + "learning_rate": 0.00015530726256983242, + "loss": 2.1004, "step": 160 }, { - "epoch": 6.44, - "learning_rate": 0.00013560000000000002, - "loss": 1.5291, + "epoch": 0.9, + "learning_rate": 0.00015502793296089386, + "loss": 2.0385, "step": 161 }, { - "epoch": 6.48, - "learning_rate": 0.0001352, - "loss": 1.5666, + "epoch": 0.91, + "learning_rate": 0.0001547486033519553, + "loss": 1.9751, "step": 162 }, { - "epoch": 6.52, - "learning_rate": 0.00013480000000000002, - "loss": 1.7576, + "epoch": 0.91, + "learning_rate": 0.00015446927374301677, + "loss": 2.0544, "step": 163 }, { - "epoch": 6.56, - "learning_rate": 0.00013440000000000001, - "loss": 1.4462, + "epoch": 0.92, + "learning_rate": 0.0001541899441340782, + "loss": 2.0069, "step": 164 }, { - "epoch": 6.6, - "learning_rate": 0.000134, - "loss": 1.5659, + "epoch": 0.92, + "learning_rate": 0.00015391061452513967, + "loss": 1.9576, "step": 165 }, { - "epoch": 6.64, - "learning_rate": 0.00013360000000000002, - "loss": 1.6234, + "epoch": 0.93, + "learning_rate": 0.00015363128491620114, + "loss": 1.8991, "step": 166 }, { - "epoch": 6.68, - "learning_rate": 0.0001332, - "loss": 1.5176, + "epoch": 0.93, + "learning_rate": 0.00015335195530726258, + "loss": 1.9336, "step": 167 }, { - "epoch": 6.72, - "learning_rate": 0.0001328, - "loss": 1.667, + "epoch": 0.94, + "learning_rate": 0.00015307262569832405, + "loss": 1.9736, "step": 168 }, { - "epoch": 6.76, - "learning_rate": 0.00013240000000000002, - "loss": 1.6514, + "epoch": 0.94, + "learning_rate": 0.00015279329608938548, + "loss": 1.9702, "step": 169 }, { - "epoch": 6.8, - "learning_rate": 0.000132, - "loss": 1.7179, + "epoch": 0.95, + "learning_rate": 0.00015251396648044692, + "loss": 1.9055, "step": 170 }, { - "epoch": 6.84, - "learning_rate": 0.0001316, - "loss": 1.4373, + "epoch": 0.96, + "learning_rate": 0.0001522346368715084, + "loss": 2.0503, "step": 171 }, { - "epoch": 6.88, - "learning_rate": 0.00013120000000000002, - "loss": 1.7044, + "epoch": 0.96, + "learning_rate": 0.00015195530726256983, + "loss": 2.0039, "step": 172 }, { - "epoch": 6.92, - "learning_rate": 0.0001308, - "loss": 1.5673, + "epoch": 0.97, + "learning_rate": 0.0001516759776536313, + "loss": 1.9406, "step": 173 }, { - "epoch": 6.96, - "learning_rate": 0.0001304, - "loss": 1.6303, + "epoch": 0.97, + "learning_rate": 0.00015139664804469274, + "loss": 2.0525, "step": 174 }, { - "epoch": 7.0, - "learning_rate": 0.00013000000000000002, - "loss": 1.4828, + "epoch": 0.98, + "learning_rate": 0.0001511173184357542, + "loss": 1.9234, "step": 175 }, { - "epoch": 7.04, - "learning_rate": 0.0001296, - "loss": 1.6204, + "epoch": 0.98, + "learning_rate": 0.00015083798882681567, + "loss": 1.8614, "step": 176 }, { - "epoch": 7.08, - "learning_rate": 0.00012920000000000002, - "loss": 1.4955, + "epoch": 0.99, + "learning_rate": 0.0001505586592178771, + "loss": 1.9616, "step": 177 }, { - "epoch": 7.12, - "learning_rate": 0.00012880000000000001, - "loss": 1.6109, + "epoch": 0.99, + "learning_rate": 0.00015027932960893855, + "loss": 1.9509, "step": 178 }, { - "epoch": 7.16, - "learning_rate": 0.0001284, - "loss": 1.5318, + "epoch": 1.0, + "learning_rate": 0.00015000000000000001, + "loss": 1.9592, "step": 179 }, { - "epoch": 7.2, - "learning_rate": 0.00012800000000000002, - "loss": 1.5702, + "epoch": 1.01, + "learning_rate": 0.00014972067039106145, + "loss": 1.8991, "step": 180 }, { - "epoch": 7.24, - "learning_rate": 0.0001276, - "loss": 1.4598, + "epoch": 1.01, + "learning_rate": 0.00014944134078212292, + "loss": 1.9127, "step": 181 }, { - "epoch": 7.28, - "learning_rate": 0.0001272, - "loss": 1.6145, + "epoch": 1.02, + "learning_rate": 0.00014916201117318436, + "loss": 1.8982, "step": 182 }, { - "epoch": 7.32, - "learning_rate": 0.00012680000000000002, - "loss": 1.3628, + "epoch": 1.02, + "learning_rate": 0.0001488826815642458, + "loss": 1.9534, "step": 183 }, { - "epoch": 7.36, - "learning_rate": 0.0001264, - "loss": 1.4578, + "epoch": 1.03, + "learning_rate": 0.0001486033519553073, + "loss": 1.7794, "step": 184 }, { - "epoch": 7.4, - "learning_rate": 0.000126, - "loss": 1.7094, + "epoch": 1.03, + "learning_rate": 0.00014832402234636873, + "loss": 1.7958, "step": 185 }, { - "epoch": 7.44, - "learning_rate": 0.00012560000000000002, - "loss": 1.4376, + "epoch": 1.04, + "learning_rate": 0.00014804469273743017, + "loss": 1.8282, "step": 186 }, { - "epoch": 7.48, - "learning_rate": 0.0001252, - "loss": 1.3393, + "epoch": 1.04, + "learning_rate": 0.00014776536312849164, + "loss": 2.0423, "step": 187 }, { - "epoch": 7.52, - "learning_rate": 0.0001248, - "loss": 1.4273, + "epoch": 1.05, + "learning_rate": 0.00014748603351955308, + "loss": 1.9282, "step": 188 }, { - "epoch": 7.56, - "learning_rate": 0.00012440000000000002, - "loss": 1.5506, + "epoch": 1.06, + "learning_rate": 0.00014720670391061454, + "loss": 1.9072, "step": 189 }, { - "epoch": 7.6, - "learning_rate": 0.000124, - "loss": 1.4774, + "epoch": 1.06, + "learning_rate": 0.00014692737430167598, + "loss": 1.8665, "step": 190 }, { - "epoch": 7.64, - "learning_rate": 0.0001236, - "loss": 1.4632, + "epoch": 1.07, + "learning_rate": 0.00014664804469273742, + "loss": 1.9021, "step": 191 }, { - "epoch": 7.68, - "learning_rate": 0.0001232, - "loss": 1.4568, + "epoch": 1.07, + "learning_rate": 0.0001463687150837989, + "loss": 1.7308, "step": 192 }, { - "epoch": 7.72, - "learning_rate": 0.0001228, - "loss": 1.6106, + "epoch": 1.08, + "learning_rate": 0.00014608938547486035, + "loss": 1.9165, "step": 193 }, { - "epoch": 7.76, - "learning_rate": 0.0001224, - "loss": 1.577, + "epoch": 1.08, + "learning_rate": 0.0001458100558659218, + "loss": 1.842, "step": 194 }, { - "epoch": 7.8, - "learning_rate": 0.000122, - "loss": 1.4805, + "epoch": 1.09, + "learning_rate": 0.00014553072625698326, + "loss": 1.9128, "step": 195 }, { - "epoch": 7.84, - "learning_rate": 0.0001216, - "loss": 1.4546, + "epoch": 1.09, + "learning_rate": 0.0001452513966480447, + "loss": 1.8005, "step": 196 }, { - "epoch": 7.88, - "learning_rate": 0.0001212, - "loss": 1.5164, + "epoch": 1.1, + "learning_rate": 0.00014497206703910614, + "loss": 1.8547, "step": 197 }, { - "epoch": 7.92, - "learning_rate": 0.0001208, - "loss": 1.5061, + "epoch": 1.11, + "learning_rate": 0.0001446927374301676, + "loss": 1.9042, "step": 198 }, { - "epoch": 7.96, - "learning_rate": 0.0001204, - "loss": 1.5312, + "epoch": 1.11, + "learning_rate": 0.00014441340782122905, + "loss": 1.8609, "step": 199 }, { - "epoch": 8.0, - "learning_rate": 0.00012, - "loss": 1.4463, + "epoch": 1.12, + "learning_rate": 0.0001441340782122905, + "loss": 1.9591, "step": 200 }, { - "epoch": 8.04, - "learning_rate": 0.00011960000000000001, - "loss": 1.4207, + "epoch": 1.12, + "learning_rate": 0.00014385474860335195, + "loss": 1.8722, "step": 201 }, { - "epoch": 8.08, - "learning_rate": 0.0001192, - "loss": 1.4688, + "epoch": 1.13, + "learning_rate": 0.00014357541899441342, + "loss": 1.8535, "step": 202 }, { - "epoch": 8.12, - "learning_rate": 0.0001188, - "loss": 1.4445, + "epoch": 1.13, + "learning_rate": 0.00014329608938547488, + "loss": 1.8676, "step": 203 }, { - "epoch": 8.16, - "learning_rate": 0.0001184, - "loss": 1.4158, + "epoch": 1.14, + "learning_rate": 0.00014301675977653632, + "loss": 1.8976, "step": 204 }, { - "epoch": 8.2, - "learning_rate": 0.000118, - "loss": 1.3483, + "epoch": 1.15, + "learning_rate": 0.00014273743016759776, + "loss": 1.7723, "step": 205 }, { - "epoch": 8.24, - "learning_rate": 0.0001176, - "loss": 1.4117, + "epoch": 1.15, + "learning_rate": 0.00014245810055865923, + "loss": 1.8165, "step": 206 }, { - "epoch": 8.28, - "learning_rate": 0.0001172, - "loss": 1.4267, + "epoch": 1.16, + "learning_rate": 0.00014217877094972067, + "loss": 1.7811, "step": 207 }, { - "epoch": 8.32, - "learning_rate": 0.00011679999999999999, - "loss": 1.5547, + "epoch": 1.16, + "learning_rate": 0.00014189944134078214, + "loss": 1.908, "step": 208 }, { - "epoch": 8.36, - "learning_rate": 0.0001164, - "loss": 1.4883, + "epoch": 1.17, + "learning_rate": 0.00014162011173184357, + "loss": 1.7663, "step": 209 }, { - "epoch": 8.4, - "learning_rate": 0.000116, - "loss": 1.414, + "epoch": 1.17, + "learning_rate": 0.00014134078212290501, + "loss": 1.6779, "step": 210 }, { - "epoch": 8.44, - "learning_rate": 0.00011559999999999999, - "loss": 1.3254, + "epoch": 1.18, + "learning_rate": 0.0001410614525139665, + "loss": 1.9039, "step": 211 }, { - "epoch": 8.48, - "learning_rate": 0.0001152, - "loss": 1.22, + "epoch": 1.18, + "learning_rate": 0.00014078212290502795, + "loss": 1.8033, "step": 212 }, { - "epoch": 8.52, - "learning_rate": 0.0001148, - "loss": 1.3224, + "epoch": 1.19, + "learning_rate": 0.00014050279329608939, + "loss": 1.8251, "step": 213 }, { - "epoch": 8.56, - "learning_rate": 0.0001144, - "loss": 1.6158, + "epoch": 1.2, + "learning_rate": 0.00014022346368715085, + "loss": 1.8505, "step": 214 }, { - "epoch": 8.6, - "learning_rate": 0.00011399999999999999, - "loss": 1.2817, + "epoch": 1.2, + "learning_rate": 0.0001399441340782123, + "loss": 1.8147, "step": 215 }, { - "epoch": 8.64, - "learning_rate": 0.0001136, - "loss": 1.4077, + "epoch": 1.21, + "learning_rate": 0.00013966480446927376, + "loss": 1.8419, "step": 216 }, { - "epoch": 8.68, - "learning_rate": 0.0001132, - "loss": 1.5311, + "epoch": 1.21, + "learning_rate": 0.0001393854748603352, + "loss": 1.8401, "step": 217 }, { - "epoch": 8.72, - "learning_rate": 0.00011279999999999999, - "loss": 1.5289, + "epoch": 1.22, + "learning_rate": 0.00013910614525139664, + "loss": 1.8912, "step": 218 }, { - "epoch": 8.76, - "learning_rate": 0.00011240000000000002, - "loss": 1.4576, + "epoch": 1.22, + "learning_rate": 0.0001388268156424581, + "loss": 1.7548, "step": 219 }, { - "epoch": 8.8, - "learning_rate": 0.00011200000000000001, - "loss": 1.2063, + "epoch": 1.23, + "learning_rate": 0.00013854748603351957, + "loss": 1.8741, "step": 220 }, { - "epoch": 8.84, - "learning_rate": 0.00011160000000000002, - "loss": 1.3543, + "epoch": 1.23, + "learning_rate": 0.000138268156424581, + "loss": 1.9549, "step": 221 }, { - "epoch": 8.88, - "learning_rate": 0.00011120000000000002, - "loss": 1.4352, + "epoch": 1.24, + "learning_rate": 0.00013798882681564248, + "loss": 1.9093, "step": 222 }, { - "epoch": 8.92, - "learning_rate": 0.00011080000000000001, - "loss": 1.3058, + "epoch": 1.25, + "learning_rate": 0.00013770949720670392, + "loss": 1.7896, "step": 223 }, { - "epoch": 8.96, - "learning_rate": 0.00011040000000000001, - "loss": 1.3475, + "epoch": 1.25, + "learning_rate": 0.00013743016759776538, + "loss": 1.8491, "step": 224 }, { - "epoch": 9.0, - "learning_rate": 0.00011000000000000002, - "loss": 1.1293, + "epoch": 1.26, + "learning_rate": 0.00013715083798882682, + "loss": 1.7851, "step": 225 }, { - "epoch": 9.04, - "learning_rate": 0.00010960000000000001, - "loss": 1.1671, + "epoch": 1.26, + "learning_rate": 0.00013687150837988826, + "loss": 1.6992, "step": 226 }, { - "epoch": 9.08, - "learning_rate": 0.00010920000000000001, - "loss": 1.4726, + "epoch": 1.27, + "learning_rate": 0.00013659217877094973, + "loss": 1.9765, "step": 227 }, { - "epoch": 9.12, - "learning_rate": 0.00010880000000000002, - "loss": 1.3986, + "epoch": 1.27, + "learning_rate": 0.00013631284916201117, + "loss": 1.8179, "step": 228 }, { - "epoch": 9.16, - "learning_rate": 0.00010840000000000002, - "loss": 1.2731, + "epoch": 1.28, + "learning_rate": 0.00013603351955307263, + "loss": 1.8548, "step": 229 }, { - "epoch": 9.2, - "learning_rate": 0.00010800000000000001, - "loss": 1.2212, + "epoch": 1.28, + "learning_rate": 0.0001357541899441341, + "loss": 1.8843, "step": 230 }, { - "epoch": 9.24, - "learning_rate": 0.00010760000000000001, - "loss": 1.3067, + "epoch": 1.29, + "learning_rate": 0.00013547486033519554, + "loss": 1.9105, "step": 231 }, { - "epoch": 9.28, - "learning_rate": 0.00010720000000000002, - "loss": 1.278, + "epoch": 1.3, + "learning_rate": 0.00013519553072625698, + "loss": 1.8748, "step": 232 }, { - "epoch": 9.32, - "learning_rate": 0.00010680000000000001, - "loss": 1.4141, + "epoch": 1.3, + "learning_rate": 0.00013491620111731844, + "loss": 1.7976, "step": 233 }, { - "epoch": 9.36, - "learning_rate": 0.00010640000000000001, - "loss": 1.2885, + "epoch": 1.31, + "learning_rate": 0.00013463687150837988, + "loss": 1.7369, "step": 234 }, { - "epoch": 9.4, - "learning_rate": 0.00010600000000000002, - "loss": 1.466, + "epoch": 1.31, + "learning_rate": 0.00013435754189944135, + "loss": 1.7808, "step": 235 }, { - "epoch": 9.44, - "learning_rate": 0.0001056, - "loss": 1.2979, + "epoch": 1.32, + "learning_rate": 0.0001340782122905028, + "loss": 1.8385, "step": 236 }, { - "epoch": 9.48, - "learning_rate": 0.00010520000000000001, - "loss": 1.1622, + "epoch": 1.32, + "learning_rate": 0.00013379888268156423, + "loss": 1.8295, "step": 237 }, { - "epoch": 9.52, - "learning_rate": 0.00010480000000000001, - "loss": 1.2504, + "epoch": 1.33, + "learning_rate": 0.00013351955307262572, + "loss": 1.757, "step": 238 }, { - "epoch": 9.56, - "learning_rate": 0.0001044, - "loss": 1.228, + "epoch": 1.34, + "learning_rate": 0.00013324022346368716, + "loss": 1.7904, "step": 239 }, { - "epoch": 9.6, - "learning_rate": 0.00010400000000000001, - "loss": 1.3607, + "epoch": 1.34, + "learning_rate": 0.0001329608938547486, + "loss": 1.7632, "step": 240 }, { - "epoch": 9.64, - "learning_rate": 0.00010360000000000001, - "loss": 1.1921, + "epoch": 1.35, + "learning_rate": 0.00013268156424581007, + "loss": 1.7867, "step": 241 }, { - "epoch": 9.68, - "learning_rate": 0.0001032, - "loss": 1.2114, + "epoch": 1.35, + "learning_rate": 0.0001324022346368715, + "loss": 1.8259, "step": 242 }, { - "epoch": 9.72, - "learning_rate": 0.0001028, - "loss": 1.2385, + "epoch": 1.36, + "learning_rate": 0.00013212290502793297, + "loss": 1.6655, "step": 243 }, { - "epoch": 9.76, - "learning_rate": 0.00010240000000000001, - "loss": 1.3227, + "epoch": 1.36, + "learning_rate": 0.0001318435754189944, + "loss": 1.6848, "step": 244 }, { - "epoch": 9.8, - "learning_rate": 0.00010200000000000001, - "loss": 1.3935, + "epoch": 1.37, + "learning_rate": 0.00013156424581005585, + "loss": 1.7931, "step": 245 }, { - "epoch": 9.84, - "learning_rate": 0.0001016, - "loss": 1.2209, + "epoch": 1.37, + "learning_rate": 0.00013128491620111732, + "loss": 1.7868, "step": 246 }, { - "epoch": 9.88, - "learning_rate": 0.00010120000000000001, - "loss": 1.1541, + "epoch": 1.38, + "learning_rate": 0.00013100558659217879, + "loss": 1.7732, "step": 247 }, { - "epoch": 9.92, - "learning_rate": 0.00010080000000000001, - "loss": 1.4049, + "epoch": 1.39, + "learning_rate": 0.00013072625698324022, + "loss": 1.7851, "step": 248 }, { - "epoch": 9.96, - "learning_rate": 0.0001004, - "loss": 1.13, + "epoch": 1.39, + "learning_rate": 0.0001304469273743017, + "loss": 1.7406, "step": 249 }, { - "epoch": 10.0, - "learning_rate": 0.0001, - "loss": 1.3783, + "epoch": 1.4, + "learning_rate": 0.00013016759776536313, + "loss": 1.5853, "step": 250 }, { - "epoch": 10.04, - "learning_rate": 9.960000000000001e-05, - "loss": 1.3207, + "epoch": 1.4, + "learning_rate": 0.0001298882681564246, + "loss": 1.8271, "step": 251 }, { - "epoch": 10.08, - "learning_rate": 9.92e-05, - "loss": 1.2697, + "epoch": 1.41, + "learning_rate": 0.00012960893854748604, + "loss": 1.6054, "step": 252 }, { - "epoch": 10.12, - "learning_rate": 9.88e-05, - "loss": 1.091, + "epoch": 1.41, + "learning_rate": 0.00012932960893854748, + "loss": 1.6884, "step": 253 }, { - "epoch": 10.16, - "learning_rate": 9.84e-05, - "loss": 1.0463, + "epoch": 1.42, + "learning_rate": 0.00012905027932960894, + "loss": 1.7333, "step": 254 }, { - "epoch": 10.2, - "learning_rate": 9.8e-05, - "loss": 1.3686, + "epoch": 1.42, + "learning_rate": 0.00012877094972067038, + "loss": 1.803, "step": 255 }, { - "epoch": 10.24, - "learning_rate": 9.76e-05, - "loss": 1.3109, + "epoch": 1.43, + "learning_rate": 0.00012849162011173185, + "loss": 1.63, "step": 256 }, { - "epoch": 10.28, - "learning_rate": 9.72e-05, - "loss": 1.2338, + "epoch": 1.44, + "learning_rate": 0.00012821229050279331, + "loss": 1.8023, "step": 257 }, { - "epoch": 10.32, - "learning_rate": 9.680000000000001e-05, - "loss": 1.12, + "epoch": 1.44, + "learning_rate": 0.00012793296089385475, + "loss": 1.762, "step": 258 }, { - "epoch": 10.36, - "learning_rate": 9.64e-05, - "loss": 1.1864, + "epoch": 1.45, + "learning_rate": 0.00012765363128491622, + "loss": 1.7565, "step": 259 }, { - "epoch": 10.4, - "learning_rate": 9.6e-05, - "loss": 1.1062, + "epoch": 1.45, + "learning_rate": 0.00012737430167597766, + "loss": 1.5937, "step": 260 }, { - "epoch": 10.44, - "learning_rate": 9.56e-05, - "loss": 1.2924, + "epoch": 1.46, + "learning_rate": 0.0001270949720670391, + "loss": 1.7208, "step": 261 }, { - "epoch": 10.48, - "learning_rate": 9.52e-05, - "loss": 1.1931, + "epoch": 1.46, + "learning_rate": 0.00012681564245810057, + "loss": 1.8097, "step": 262 }, { - "epoch": 10.52, - "learning_rate": 9.48e-05, - "loss": 1.2366, + "epoch": 1.47, + "learning_rate": 0.000126536312849162, + "loss": 1.7042, "step": 263 }, { - "epoch": 10.56, - "learning_rate": 9.44e-05, - "loss": 1.1589, + "epoch": 1.47, + "learning_rate": 0.00012625698324022347, + "loss": 1.7892, "step": 264 }, { - "epoch": 10.6, - "learning_rate": 9.4e-05, - "loss": 1.1197, + "epoch": 1.48, + "learning_rate": 0.00012597765363128494, + "loss": 1.752, "step": 265 }, { - "epoch": 10.64, - "learning_rate": 9.360000000000001e-05, - "loss": 1.3072, + "epoch": 1.49, + "learning_rate": 0.00012569832402234638, + "loss": 1.7333, "step": 266 }, { - "epoch": 10.68, - "learning_rate": 9.320000000000002e-05, - "loss": 1.1173, + "epoch": 1.49, + "learning_rate": 0.00012541899441340784, + "loss": 1.7492, "step": 267 }, { - "epoch": 10.72, - "learning_rate": 9.28e-05, - "loss": 1.0378, + "epoch": 1.5, + "learning_rate": 0.00012513966480446928, + "loss": 1.818, "step": 268 }, { - "epoch": 10.76, - "learning_rate": 9.240000000000001e-05, - "loss": 1.1746, + "epoch": 1.5, + "learning_rate": 0.00012486033519553072, + "loss": 1.7146, "step": 269 }, { - "epoch": 10.8, - "learning_rate": 9.200000000000001e-05, - "loss": 1.1144, + "epoch": 1.51, + "learning_rate": 0.0001245810055865922, + "loss": 1.7958, "step": 270 }, { - "epoch": 10.84, - "learning_rate": 9.16e-05, - "loss": 1.1862, + "epoch": 1.51, + "learning_rate": 0.00012430167597765363, + "loss": 1.7212, "step": 271 }, { - "epoch": 10.88, - "learning_rate": 9.120000000000001e-05, - "loss": 1.0816, + "epoch": 1.52, + "learning_rate": 0.0001240223463687151, + "loss": 1.8835, "step": 272 }, { - "epoch": 10.92, - "learning_rate": 9.080000000000001e-05, - "loss": 1.2288, + "epoch": 1.53, + "learning_rate": 0.00012374301675977656, + "loss": 1.7486, "step": 273 }, { - "epoch": 10.96, - "learning_rate": 9.04e-05, - "loss": 1.3031, + "epoch": 1.53, + "learning_rate": 0.000123463687150838, + "loss": 1.8103, "step": 274 }, { - "epoch": 11.0, - "learning_rate": 9e-05, - "loss": 1.1646, + "epoch": 1.54, + "learning_rate": 0.00012318435754189944, + "loss": 1.7557, "step": 275 }, { - "epoch": 11.04, - "learning_rate": 8.960000000000001e-05, - "loss": 1.2088, + "epoch": 1.54, + "learning_rate": 0.0001229050279329609, + "loss": 1.6198, "step": 276 }, { - "epoch": 11.08, - "learning_rate": 8.92e-05, - "loss": 1.1465, + "epoch": 1.55, + "learning_rate": 0.00012262569832402235, + "loss": 1.6971, "step": 277 }, { - "epoch": 11.12, - "learning_rate": 8.88e-05, - "loss": 1.0313, + "epoch": 1.55, + "learning_rate": 0.0001223463687150838, + "loss": 1.668, "step": 278 }, { - "epoch": 11.16, - "learning_rate": 8.840000000000001e-05, - "loss": 0.9662, + "epoch": 1.56, + "learning_rate": 0.00012206703910614525, + "loss": 1.8795, "step": 279 }, { - "epoch": 11.2, - "learning_rate": 8.800000000000001e-05, - "loss": 1.051, + "epoch": 1.56, + "learning_rate": 0.0001217877094972067, + "loss": 1.6412, "step": 280 }, { - "epoch": 11.24, - "learning_rate": 8.76e-05, - "loss": 1.1708, + "epoch": 1.57, + "learning_rate": 0.00012150837988826816, + "loss": 1.7497, "step": 281 }, { - "epoch": 11.28, - "learning_rate": 8.72e-05, - "loss": 1.0236, + "epoch": 1.58, + "learning_rate": 0.00012122905027932962, + "loss": 1.5577, "step": 282 }, { - "epoch": 11.32, - "learning_rate": 8.680000000000001e-05, - "loss": 1.1359, + "epoch": 1.58, + "learning_rate": 0.00012094972067039108, + "loss": 1.8049, "step": 283 }, { - "epoch": 11.36, - "learning_rate": 8.64e-05, - "loss": 1.1398, + "epoch": 1.59, + "learning_rate": 0.00012067039106145253, + "loss": 1.6834, "step": 284 }, { - "epoch": 11.4, - "learning_rate": 8.6e-05, - "loss": 1.0213, + "epoch": 1.59, + "learning_rate": 0.00012039106145251397, + "loss": 1.7978, "step": 285 }, { - "epoch": 11.44, - "learning_rate": 8.560000000000001e-05, - "loss": 1.1788, + "epoch": 1.6, + "learning_rate": 0.00012011173184357542, + "loss": 1.6558, "step": 286 }, { - "epoch": 11.48, - "learning_rate": 8.52e-05, - "loss": 1.0387, + "epoch": 1.6, + "learning_rate": 0.00011983240223463687, + "loss": 1.7561, "step": 287 }, { - "epoch": 11.52, - "learning_rate": 8.48e-05, - "loss": 0.9476, + "epoch": 1.61, + "learning_rate": 0.00011955307262569833, + "loss": 1.6807, "step": 288 }, { - "epoch": 11.56, - "learning_rate": 8.44e-05, - "loss": 1.1954, + "epoch": 1.61, + "learning_rate": 0.00011927374301675978, + "loss": 1.6937, "step": 289 }, { - "epoch": 11.6, - "learning_rate": 8.4e-05, - "loss": 1.0309, + "epoch": 1.62, + "learning_rate": 0.00011899441340782122, + "loss": 1.6591, "step": 290 }, { - "epoch": 11.64, - "learning_rate": 8.36e-05, - "loss": 1.1321, + "epoch": 1.63, + "learning_rate": 0.0001187150837988827, + "loss": 1.6771, "step": 291 }, { - "epoch": 11.68, - "learning_rate": 8.32e-05, - "loss": 1.0914, + "epoch": 1.63, + "learning_rate": 0.00011843575418994415, + "loss": 1.7743, "step": 292 }, { - "epoch": 11.72, - "learning_rate": 8.28e-05, - "loss": 1.222, + "epoch": 1.64, + "learning_rate": 0.00011815642458100559, + "loss": 1.5857, "step": 293 }, { - "epoch": 11.76, - "learning_rate": 8.24e-05, - "loss": 1.0127, + "epoch": 1.64, + "learning_rate": 0.00011787709497206705, + "loss": 1.6999, "step": 294 }, { - "epoch": 11.8, - "learning_rate": 8.2e-05, - "loss": 1.1977, + "epoch": 1.65, + "learning_rate": 0.0001175977653631285, + "loss": 1.5661, "step": 295 }, { - "epoch": 11.84, - "learning_rate": 8.16e-05, - "loss": 1.122, + "epoch": 1.65, + "learning_rate": 0.00011731843575418995, + "loss": 1.7235, "step": 296 }, { - "epoch": 11.88, - "learning_rate": 8.120000000000001e-05, - "loss": 1.1543, + "epoch": 1.66, + "learning_rate": 0.0001170391061452514, + "loss": 1.607, "step": 297 }, { - "epoch": 11.92, - "learning_rate": 8.080000000000001e-05, - "loss": 1.1767, + "epoch": 1.66, + "learning_rate": 0.00011675977653631284, + "loss": 1.68, "step": 298 }, { - "epoch": 11.96, - "learning_rate": 8.04e-05, - "loss": 1.1341, + "epoch": 1.67, + "learning_rate": 0.0001164804469273743, + "loss": 1.6938, "step": 299 }, { - "epoch": 12.0, - "learning_rate": 8e-05, - "loss": 1.0107, + "epoch": 1.68, + "learning_rate": 0.00011620111731843578, + "loss": 1.6315, "step": 300 } ], "logging_steps": 1, - "max_steps": 500, - "num_train_epochs": 20, + "max_steps": 716, + "num_train_epochs": 4, "save_steps": 100, - "total_flos": 1.0209241970264064e+17, + "total_flos": 1.5369996759656448e+17, "trial_name": null, "trial_params": null } diff --git a/checkpoint-300/training_args.bin b/checkpoint-300/training_args.bin index 4de6572a838c337c9990635a9406ebf46c0ec336..c8672c716e925d0028b4938db147703f58656ff7 100644 --- a/checkpoint-300/training_args.bin +++ b/checkpoint-300/training_args.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6ef74b31950ae6c8955316bed48c343fb06cda0cb6a8a54ca46ca3cb681c8736 +oid sha256:188ae1c421cc0c6435d1f71d8d3423ac4abc7dba0e6fc2efcbc4dbe77c741317 size 4027 diff --git a/checkpoint-400/README.md b/checkpoint-400/README.md index 08371015f02382e6fcba318f4aaea54ae52cd3c4..2f257a448caef1c59022426ad3ec9ba80acb3821 100644 --- a/checkpoint-400/README.md +++ b/checkpoint-400/README.md @@ -4,6 +4,30 @@ library_name: peft ## Training procedure +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + The following `bitsandbytes` quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: True @@ -29,6 +53,8 @@ The following `bitsandbytes` quantization config was used during training: - bnb_4bit_compute_dtype: float32 ### Framework versions +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 diff --git a/checkpoint-400/adapter_model.bin b/checkpoint-400/adapter_model.bin index c3e6363275595b219717dfc19f12692e62350c8f..1b6fe766ab6e3bde1e95c1168e8b27f515e22af0 100644 --- a/checkpoint-400/adapter_model.bin +++ b/checkpoint-400/adapter_model.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:d2314e06a7452fc12576ddd903f95a2753dfd474755a766d263b3d938b19d66f +oid sha256:e7b701342ea311c1e25f0f6953505f048df3f56027dc5c764b95df4491bfde1c size 39409357 diff --git a/checkpoint-400/optimizer.pt b/checkpoint-400/optimizer.pt index 17d28ff148a94bd700bad3627e4595256db5ea75..0119015704369e84c41214b2eda49f7eed10bbe8 100644 --- a/checkpoint-400/optimizer.pt +++ b/checkpoint-400/optimizer.pt @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:744a5e62f698effd66997373bf91c530543a99d8828c460626ac0cd2032b7420 +oid sha256:da79792a601e6737b980ff24e453353405dce9a807a0964433ac96d5a97b84aa size 78844421 diff --git a/checkpoint-400/rng_state.pth b/checkpoint-400/rng_state.pth index ce32862861da12da23bbb01ee9f59c5232cb111d..e0df6592e27bef84a4beb9293e7b49666a4d652e 100644 --- a/checkpoint-400/rng_state.pth +++ b/checkpoint-400/rng_state.pth @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:51f3c17c1e7ce128318be2166e96b6dda4d1ff79c8b44e6fb06488bf0bbfcf8d +oid sha256:7508d4b8dd267de5cc58e972da25236687927651336a28f292c92f7f23951475 size 14575 diff --git a/checkpoint-400/scheduler.pt b/checkpoint-400/scheduler.pt index 74eed63b650e5eae737878728f47e3c4aaf6a0c3..b3ccbb3bd3df5add5130e76711d025db7007acb0 100644 --- a/checkpoint-400/scheduler.pt +++ b/checkpoint-400/scheduler.pt @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:a9aafe6849dc1b38ea86a67ee0dd398e835f1054275ee4627209b6ffeca7902f +oid sha256:04fdaf305b17644f9d215d28be45b77e6d0f8e4b5adc1c7045c50a61bd8f3c5b size 627 diff --git a/checkpoint-400/tokenizer.json b/checkpoint-400/tokenizer.json index dbf002cafbd4818dcff2abc9156c088d681b4533..673c31abdeadf6576c3c754df86459e1ad64e207 100644 --- a/checkpoint-400/tokenizer.json +++ b/checkpoint-400/tokenizer.json @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba -size 14500471 +oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe +size 14500570 diff --git a/checkpoint-400/trainer_state.json b/checkpoint-400/trainer_state.json index b6483d39779ceb63a8e9a7a661a5286d66760130..f60e92395bfa70b2808af040d92ac135cc2b0e93 100644 --- a/checkpoint-400/trainer_state.json +++ b/checkpoint-400/trainer_state.json @@ -1,7 +1,7 @@ { "best_metric": null, "best_model_checkpoint": null, - "epoch": 16.0, + "epoch": 2.2346368715083798, "eval_steps": 500, "global_step": 400, "is_hyper_param_search": false, @@ -9,2411 +9,2411 @@ "is_world_process_zero": true, "log_history": [ { - "epoch": 0.04, - "learning_rate": 0.0001996, - "loss": 2.4683, + "epoch": 0.01, + "learning_rate": 0.00019972067039106145, + "loss": 2.6443, "step": 1 }, { - "epoch": 0.08, - "learning_rate": 0.00019920000000000002, - "loss": 2.2023, + "epoch": 0.01, + "learning_rate": 0.00019944134078212292, + "loss": 2.4104, "step": 2 }, { - "epoch": 0.12, - "learning_rate": 0.0001988, - "loss": 2.3039, + "epoch": 0.02, + "learning_rate": 0.00019916201117318435, + "loss": 2.4975, "step": 3 }, { - "epoch": 0.16, - "learning_rate": 0.0001984, - "loss": 2.1855, + "epoch": 0.02, + "learning_rate": 0.00019888268156424582, + "loss": 2.3513, "step": 4 }, { - "epoch": 0.2, - "learning_rate": 0.00019800000000000002, - "loss": 2.328, + "epoch": 0.03, + "learning_rate": 0.0001986033519553073, + "loss": 2.4274, "step": 5 }, { - "epoch": 0.24, - "learning_rate": 0.0001976, - "loss": 2.3407, + "epoch": 0.03, + "learning_rate": 0.00019832402234636873, + "loss": 2.3628, "step": 6 }, { - "epoch": 0.28, - "learning_rate": 0.0001972, - "loss": 2.3432, + "epoch": 0.04, + "learning_rate": 0.0001980446927374302, + "loss": 2.3567, "step": 7 }, { - "epoch": 0.32, - "learning_rate": 0.0001968, - "loss": 2.2333, + "epoch": 0.04, + "learning_rate": 0.00019776536312849163, + "loss": 2.4121, "step": 8 }, { - "epoch": 0.36, - "learning_rate": 0.0001964, - "loss": 2.1761, + "epoch": 0.05, + "learning_rate": 0.00019748603351955307, + "loss": 2.4033, "step": 9 }, { - "epoch": 0.4, - "learning_rate": 0.000196, - "loss": 2.1473, + "epoch": 0.06, + "learning_rate": 0.00019720670391061454, + "loss": 2.2805, "step": 10 }, { - "epoch": 0.44, - "learning_rate": 0.0001956, - "loss": 2.2076, + "epoch": 0.06, + "learning_rate": 0.00019692737430167598, + "loss": 2.2639, "step": 11 }, { - "epoch": 0.48, - "learning_rate": 0.0001952, - "loss": 2.1925, + "epoch": 0.07, + "learning_rate": 0.00019664804469273744, + "loss": 2.2724, "step": 12 }, { - "epoch": 0.52, - "learning_rate": 0.0001948, - "loss": 2.1694, + "epoch": 0.07, + "learning_rate": 0.00019636871508379888, + "loss": 2.332, "step": 13 }, { - "epoch": 0.56, - "learning_rate": 0.0001944, - "loss": 2.1056, + "epoch": 0.08, + "learning_rate": 0.00019608938547486035, + "loss": 2.2261, "step": 14 }, { - "epoch": 0.6, - "learning_rate": 0.000194, - "loss": 2.2426, + "epoch": 0.08, + "learning_rate": 0.00019581005586592182, + "loss": 2.2208, "step": 15 }, { - "epoch": 0.64, - "learning_rate": 0.00019360000000000002, - "loss": 2.2635, + "epoch": 0.09, + "learning_rate": 0.00019553072625698326, + "loss": 2.3351, "step": 16 }, { - "epoch": 0.68, - "learning_rate": 0.0001932, - "loss": 2.251, + "epoch": 0.09, + "learning_rate": 0.0001952513966480447, + "loss": 2.2475, "step": 17 }, { - "epoch": 0.72, - "learning_rate": 0.0001928, - "loss": 2.224, + "epoch": 0.1, + "learning_rate": 0.00019497206703910616, + "loss": 2.3283, "step": 18 }, { - "epoch": 0.76, - "learning_rate": 0.00019240000000000001, - "loss": 2.1848, + "epoch": 0.11, + "learning_rate": 0.0001946927374301676, + "loss": 2.1346, "step": 19 }, { - "epoch": 0.8, - "learning_rate": 0.000192, - "loss": 2.1341, + "epoch": 0.11, + "learning_rate": 0.00019441340782122907, + "loss": 2.131, "step": 20 }, { - "epoch": 0.84, - "learning_rate": 0.0001916, - "loss": 2.126, + "epoch": 0.12, + "learning_rate": 0.0001941340782122905, + "loss": 2.1718, "step": 21 }, { - "epoch": 0.88, - "learning_rate": 0.0001912, - "loss": 2.0959, + "epoch": 0.12, + "learning_rate": 0.00019385474860335195, + "loss": 2.2446, "step": 22 }, { - "epoch": 0.92, - "learning_rate": 0.0001908, - "loss": 2.2446, + "epoch": 0.13, + "learning_rate": 0.0001935754189944134, + "loss": 2.306, "step": 23 }, { - "epoch": 0.96, - "learning_rate": 0.0001904, - "loss": 1.9448, + "epoch": 0.13, + "learning_rate": 0.00019329608938547488, + "loss": 2.1908, "step": 24 }, { - "epoch": 1.0, - "learning_rate": 0.00019, - "loss": 2.0939, + "epoch": 0.14, + "learning_rate": 0.00019301675977653632, + "loss": 2.2844, "step": 25 }, { - "epoch": 1.04, - "learning_rate": 0.0001896, - "loss": 2.1247, + "epoch": 0.15, + "learning_rate": 0.00019273743016759779, + "loss": 2.2235, "step": 26 }, { - "epoch": 1.08, - "learning_rate": 0.0001892, - "loss": 1.9417, + "epoch": 0.15, + "learning_rate": 0.00019245810055865922, + "loss": 2.1842, "step": 27 }, { - "epoch": 1.12, - "learning_rate": 0.0001888, - "loss": 2.105, + "epoch": 0.16, + "learning_rate": 0.00019217877094972066, + "loss": 2.2675, "step": 28 }, { - "epoch": 1.16, - "learning_rate": 0.0001884, - "loss": 2.1595, + "epoch": 0.16, + "learning_rate": 0.00019189944134078213, + "loss": 2.2532, "step": 29 }, { - "epoch": 1.2, - "learning_rate": 0.000188, - "loss": 2.2009, + "epoch": 0.17, + "learning_rate": 0.00019162011173184357, + "loss": 2.1788, "step": 30 }, { - "epoch": 1.24, - "learning_rate": 0.0001876, - "loss": 2.0784, + "epoch": 0.17, + "learning_rate": 0.00019134078212290504, + "loss": 2.2494, "step": 31 }, { - "epoch": 1.28, - "learning_rate": 0.00018720000000000002, - "loss": 2.2173, + "epoch": 0.18, + "learning_rate": 0.0001910614525139665, + "loss": 2.1995, "step": 32 }, { - "epoch": 1.32, - "learning_rate": 0.00018680000000000001, - "loss": 2.1185, + "epoch": 0.18, + "learning_rate": 0.00019078212290502794, + "loss": 2.1451, "step": 33 }, { - "epoch": 1.36, - "learning_rate": 0.00018640000000000003, - "loss": 1.988, + "epoch": 0.19, + "learning_rate": 0.0001905027932960894, + "loss": 2.223, "step": 34 }, { - "epoch": 1.4, - "learning_rate": 0.00018600000000000002, - "loss": 1.9493, + "epoch": 0.2, + "learning_rate": 0.00019022346368715085, + "loss": 2.2854, "step": 35 }, { - "epoch": 1.44, - "learning_rate": 0.0001856, - "loss": 1.9947, + "epoch": 0.2, + "learning_rate": 0.0001899441340782123, + "loss": 2.2265, "step": 36 }, { - "epoch": 1.48, - "learning_rate": 0.00018520000000000003, - "loss": 2.0506, + "epoch": 0.21, + "learning_rate": 0.00018966480446927375, + "loss": 2.1214, "step": 37 }, { - "epoch": 1.52, - "learning_rate": 0.00018480000000000002, - "loss": 2.0829, + "epoch": 0.21, + "learning_rate": 0.0001893854748603352, + "loss": 2.1898, "step": 38 }, { - "epoch": 1.56, - "learning_rate": 0.0001844, - "loss": 2.1455, + "epoch": 0.22, + "learning_rate": 0.00018910614525139666, + "loss": 2.1974, "step": 39 }, { - "epoch": 1.6, - "learning_rate": 0.00018400000000000003, - "loss": 2.0304, + "epoch": 0.22, + "learning_rate": 0.0001888268156424581, + "loss": 2.2259, "step": 40 }, { - "epoch": 1.64, - "learning_rate": 0.00018360000000000002, - "loss": 2.006, + "epoch": 0.23, + "learning_rate": 0.00018854748603351957, + "loss": 2.2094, "step": 41 }, { - "epoch": 1.68, - "learning_rate": 0.0001832, - "loss": 2.1759, + "epoch": 0.23, + "learning_rate": 0.00018826815642458103, + "loss": 2.1731, "step": 42 }, { - "epoch": 1.72, - "learning_rate": 0.00018280000000000003, - "loss": 2.0093, + "epoch": 0.24, + "learning_rate": 0.00018798882681564247, + "loss": 2.2373, "step": 43 }, { - "epoch": 1.76, - "learning_rate": 0.00018240000000000002, - "loss": 2.0683, + "epoch": 0.25, + "learning_rate": 0.0001877094972067039, + "loss": 2.2295, "step": 44 }, { - "epoch": 1.8, - "learning_rate": 0.000182, - "loss": 2.1352, + "epoch": 0.25, + "learning_rate": 0.00018743016759776538, + "loss": 2.1947, "step": 45 }, { - "epoch": 1.84, - "learning_rate": 0.00018160000000000002, - "loss": 1.9736, + "epoch": 0.26, + "learning_rate": 0.00018715083798882682, + "loss": 2.2115, "step": 46 }, { - "epoch": 1.88, - "learning_rate": 0.0001812, - "loss": 1.9631, + "epoch": 0.26, + "learning_rate": 0.00018687150837988828, + "loss": 2.1224, "step": 47 }, { - "epoch": 1.92, - "learning_rate": 0.0001808, - "loss": 2.1024, + "epoch": 0.27, + "learning_rate": 0.00018659217877094972, + "loss": 2.2137, "step": 48 }, { - "epoch": 1.96, - "learning_rate": 0.00018040000000000002, - "loss": 1.9895, + "epoch": 0.27, + "learning_rate": 0.00018631284916201116, + "loss": 2.2338, "step": 49 }, { - "epoch": 2.0, - "learning_rate": 0.00018, - "loss": 2.0067, + "epoch": 0.28, + "learning_rate": 0.00018603351955307266, + "loss": 2.1298, "step": 50 }, { - "epoch": 2.04, - "learning_rate": 0.0001796, - "loss": 2.0509, + "epoch": 0.28, + "learning_rate": 0.0001857541899441341, + "loss": 2.0883, "step": 51 }, { - "epoch": 2.08, - "learning_rate": 0.00017920000000000002, - "loss": 1.8795, + "epoch": 0.29, + "learning_rate": 0.00018547486033519553, + "loss": 2.1216, "step": 52 }, { - "epoch": 2.12, - "learning_rate": 0.0001788, - "loss": 2.0633, + "epoch": 0.3, + "learning_rate": 0.000185195530726257, + "loss": 2.2112, "step": 53 }, { - "epoch": 2.16, - "learning_rate": 0.0001784, - "loss": 1.9998, + "epoch": 0.3, + "learning_rate": 0.00018491620111731844, + "loss": 2.1224, "step": 54 }, { - "epoch": 2.2, - "learning_rate": 0.00017800000000000002, - "loss": 2.0675, + "epoch": 0.31, + "learning_rate": 0.0001846368715083799, + "loss": 2.2375, "step": 55 }, { - "epoch": 2.24, - "learning_rate": 0.0001776, - "loss": 2.0129, + "epoch": 0.31, + "learning_rate": 0.00018435754189944135, + "loss": 2.2235, "step": 56 }, { - "epoch": 2.28, - "learning_rate": 0.0001772, - "loss": 1.9302, + "epoch": 0.32, + "learning_rate": 0.00018407821229050279, + "loss": 2.1682, "step": 57 }, { - "epoch": 2.32, - "learning_rate": 0.00017680000000000001, - "loss": 2.0472, + "epoch": 0.32, + "learning_rate": 0.00018379888268156425, + "loss": 2.2077, "step": 58 }, { - "epoch": 2.36, - "learning_rate": 0.0001764, - "loss": 2.0031, + "epoch": 0.33, + "learning_rate": 0.00018351955307262572, + "loss": 2.1596, "step": 59 }, { - "epoch": 2.4, - "learning_rate": 0.00017600000000000002, - "loss": 2.0231, + "epoch": 0.34, + "learning_rate": 0.00018324022346368716, + "loss": 2.1311, "step": 60 }, { - "epoch": 2.44, - "learning_rate": 0.0001756, - "loss": 2.0192, + "epoch": 0.34, + "learning_rate": 0.00018296089385474862, + "loss": 2.1333, "step": 61 }, { - "epoch": 2.48, - "learning_rate": 0.0001752, - "loss": 2.1336, + "epoch": 0.35, + "learning_rate": 0.00018268156424581006, + "loss": 2.0901, "step": 62 }, { - "epoch": 2.52, - "learning_rate": 0.00017480000000000002, - "loss": 1.7753, + "epoch": 0.35, + "learning_rate": 0.00018240223463687153, + "loss": 2.1971, "step": 63 }, { - "epoch": 2.56, - "learning_rate": 0.0001744, - "loss": 2.0274, + "epoch": 0.36, + "learning_rate": 0.00018212290502793297, + "loss": 2.2602, "step": 64 }, { - "epoch": 2.6, - "learning_rate": 0.000174, - "loss": 1.8893, + "epoch": 0.36, + "learning_rate": 0.0001818435754189944, + "loss": 2.2194, "step": 65 }, { - "epoch": 2.64, - "learning_rate": 0.00017360000000000002, - "loss": 2.0341, + "epoch": 0.37, + "learning_rate": 0.00018156424581005588, + "loss": 2.1218, "step": 66 }, { - "epoch": 2.68, - "learning_rate": 0.0001732, - "loss": 2.1081, + "epoch": 0.37, + "learning_rate": 0.00018128491620111731, + "loss": 2.2049, "step": 67 }, { - "epoch": 2.72, - "learning_rate": 0.0001728, - "loss": 1.9463, + "epoch": 0.38, + "learning_rate": 0.00018100558659217878, + "loss": 2.1521, "step": 68 }, { - "epoch": 2.76, - "learning_rate": 0.00017240000000000002, - "loss": 2.0607, + "epoch": 0.39, + "learning_rate": 0.00018072625698324025, + "loss": 2.112, "step": 69 }, { - "epoch": 2.8, - "learning_rate": 0.000172, - "loss": 1.9803, + "epoch": 0.39, + "learning_rate": 0.0001804469273743017, + "loss": 2.1906, "step": 70 }, { - "epoch": 2.84, - "learning_rate": 0.0001716, - "loss": 1.7663, + "epoch": 0.4, + "learning_rate": 0.00018016759776536313, + "loss": 2.1717, "step": 71 }, { - "epoch": 2.88, - "learning_rate": 0.00017120000000000001, - "loss": 2.1116, + "epoch": 0.4, + "learning_rate": 0.0001798882681564246, + "loss": 2.0712, "step": 72 }, { - "epoch": 2.92, - "learning_rate": 0.0001708, - "loss": 1.9039, + "epoch": 0.41, + "learning_rate": 0.00017960893854748603, + "loss": 2.141, "step": 73 }, { - "epoch": 2.96, - "learning_rate": 0.0001704, - "loss": 2.049, + "epoch": 0.41, + "learning_rate": 0.0001793296089385475, + "loss": 2.0656, "step": 74 }, { - "epoch": 3.0, - "learning_rate": 0.00017, - "loss": 1.991, + "epoch": 0.42, + "learning_rate": 0.00017905027932960894, + "loss": 2.1125, "step": 75 }, { - "epoch": 3.04, - "learning_rate": 0.0001696, - "loss": 1.8755, + "epoch": 0.42, + "learning_rate": 0.00017877094972067038, + "loss": 2.0869, "step": 76 }, { - "epoch": 3.08, - "learning_rate": 0.0001692, - "loss": 2.0367, + "epoch": 0.43, + "learning_rate": 0.00017849162011173187, + "loss": 2.2478, "step": 77 }, { - "epoch": 3.12, - "learning_rate": 0.0001688, - "loss": 1.9959, + "epoch": 0.44, + "learning_rate": 0.0001782122905027933, + "loss": 2.1535, "step": 78 }, { - "epoch": 3.16, - "learning_rate": 0.0001684, - "loss": 1.8051, + "epoch": 0.44, + "learning_rate": 0.00017793296089385475, + "loss": 2.1927, "step": 79 }, { - "epoch": 3.2, - "learning_rate": 0.000168, - "loss": 2.0446, + "epoch": 0.45, + "learning_rate": 0.00017765363128491622, + "loss": 2.1213, "step": 80 }, { - "epoch": 3.24, - "learning_rate": 0.0001676, - "loss": 1.7873, + "epoch": 0.45, + "learning_rate": 0.00017737430167597766, + "loss": 2.0981, "step": 81 }, { - "epoch": 3.28, - "learning_rate": 0.0001672, - "loss": 1.9264, + "epoch": 0.46, + "learning_rate": 0.00017709497206703912, + "loss": 2.1828, "step": 82 }, { - "epoch": 3.32, - "learning_rate": 0.0001668, - "loss": 1.8595, + "epoch": 0.46, + "learning_rate": 0.00017681564245810056, + "loss": 2.0562, "step": 83 }, { - "epoch": 3.36, - "learning_rate": 0.0001664, - "loss": 1.9992, + "epoch": 0.47, + "learning_rate": 0.000176536312849162, + "loss": 2.1334, "step": 84 }, { - "epoch": 3.4, - "learning_rate": 0.000166, - "loss": 1.8783, + "epoch": 0.47, + "learning_rate": 0.00017625698324022347, + "loss": 2.1225, "step": 85 }, { - "epoch": 3.44, - "learning_rate": 0.0001656, - "loss": 2.0216, + "epoch": 0.48, + "learning_rate": 0.00017597765363128493, + "loss": 2.2098, "step": 86 }, { - "epoch": 3.48, - "learning_rate": 0.0001652, - "loss": 1.947, + "epoch": 0.49, + "learning_rate": 0.00017569832402234637, + "loss": 2.1519, "step": 87 }, { - "epoch": 3.52, - "learning_rate": 0.0001648, - "loss": 1.9554, + "epoch": 0.49, + "learning_rate": 0.00017541899441340784, + "loss": 2.1132, "step": 88 }, { - "epoch": 3.56, - "learning_rate": 0.0001644, - "loss": 1.8563, + "epoch": 0.5, + "learning_rate": 0.00017513966480446928, + "loss": 2.0333, "step": 89 }, { - "epoch": 3.6, - "learning_rate": 0.000164, - "loss": 1.7795, + "epoch": 0.5, + "learning_rate": 0.00017486033519553075, + "loss": 2.2764, "step": 90 }, { - "epoch": 3.64, - "learning_rate": 0.0001636, - "loss": 1.9347, + "epoch": 0.51, + "learning_rate": 0.00017458100558659218, + "loss": 2.1838, "step": 91 }, { - "epoch": 3.68, - "learning_rate": 0.0001632, - "loss": 2.0078, + "epoch": 0.51, + "learning_rate": 0.00017430167597765362, + "loss": 2.1386, "step": 92 }, { - "epoch": 3.72, - "learning_rate": 0.0001628, - "loss": 1.9964, + "epoch": 0.52, + "learning_rate": 0.0001740223463687151, + "loss": 2.1034, "step": 93 }, { - "epoch": 3.76, - "learning_rate": 0.00016240000000000002, - "loss": 1.9004, + "epoch": 0.53, + "learning_rate": 0.00017374301675977656, + "loss": 2.0346, "step": 94 }, { - "epoch": 3.8, - "learning_rate": 0.000162, - "loss": 1.9446, + "epoch": 0.53, + "learning_rate": 0.000173463687150838, + "loss": 2.0274, "step": 95 }, { - "epoch": 3.84, - "learning_rate": 0.00016160000000000002, - "loss": 1.8857, + "epoch": 0.54, + "learning_rate": 0.00017318435754189946, + "loss": 2.1036, "step": 96 }, { - "epoch": 3.88, - "learning_rate": 0.00016120000000000002, - "loss": 1.8797, + "epoch": 0.54, + "learning_rate": 0.0001729050279329609, + "loss": 2.1208, "step": 97 }, { - "epoch": 3.92, - "learning_rate": 0.0001608, - "loss": 1.7643, + "epoch": 0.55, + "learning_rate": 0.00017262569832402237, + "loss": 2.0572, "step": 98 }, { - "epoch": 3.96, - "learning_rate": 0.00016040000000000002, - "loss": 1.9724, + "epoch": 0.55, + "learning_rate": 0.0001723463687150838, + "loss": 2.1702, "step": 99 }, { - "epoch": 4.0, - "learning_rate": 0.00016, - "loss": 1.9058, + "epoch": 0.56, + "learning_rate": 0.00017206703910614525, + "loss": 2.1302, "step": 100 }, { - "epoch": 4.04, - "learning_rate": 0.0001596, - "loss": 1.8663, + "epoch": 0.56, + "learning_rate": 0.0001717877094972067, + "loss": 2.0175, "step": 101 }, { - "epoch": 4.08, - "learning_rate": 0.00015920000000000002, - "loss": 1.8457, + "epoch": 0.57, + "learning_rate": 0.00017150837988826815, + "loss": 2.1006, "step": 102 }, { - "epoch": 4.12, - "learning_rate": 0.0001588, - "loss": 1.8394, + "epoch": 0.58, + "learning_rate": 0.00017122905027932962, + "loss": 2.0662, "step": 103 }, { - "epoch": 4.16, - "learning_rate": 0.00015840000000000003, - "loss": 1.8941, + "epoch": 0.58, + "learning_rate": 0.00017094972067039109, + "loss": 1.988, "step": 104 }, { - "epoch": 4.2, - "learning_rate": 0.00015800000000000002, - "loss": 1.8601, + "epoch": 0.59, + "learning_rate": 0.00017067039106145253, + "loss": 2.1008, "step": 105 }, { - "epoch": 4.24, - "learning_rate": 0.0001576, - "loss": 1.7365, + "epoch": 0.59, + "learning_rate": 0.00017039106145251396, + "loss": 2.1482, "step": 106 }, { - "epoch": 4.28, - "learning_rate": 0.00015720000000000003, - "loss": 1.8809, + "epoch": 0.6, + "learning_rate": 0.00017011173184357543, + "loss": 2.1052, "step": 107 }, { - "epoch": 4.32, - "learning_rate": 0.00015680000000000002, - "loss": 2.0206, + "epoch": 0.6, + "learning_rate": 0.00016983240223463687, + "loss": 2.0978, "step": 108 }, { - "epoch": 4.36, - "learning_rate": 0.0001564, - "loss": 1.7776, + "epoch": 0.61, + "learning_rate": 0.00016955307262569834, + "loss": 2.1303, "step": 109 }, { - "epoch": 4.4, - "learning_rate": 0.00015600000000000002, - "loss": 1.9089, + "epoch": 0.61, + "learning_rate": 0.00016927374301675978, + "loss": 2.0794, "step": 110 }, { - "epoch": 4.44, - "learning_rate": 0.00015560000000000001, - "loss": 1.8464, + "epoch": 0.62, + "learning_rate": 0.00016899441340782122, + "loss": 2.1059, "step": 111 }, { - "epoch": 4.48, - "learning_rate": 0.0001552, - "loss": 1.7943, + "epoch": 0.63, + "learning_rate": 0.0001687150837988827, + "loss": 1.9642, "step": 112 }, { - "epoch": 4.52, - "learning_rate": 0.00015480000000000002, - "loss": 1.8081, + "epoch": 0.63, + "learning_rate": 0.00016843575418994415, + "loss": 2.0415, "step": 113 }, { - "epoch": 4.56, - "learning_rate": 0.0001544, - "loss": 1.734, + "epoch": 0.64, + "learning_rate": 0.0001681564245810056, + "loss": 2.0795, "step": 114 }, { - "epoch": 4.6, - "learning_rate": 0.000154, - "loss": 1.8379, + "epoch": 0.64, + "learning_rate": 0.00016787709497206705, + "loss": 2.0238, "step": 115 }, { - "epoch": 4.64, - "learning_rate": 0.00015360000000000002, - "loss": 1.6841, + "epoch": 0.65, + "learning_rate": 0.0001675977653631285, + "loss": 2.078, "step": 116 }, { - "epoch": 4.68, - "learning_rate": 0.0001532, - "loss": 1.8253, + "epoch": 0.65, + "learning_rate": 0.00016731843575418996, + "loss": 2.1362, "step": 117 }, { - "epoch": 4.72, - "learning_rate": 0.0001528, - "loss": 1.7607, + "epoch": 0.66, + "learning_rate": 0.0001670391061452514, + "loss": 2.0552, "step": 118 }, { - "epoch": 4.76, - "learning_rate": 0.00015240000000000002, - "loss": 1.8205, + "epoch": 0.66, + "learning_rate": 0.00016675977653631284, + "loss": 2.213, "step": 119 }, { - "epoch": 4.8, - "learning_rate": 0.000152, - "loss": 1.8443, + "epoch": 0.67, + "learning_rate": 0.0001664804469273743, + "loss": 1.9999, "step": 120 }, { - "epoch": 4.84, - "learning_rate": 0.0001516, - "loss": 1.8824, + "epoch": 0.68, + "learning_rate": 0.00016620111731843577, + "loss": 2.06, "step": 121 }, { - "epoch": 4.88, - "learning_rate": 0.00015120000000000002, - "loss": 1.8369, + "epoch": 0.68, + "learning_rate": 0.0001659217877094972, + "loss": 2.0177, "step": 122 }, { - "epoch": 4.92, - "learning_rate": 0.0001508, - "loss": 1.8017, + "epoch": 0.69, + "learning_rate": 0.00016564245810055868, + "loss": 2.0504, "step": 123 }, { - "epoch": 4.96, - "learning_rate": 0.0001504, - "loss": 1.7858, + "epoch": 0.69, + "learning_rate": 0.00016536312849162012, + "loss": 2.0585, "step": 124 }, { - "epoch": 5.0, - "learning_rate": 0.00015000000000000001, - "loss": 1.8366, + "epoch": 0.7, + "learning_rate": 0.00016508379888268158, + "loss": 2.0273, "step": 125 }, { - "epoch": 5.04, - "learning_rate": 0.0001496, - "loss": 1.7353, + "epoch": 0.7, + "learning_rate": 0.00016480446927374302, + "loss": 2.0549, "step": 126 }, { - "epoch": 5.08, - "learning_rate": 0.0001492, - "loss": 1.6017, + "epoch": 0.71, + "learning_rate": 0.00016452513966480446, + "loss": 2.044, "step": 127 }, { - "epoch": 5.12, - "learning_rate": 0.0001488, - "loss": 1.6645, + "epoch": 0.72, + "learning_rate": 0.00016424581005586593, + "loss": 2.0731, "step": 128 }, { - "epoch": 5.16, - "learning_rate": 0.0001484, - "loss": 1.8663, + "epoch": 0.72, + "learning_rate": 0.00016396648044692737, + "loss": 2.0568, "step": 129 }, { - "epoch": 5.2, - "learning_rate": 0.000148, - "loss": 1.7009, + "epoch": 0.73, + "learning_rate": 0.00016368715083798883, + "loss": 2.007, "step": 130 }, { - "epoch": 5.24, - "learning_rate": 0.0001476, - "loss": 1.7371, + "epoch": 0.73, + "learning_rate": 0.0001634078212290503, + "loss": 2.0707, "step": 131 }, { - "epoch": 5.28, - "learning_rate": 0.0001472, - "loss": 1.8555, + "epoch": 0.74, + "learning_rate": 0.00016312849162011174, + "loss": 1.9793, "step": 132 }, { - "epoch": 5.32, - "learning_rate": 0.00014680000000000002, - "loss": 1.6373, + "epoch": 0.74, + "learning_rate": 0.0001628491620111732, + "loss": 2.1311, "step": 133 }, { - "epoch": 5.36, - "learning_rate": 0.0001464, - "loss": 1.7211, + "epoch": 0.75, + "learning_rate": 0.00016256983240223465, + "loss": 2.0016, "step": 134 }, { - "epoch": 5.4, - "learning_rate": 0.000146, - "loss": 1.614, + "epoch": 0.75, + "learning_rate": 0.00016229050279329609, + "loss": 1.9945, "step": 135 }, { - "epoch": 5.44, - "learning_rate": 0.00014560000000000002, - "loss": 1.8736, + "epoch": 0.76, + "learning_rate": 0.00016201117318435755, + "loss": 2.0186, "step": 136 }, { - "epoch": 5.48, - "learning_rate": 0.0001452, - "loss": 1.7229, + "epoch": 0.77, + "learning_rate": 0.000161731843575419, + "loss": 2.0971, "step": 137 }, { - "epoch": 5.52, - "learning_rate": 0.0001448, - "loss": 1.7315, + "epoch": 0.77, + "learning_rate": 0.00016145251396648046, + "loss": 2.0883, "step": 138 }, { - "epoch": 5.56, - "learning_rate": 0.0001444, - "loss": 1.7259, + "epoch": 0.78, + "learning_rate": 0.00016117318435754192, + "loss": 2.0803, "step": 139 }, { - "epoch": 5.6, - "learning_rate": 0.000144, - "loss": 1.7032, + "epoch": 0.78, + "learning_rate": 0.00016089385474860336, + "loss": 2.0617, "step": 140 }, { - "epoch": 5.64, - "learning_rate": 0.0001436, - "loss": 1.9237, + "epoch": 0.79, + "learning_rate": 0.00016061452513966483, + "loss": 2.1265, "step": 141 }, { - "epoch": 5.68, - "learning_rate": 0.0001432, - "loss": 1.7821, + "epoch": 0.79, + "learning_rate": 0.00016033519553072627, + "loss": 2.0151, "step": 142 }, { - "epoch": 5.72, - "learning_rate": 0.0001428, - "loss": 1.6081, + "epoch": 0.8, + "learning_rate": 0.0001600558659217877, + "loss": 1.996, "step": 143 }, { - "epoch": 5.76, - "learning_rate": 0.0001424, - "loss": 1.7979, + "epoch": 0.8, + "learning_rate": 0.00015977653631284918, + "loss": 2.0164, "step": 144 }, { - "epoch": 5.8, - "learning_rate": 0.000142, - "loss": 1.6675, + "epoch": 0.81, + "learning_rate": 0.00015949720670391061, + "loss": 2.0314, "step": 145 }, { - "epoch": 5.84, - "learning_rate": 0.0001416, - "loss": 1.6758, + "epoch": 0.82, + "learning_rate": 0.00015921787709497208, + "loss": 1.9501, "step": 146 }, { - "epoch": 5.88, - "learning_rate": 0.0001412, - "loss": 1.7783, + "epoch": 0.82, + "learning_rate": 0.00015893854748603352, + "loss": 2.087, "step": 147 }, { - "epoch": 5.92, - "learning_rate": 0.0001408, - "loss": 1.5935, + "epoch": 0.83, + "learning_rate": 0.000158659217877095, + "loss": 2.0262, "step": 148 }, { - "epoch": 5.96, - "learning_rate": 0.0001404, - "loss": 1.7372, + "epoch": 0.83, + "learning_rate": 0.00015837988826815643, + "loss": 2.0765, "step": 149 }, { - "epoch": 6.0, - "learning_rate": 0.00014, - "loss": 1.6308, + "epoch": 0.84, + "learning_rate": 0.0001581005586592179, + "loss": 2.105, "step": 150 }, { - "epoch": 6.04, - "learning_rate": 0.0001396, - "loss": 1.5829, + "epoch": 0.84, + "learning_rate": 0.00015782122905027933, + "loss": 1.9863, "step": 151 }, { - "epoch": 6.08, - "learning_rate": 0.0001392, - "loss": 1.6538, + "epoch": 0.85, + "learning_rate": 0.0001575418994413408, + "loss": 1.9873, "step": 152 }, { - "epoch": 6.12, - "learning_rate": 0.00013879999999999999, - "loss": 1.7624, + "epoch": 0.85, + "learning_rate": 0.00015726256983240224, + "loss": 2.0094, "step": 153 }, { - "epoch": 6.16, - "learning_rate": 0.0001384, - "loss": 1.4968, + "epoch": 0.86, + "learning_rate": 0.00015698324022346368, + "loss": 1.9141, "step": 154 }, { - "epoch": 6.2, - "learning_rate": 0.000138, - "loss": 1.7305, + "epoch": 0.87, + "learning_rate": 0.00015670391061452514, + "loss": 1.917, "step": 155 }, { - "epoch": 6.24, - "learning_rate": 0.00013759999999999998, - "loss": 1.6536, + "epoch": 0.87, + "learning_rate": 0.00015642458100558658, + "loss": 2.109, "step": 156 }, { - "epoch": 6.28, - "learning_rate": 0.00013720000000000003, - "loss": 1.654, + "epoch": 0.88, + "learning_rate": 0.00015614525139664805, + "loss": 1.9799, "step": 157 }, { - "epoch": 6.32, - "learning_rate": 0.00013680000000000002, - "loss": 1.7163, + "epoch": 0.88, + "learning_rate": 0.00015586592178770952, + "loss": 1.9571, "step": 158 }, { - "epoch": 6.36, - "learning_rate": 0.0001364, - "loss": 1.6599, + "epoch": 0.89, + "learning_rate": 0.00015558659217877096, + "loss": 1.9931, "step": 159 }, { - "epoch": 6.4, - "learning_rate": 0.00013600000000000003, - "loss": 1.5729, + "epoch": 0.89, + "learning_rate": 0.00015530726256983242, + "loss": 2.1004, "step": 160 }, { - "epoch": 6.44, - "learning_rate": 0.00013560000000000002, - "loss": 1.5291, + "epoch": 0.9, + "learning_rate": 0.00015502793296089386, + "loss": 2.0385, "step": 161 }, { - "epoch": 6.48, - "learning_rate": 0.0001352, - "loss": 1.5666, + "epoch": 0.91, + "learning_rate": 0.0001547486033519553, + "loss": 1.9751, "step": 162 }, { - "epoch": 6.52, - "learning_rate": 0.00013480000000000002, - "loss": 1.7576, + "epoch": 0.91, + "learning_rate": 0.00015446927374301677, + "loss": 2.0544, "step": 163 }, { - "epoch": 6.56, - "learning_rate": 0.00013440000000000001, - "loss": 1.4462, + "epoch": 0.92, + "learning_rate": 0.0001541899441340782, + "loss": 2.0069, "step": 164 }, { - "epoch": 6.6, - "learning_rate": 0.000134, - "loss": 1.5659, + "epoch": 0.92, + "learning_rate": 0.00015391061452513967, + "loss": 1.9576, "step": 165 }, { - "epoch": 6.64, - "learning_rate": 0.00013360000000000002, - "loss": 1.6234, + "epoch": 0.93, + "learning_rate": 0.00015363128491620114, + "loss": 1.8991, "step": 166 }, { - "epoch": 6.68, - "learning_rate": 0.0001332, - "loss": 1.5176, + "epoch": 0.93, + "learning_rate": 0.00015335195530726258, + "loss": 1.9336, "step": 167 }, { - "epoch": 6.72, - "learning_rate": 0.0001328, - "loss": 1.667, + "epoch": 0.94, + "learning_rate": 0.00015307262569832405, + "loss": 1.9736, "step": 168 }, { - "epoch": 6.76, - "learning_rate": 0.00013240000000000002, - "loss": 1.6514, + "epoch": 0.94, + "learning_rate": 0.00015279329608938548, + "loss": 1.9702, "step": 169 }, { - "epoch": 6.8, - "learning_rate": 0.000132, - "loss": 1.7179, + "epoch": 0.95, + "learning_rate": 0.00015251396648044692, + "loss": 1.9055, "step": 170 }, { - "epoch": 6.84, - "learning_rate": 0.0001316, - "loss": 1.4373, + "epoch": 0.96, + "learning_rate": 0.0001522346368715084, + "loss": 2.0503, "step": 171 }, { - "epoch": 6.88, - "learning_rate": 0.00013120000000000002, - "loss": 1.7044, + "epoch": 0.96, + "learning_rate": 0.00015195530726256983, + "loss": 2.0039, "step": 172 }, { - "epoch": 6.92, - "learning_rate": 0.0001308, - "loss": 1.5673, + "epoch": 0.97, + "learning_rate": 0.0001516759776536313, + "loss": 1.9406, "step": 173 }, { - "epoch": 6.96, - "learning_rate": 0.0001304, - "loss": 1.6303, + "epoch": 0.97, + "learning_rate": 0.00015139664804469274, + "loss": 2.0525, "step": 174 }, { - "epoch": 7.0, - "learning_rate": 0.00013000000000000002, - "loss": 1.4828, + "epoch": 0.98, + "learning_rate": 0.0001511173184357542, + "loss": 1.9234, "step": 175 }, { - "epoch": 7.04, - "learning_rate": 0.0001296, - "loss": 1.6204, + "epoch": 0.98, + "learning_rate": 0.00015083798882681567, + "loss": 1.8614, "step": 176 }, { - "epoch": 7.08, - "learning_rate": 0.00012920000000000002, - "loss": 1.4955, + "epoch": 0.99, + "learning_rate": 0.0001505586592178771, + "loss": 1.9616, "step": 177 }, { - "epoch": 7.12, - "learning_rate": 0.00012880000000000001, - "loss": 1.6109, + "epoch": 0.99, + "learning_rate": 0.00015027932960893855, + "loss": 1.9509, "step": 178 }, { - "epoch": 7.16, - "learning_rate": 0.0001284, - "loss": 1.5318, + "epoch": 1.0, + "learning_rate": 0.00015000000000000001, + "loss": 1.9592, "step": 179 }, { - "epoch": 7.2, - "learning_rate": 0.00012800000000000002, - "loss": 1.5702, + "epoch": 1.01, + "learning_rate": 0.00014972067039106145, + "loss": 1.8991, "step": 180 }, { - "epoch": 7.24, - "learning_rate": 0.0001276, - "loss": 1.4598, + "epoch": 1.01, + "learning_rate": 0.00014944134078212292, + "loss": 1.9127, "step": 181 }, { - "epoch": 7.28, - "learning_rate": 0.0001272, - "loss": 1.6145, + "epoch": 1.02, + "learning_rate": 0.00014916201117318436, + "loss": 1.8982, "step": 182 }, { - "epoch": 7.32, - "learning_rate": 0.00012680000000000002, - "loss": 1.3628, + "epoch": 1.02, + "learning_rate": 0.0001488826815642458, + "loss": 1.9534, "step": 183 }, { - "epoch": 7.36, - "learning_rate": 0.0001264, - "loss": 1.4578, + "epoch": 1.03, + "learning_rate": 0.0001486033519553073, + "loss": 1.7794, "step": 184 }, { - "epoch": 7.4, - "learning_rate": 0.000126, - "loss": 1.7094, + "epoch": 1.03, + "learning_rate": 0.00014832402234636873, + "loss": 1.7958, "step": 185 }, { - "epoch": 7.44, - "learning_rate": 0.00012560000000000002, - "loss": 1.4376, + "epoch": 1.04, + "learning_rate": 0.00014804469273743017, + "loss": 1.8282, "step": 186 }, { - "epoch": 7.48, - "learning_rate": 0.0001252, - "loss": 1.3393, + "epoch": 1.04, + "learning_rate": 0.00014776536312849164, + "loss": 2.0423, "step": 187 }, { - "epoch": 7.52, - "learning_rate": 0.0001248, - "loss": 1.4273, + "epoch": 1.05, + "learning_rate": 0.00014748603351955308, + "loss": 1.9282, "step": 188 }, { - "epoch": 7.56, - "learning_rate": 0.00012440000000000002, - "loss": 1.5506, + "epoch": 1.06, + "learning_rate": 0.00014720670391061454, + "loss": 1.9072, "step": 189 }, { - "epoch": 7.6, - "learning_rate": 0.000124, - "loss": 1.4774, + "epoch": 1.06, + "learning_rate": 0.00014692737430167598, + "loss": 1.8665, "step": 190 }, { - "epoch": 7.64, - "learning_rate": 0.0001236, - "loss": 1.4632, + "epoch": 1.07, + "learning_rate": 0.00014664804469273742, + "loss": 1.9021, "step": 191 }, { - "epoch": 7.68, - "learning_rate": 0.0001232, - "loss": 1.4568, + "epoch": 1.07, + "learning_rate": 0.0001463687150837989, + "loss": 1.7308, "step": 192 }, { - "epoch": 7.72, - "learning_rate": 0.0001228, - "loss": 1.6106, + "epoch": 1.08, + "learning_rate": 0.00014608938547486035, + "loss": 1.9165, "step": 193 }, { - "epoch": 7.76, - "learning_rate": 0.0001224, - "loss": 1.577, + "epoch": 1.08, + "learning_rate": 0.0001458100558659218, + "loss": 1.842, "step": 194 }, { - "epoch": 7.8, - "learning_rate": 0.000122, - "loss": 1.4805, + "epoch": 1.09, + "learning_rate": 0.00014553072625698326, + "loss": 1.9128, "step": 195 }, { - "epoch": 7.84, - "learning_rate": 0.0001216, - "loss": 1.4546, + "epoch": 1.09, + "learning_rate": 0.0001452513966480447, + "loss": 1.8005, "step": 196 }, { - "epoch": 7.88, - "learning_rate": 0.0001212, - "loss": 1.5164, + "epoch": 1.1, + "learning_rate": 0.00014497206703910614, + "loss": 1.8547, "step": 197 }, { - "epoch": 7.92, - "learning_rate": 0.0001208, - "loss": 1.5061, + "epoch": 1.11, + "learning_rate": 0.0001446927374301676, + "loss": 1.9042, "step": 198 }, { - "epoch": 7.96, - "learning_rate": 0.0001204, - "loss": 1.5312, + "epoch": 1.11, + "learning_rate": 0.00014441340782122905, + "loss": 1.8609, "step": 199 }, { - "epoch": 8.0, - "learning_rate": 0.00012, - "loss": 1.4463, + "epoch": 1.12, + "learning_rate": 0.0001441340782122905, + "loss": 1.9591, "step": 200 }, { - "epoch": 8.04, - "learning_rate": 0.00011960000000000001, - "loss": 1.4207, + "epoch": 1.12, + "learning_rate": 0.00014385474860335195, + "loss": 1.8722, "step": 201 }, { - "epoch": 8.08, - "learning_rate": 0.0001192, - "loss": 1.4688, + "epoch": 1.13, + "learning_rate": 0.00014357541899441342, + "loss": 1.8535, "step": 202 }, { - "epoch": 8.12, - "learning_rate": 0.0001188, - "loss": 1.4445, + "epoch": 1.13, + "learning_rate": 0.00014329608938547488, + "loss": 1.8676, "step": 203 }, { - "epoch": 8.16, - "learning_rate": 0.0001184, - "loss": 1.4158, + "epoch": 1.14, + "learning_rate": 0.00014301675977653632, + "loss": 1.8976, "step": 204 }, { - "epoch": 8.2, - "learning_rate": 0.000118, - "loss": 1.3483, + "epoch": 1.15, + "learning_rate": 0.00014273743016759776, + "loss": 1.7723, "step": 205 }, { - "epoch": 8.24, - "learning_rate": 0.0001176, - "loss": 1.4117, + "epoch": 1.15, + "learning_rate": 0.00014245810055865923, + "loss": 1.8165, "step": 206 }, { - "epoch": 8.28, - "learning_rate": 0.0001172, - "loss": 1.4267, + "epoch": 1.16, + "learning_rate": 0.00014217877094972067, + "loss": 1.7811, "step": 207 }, { - "epoch": 8.32, - "learning_rate": 0.00011679999999999999, - "loss": 1.5547, + "epoch": 1.16, + "learning_rate": 0.00014189944134078214, + "loss": 1.908, "step": 208 }, { - "epoch": 8.36, - "learning_rate": 0.0001164, - "loss": 1.4883, + "epoch": 1.17, + "learning_rate": 0.00014162011173184357, + "loss": 1.7663, "step": 209 }, { - "epoch": 8.4, - "learning_rate": 0.000116, - "loss": 1.414, + "epoch": 1.17, + "learning_rate": 0.00014134078212290501, + "loss": 1.6779, "step": 210 }, { - "epoch": 8.44, - "learning_rate": 0.00011559999999999999, - "loss": 1.3254, + "epoch": 1.18, + "learning_rate": 0.0001410614525139665, + "loss": 1.9039, "step": 211 }, { - "epoch": 8.48, - "learning_rate": 0.0001152, - "loss": 1.22, + "epoch": 1.18, + "learning_rate": 0.00014078212290502795, + "loss": 1.8033, "step": 212 }, { - "epoch": 8.52, - "learning_rate": 0.0001148, - "loss": 1.3224, + "epoch": 1.19, + "learning_rate": 0.00014050279329608939, + "loss": 1.8251, "step": 213 }, { - "epoch": 8.56, - "learning_rate": 0.0001144, - "loss": 1.6158, + "epoch": 1.2, + "learning_rate": 0.00014022346368715085, + "loss": 1.8505, "step": 214 }, { - "epoch": 8.6, - "learning_rate": 0.00011399999999999999, - "loss": 1.2817, + "epoch": 1.2, + "learning_rate": 0.0001399441340782123, + "loss": 1.8147, "step": 215 }, { - "epoch": 8.64, - "learning_rate": 0.0001136, - "loss": 1.4077, + "epoch": 1.21, + "learning_rate": 0.00013966480446927376, + "loss": 1.8419, "step": 216 }, { - "epoch": 8.68, - "learning_rate": 0.0001132, - "loss": 1.5311, + "epoch": 1.21, + "learning_rate": 0.0001393854748603352, + "loss": 1.8401, "step": 217 }, { - "epoch": 8.72, - "learning_rate": 0.00011279999999999999, - "loss": 1.5289, + "epoch": 1.22, + "learning_rate": 0.00013910614525139664, + "loss": 1.8912, "step": 218 }, { - "epoch": 8.76, - "learning_rate": 0.00011240000000000002, - "loss": 1.4576, + "epoch": 1.22, + "learning_rate": 0.0001388268156424581, + "loss": 1.7548, "step": 219 }, { - "epoch": 8.8, - "learning_rate": 0.00011200000000000001, - "loss": 1.2063, + "epoch": 1.23, + "learning_rate": 0.00013854748603351957, + "loss": 1.8741, "step": 220 }, { - "epoch": 8.84, - "learning_rate": 0.00011160000000000002, - "loss": 1.3543, + "epoch": 1.23, + "learning_rate": 0.000138268156424581, + "loss": 1.9549, "step": 221 }, { - "epoch": 8.88, - "learning_rate": 0.00011120000000000002, - "loss": 1.4352, + "epoch": 1.24, + "learning_rate": 0.00013798882681564248, + "loss": 1.9093, "step": 222 }, { - "epoch": 8.92, - "learning_rate": 0.00011080000000000001, - "loss": 1.3058, + "epoch": 1.25, + "learning_rate": 0.00013770949720670392, + "loss": 1.7896, "step": 223 }, { - "epoch": 8.96, - "learning_rate": 0.00011040000000000001, - "loss": 1.3475, + "epoch": 1.25, + "learning_rate": 0.00013743016759776538, + "loss": 1.8491, "step": 224 }, { - "epoch": 9.0, - "learning_rate": 0.00011000000000000002, - "loss": 1.1293, + "epoch": 1.26, + "learning_rate": 0.00013715083798882682, + "loss": 1.7851, "step": 225 }, { - "epoch": 9.04, - "learning_rate": 0.00010960000000000001, - "loss": 1.1671, + "epoch": 1.26, + "learning_rate": 0.00013687150837988826, + "loss": 1.6992, "step": 226 }, { - "epoch": 9.08, - "learning_rate": 0.00010920000000000001, - "loss": 1.4726, + "epoch": 1.27, + "learning_rate": 0.00013659217877094973, + "loss": 1.9765, "step": 227 }, { - "epoch": 9.12, - "learning_rate": 0.00010880000000000002, - "loss": 1.3986, + "epoch": 1.27, + "learning_rate": 0.00013631284916201117, + "loss": 1.8179, "step": 228 }, { - "epoch": 9.16, - "learning_rate": 0.00010840000000000002, - "loss": 1.2731, + "epoch": 1.28, + "learning_rate": 0.00013603351955307263, + "loss": 1.8548, "step": 229 }, { - "epoch": 9.2, - "learning_rate": 0.00010800000000000001, - "loss": 1.2212, + "epoch": 1.28, + "learning_rate": 0.0001357541899441341, + "loss": 1.8843, "step": 230 }, { - "epoch": 9.24, - "learning_rate": 0.00010760000000000001, - "loss": 1.3067, + "epoch": 1.29, + "learning_rate": 0.00013547486033519554, + "loss": 1.9105, "step": 231 }, { - "epoch": 9.28, - "learning_rate": 0.00010720000000000002, - "loss": 1.278, + "epoch": 1.3, + "learning_rate": 0.00013519553072625698, + "loss": 1.8748, "step": 232 }, { - "epoch": 9.32, - "learning_rate": 0.00010680000000000001, - "loss": 1.4141, + "epoch": 1.3, + "learning_rate": 0.00013491620111731844, + "loss": 1.7976, "step": 233 }, { - "epoch": 9.36, - "learning_rate": 0.00010640000000000001, - "loss": 1.2885, + "epoch": 1.31, + "learning_rate": 0.00013463687150837988, + "loss": 1.7369, "step": 234 }, { - "epoch": 9.4, - "learning_rate": 0.00010600000000000002, - "loss": 1.466, + "epoch": 1.31, + "learning_rate": 0.00013435754189944135, + "loss": 1.7808, "step": 235 }, { - "epoch": 9.44, - "learning_rate": 0.0001056, - "loss": 1.2979, + "epoch": 1.32, + "learning_rate": 0.0001340782122905028, + "loss": 1.8385, "step": 236 }, { - "epoch": 9.48, - "learning_rate": 0.00010520000000000001, - "loss": 1.1622, + "epoch": 1.32, + "learning_rate": 0.00013379888268156423, + "loss": 1.8295, "step": 237 }, { - "epoch": 9.52, - "learning_rate": 0.00010480000000000001, - "loss": 1.2504, + "epoch": 1.33, + "learning_rate": 0.00013351955307262572, + "loss": 1.757, "step": 238 }, { - "epoch": 9.56, - "learning_rate": 0.0001044, - "loss": 1.228, + "epoch": 1.34, + "learning_rate": 0.00013324022346368716, + "loss": 1.7904, "step": 239 }, { - "epoch": 9.6, - "learning_rate": 0.00010400000000000001, - "loss": 1.3607, + "epoch": 1.34, + "learning_rate": 0.0001329608938547486, + "loss": 1.7632, "step": 240 }, { - "epoch": 9.64, - "learning_rate": 0.00010360000000000001, - "loss": 1.1921, + "epoch": 1.35, + "learning_rate": 0.00013268156424581007, + "loss": 1.7867, "step": 241 }, { - "epoch": 9.68, - "learning_rate": 0.0001032, - "loss": 1.2114, + "epoch": 1.35, + "learning_rate": 0.0001324022346368715, + "loss": 1.8259, "step": 242 }, { - "epoch": 9.72, - "learning_rate": 0.0001028, - "loss": 1.2385, + "epoch": 1.36, + "learning_rate": 0.00013212290502793297, + "loss": 1.6655, "step": 243 }, { - "epoch": 9.76, - "learning_rate": 0.00010240000000000001, - "loss": 1.3227, + "epoch": 1.36, + "learning_rate": 0.0001318435754189944, + "loss": 1.6848, "step": 244 }, { - "epoch": 9.8, - "learning_rate": 0.00010200000000000001, - "loss": 1.3935, + "epoch": 1.37, + "learning_rate": 0.00013156424581005585, + "loss": 1.7931, "step": 245 }, { - "epoch": 9.84, - "learning_rate": 0.0001016, - "loss": 1.2209, + "epoch": 1.37, + "learning_rate": 0.00013128491620111732, + "loss": 1.7868, "step": 246 }, { - "epoch": 9.88, - "learning_rate": 0.00010120000000000001, - "loss": 1.1541, + "epoch": 1.38, + "learning_rate": 0.00013100558659217879, + "loss": 1.7732, "step": 247 }, { - "epoch": 9.92, - "learning_rate": 0.00010080000000000001, - "loss": 1.4049, + "epoch": 1.39, + "learning_rate": 0.00013072625698324022, + "loss": 1.7851, "step": 248 }, { - "epoch": 9.96, - "learning_rate": 0.0001004, - "loss": 1.13, + "epoch": 1.39, + "learning_rate": 0.0001304469273743017, + "loss": 1.7406, "step": 249 }, { - "epoch": 10.0, - "learning_rate": 0.0001, - "loss": 1.3783, + "epoch": 1.4, + "learning_rate": 0.00013016759776536313, + "loss": 1.5853, "step": 250 }, { - "epoch": 10.04, - "learning_rate": 9.960000000000001e-05, - "loss": 1.3207, + "epoch": 1.4, + "learning_rate": 0.0001298882681564246, + "loss": 1.8271, "step": 251 }, { - "epoch": 10.08, - "learning_rate": 9.92e-05, - "loss": 1.2697, + "epoch": 1.41, + "learning_rate": 0.00012960893854748604, + "loss": 1.6054, "step": 252 }, { - "epoch": 10.12, - "learning_rate": 9.88e-05, - "loss": 1.091, + "epoch": 1.41, + "learning_rate": 0.00012932960893854748, + "loss": 1.6884, "step": 253 }, { - "epoch": 10.16, - "learning_rate": 9.84e-05, - "loss": 1.0463, + "epoch": 1.42, + "learning_rate": 0.00012905027932960894, + "loss": 1.7333, "step": 254 }, { - "epoch": 10.2, - "learning_rate": 9.8e-05, - "loss": 1.3686, + "epoch": 1.42, + "learning_rate": 0.00012877094972067038, + "loss": 1.803, "step": 255 }, { - "epoch": 10.24, - "learning_rate": 9.76e-05, - "loss": 1.3109, + "epoch": 1.43, + "learning_rate": 0.00012849162011173185, + "loss": 1.63, "step": 256 }, { - "epoch": 10.28, - "learning_rate": 9.72e-05, - "loss": 1.2338, + "epoch": 1.44, + "learning_rate": 0.00012821229050279331, + "loss": 1.8023, "step": 257 }, { - "epoch": 10.32, - "learning_rate": 9.680000000000001e-05, - "loss": 1.12, + "epoch": 1.44, + "learning_rate": 0.00012793296089385475, + "loss": 1.762, "step": 258 }, { - "epoch": 10.36, - "learning_rate": 9.64e-05, - "loss": 1.1864, + "epoch": 1.45, + "learning_rate": 0.00012765363128491622, + "loss": 1.7565, "step": 259 }, { - "epoch": 10.4, - "learning_rate": 9.6e-05, - "loss": 1.1062, + "epoch": 1.45, + "learning_rate": 0.00012737430167597766, + "loss": 1.5937, "step": 260 }, { - "epoch": 10.44, - "learning_rate": 9.56e-05, - "loss": 1.2924, + "epoch": 1.46, + "learning_rate": 0.0001270949720670391, + "loss": 1.7208, "step": 261 }, { - "epoch": 10.48, - "learning_rate": 9.52e-05, - "loss": 1.1931, + "epoch": 1.46, + "learning_rate": 0.00012681564245810057, + "loss": 1.8097, "step": 262 }, { - "epoch": 10.52, - "learning_rate": 9.48e-05, - "loss": 1.2366, + "epoch": 1.47, + "learning_rate": 0.000126536312849162, + "loss": 1.7042, "step": 263 }, { - "epoch": 10.56, - "learning_rate": 9.44e-05, - "loss": 1.1589, + "epoch": 1.47, + "learning_rate": 0.00012625698324022347, + "loss": 1.7892, "step": 264 }, { - "epoch": 10.6, - "learning_rate": 9.4e-05, - "loss": 1.1197, + "epoch": 1.48, + "learning_rate": 0.00012597765363128494, + "loss": 1.752, "step": 265 }, { - "epoch": 10.64, - "learning_rate": 9.360000000000001e-05, - "loss": 1.3072, + "epoch": 1.49, + "learning_rate": 0.00012569832402234638, + "loss": 1.7333, "step": 266 }, { - "epoch": 10.68, - "learning_rate": 9.320000000000002e-05, - "loss": 1.1173, + "epoch": 1.49, + "learning_rate": 0.00012541899441340784, + "loss": 1.7492, "step": 267 }, { - "epoch": 10.72, - "learning_rate": 9.28e-05, - "loss": 1.0378, + "epoch": 1.5, + "learning_rate": 0.00012513966480446928, + "loss": 1.818, "step": 268 }, { - "epoch": 10.76, - "learning_rate": 9.240000000000001e-05, - "loss": 1.1746, + "epoch": 1.5, + "learning_rate": 0.00012486033519553072, + "loss": 1.7146, "step": 269 }, { - "epoch": 10.8, - "learning_rate": 9.200000000000001e-05, - "loss": 1.1144, + "epoch": 1.51, + "learning_rate": 0.0001245810055865922, + "loss": 1.7958, "step": 270 }, { - "epoch": 10.84, - "learning_rate": 9.16e-05, - "loss": 1.1862, + "epoch": 1.51, + "learning_rate": 0.00012430167597765363, + "loss": 1.7212, "step": 271 }, { - "epoch": 10.88, - "learning_rate": 9.120000000000001e-05, - "loss": 1.0816, + "epoch": 1.52, + "learning_rate": 0.0001240223463687151, + "loss": 1.8835, "step": 272 }, { - "epoch": 10.92, - "learning_rate": 9.080000000000001e-05, - "loss": 1.2288, + "epoch": 1.53, + "learning_rate": 0.00012374301675977656, + "loss": 1.7486, "step": 273 }, { - "epoch": 10.96, - "learning_rate": 9.04e-05, - "loss": 1.3031, + "epoch": 1.53, + "learning_rate": 0.000123463687150838, + "loss": 1.8103, "step": 274 }, { - "epoch": 11.0, - "learning_rate": 9e-05, - "loss": 1.1646, + "epoch": 1.54, + "learning_rate": 0.00012318435754189944, + "loss": 1.7557, "step": 275 }, { - "epoch": 11.04, - "learning_rate": 8.960000000000001e-05, - "loss": 1.2088, + "epoch": 1.54, + "learning_rate": 0.0001229050279329609, + "loss": 1.6198, "step": 276 }, { - "epoch": 11.08, - "learning_rate": 8.92e-05, - "loss": 1.1465, + "epoch": 1.55, + "learning_rate": 0.00012262569832402235, + "loss": 1.6971, "step": 277 }, { - "epoch": 11.12, - "learning_rate": 8.88e-05, - "loss": 1.0313, + "epoch": 1.55, + "learning_rate": 0.0001223463687150838, + "loss": 1.668, "step": 278 }, { - "epoch": 11.16, - "learning_rate": 8.840000000000001e-05, - "loss": 0.9662, + "epoch": 1.56, + "learning_rate": 0.00012206703910614525, + "loss": 1.8795, "step": 279 }, { - "epoch": 11.2, - "learning_rate": 8.800000000000001e-05, - "loss": 1.051, + "epoch": 1.56, + "learning_rate": 0.0001217877094972067, + "loss": 1.6412, "step": 280 }, { - "epoch": 11.24, - "learning_rate": 8.76e-05, - "loss": 1.1708, + "epoch": 1.57, + "learning_rate": 0.00012150837988826816, + "loss": 1.7497, "step": 281 }, { - "epoch": 11.28, - "learning_rate": 8.72e-05, - "loss": 1.0236, + "epoch": 1.58, + "learning_rate": 0.00012122905027932962, + "loss": 1.5577, "step": 282 }, { - "epoch": 11.32, - "learning_rate": 8.680000000000001e-05, - "loss": 1.1359, + "epoch": 1.58, + "learning_rate": 0.00012094972067039108, + "loss": 1.8049, "step": 283 }, { - "epoch": 11.36, - "learning_rate": 8.64e-05, - "loss": 1.1398, + "epoch": 1.59, + "learning_rate": 0.00012067039106145253, + "loss": 1.6834, "step": 284 }, { - "epoch": 11.4, - "learning_rate": 8.6e-05, - "loss": 1.0213, + "epoch": 1.59, + "learning_rate": 0.00012039106145251397, + "loss": 1.7978, "step": 285 }, { - "epoch": 11.44, - "learning_rate": 8.560000000000001e-05, - "loss": 1.1788, + "epoch": 1.6, + "learning_rate": 0.00012011173184357542, + "loss": 1.6558, "step": 286 }, { - "epoch": 11.48, - "learning_rate": 8.52e-05, - "loss": 1.0387, + "epoch": 1.6, + "learning_rate": 0.00011983240223463687, + "loss": 1.7561, "step": 287 }, { - "epoch": 11.52, - "learning_rate": 8.48e-05, - "loss": 0.9476, + "epoch": 1.61, + "learning_rate": 0.00011955307262569833, + "loss": 1.6807, "step": 288 }, { - "epoch": 11.56, - "learning_rate": 8.44e-05, - "loss": 1.1954, + "epoch": 1.61, + "learning_rate": 0.00011927374301675978, + "loss": 1.6937, "step": 289 }, { - "epoch": 11.6, - "learning_rate": 8.4e-05, - "loss": 1.0309, + "epoch": 1.62, + "learning_rate": 0.00011899441340782122, + "loss": 1.6591, "step": 290 }, { - "epoch": 11.64, - "learning_rate": 8.36e-05, - "loss": 1.1321, + "epoch": 1.63, + "learning_rate": 0.0001187150837988827, + "loss": 1.6771, "step": 291 }, { - "epoch": 11.68, - "learning_rate": 8.32e-05, - "loss": 1.0914, + "epoch": 1.63, + "learning_rate": 0.00011843575418994415, + "loss": 1.7743, "step": 292 }, { - "epoch": 11.72, - "learning_rate": 8.28e-05, - "loss": 1.222, + "epoch": 1.64, + "learning_rate": 0.00011815642458100559, + "loss": 1.5857, "step": 293 }, { - "epoch": 11.76, - "learning_rate": 8.24e-05, - "loss": 1.0127, + "epoch": 1.64, + "learning_rate": 0.00011787709497206705, + "loss": 1.6999, "step": 294 }, { - "epoch": 11.8, - "learning_rate": 8.2e-05, - "loss": 1.1977, + "epoch": 1.65, + "learning_rate": 0.0001175977653631285, + "loss": 1.5661, "step": 295 }, { - "epoch": 11.84, - "learning_rate": 8.16e-05, - "loss": 1.122, + "epoch": 1.65, + "learning_rate": 0.00011731843575418995, + "loss": 1.7235, "step": 296 }, { - "epoch": 11.88, - "learning_rate": 8.120000000000001e-05, - "loss": 1.1543, + "epoch": 1.66, + "learning_rate": 0.0001170391061452514, + "loss": 1.607, "step": 297 }, { - "epoch": 11.92, - "learning_rate": 8.080000000000001e-05, - "loss": 1.1767, + "epoch": 1.66, + "learning_rate": 0.00011675977653631284, + "loss": 1.68, "step": 298 }, { - "epoch": 11.96, - "learning_rate": 8.04e-05, - "loss": 1.1341, + "epoch": 1.67, + "learning_rate": 0.0001164804469273743, + "loss": 1.6938, "step": 299 }, { - "epoch": 12.0, - "learning_rate": 8e-05, - "loss": 1.0107, + "epoch": 1.68, + "learning_rate": 0.00011620111731843578, + "loss": 1.6315, "step": 300 }, { - "epoch": 12.04, - "learning_rate": 7.960000000000001e-05, - "loss": 1.1455, + "epoch": 1.68, + "learning_rate": 0.00011592178770949722, + "loss": 1.6802, "step": 301 }, { - "epoch": 12.08, - "learning_rate": 7.920000000000001e-05, - "loss": 1.0091, + "epoch": 1.69, + "learning_rate": 0.00011564245810055867, + "loss": 1.7174, "step": 302 }, { - "epoch": 12.12, - "learning_rate": 7.88e-05, - "loss": 0.9888, + "epoch": 1.69, + "learning_rate": 0.00011536312849162012, + "loss": 1.5212, "step": 303 }, { - "epoch": 12.16, - "learning_rate": 7.840000000000001e-05, - "loss": 0.9894, + "epoch": 1.7, + "learning_rate": 0.00011508379888268157, + "loss": 1.5808, "step": 304 }, { - "epoch": 12.2, - "learning_rate": 7.800000000000001e-05, - "loss": 1.0095, + "epoch": 1.7, + "learning_rate": 0.00011480446927374303, + "loss": 1.6152, "step": 305 }, { - "epoch": 12.24, - "learning_rate": 7.76e-05, - "loss": 1.1163, + "epoch": 1.71, + "learning_rate": 0.00011452513966480447, + "loss": 1.5435, "step": 306 }, { - "epoch": 12.28, - "learning_rate": 7.72e-05, - "loss": 1.0345, + "epoch": 1.72, + "learning_rate": 0.00011424581005586592, + "loss": 1.6603, "step": 307 }, { - "epoch": 12.32, - "learning_rate": 7.680000000000001e-05, - "loss": 0.9662, + "epoch": 1.72, + "learning_rate": 0.00011396648044692737, + "loss": 1.685, "step": 308 }, { - "epoch": 12.36, - "learning_rate": 7.64e-05, - "loss": 0.9064, + "epoch": 1.73, + "learning_rate": 0.00011368715083798884, + "loss": 1.6002, "step": 309 }, { - "epoch": 12.4, - "learning_rate": 7.6e-05, - "loss": 0.9448, + "epoch": 1.73, + "learning_rate": 0.00011340782122905029, + "loss": 1.6046, "step": 310 }, { - "epoch": 12.44, - "learning_rate": 7.560000000000001e-05, - "loss": 0.9509, + "epoch": 1.74, + "learning_rate": 0.00011312849162011174, + "loss": 1.5969, "step": 311 }, { - "epoch": 12.48, - "learning_rate": 7.52e-05, - "loss": 1.123, + "epoch": 1.74, + "learning_rate": 0.0001128491620111732, + "loss": 1.5845, "step": 312 }, { - "epoch": 12.52, - "learning_rate": 7.48e-05, - "loss": 1.1011, + "epoch": 1.75, + "learning_rate": 0.00011256983240223464, + "loss": 1.8183, "step": 313 }, { - "epoch": 12.56, - "learning_rate": 7.44e-05, - "loss": 0.9398, + "epoch": 1.75, + "learning_rate": 0.00011229050279329609, + "loss": 1.6953, "step": 314 }, { - "epoch": 12.6, - "learning_rate": 7.4e-05, - "loss": 1.0498, + "epoch": 1.76, + "learning_rate": 0.00011201117318435754, + "loss": 1.7787, "step": 315 }, { - "epoch": 12.64, - "learning_rate": 7.36e-05, - "loss": 1.0442, + "epoch": 1.77, + "learning_rate": 0.000111731843575419, + "loss": 1.6422, "step": 316 }, { - "epoch": 12.68, - "learning_rate": 7.32e-05, - "loss": 0.9599, + "epoch": 1.77, + "learning_rate": 0.00011145251396648045, + "loss": 1.7034, "step": 317 }, { - "epoch": 12.72, - "learning_rate": 7.280000000000001e-05, - "loss": 0.8778, + "epoch": 1.78, + "learning_rate": 0.00011117318435754192, + "loss": 1.7301, "step": 318 }, { - "epoch": 12.76, - "learning_rate": 7.24e-05, - "loss": 0.8727, + "epoch": 1.78, + "learning_rate": 0.00011089385474860337, + "loss": 1.7084, "step": 319 }, { - "epoch": 12.8, - "learning_rate": 7.2e-05, - "loss": 1.0189, + "epoch": 1.79, + "learning_rate": 0.00011061452513966482, + "loss": 1.772, "step": 320 }, { - "epoch": 12.84, - "learning_rate": 7.16e-05, - "loss": 1.0543, + "epoch": 1.79, + "learning_rate": 0.00011033519553072626, + "loss": 1.5733, "step": 321 }, { - "epoch": 12.88, - "learning_rate": 7.12e-05, - "loss": 1.1282, + "epoch": 1.8, + "learning_rate": 0.00011005586592178771, + "loss": 1.6423, "step": 322 }, { - "epoch": 12.92, - "learning_rate": 7.08e-05, - "loss": 1.1018, + "epoch": 1.8, + "learning_rate": 0.00010977653631284917, + "loss": 1.5809, "step": 323 }, { - "epoch": 12.96, - "learning_rate": 7.04e-05, - "loss": 1.1827, + "epoch": 1.81, + "learning_rate": 0.00010949720670391062, + "loss": 1.6781, "step": 324 }, { - "epoch": 13.0, - "learning_rate": 7e-05, - "loss": 1.108, + "epoch": 1.82, + "learning_rate": 0.00010921787709497207, + "loss": 1.6788, "step": 325 }, { - "epoch": 13.04, - "learning_rate": 6.96e-05, - "loss": 0.9618, + "epoch": 1.82, + "learning_rate": 0.00010893854748603351, + "loss": 1.6346, "step": 326 }, { - "epoch": 13.08, - "learning_rate": 6.92e-05, - "loss": 1.0666, + "epoch": 1.83, + "learning_rate": 0.00010865921787709499, + "loss": 1.6634, "step": 327 }, { - "epoch": 13.12, - "learning_rate": 6.879999999999999e-05, - "loss": 0.9973, + "epoch": 1.83, + "learning_rate": 0.00010837988826815643, + "loss": 1.7561, "step": 328 }, { - "epoch": 13.16, - "learning_rate": 6.840000000000001e-05, - "loss": 1.0103, + "epoch": 1.84, + "learning_rate": 0.00010810055865921788, + "loss": 1.66, "step": 329 }, { - "epoch": 13.2, - "learning_rate": 6.800000000000001e-05, - "loss": 0.9974, + "epoch": 1.84, + "learning_rate": 0.00010782122905027934, + "loss": 1.7298, "step": 330 }, { - "epoch": 13.24, - "learning_rate": 6.76e-05, - "loss": 1.2283, + "epoch": 1.85, + "learning_rate": 0.00010754189944134079, + "loss": 1.6893, "step": 331 }, { - "epoch": 13.28, - "learning_rate": 6.720000000000001e-05, - "loss": 0.7485, + "epoch": 1.85, + "learning_rate": 0.00010726256983240224, + "loss": 1.7631, "step": 332 }, { - "epoch": 13.32, - "learning_rate": 6.680000000000001e-05, - "loss": 0.8072, + "epoch": 1.86, + "learning_rate": 0.00010698324022346368, + "loss": 1.6633, "step": 333 }, { - "epoch": 13.36, - "learning_rate": 6.64e-05, - "loss": 1.1106, + "epoch": 1.87, + "learning_rate": 0.00010670391061452513, + "loss": 1.5388, "step": 334 }, { - "epoch": 13.4, - "learning_rate": 6.6e-05, - "loss": 0.8216, + "epoch": 1.87, + "learning_rate": 0.00010642458100558659, + "loss": 1.6718, "step": 335 }, { - "epoch": 13.44, - "learning_rate": 6.560000000000001e-05, - "loss": 0.959, + "epoch": 1.88, + "learning_rate": 0.00010614525139664805, + "loss": 1.5536, "step": 336 }, { - "epoch": 13.48, - "learning_rate": 6.52e-05, - "loss": 1.0767, + "epoch": 1.88, + "learning_rate": 0.00010586592178770951, + "loss": 1.6483, "step": 337 }, { - "epoch": 13.52, - "learning_rate": 6.48e-05, - "loss": 1.0395, + "epoch": 1.89, + "learning_rate": 0.00010558659217877096, + "loss": 1.5774, "step": 338 }, { - "epoch": 13.56, - "learning_rate": 6.440000000000001e-05, - "loss": 0.9102, + "epoch": 1.89, + "learning_rate": 0.00010530726256983241, + "loss": 1.6366, "step": 339 }, { - "epoch": 13.6, - "learning_rate": 6.400000000000001e-05, - "loss": 0.8875, + "epoch": 1.9, + "learning_rate": 0.00010502793296089387, + "loss": 1.5567, "step": 340 }, { - "epoch": 13.64, - "learning_rate": 6.36e-05, - "loss": 1.0005, + "epoch": 1.91, + "learning_rate": 0.0001047486033519553, + "loss": 1.5323, "step": 341 }, { - "epoch": 13.68, - "learning_rate": 6.32e-05, - "loss": 0.9208, + "epoch": 1.91, + "learning_rate": 0.00010446927374301676, + "loss": 1.4608, "step": 342 }, { - "epoch": 13.72, - "learning_rate": 6.280000000000001e-05, - "loss": 0.8943, + "epoch": 1.92, + "learning_rate": 0.00010418994413407821, + "loss": 1.5933, "step": 343 }, { - "epoch": 13.76, - "learning_rate": 6.24e-05, - "loss": 0.8441, + "epoch": 1.92, + "learning_rate": 0.00010391061452513966, + "loss": 1.6625, "step": 344 }, { - "epoch": 13.8, - "learning_rate": 6.2e-05, - "loss": 0.9953, + "epoch": 1.93, + "learning_rate": 0.00010363128491620113, + "loss": 1.7236, "step": 345 }, { - "epoch": 13.84, - "learning_rate": 6.16e-05, - "loss": 0.796, + "epoch": 1.93, + "learning_rate": 0.00010335195530726258, + "loss": 1.759, "step": 346 }, { - "epoch": 13.88, - "learning_rate": 6.12e-05, - "loss": 1.0745, + "epoch": 1.94, + "learning_rate": 0.00010307262569832404, + "loss": 1.7248, "step": 347 }, { - "epoch": 13.92, - "learning_rate": 6.08e-05, - "loss": 0.9898, + "epoch": 1.94, + "learning_rate": 0.00010279329608938548, + "loss": 1.5144, "step": 348 }, { - "epoch": 13.96, - "learning_rate": 6.04e-05, - "loss": 0.9559, + "epoch": 1.95, + "learning_rate": 0.00010251396648044693, + "loss": 1.6905, "step": 349 }, { - "epoch": 14.0, - "learning_rate": 6e-05, - "loss": 0.8236, + "epoch": 1.96, + "learning_rate": 0.00010223463687150838, + "loss": 1.6119, "step": 350 }, { - "epoch": 14.04, - "learning_rate": 5.96e-05, - "loss": 0.7318, + "epoch": 1.96, + "learning_rate": 0.00010195530726256983, + "loss": 1.5464, "step": 351 }, { - "epoch": 14.08, - "learning_rate": 5.92e-05, - "loss": 0.8994, + "epoch": 1.97, + "learning_rate": 0.00010167597765363129, + "loss": 1.6901, "step": 352 }, { - "epoch": 14.12, - "learning_rate": 5.88e-05, - "loss": 0.9707, + "epoch": 1.97, + "learning_rate": 0.00010139664804469273, + "loss": 1.3511, "step": 353 }, { - "epoch": 14.16, - "learning_rate": 5.8399999999999997e-05, - "loss": 0.806, + "epoch": 1.98, + "learning_rate": 0.0001011173184357542, + "loss": 1.5434, "step": 354 }, { - "epoch": 14.2, - "learning_rate": 5.8e-05, - "loss": 1.0525, + "epoch": 1.98, + "learning_rate": 0.00010083798882681566, + "loss": 1.5891, "step": 355 }, { - "epoch": 14.24, - "learning_rate": 5.76e-05, - "loss": 0.7589, + "epoch": 1.99, + "learning_rate": 0.0001005586592178771, + "loss": 1.6658, "step": 356 }, { - "epoch": 14.28, - "learning_rate": 5.72e-05, - "loss": 0.7703, + "epoch": 1.99, + "learning_rate": 0.00010027932960893855, + "loss": 1.5657, "step": 357 }, { - "epoch": 14.32, - "learning_rate": 5.68e-05, - "loss": 0.9869, + "epoch": 2.0, + "learning_rate": 0.0001, + "loss": 1.7005, "step": 358 }, { - "epoch": 14.36, - "learning_rate": 5.6399999999999995e-05, - "loss": 0.9673, + "epoch": 2.01, + "learning_rate": 9.972067039106146e-05, + "loss": 1.4202, "step": 359 }, { - "epoch": 14.4, - "learning_rate": 5.6000000000000006e-05, - "loss": 0.9913, + "epoch": 2.01, + "learning_rate": 9.944134078212291e-05, + "loss": 1.5262, "step": 360 }, { - "epoch": 14.44, - "learning_rate": 5.560000000000001e-05, - "loss": 1.0219, + "epoch": 2.02, + "learning_rate": 9.916201117318436e-05, + "loss": 1.6323, "step": 361 }, { - "epoch": 14.48, - "learning_rate": 5.520000000000001e-05, - "loss": 0.8672, + "epoch": 2.02, + "learning_rate": 9.888268156424582e-05, + "loss": 1.5521, "step": 362 }, { - "epoch": 14.52, - "learning_rate": 5.4800000000000004e-05, - "loss": 1.0464, + "epoch": 2.03, + "learning_rate": 9.860335195530727e-05, + "loss": 1.5762, "step": 363 }, { - "epoch": 14.56, - "learning_rate": 5.440000000000001e-05, - "loss": 1.0109, + "epoch": 2.03, + "learning_rate": 9.832402234636872e-05, + "loss": 1.613, "step": 364 }, { - "epoch": 14.6, - "learning_rate": 5.4000000000000005e-05, - "loss": 0.9913, + "epoch": 2.04, + "learning_rate": 9.804469273743018e-05, + "loss": 1.4231, "step": 365 }, { - "epoch": 14.64, - "learning_rate": 5.360000000000001e-05, - "loss": 0.779, + "epoch": 2.04, + "learning_rate": 9.776536312849163e-05, + "loss": 1.5706, "step": 366 }, { - "epoch": 14.68, - "learning_rate": 5.3200000000000006e-05, - "loss": 0.8675, + "epoch": 2.05, + "learning_rate": 9.748603351955308e-05, + "loss": 1.5245, "step": 367 }, { - "epoch": 14.72, - "learning_rate": 5.28e-05, - "loss": 0.987, + "epoch": 2.06, + "learning_rate": 9.720670391061453e-05, + "loss": 1.4771, "step": 368 }, { - "epoch": 14.76, - "learning_rate": 5.2400000000000007e-05, - "loss": 0.741, + "epoch": 2.06, + "learning_rate": 9.692737430167597e-05, + "loss": 1.596, "step": 369 }, { - "epoch": 14.8, - "learning_rate": 5.2000000000000004e-05, - "loss": 0.9585, + "epoch": 2.07, + "learning_rate": 9.664804469273744e-05, + "loss": 1.537, "step": 370 }, { - "epoch": 14.84, - "learning_rate": 5.16e-05, - "loss": 0.8977, + "epoch": 2.07, + "learning_rate": 9.636871508379889e-05, + "loss": 1.4276, "step": 371 }, { - "epoch": 14.88, - "learning_rate": 5.1200000000000004e-05, - "loss": 0.7836, + "epoch": 2.08, + "learning_rate": 9.608938547486033e-05, + "loss": 1.4746, "step": 372 }, { - "epoch": 14.92, - "learning_rate": 5.08e-05, - "loss": 0.8461, + "epoch": 2.08, + "learning_rate": 9.581005586592178e-05, + "loss": 1.4374, "step": 373 }, { - "epoch": 14.96, - "learning_rate": 5.0400000000000005e-05, - "loss": 0.7531, + "epoch": 2.09, + "learning_rate": 9.553072625698325e-05, + "loss": 1.4704, "step": 374 }, { - "epoch": 15.0, - "learning_rate": 5e-05, - "loss": 0.8906, + "epoch": 2.09, + "learning_rate": 9.52513966480447e-05, + "loss": 1.5997, "step": 375 }, { - "epoch": 15.04, - "learning_rate": 4.96e-05, - "loss": 0.8793, + "epoch": 2.1, + "learning_rate": 9.497206703910614e-05, + "loss": 1.5034, "step": 376 }, { - "epoch": 15.08, - "learning_rate": 4.92e-05, - "loss": 0.8596, + "epoch": 2.11, + "learning_rate": 9.46927374301676e-05, + "loss": 1.6392, "step": 377 }, { - "epoch": 15.12, - "learning_rate": 4.88e-05, - "loss": 0.7947, + "epoch": 2.11, + "learning_rate": 9.441340782122905e-05, + "loss": 1.5611, "step": 378 }, { - "epoch": 15.16, - "learning_rate": 4.8400000000000004e-05, - "loss": 0.7976, + "epoch": 2.12, + "learning_rate": 9.413407821229052e-05, + "loss": 1.3384, "step": 379 }, { - "epoch": 15.2, - "learning_rate": 4.8e-05, - "loss": 0.9037, + "epoch": 2.12, + "learning_rate": 9.385474860335196e-05, + "loss": 1.5732, "step": 380 }, { - "epoch": 15.24, - "learning_rate": 4.76e-05, - "loss": 0.9863, + "epoch": 2.13, + "learning_rate": 9.357541899441341e-05, + "loss": 1.3874, "step": 381 }, { - "epoch": 15.28, - "learning_rate": 4.72e-05, - "loss": 0.7735, + "epoch": 2.13, + "learning_rate": 9.329608938547486e-05, + "loss": 1.4139, "step": 382 }, { - "epoch": 15.32, - "learning_rate": 4.6800000000000006e-05, - "loss": 0.6723, + "epoch": 2.14, + "learning_rate": 9.301675977653633e-05, + "loss": 1.5195, "step": 383 }, { - "epoch": 15.36, - "learning_rate": 4.64e-05, - "loss": 0.7002, + "epoch": 2.15, + "learning_rate": 9.273743016759777e-05, + "loss": 1.4371, "step": 384 }, { - "epoch": 15.4, - "learning_rate": 4.600000000000001e-05, - "loss": 0.9479, + "epoch": 2.15, + "learning_rate": 9.245810055865922e-05, + "loss": 1.4411, "step": 385 }, { - "epoch": 15.44, - "learning_rate": 4.5600000000000004e-05, - "loss": 0.8159, + "epoch": 2.16, + "learning_rate": 9.217877094972067e-05, + "loss": 1.5214, "step": 386 }, { - "epoch": 15.48, - "learning_rate": 4.52e-05, - "loss": 0.7954, + "epoch": 2.16, + "learning_rate": 9.189944134078213e-05, + "loss": 1.4971, "step": 387 }, { - "epoch": 15.52, - "learning_rate": 4.4800000000000005e-05, - "loss": 0.8016, + "epoch": 2.17, + "learning_rate": 9.162011173184358e-05, + "loss": 1.3453, "step": 388 }, { - "epoch": 15.56, - "learning_rate": 4.44e-05, - "loss": 0.7995, + "epoch": 2.17, + "learning_rate": 9.134078212290503e-05, + "loss": 1.394, "step": 389 }, { - "epoch": 15.6, - "learning_rate": 4.4000000000000006e-05, - "loss": 0.8243, + "epoch": 2.18, + "learning_rate": 9.106145251396648e-05, + "loss": 1.5058, "step": 390 }, { - "epoch": 15.64, - "learning_rate": 4.36e-05, - "loss": 0.8335, + "epoch": 2.18, + "learning_rate": 9.078212290502794e-05, + "loss": 1.4855, "step": 391 }, { - "epoch": 15.68, - "learning_rate": 4.32e-05, - "loss": 1.001, + "epoch": 2.19, + "learning_rate": 9.050279329608939e-05, + "loss": 1.4647, "step": 392 }, { - "epoch": 15.72, - "learning_rate": 4.2800000000000004e-05, - "loss": 0.9385, + "epoch": 2.2, + "learning_rate": 9.022346368715084e-05, + "loss": 1.3435, "step": 393 }, { - "epoch": 15.76, - "learning_rate": 4.24e-05, - "loss": 0.8796, + "epoch": 2.2, + "learning_rate": 8.99441340782123e-05, + "loss": 1.5815, "step": 394 }, { - "epoch": 15.8, - "learning_rate": 4.2e-05, - "loss": 0.98, + "epoch": 2.21, + "learning_rate": 8.966480446927375e-05, + "loss": 1.4742, "step": 395 }, { - "epoch": 15.84, - "learning_rate": 4.16e-05, - "loss": 0.7547, + "epoch": 2.21, + "learning_rate": 8.938547486033519e-05, + "loss": 1.5389, "step": 396 }, { - "epoch": 15.88, - "learning_rate": 4.12e-05, - "loss": 0.7943, + "epoch": 2.22, + "learning_rate": 8.910614525139666e-05, + "loss": 1.384, "step": 397 }, { - "epoch": 15.92, - "learning_rate": 4.08e-05, - "loss": 0.8135, + "epoch": 2.22, + "learning_rate": 8.882681564245811e-05, + "loss": 1.3967, "step": 398 }, { - "epoch": 15.96, - "learning_rate": 4.0400000000000006e-05, - "loss": 0.87, + "epoch": 2.23, + "learning_rate": 8.854748603351956e-05, + "loss": 1.442, "step": 399 }, { - "epoch": 16.0, - "learning_rate": 4e-05, - "loss": 0.8818, + "epoch": 2.23, + "learning_rate": 8.8268156424581e-05, + "loss": 1.396, "step": 400 } ], "logging_steps": 1, - "max_steps": 500, - "num_train_epochs": 20, + "max_steps": 716, + "num_train_epochs": 4, "save_steps": 100, - "total_flos": 1.361597073887232e+17, + "total_flos": 2.056923590572032e+17, "trial_name": null, "trial_params": null } diff --git a/checkpoint-400/training_args.bin b/checkpoint-400/training_args.bin index 4de6572a838c337c9990635a9406ebf46c0ec336..c8672c716e925d0028b4938db147703f58656ff7 100644 --- a/checkpoint-400/training_args.bin +++ b/checkpoint-400/training_args.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6ef74b31950ae6c8955316bed48c343fb06cda0cb6a8a54ca46ca3cb681c8736 +oid sha256:188ae1c421cc0c6435d1f71d8d3423ac4abc7dba0e6fc2efcbc4dbe77c741317 size 4027 diff --git a/checkpoint-500/README.md b/checkpoint-500/README.md index 08371015f02382e6fcba318f4aaea54ae52cd3c4..2f257a448caef1c59022426ad3ec9ba80acb3821 100644 --- a/checkpoint-500/README.md +++ b/checkpoint-500/README.md @@ -4,6 +4,30 @@ library_name: peft ## Training procedure +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + The following `bitsandbytes` quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: True @@ -29,6 +53,8 @@ The following `bitsandbytes` quantization config was used during training: - bnb_4bit_compute_dtype: float32 ### Framework versions +- PEFT 0.6.0.dev0 +- PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 diff --git a/checkpoint-500/adapter_model.bin b/checkpoint-500/adapter_model.bin index c591858f8c3078c2a2cb20f492f65b893f93f7e6..6198683070fc9df0fa08204fb385a9c9cf4fb8a2 100644 --- a/checkpoint-500/adapter_model.bin +++ b/checkpoint-500/adapter_model.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:5a125cc125172a0c45ae8a435cdae72b0c657f9b899e914953f9df21478011a8 +oid sha256:fbc5f8a93e505087cecc049ff476019df96de3cba21db5d67029ade06c844216 size 39409357 diff --git a/checkpoint-500/optimizer.pt b/checkpoint-500/optimizer.pt index 77340f17ffa5f66b1643f14304ad12b7af091dbc..dec2baaffc2c1b79896eb5f10b68cc8630ce5592 100644 --- a/checkpoint-500/optimizer.pt +++ b/checkpoint-500/optimizer.pt @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:20490071892c8d3160a456d6e86f6905a6d5f8dfd4a5099f884f3c570dfd5be2 +oid sha256:cbab141106d84467be5a7a978a93b8492dd00bcc4a497115fb4124bf8beaa046 size 78844421 diff --git a/checkpoint-500/rng_state.pth b/checkpoint-500/rng_state.pth index b10130f910d9e6a0f56a41b5d4b3e6c851381a72..e0df6592e27bef84a4beb9293e7b49666a4d652e 100644 --- a/checkpoint-500/rng_state.pth +++ b/checkpoint-500/rng_state.pth @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:a3464e9c253485ee0e45ad721cb8a719f3a43712377f5b9e04d190fe903bbfbe +oid sha256:7508d4b8dd267de5cc58e972da25236687927651336a28f292c92f7f23951475 size 14575 diff --git a/checkpoint-500/scheduler.pt b/checkpoint-500/scheduler.pt index b29b9b24abffbaf84031523df986bd3982387246..53ba5c95b7d8f86bbe40d20004a07e2e74748d22 100644 --- a/checkpoint-500/scheduler.pt +++ b/checkpoint-500/scheduler.pt @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:becc6ce1d910c1484b60d785c856d3d18b0420d3e43c97ef16af3312337b9491 +oid sha256:b841ff782e5772fca220707441f952d8fcec6aefcd65e69bf2fe08db08edfce1 size 627 diff --git a/checkpoint-500/tokenizer.json b/checkpoint-500/tokenizer.json index dbf002cafbd4818dcff2abc9156c088d681b4533..673c31abdeadf6576c3c754df86459e1ad64e207 100644 --- a/checkpoint-500/tokenizer.json +++ b/checkpoint-500/tokenizer.json @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba -size 14500471 +oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe +size 14500570 diff --git a/checkpoint-500/trainer_state.json b/checkpoint-500/trainer_state.json index 581f1e94b337ced6715385f7977bf3cafe178e58..121cade410f9ee4e2a798857772481a6700d2a71 100644 --- a/checkpoint-500/trainer_state.json +++ b/checkpoint-500/trainer_state.json @@ -1,7 +1,7 @@ { "best_metric": null, "best_model_checkpoint": null, - "epoch": 20.0, + "epoch": 2.793296089385475, "eval_steps": 500, "global_step": 500, "is_hyper_param_search": false, @@ -9,3011 +9,3011 @@ "is_world_process_zero": true, "log_history": [ { - "epoch": 0.04, - "learning_rate": 0.0001996, - "loss": 2.4683, + "epoch": 0.01, + "learning_rate": 0.00019972067039106145, + "loss": 2.6443, "step": 1 }, { - "epoch": 0.08, - "learning_rate": 0.00019920000000000002, - "loss": 2.2023, + "epoch": 0.01, + "learning_rate": 0.00019944134078212292, + "loss": 2.4104, "step": 2 }, { - "epoch": 0.12, - "learning_rate": 0.0001988, - "loss": 2.3039, + "epoch": 0.02, + "learning_rate": 0.00019916201117318435, + "loss": 2.4975, "step": 3 }, { - "epoch": 0.16, - "learning_rate": 0.0001984, - "loss": 2.1855, + "epoch": 0.02, + "learning_rate": 0.00019888268156424582, + "loss": 2.3513, "step": 4 }, { - "epoch": 0.2, - "learning_rate": 0.00019800000000000002, - "loss": 2.328, + "epoch": 0.03, + "learning_rate": 0.0001986033519553073, + "loss": 2.4274, "step": 5 }, { - "epoch": 0.24, - "learning_rate": 0.0001976, - "loss": 2.3407, + "epoch": 0.03, + "learning_rate": 0.00019832402234636873, + "loss": 2.3628, "step": 6 }, { - "epoch": 0.28, - "learning_rate": 0.0001972, - "loss": 2.3432, + "epoch": 0.04, + "learning_rate": 0.0001980446927374302, + "loss": 2.3567, "step": 7 }, { - "epoch": 0.32, - "learning_rate": 0.0001968, - "loss": 2.2333, + "epoch": 0.04, + "learning_rate": 0.00019776536312849163, + "loss": 2.4121, "step": 8 }, { - "epoch": 0.36, - "learning_rate": 0.0001964, - "loss": 2.1761, + "epoch": 0.05, + "learning_rate": 0.00019748603351955307, + "loss": 2.4033, "step": 9 }, { - "epoch": 0.4, - "learning_rate": 0.000196, - "loss": 2.1473, + "epoch": 0.06, + "learning_rate": 0.00019720670391061454, + "loss": 2.2805, "step": 10 }, { - "epoch": 0.44, - "learning_rate": 0.0001956, - "loss": 2.2076, + "epoch": 0.06, + "learning_rate": 0.00019692737430167598, + "loss": 2.2639, "step": 11 }, { - "epoch": 0.48, - "learning_rate": 0.0001952, - "loss": 2.1925, + "epoch": 0.07, + "learning_rate": 0.00019664804469273744, + "loss": 2.2724, "step": 12 }, { - "epoch": 0.52, - "learning_rate": 0.0001948, - "loss": 2.1694, + "epoch": 0.07, + "learning_rate": 0.00019636871508379888, + "loss": 2.332, "step": 13 }, { - "epoch": 0.56, - "learning_rate": 0.0001944, - "loss": 2.1056, + "epoch": 0.08, + "learning_rate": 0.00019608938547486035, + "loss": 2.2261, "step": 14 }, { - "epoch": 0.6, - "learning_rate": 0.000194, - "loss": 2.2426, + "epoch": 0.08, + "learning_rate": 0.00019581005586592182, + "loss": 2.2208, "step": 15 }, { - "epoch": 0.64, - "learning_rate": 0.00019360000000000002, - "loss": 2.2635, + "epoch": 0.09, + "learning_rate": 0.00019553072625698326, + "loss": 2.3351, "step": 16 }, { - "epoch": 0.68, - "learning_rate": 0.0001932, - "loss": 2.251, + "epoch": 0.09, + "learning_rate": 0.0001952513966480447, + "loss": 2.2475, "step": 17 }, { - "epoch": 0.72, - "learning_rate": 0.0001928, - "loss": 2.224, + "epoch": 0.1, + "learning_rate": 0.00019497206703910616, + "loss": 2.3283, "step": 18 }, { - "epoch": 0.76, - "learning_rate": 0.00019240000000000001, - "loss": 2.1848, + "epoch": 0.11, + "learning_rate": 0.0001946927374301676, + "loss": 2.1346, "step": 19 }, { - "epoch": 0.8, - "learning_rate": 0.000192, - "loss": 2.1341, + "epoch": 0.11, + "learning_rate": 0.00019441340782122907, + "loss": 2.131, "step": 20 }, { - "epoch": 0.84, - "learning_rate": 0.0001916, - "loss": 2.126, + "epoch": 0.12, + "learning_rate": 0.0001941340782122905, + "loss": 2.1718, "step": 21 }, { - "epoch": 0.88, - "learning_rate": 0.0001912, - "loss": 2.0959, + "epoch": 0.12, + "learning_rate": 0.00019385474860335195, + "loss": 2.2446, "step": 22 }, { - "epoch": 0.92, - "learning_rate": 0.0001908, - "loss": 2.2446, + "epoch": 0.13, + "learning_rate": 0.0001935754189944134, + "loss": 2.306, "step": 23 }, { - "epoch": 0.96, - "learning_rate": 0.0001904, - "loss": 1.9448, + "epoch": 0.13, + "learning_rate": 0.00019329608938547488, + "loss": 2.1908, "step": 24 }, { - "epoch": 1.0, - "learning_rate": 0.00019, - "loss": 2.0939, + "epoch": 0.14, + "learning_rate": 0.00019301675977653632, + "loss": 2.2844, "step": 25 }, { - "epoch": 1.04, - "learning_rate": 0.0001896, - "loss": 2.1247, + "epoch": 0.15, + "learning_rate": 0.00019273743016759779, + "loss": 2.2235, "step": 26 }, { - "epoch": 1.08, - "learning_rate": 0.0001892, - "loss": 1.9417, + "epoch": 0.15, + "learning_rate": 0.00019245810055865922, + "loss": 2.1842, "step": 27 }, { - "epoch": 1.12, - "learning_rate": 0.0001888, - "loss": 2.105, + "epoch": 0.16, + "learning_rate": 0.00019217877094972066, + "loss": 2.2675, "step": 28 }, { - "epoch": 1.16, - "learning_rate": 0.0001884, - "loss": 2.1595, + "epoch": 0.16, + "learning_rate": 0.00019189944134078213, + "loss": 2.2532, "step": 29 }, { - "epoch": 1.2, - "learning_rate": 0.000188, - "loss": 2.2009, + "epoch": 0.17, + "learning_rate": 0.00019162011173184357, + "loss": 2.1788, "step": 30 }, { - "epoch": 1.24, - "learning_rate": 0.0001876, - "loss": 2.0784, + "epoch": 0.17, + "learning_rate": 0.00019134078212290504, + "loss": 2.2494, "step": 31 }, { - "epoch": 1.28, - "learning_rate": 0.00018720000000000002, - "loss": 2.2173, + "epoch": 0.18, + "learning_rate": 0.0001910614525139665, + "loss": 2.1995, "step": 32 }, { - "epoch": 1.32, - "learning_rate": 0.00018680000000000001, - "loss": 2.1185, + "epoch": 0.18, + "learning_rate": 0.00019078212290502794, + "loss": 2.1451, "step": 33 }, { - "epoch": 1.36, - "learning_rate": 0.00018640000000000003, - "loss": 1.988, + "epoch": 0.19, + "learning_rate": 0.0001905027932960894, + "loss": 2.223, "step": 34 }, { - "epoch": 1.4, - "learning_rate": 0.00018600000000000002, - "loss": 1.9493, + "epoch": 0.2, + "learning_rate": 0.00019022346368715085, + "loss": 2.2854, "step": 35 }, { - "epoch": 1.44, - "learning_rate": 0.0001856, - "loss": 1.9947, + "epoch": 0.2, + "learning_rate": 0.0001899441340782123, + "loss": 2.2265, "step": 36 }, { - "epoch": 1.48, - "learning_rate": 0.00018520000000000003, - "loss": 2.0506, + "epoch": 0.21, + "learning_rate": 0.00018966480446927375, + "loss": 2.1214, "step": 37 }, { - "epoch": 1.52, - "learning_rate": 0.00018480000000000002, - "loss": 2.0829, + "epoch": 0.21, + "learning_rate": 0.0001893854748603352, + "loss": 2.1898, "step": 38 }, { - "epoch": 1.56, - "learning_rate": 0.0001844, - "loss": 2.1455, + "epoch": 0.22, + "learning_rate": 0.00018910614525139666, + "loss": 2.1974, "step": 39 }, { - "epoch": 1.6, - "learning_rate": 0.00018400000000000003, - "loss": 2.0304, + "epoch": 0.22, + "learning_rate": 0.0001888268156424581, + "loss": 2.2259, "step": 40 }, { - "epoch": 1.64, - "learning_rate": 0.00018360000000000002, - "loss": 2.006, + "epoch": 0.23, + "learning_rate": 0.00018854748603351957, + "loss": 2.2094, "step": 41 }, { - "epoch": 1.68, - "learning_rate": 0.0001832, - "loss": 2.1759, + "epoch": 0.23, + "learning_rate": 0.00018826815642458103, + "loss": 2.1731, "step": 42 }, { - "epoch": 1.72, - "learning_rate": 0.00018280000000000003, - "loss": 2.0093, + "epoch": 0.24, + "learning_rate": 0.00018798882681564247, + "loss": 2.2373, "step": 43 }, { - "epoch": 1.76, - "learning_rate": 0.00018240000000000002, - "loss": 2.0683, + "epoch": 0.25, + "learning_rate": 0.0001877094972067039, + "loss": 2.2295, "step": 44 }, { - "epoch": 1.8, - "learning_rate": 0.000182, - "loss": 2.1352, + "epoch": 0.25, + "learning_rate": 0.00018743016759776538, + "loss": 2.1947, "step": 45 }, { - "epoch": 1.84, - "learning_rate": 0.00018160000000000002, - "loss": 1.9736, + "epoch": 0.26, + "learning_rate": 0.00018715083798882682, + "loss": 2.2115, "step": 46 }, { - "epoch": 1.88, - "learning_rate": 0.0001812, - "loss": 1.9631, + "epoch": 0.26, + "learning_rate": 0.00018687150837988828, + "loss": 2.1224, "step": 47 }, { - "epoch": 1.92, - "learning_rate": 0.0001808, - "loss": 2.1024, + "epoch": 0.27, + "learning_rate": 0.00018659217877094972, + "loss": 2.2137, "step": 48 }, { - "epoch": 1.96, - "learning_rate": 0.00018040000000000002, - "loss": 1.9895, + "epoch": 0.27, + "learning_rate": 0.00018631284916201116, + "loss": 2.2338, "step": 49 }, { - "epoch": 2.0, - "learning_rate": 0.00018, - "loss": 2.0067, + "epoch": 0.28, + "learning_rate": 0.00018603351955307266, + "loss": 2.1298, "step": 50 }, { - "epoch": 2.04, - "learning_rate": 0.0001796, - "loss": 2.0509, + "epoch": 0.28, + "learning_rate": 0.0001857541899441341, + "loss": 2.0883, "step": 51 }, { - "epoch": 2.08, - "learning_rate": 0.00017920000000000002, - "loss": 1.8795, + "epoch": 0.29, + "learning_rate": 0.00018547486033519553, + "loss": 2.1216, "step": 52 }, { - "epoch": 2.12, - "learning_rate": 0.0001788, - "loss": 2.0633, + "epoch": 0.3, + "learning_rate": 0.000185195530726257, + "loss": 2.2112, "step": 53 }, { - "epoch": 2.16, - "learning_rate": 0.0001784, - "loss": 1.9998, + "epoch": 0.3, + "learning_rate": 0.00018491620111731844, + "loss": 2.1224, "step": 54 }, { - "epoch": 2.2, - "learning_rate": 0.00017800000000000002, - "loss": 2.0675, + "epoch": 0.31, + "learning_rate": 0.0001846368715083799, + "loss": 2.2375, "step": 55 }, { - "epoch": 2.24, - "learning_rate": 0.0001776, - "loss": 2.0129, + "epoch": 0.31, + "learning_rate": 0.00018435754189944135, + "loss": 2.2235, "step": 56 }, { - "epoch": 2.28, - "learning_rate": 0.0001772, - "loss": 1.9302, + "epoch": 0.32, + "learning_rate": 0.00018407821229050279, + "loss": 2.1682, "step": 57 }, { - "epoch": 2.32, - "learning_rate": 0.00017680000000000001, - "loss": 2.0472, + "epoch": 0.32, + "learning_rate": 0.00018379888268156425, + "loss": 2.2077, "step": 58 }, { - "epoch": 2.36, - "learning_rate": 0.0001764, - "loss": 2.0031, + "epoch": 0.33, + "learning_rate": 0.00018351955307262572, + "loss": 2.1596, "step": 59 }, { - "epoch": 2.4, - "learning_rate": 0.00017600000000000002, - "loss": 2.0231, + "epoch": 0.34, + "learning_rate": 0.00018324022346368716, + "loss": 2.1311, "step": 60 }, { - "epoch": 2.44, - "learning_rate": 0.0001756, - "loss": 2.0192, + "epoch": 0.34, + "learning_rate": 0.00018296089385474862, + "loss": 2.1333, "step": 61 }, { - "epoch": 2.48, - "learning_rate": 0.0001752, - "loss": 2.1336, + "epoch": 0.35, + "learning_rate": 0.00018268156424581006, + "loss": 2.0901, "step": 62 }, { - "epoch": 2.52, - "learning_rate": 0.00017480000000000002, - "loss": 1.7753, + "epoch": 0.35, + "learning_rate": 0.00018240223463687153, + "loss": 2.1971, "step": 63 }, { - "epoch": 2.56, - "learning_rate": 0.0001744, - "loss": 2.0274, + "epoch": 0.36, + "learning_rate": 0.00018212290502793297, + "loss": 2.2602, "step": 64 }, { - "epoch": 2.6, - "learning_rate": 0.000174, - "loss": 1.8893, + "epoch": 0.36, + "learning_rate": 0.0001818435754189944, + "loss": 2.2194, "step": 65 }, { - "epoch": 2.64, - "learning_rate": 0.00017360000000000002, - "loss": 2.0341, + "epoch": 0.37, + "learning_rate": 0.00018156424581005588, + "loss": 2.1218, "step": 66 }, { - "epoch": 2.68, - "learning_rate": 0.0001732, - "loss": 2.1081, + "epoch": 0.37, + "learning_rate": 0.00018128491620111731, + "loss": 2.2049, "step": 67 }, { - "epoch": 2.72, - "learning_rate": 0.0001728, - "loss": 1.9463, + "epoch": 0.38, + "learning_rate": 0.00018100558659217878, + "loss": 2.1521, "step": 68 }, { - "epoch": 2.76, - "learning_rate": 0.00017240000000000002, - "loss": 2.0607, + "epoch": 0.39, + "learning_rate": 0.00018072625698324025, + "loss": 2.112, "step": 69 }, { - "epoch": 2.8, - "learning_rate": 0.000172, - "loss": 1.9803, + "epoch": 0.39, + "learning_rate": 0.0001804469273743017, + "loss": 2.1906, "step": 70 }, { - "epoch": 2.84, - "learning_rate": 0.0001716, - "loss": 1.7663, + "epoch": 0.4, + "learning_rate": 0.00018016759776536313, + "loss": 2.1717, "step": 71 }, { - "epoch": 2.88, - "learning_rate": 0.00017120000000000001, - "loss": 2.1116, + "epoch": 0.4, + "learning_rate": 0.0001798882681564246, + "loss": 2.0712, "step": 72 }, { - "epoch": 2.92, - "learning_rate": 0.0001708, - "loss": 1.9039, + "epoch": 0.41, + "learning_rate": 0.00017960893854748603, + "loss": 2.141, "step": 73 }, { - "epoch": 2.96, - "learning_rate": 0.0001704, - "loss": 2.049, + "epoch": 0.41, + "learning_rate": 0.0001793296089385475, + "loss": 2.0656, "step": 74 }, { - "epoch": 3.0, - "learning_rate": 0.00017, - "loss": 1.991, + "epoch": 0.42, + "learning_rate": 0.00017905027932960894, + "loss": 2.1125, "step": 75 }, { - "epoch": 3.04, - "learning_rate": 0.0001696, - "loss": 1.8755, + "epoch": 0.42, + "learning_rate": 0.00017877094972067038, + "loss": 2.0869, "step": 76 }, { - "epoch": 3.08, - "learning_rate": 0.0001692, - "loss": 2.0367, + "epoch": 0.43, + "learning_rate": 0.00017849162011173187, + "loss": 2.2478, "step": 77 }, { - "epoch": 3.12, - "learning_rate": 0.0001688, - "loss": 1.9959, + "epoch": 0.44, + "learning_rate": 0.0001782122905027933, + "loss": 2.1535, "step": 78 }, { - "epoch": 3.16, - "learning_rate": 0.0001684, - "loss": 1.8051, + "epoch": 0.44, + "learning_rate": 0.00017793296089385475, + "loss": 2.1927, "step": 79 }, { - "epoch": 3.2, - "learning_rate": 0.000168, - "loss": 2.0446, + "epoch": 0.45, + "learning_rate": 0.00017765363128491622, + "loss": 2.1213, "step": 80 }, { - "epoch": 3.24, - "learning_rate": 0.0001676, - "loss": 1.7873, + "epoch": 0.45, + "learning_rate": 0.00017737430167597766, + "loss": 2.0981, "step": 81 }, { - "epoch": 3.28, - "learning_rate": 0.0001672, - "loss": 1.9264, + "epoch": 0.46, + "learning_rate": 0.00017709497206703912, + "loss": 2.1828, "step": 82 }, { - "epoch": 3.32, - "learning_rate": 0.0001668, - "loss": 1.8595, + "epoch": 0.46, + "learning_rate": 0.00017681564245810056, + "loss": 2.0562, "step": 83 }, { - "epoch": 3.36, - "learning_rate": 0.0001664, - "loss": 1.9992, + "epoch": 0.47, + "learning_rate": 0.000176536312849162, + "loss": 2.1334, "step": 84 }, { - "epoch": 3.4, - "learning_rate": 0.000166, - "loss": 1.8783, + "epoch": 0.47, + "learning_rate": 0.00017625698324022347, + "loss": 2.1225, "step": 85 }, { - "epoch": 3.44, - "learning_rate": 0.0001656, - "loss": 2.0216, + "epoch": 0.48, + "learning_rate": 0.00017597765363128493, + "loss": 2.2098, "step": 86 }, { - "epoch": 3.48, - "learning_rate": 0.0001652, - "loss": 1.947, + "epoch": 0.49, + "learning_rate": 0.00017569832402234637, + "loss": 2.1519, "step": 87 }, { - "epoch": 3.52, - "learning_rate": 0.0001648, - "loss": 1.9554, + "epoch": 0.49, + "learning_rate": 0.00017541899441340784, + "loss": 2.1132, "step": 88 }, { - "epoch": 3.56, - "learning_rate": 0.0001644, - "loss": 1.8563, + "epoch": 0.5, + "learning_rate": 0.00017513966480446928, + "loss": 2.0333, "step": 89 }, { - "epoch": 3.6, - "learning_rate": 0.000164, - "loss": 1.7795, + "epoch": 0.5, + "learning_rate": 0.00017486033519553075, + "loss": 2.2764, "step": 90 }, { - "epoch": 3.64, - "learning_rate": 0.0001636, - "loss": 1.9347, + "epoch": 0.51, + "learning_rate": 0.00017458100558659218, + "loss": 2.1838, "step": 91 }, { - "epoch": 3.68, - "learning_rate": 0.0001632, - "loss": 2.0078, + "epoch": 0.51, + "learning_rate": 0.00017430167597765362, + "loss": 2.1386, "step": 92 }, { - "epoch": 3.72, - "learning_rate": 0.0001628, - "loss": 1.9964, + "epoch": 0.52, + "learning_rate": 0.0001740223463687151, + "loss": 2.1034, "step": 93 }, { - "epoch": 3.76, - "learning_rate": 0.00016240000000000002, - "loss": 1.9004, + "epoch": 0.53, + "learning_rate": 0.00017374301675977656, + "loss": 2.0346, "step": 94 }, { - "epoch": 3.8, - "learning_rate": 0.000162, - "loss": 1.9446, + "epoch": 0.53, + "learning_rate": 0.000173463687150838, + "loss": 2.0274, "step": 95 }, { - "epoch": 3.84, - "learning_rate": 0.00016160000000000002, - "loss": 1.8857, + "epoch": 0.54, + "learning_rate": 0.00017318435754189946, + "loss": 2.1036, "step": 96 }, { - "epoch": 3.88, - "learning_rate": 0.00016120000000000002, - "loss": 1.8797, + "epoch": 0.54, + "learning_rate": 0.0001729050279329609, + "loss": 2.1208, "step": 97 }, { - "epoch": 3.92, - "learning_rate": 0.0001608, - "loss": 1.7643, + "epoch": 0.55, + "learning_rate": 0.00017262569832402237, + "loss": 2.0572, "step": 98 }, { - "epoch": 3.96, - "learning_rate": 0.00016040000000000002, - "loss": 1.9724, + "epoch": 0.55, + "learning_rate": 0.0001723463687150838, + "loss": 2.1702, "step": 99 }, { - "epoch": 4.0, - "learning_rate": 0.00016, - "loss": 1.9058, + "epoch": 0.56, + "learning_rate": 0.00017206703910614525, + "loss": 2.1302, "step": 100 }, { - "epoch": 4.04, - "learning_rate": 0.0001596, - "loss": 1.8663, + "epoch": 0.56, + "learning_rate": 0.0001717877094972067, + "loss": 2.0175, "step": 101 }, { - "epoch": 4.08, - "learning_rate": 0.00015920000000000002, - "loss": 1.8457, + "epoch": 0.57, + "learning_rate": 0.00017150837988826815, + "loss": 2.1006, "step": 102 }, { - "epoch": 4.12, - "learning_rate": 0.0001588, - "loss": 1.8394, + "epoch": 0.58, + "learning_rate": 0.00017122905027932962, + "loss": 2.0662, "step": 103 }, { - "epoch": 4.16, - "learning_rate": 0.00015840000000000003, - "loss": 1.8941, + "epoch": 0.58, + "learning_rate": 0.00017094972067039109, + "loss": 1.988, "step": 104 }, { - "epoch": 4.2, - "learning_rate": 0.00015800000000000002, - "loss": 1.8601, + "epoch": 0.59, + "learning_rate": 0.00017067039106145253, + "loss": 2.1008, "step": 105 }, { - "epoch": 4.24, - "learning_rate": 0.0001576, - "loss": 1.7365, + "epoch": 0.59, + "learning_rate": 0.00017039106145251396, + "loss": 2.1482, "step": 106 }, { - "epoch": 4.28, - "learning_rate": 0.00015720000000000003, - "loss": 1.8809, + "epoch": 0.6, + "learning_rate": 0.00017011173184357543, + "loss": 2.1052, "step": 107 }, { - "epoch": 4.32, - "learning_rate": 0.00015680000000000002, - "loss": 2.0206, + "epoch": 0.6, + "learning_rate": 0.00016983240223463687, + "loss": 2.0978, "step": 108 }, { - "epoch": 4.36, - "learning_rate": 0.0001564, - "loss": 1.7776, + "epoch": 0.61, + "learning_rate": 0.00016955307262569834, + "loss": 2.1303, "step": 109 }, { - "epoch": 4.4, - "learning_rate": 0.00015600000000000002, - "loss": 1.9089, + "epoch": 0.61, + "learning_rate": 0.00016927374301675978, + "loss": 2.0794, "step": 110 }, { - "epoch": 4.44, - "learning_rate": 0.00015560000000000001, - "loss": 1.8464, + "epoch": 0.62, + "learning_rate": 0.00016899441340782122, + "loss": 2.1059, "step": 111 }, { - "epoch": 4.48, - "learning_rate": 0.0001552, - "loss": 1.7943, + "epoch": 0.63, + "learning_rate": 0.0001687150837988827, + "loss": 1.9642, "step": 112 }, { - "epoch": 4.52, - "learning_rate": 0.00015480000000000002, - "loss": 1.8081, + "epoch": 0.63, + "learning_rate": 0.00016843575418994415, + "loss": 2.0415, "step": 113 }, { - "epoch": 4.56, - "learning_rate": 0.0001544, - "loss": 1.734, + "epoch": 0.64, + "learning_rate": 0.0001681564245810056, + "loss": 2.0795, "step": 114 }, { - "epoch": 4.6, - "learning_rate": 0.000154, - "loss": 1.8379, + "epoch": 0.64, + "learning_rate": 0.00016787709497206705, + "loss": 2.0238, "step": 115 }, { - "epoch": 4.64, - "learning_rate": 0.00015360000000000002, - "loss": 1.6841, + "epoch": 0.65, + "learning_rate": 0.0001675977653631285, + "loss": 2.078, "step": 116 }, { - "epoch": 4.68, - "learning_rate": 0.0001532, - "loss": 1.8253, + "epoch": 0.65, + "learning_rate": 0.00016731843575418996, + "loss": 2.1362, "step": 117 }, { - "epoch": 4.72, - "learning_rate": 0.0001528, - "loss": 1.7607, + "epoch": 0.66, + "learning_rate": 0.0001670391061452514, + "loss": 2.0552, "step": 118 }, { - "epoch": 4.76, - "learning_rate": 0.00015240000000000002, - "loss": 1.8205, + "epoch": 0.66, + "learning_rate": 0.00016675977653631284, + "loss": 2.213, "step": 119 }, { - "epoch": 4.8, - "learning_rate": 0.000152, - "loss": 1.8443, + "epoch": 0.67, + "learning_rate": 0.0001664804469273743, + "loss": 1.9999, "step": 120 }, { - "epoch": 4.84, - "learning_rate": 0.0001516, - "loss": 1.8824, + "epoch": 0.68, + "learning_rate": 0.00016620111731843577, + "loss": 2.06, "step": 121 }, { - "epoch": 4.88, - "learning_rate": 0.00015120000000000002, - "loss": 1.8369, + "epoch": 0.68, + "learning_rate": 0.0001659217877094972, + "loss": 2.0177, "step": 122 }, { - "epoch": 4.92, - "learning_rate": 0.0001508, - "loss": 1.8017, + "epoch": 0.69, + "learning_rate": 0.00016564245810055868, + "loss": 2.0504, "step": 123 }, { - "epoch": 4.96, - "learning_rate": 0.0001504, - "loss": 1.7858, + "epoch": 0.69, + "learning_rate": 0.00016536312849162012, + "loss": 2.0585, "step": 124 }, { - "epoch": 5.0, - "learning_rate": 0.00015000000000000001, - "loss": 1.8366, + "epoch": 0.7, + "learning_rate": 0.00016508379888268158, + "loss": 2.0273, "step": 125 }, { - "epoch": 5.04, - "learning_rate": 0.0001496, - "loss": 1.7353, + "epoch": 0.7, + "learning_rate": 0.00016480446927374302, + "loss": 2.0549, "step": 126 }, { - "epoch": 5.08, - "learning_rate": 0.0001492, - "loss": 1.6017, + "epoch": 0.71, + "learning_rate": 0.00016452513966480446, + "loss": 2.044, "step": 127 }, { - "epoch": 5.12, - "learning_rate": 0.0001488, - "loss": 1.6645, + "epoch": 0.72, + "learning_rate": 0.00016424581005586593, + "loss": 2.0731, "step": 128 }, { - "epoch": 5.16, - "learning_rate": 0.0001484, - "loss": 1.8663, + "epoch": 0.72, + "learning_rate": 0.00016396648044692737, + "loss": 2.0568, "step": 129 }, { - "epoch": 5.2, - "learning_rate": 0.000148, - "loss": 1.7009, + "epoch": 0.73, + "learning_rate": 0.00016368715083798883, + "loss": 2.007, "step": 130 }, { - "epoch": 5.24, - "learning_rate": 0.0001476, - "loss": 1.7371, + "epoch": 0.73, + "learning_rate": 0.0001634078212290503, + "loss": 2.0707, "step": 131 }, { - "epoch": 5.28, - "learning_rate": 0.0001472, - "loss": 1.8555, + "epoch": 0.74, + "learning_rate": 0.00016312849162011174, + "loss": 1.9793, "step": 132 }, { - "epoch": 5.32, - "learning_rate": 0.00014680000000000002, - "loss": 1.6373, + "epoch": 0.74, + "learning_rate": 0.0001628491620111732, + "loss": 2.1311, "step": 133 }, { - "epoch": 5.36, - "learning_rate": 0.0001464, - "loss": 1.7211, + "epoch": 0.75, + "learning_rate": 0.00016256983240223465, + "loss": 2.0016, "step": 134 }, { - "epoch": 5.4, - "learning_rate": 0.000146, - "loss": 1.614, + "epoch": 0.75, + "learning_rate": 0.00016229050279329609, + "loss": 1.9945, "step": 135 }, { - "epoch": 5.44, - "learning_rate": 0.00014560000000000002, - "loss": 1.8736, + "epoch": 0.76, + "learning_rate": 0.00016201117318435755, + "loss": 2.0186, "step": 136 }, { - "epoch": 5.48, - "learning_rate": 0.0001452, - "loss": 1.7229, + "epoch": 0.77, + "learning_rate": 0.000161731843575419, + "loss": 2.0971, "step": 137 }, { - "epoch": 5.52, - "learning_rate": 0.0001448, - "loss": 1.7315, + "epoch": 0.77, + "learning_rate": 0.00016145251396648046, + "loss": 2.0883, "step": 138 }, { - "epoch": 5.56, - "learning_rate": 0.0001444, - "loss": 1.7259, + "epoch": 0.78, + "learning_rate": 0.00016117318435754192, + "loss": 2.0803, "step": 139 }, { - "epoch": 5.6, - "learning_rate": 0.000144, - "loss": 1.7032, + "epoch": 0.78, + "learning_rate": 0.00016089385474860336, + "loss": 2.0617, "step": 140 }, { - "epoch": 5.64, - "learning_rate": 0.0001436, - "loss": 1.9237, + "epoch": 0.79, + "learning_rate": 0.00016061452513966483, + "loss": 2.1265, "step": 141 }, { - "epoch": 5.68, - "learning_rate": 0.0001432, - "loss": 1.7821, + "epoch": 0.79, + "learning_rate": 0.00016033519553072627, + "loss": 2.0151, "step": 142 }, { - "epoch": 5.72, - "learning_rate": 0.0001428, - "loss": 1.6081, + "epoch": 0.8, + "learning_rate": 0.0001600558659217877, + "loss": 1.996, "step": 143 }, { - "epoch": 5.76, - "learning_rate": 0.0001424, - "loss": 1.7979, + "epoch": 0.8, + "learning_rate": 0.00015977653631284918, + "loss": 2.0164, "step": 144 }, { - "epoch": 5.8, - "learning_rate": 0.000142, - "loss": 1.6675, + "epoch": 0.81, + "learning_rate": 0.00015949720670391061, + "loss": 2.0314, "step": 145 }, { - "epoch": 5.84, - "learning_rate": 0.0001416, - "loss": 1.6758, + "epoch": 0.82, + "learning_rate": 0.00015921787709497208, + "loss": 1.9501, "step": 146 }, { - "epoch": 5.88, - "learning_rate": 0.0001412, - "loss": 1.7783, + "epoch": 0.82, + "learning_rate": 0.00015893854748603352, + "loss": 2.087, "step": 147 }, { - "epoch": 5.92, - "learning_rate": 0.0001408, - "loss": 1.5935, + "epoch": 0.83, + "learning_rate": 0.000158659217877095, + "loss": 2.0262, "step": 148 }, { - "epoch": 5.96, - "learning_rate": 0.0001404, - "loss": 1.7372, + "epoch": 0.83, + "learning_rate": 0.00015837988826815643, + "loss": 2.0765, "step": 149 }, { - "epoch": 6.0, - "learning_rate": 0.00014, - "loss": 1.6308, + "epoch": 0.84, + "learning_rate": 0.0001581005586592179, + "loss": 2.105, "step": 150 }, { - "epoch": 6.04, - "learning_rate": 0.0001396, - "loss": 1.5829, + "epoch": 0.84, + "learning_rate": 0.00015782122905027933, + "loss": 1.9863, "step": 151 }, { - "epoch": 6.08, - "learning_rate": 0.0001392, - "loss": 1.6538, + "epoch": 0.85, + "learning_rate": 0.0001575418994413408, + "loss": 1.9873, "step": 152 }, { - "epoch": 6.12, - "learning_rate": 0.00013879999999999999, - "loss": 1.7624, + "epoch": 0.85, + "learning_rate": 0.00015726256983240224, + "loss": 2.0094, "step": 153 }, { - "epoch": 6.16, - "learning_rate": 0.0001384, - "loss": 1.4968, + "epoch": 0.86, + "learning_rate": 0.00015698324022346368, + "loss": 1.9141, "step": 154 }, { - "epoch": 6.2, - "learning_rate": 0.000138, - "loss": 1.7305, + "epoch": 0.87, + "learning_rate": 0.00015670391061452514, + "loss": 1.917, "step": 155 }, { - "epoch": 6.24, - "learning_rate": 0.00013759999999999998, - "loss": 1.6536, + "epoch": 0.87, + "learning_rate": 0.00015642458100558658, + "loss": 2.109, "step": 156 }, { - "epoch": 6.28, - "learning_rate": 0.00013720000000000003, - "loss": 1.654, + "epoch": 0.88, + "learning_rate": 0.00015614525139664805, + "loss": 1.9799, "step": 157 }, { - "epoch": 6.32, - "learning_rate": 0.00013680000000000002, - "loss": 1.7163, + "epoch": 0.88, + "learning_rate": 0.00015586592178770952, + "loss": 1.9571, "step": 158 }, { - "epoch": 6.36, - "learning_rate": 0.0001364, - "loss": 1.6599, + "epoch": 0.89, + "learning_rate": 0.00015558659217877096, + "loss": 1.9931, "step": 159 }, { - "epoch": 6.4, - "learning_rate": 0.00013600000000000003, - "loss": 1.5729, + "epoch": 0.89, + "learning_rate": 0.00015530726256983242, + "loss": 2.1004, "step": 160 }, { - "epoch": 6.44, - "learning_rate": 0.00013560000000000002, - "loss": 1.5291, + "epoch": 0.9, + "learning_rate": 0.00015502793296089386, + "loss": 2.0385, "step": 161 }, { - "epoch": 6.48, - "learning_rate": 0.0001352, - "loss": 1.5666, + "epoch": 0.91, + "learning_rate": 0.0001547486033519553, + "loss": 1.9751, "step": 162 }, { - "epoch": 6.52, - "learning_rate": 0.00013480000000000002, - "loss": 1.7576, + "epoch": 0.91, + "learning_rate": 0.00015446927374301677, + "loss": 2.0544, "step": 163 }, { - "epoch": 6.56, - "learning_rate": 0.00013440000000000001, - "loss": 1.4462, + "epoch": 0.92, + "learning_rate": 0.0001541899441340782, + "loss": 2.0069, "step": 164 }, { - "epoch": 6.6, - "learning_rate": 0.000134, - "loss": 1.5659, + "epoch": 0.92, + "learning_rate": 0.00015391061452513967, + "loss": 1.9576, "step": 165 }, { - "epoch": 6.64, - "learning_rate": 0.00013360000000000002, - "loss": 1.6234, + "epoch": 0.93, + "learning_rate": 0.00015363128491620114, + "loss": 1.8991, "step": 166 }, { - "epoch": 6.68, - "learning_rate": 0.0001332, - "loss": 1.5176, + "epoch": 0.93, + "learning_rate": 0.00015335195530726258, + "loss": 1.9336, "step": 167 }, { - "epoch": 6.72, - "learning_rate": 0.0001328, - "loss": 1.667, + "epoch": 0.94, + "learning_rate": 0.00015307262569832405, + "loss": 1.9736, "step": 168 }, { - "epoch": 6.76, - "learning_rate": 0.00013240000000000002, - "loss": 1.6514, + "epoch": 0.94, + "learning_rate": 0.00015279329608938548, + "loss": 1.9702, "step": 169 }, { - "epoch": 6.8, - "learning_rate": 0.000132, - "loss": 1.7179, + "epoch": 0.95, + "learning_rate": 0.00015251396648044692, + "loss": 1.9055, "step": 170 }, { - "epoch": 6.84, - "learning_rate": 0.0001316, - "loss": 1.4373, + "epoch": 0.96, + "learning_rate": 0.0001522346368715084, + "loss": 2.0503, "step": 171 }, { - "epoch": 6.88, - "learning_rate": 0.00013120000000000002, - "loss": 1.7044, + "epoch": 0.96, + "learning_rate": 0.00015195530726256983, + "loss": 2.0039, "step": 172 }, { - "epoch": 6.92, - "learning_rate": 0.0001308, - "loss": 1.5673, + "epoch": 0.97, + "learning_rate": 0.0001516759776536313, + "loss": 1.9406, "step": 173 }, { - "epoch": 6.96, - "learning_rate": 0.0001304, - "loss": 1.6303, + "epoch": 0.97, + "learning_rate": 0.00015139664804469274, + "loss": 2.0525, "step": 174 }, { - "epoch": 7.0, - "learning_rate": 0.00013000000000000002, - "loss": 1.4828, + "epoch": 0.98, + "learning_rate": 0.0001511173184357542, + "loss": 1.9234, "step": 175 }, { - "epoch": 7.04, - "learning_rate": 0.0001296, - "loss": 1.6204, + "epoch": 0.98, + "learning_rate": 0.00015083798882681567, + "loss": 1.8614, "step": 176 }, { - "epoch": 7.08, - "learning_rate": 0.00012920000000000002, - "loss": 1.4955, + "epoch": 0.99, + "learning_rate": 0.0001505586592178771, + "loss": 1.9616, "step": 177 }, { - "epoch": 7.12, - "learning_rate": 0.00012880000000000001, - "loss": 1.6109, + "epoch": 0.99, + "learning_rate": 0.00015027932960893855, + "loss": 1.9509, "step": 178 }, { - "epoch": 7.16, - "learning_rate": 0.0001284, - "loss": 1.5318, + "epoch": 1.0, + "learning_rate": 0.00015000000000000001, + "loss": 1.9592, "step": 179 }, { - "epoch": 7.2, - "learning_rate": 0.00012800000000000002, - "loss": 1.5702, + "epoch": 1.01, + "learning_rate": 0.00014972067039106145, + "loss": 1.8991, "step": 180 }, { - "epoch": 7.24, - "learning_rate": 0.0001276, - "loss": 1.4598, + "epoch": 1.01, + "learning_rate": 0.00014944134078212292, + "loss": 1.9127, "step": 181 }, { - "epoch": 7.28, - "learning_rate": 0.0001272, - "loss": 1.6145, + "epoch": 1.02, + "learning_rate": 0.00014916201117318436, + "loss": 1.8982, "step": 182 }, { - "epoch": 7.32, - "learning_rate": 0.00012680000000000002, - "loss": 1.3628, + "epoch": 1.02, + "learning_rate": 0.0001488826815642458, + "loss": 1.9534, "step": 183 }, { - "epoch": 7.36, - "learning_rate": 0.0001264, - "loss": 1.4578, + "epoch": 1.03, + "learning_rate": 0.0001486033519553073, + "loss": 1.7794, "step": 184 }, { - "epoch": 7.4, - "learning_rate": 0.000126, - "loss": 1.7094, + "epoch": 1.03, + "learning_rate": 0.00014832402234636873, + "loss": 1.7958, "step": 185 }, { - "epoch": 7.44, - "learning_rate": 0.00012560000000000002, - "loss": 1.4376, + "epoch": 1.04, + "learning_rate": 0.00014804469273743017, + "loss": 1.8282, "step": 186 }, { - "epoch": 7.48, - "learning_rate": 0.0001252, - "loss": 1.3393, + "epoch": 1.04, + "learning_rate": 0.00014776536312849164, + "loss": 2.0423, "step": 187 }, { - "epoch": 7.52, - "learning_rate": 0.0001248, - "loss": 1.4273, + "epoch": 1.05, + "learning_rate": 0.00014748603351955308, + "loss": 1.9282, "step": 188 }, { - "epoch": 7.56, - "learning_rate": 0.00012440000000000002, - "loss": 1.5506, + "epoch": 1.06, + "learning_rate": 0.00014720670391061454, + "loss": 1.9072, "step": 189 }, { - "epoch": 7.6, - "learning_rate": 0.000124, - "loss": 1.4774, + "epoch": 1.06, + "learning_rate": 0.00014692737430167598, + "loss": 1.8665, "step": 190 }, { - "epoch": 7.64, - "learning_rate": 0.0001236, - "loss": 1.4632, + "epoch": 1.07, + "learning_rate": 0.00014664804469273742, + "loss": 1.9021, "step": 191 }, { - "epoch": 7.68, - "learning_rate": 0.0001232, - "loss": 1.4568, + "epoch": 1.07, + "learning_rate": 0.0001463687150837989, + "loss": 1.7308, "step": 192 }, { - "epoch": 7.72, - "learning_rate": 0.0001228, - "loss": 1.6106, + "epoch": 1.08, + "learning_rate": 0.00014608938547486035, + "loss": 1.9165, "step": 193 }, { - "epoch": 7.76, - "learning_rate": 0.0001224, - "loss": 1.577, + "epoch": 1.08, + "learning_rate": 0.0001458100558659218, + "loss": 1.842, "step": 194 }, { - "epoch": 7.8, - "learning_rate": 0.000122, - "loss": 1.4805, + "epoch": 1.09, + "learning_rate": 0.00014553072625698326, + "loss": 1.9128, "step": 195 }, { - "epoch": 7.84, - "learning_rate": 0.0001216, - "loss": 1.4546, + "epoch": 1.09, + "learning_rate": 0.0001452513966480447, + "loss": 1.8005, "step": 196 }, { - "epoch": 7.88, - "learning_rate": 0.0001212, - "loss": 1.5164, + "epoch": 1.1, + "learning_rate": 0.00014497206703910614, + "loss": 1.8547, "step": 197 }, { - "epoch": 7.92, - "learning_rate": 0.0001208, - "loss": 1.5061, + "epoch": 1.11, + "learning_rate": 0.0001446927374301676, + "loss": 1.9042, "step": 198 }, { - "epoch": 7.96, - "learning_rate": 0.0001204, - "loss": 1.5312, + "epoch": 1.11, + "learning_rate": 0.00014441340782122905, + "loss": 1.8609, "step": 199 }, { - "epoch": 8.0, - "learning_rate": 0.00012, - "loss": 1.4463, + "epoch": 1.12, + "learning_rate": 0.0001441340782122905, + "loss": 1.9591, "step": 200 }, { - "epoch": 8.04, - "learning_rate": 0.00011960000000000001, - "loss": 1.4207, + "epoch": 1.12, + "learning_rate": 0.00014385474860335195, + "loss": 1.8722, "step": 201 }, { - "epoch": 8.08, - "learning_rate": 0.0001192, - "loss": 1.4688, + "epoch": 1.13, + "learning_rate": 0.00014357541899441342, + "loss": 1.8535, "step": 202 }, { - "epoch": 8.12, - "learning_rate": 0.0001188, - "loss": 1.4445, + "epoch": 1.13, + "learning_rate": 0.00014329608938547488, + "loss": 1.8676, "step": 203 }, { - "epoch": 8.16, - "learning_rate": 0.0001184, - "loss": 1.4158, + "epoch": 1.14, + "learning_rate": 0.00014301675977653632, + "loss": 1.8976, "step": 204 }, { - "epoch": 8.2, - "learning_rate": 0.000118, - "loss": 1.3483, + "epoch": 1.15, + "learning_rate": 0.00014273743016759776, + "loss": 1.7723, "step": 205 }, { - "epoch": 8.24, - "learning_rate": 0.0001176, - "loss": 1.4117, + "epoch": 1.15, + "learning_rate": 0.00014245810055865923, + "loss": 1.8165, "step": 206 }, { - "epoch": 8.28, - "learning_rate": 0.0001172, - "loss": 1.4267, + "epoch": 1.16, + "learning_rate": 0.00014217877094972067, + "loss": 1.7811, "step": 207 }, { - "epoch": 8.32, - "learning_rate": 0.00011679999999999999, - "loss": 1.5547, + "epoch": 1.16, + "learning_rate": 0.00014189944134078214, + "loss": 1.908, "step": 208 }, { - "epoch": 8.36, - "learning_rate": 0.0001164, - "loss": 1.4883, + "epoch": 1.17, + "learning_rate": 0.00014162011173184357, + "loss": 1.7663, "step": 209 }, { - "epoch": 8.4, - "learning_rate": 0.000116, - "loss": 1.414, + "epoch": 1.17, + "learning_rate": 0.00014134078212290501, + "loss": 1.6779, "step": 210 }, { - "epoch": 8.44, - "learning_rate": 0.00011559999999999999, - "loss": 1.3254, + "epoch": 1.18, + "learning_rate": 0.0001410614525139665, + "loss": 1.9039, "step": 211 }, { - "epoch": 8.48, - "learning_rate": 0.0001152, - "loss": 1.22, + "epoch": 1.18, + "learning_rate": 0.00014078212290502795, + "loss": 1.8033, "step": 212 }, { - "epoch": 8.52, - "learning_rate": 0.0001148, - "loss": 1.3224, + "epoch": 1.19, + "learning_rate": 0.00014050279329608939, + "loss": 1.8251, "step": 213 }, { - "epoch": 8.56, - "learning_rate": 0.0001144, - "loss": 1.6158, + "epoch": 1.2, + "learning_rate": 0.00014022346368715085, + "loss": 1.8505, "step": 214 }, { - "epoch": 8.6, - "learning_rate": 0.00011399999999999999, - "loss": 1.2817, + "epoch": 1.2, + "learning_rate": 0.0001399441340782123, + "loss": 1.8147, "step": 215 }, { - "epoch": 8.64, - "learning_rate": 0.0001136, - "loss": 1.4077, + "epoch": 1.21, + "learning_rate": 0.00013966480446927376, + "loss": 1.8419, "step": 216 }, { - "epoch": 8.68, - "learning_rate": 0.0001132, - "loss": 1.5311, + "epoch": 1.21, + "learning_rate": 0.0001393854748603352, + "loss": 1.8401, "step": 217 }, { - "epoch": 8.72, - "learning_rate": 0.00011279999999999999, - "loss": 1.5289, + "epoch": 1.22, + "learning_rate": 0.00013910614525139664, + "loss": 1.8912, "step": 218 }, { - "epoch": 8.76, - "learning_rate": 0.00011240000000000002, - "loss": 1.4576, + "epoch": 1.22, + "learning_rate": 0.0001388268156424581, + "loss": 1.7548, "step": 219 }, { - "epoch": 8.8, - "learning_rate": 0.00011200000000000001, - "loss": 1.2063, + "epoch": 1.23, + "learning_rate": 0.00013854748603351957, + "loss": 1.8741, "step": 220 }, { - "epoch": 8.84, - "learning_rate": 0.00011160000000000002, - "loss": 1.3543, + "epoch": 1.23, + "learning_rate": 0.000138268156424581, + "loss": 1.9549, "step": 221 }, { - "epoch": 8.88, - "learning_rate": 0.00011120000000000002, - "loss": 1.4352, + "epoch": 1.24, + "learning_rate": 0.00013798882681564248, + "loss": 1.9093, "step": 222 }, { - "epoch": 8.92, - "learning_rate": 0.00011080000000000001, - "loss": 1.3058, + "epoch": 1.25, + "learning_rate": 0.00013770949720670392, + "loss": 1.7896, "step": 223 }, { - "epoch": 8.96, - "learning_rate": 0.00011040000000000001, - "loss": 1.3475, + "epoch": 1.25, + "learning_rate": 0.00013743016759776538, + "loss": 1.8491, "step": 224 }, { - "epoch": 9.0, - "learning_rate": 0.00011000000000000002, - "loss": 1.1293, + "epoch": 1.26, + "learning_rate": 0.00013715083798882682, + "loss": 1.7851, "step": 225 }, { - "epoch": 9.04, - "learning_rate": 0.00010960000000000001, - "loss": 1.1671, + "epoch": 1.26, + "learning_rate": 0.00013687150837988826, + "loss": 1.6992, "step": 226 }, { - "epoch": 9.08, - "learning_rate": 0.00010920000000000001, - "loss": 1.4726, + "epoch": 1.27, + "learning_rate": 0.00013659217877094973, + "loss": 1.9765, "step": 227 }, { - "epoch": 9.12, - "learning_rate": 0.00010880000000000002, - "loss": 1.3986, + "epoch": 1.27, + "learning_rate": 0.00013631284916201117, + "loss": 1.8179, "step": 228 }, { - "epoch": 9.16, - "learning_rate": 0.00010840000000000002, - "loss": 1.2731, + "epoch": 1.28, + "learning_rate": 0.00013603351955307263, + "loss": 1.8548, "step": 229 }, { - "epoch": 9.2, - "learning_rate": 0.00010800000000000001, - "loss": 1.2212, + "epoch": 1.28, + "learning_rate": 0.0001357541899441341, + "loss": 1.8843, "step": 230 }, { - "epoch": 9.24, - "learning_rate": 0.00010760000000000001, - "loss": 1.3067, + "epoch": 1.29, + "learning_rate": 0.00013547486033519554, + "loss": 1.9105, "step": 231 }, { - "epoch": 9.28, - "learning_rate": 0.00010720000000000002, - "loss": 1.278, + "epoch": 1.3, + "learning_rate": 0.00013519553072625698, + "loss": 1.8748, "step": 232 }, { - "epoch": 9.32, - "learning_rate": 0.00010680000000000001, - "loss": 1.4141, + "epoch": 1.3, + "learning_rate": 0.00013491620111731844, + "loss": 1.7976, "step": 233 }, { - "epoch": 9.36, - "learning_rate": 0.00010640000000000001, - "loss": 1.2885, + "epoch": 1.31, + "learning_rate": 0.00013463687150837988, + "loss": 1.7369, "step": 234 }, { - "epoch": 9.4, - "learning_rate": 0.00010600000000000002, - "loss": 1.466, + "epoch": 1.31, + "learning_rate": 0.00013435754189944135, + "loss": 1.7808, "step": 235 }, { - "epoch": 9.44, - "learning_rate": 0.0001056, - "loss": 1.2979, + "epoch": 1.32, + "learning_rate": 0.0001340782122905028, + "loss": 1.8385, "step": 236 }, { - "epoch": 9.48, - "learning_rate": 0.00010520000000000001, - "loss": 1.1622, + "epoch": 1.32, + "learning_rate": 0.00013379888268156423, + "loss": 1.8295, "step": 237 }, { - "epoch": 9.52, - "learning_rate": 0.00010480000000000001, - "loss": 1.2504, + "epoch": 1.33, + "learning_rate": 0.00013351955307262572, + "loss": 1.757, "step": 238 }, { - "epoch": 9.56, - "learning_rate": 0.0001044, - "loss": 1.228, + "epoch": 1.34, + "learning_rate": 0.00013324022346368716, + "loss": 1.7904, "step": 239 }, { - "epoch": 9.6, - "learning_rate": 0.00010400000000000001, - "loss": 1.3607, + "epoch": 1.34, + "learning_rate": 0.0001329608938547486, + "loss": 1.7632, "step": 240 }, { - "epoch": 9.64, - "learning_rate": 0.00010360000000000001, - "loss": 1.1921, + "epoch": 1.35, + "learning_rate": 0.00013268156424581007, + "loss": 1.7867, "step": 241 }, { - "epoch": 9.68, - "learning_rate": 0.0001032, - "loss": 1.2114, + "epoch": 1.35, + "learning_rate": 0.0001324022346368715, + "loss": 1.8259, "step": 242 }, { - "epoch": 9.72, - "learning_rate": 0.0001028, - "loss": 1.2385, + "epoch": 1.36, + "learning_rate": 0.00013212290502793297, + "loss": 1.6655, "step": 243 }, { - "epoch": 9.76, - "learning_rate": 0.00010240000000000001, - "loss": 1.3227, + "epoch": 1.36, + "learning_rate": 0.0001318435754189944, + "loss": 1.6848, "step": 244 }, { - "epoch": 9.8, - "learning_rate": 0.00010200000000000001, - "loss": 1.3935, + "epoch": 1.37, + "learning_rate": 0.00013156424581005585, + "loss": 1.7931, "step": 245 }, { - "epoch": 9.84, - "learning_rate": 0.0001016, - "loss": 1.2209, + "epoch": 1.37, + "learning_rate": 0.00013128491620111732, + "loss": 1.7868, "step": 246 }, { - "epoch": 9.88, - "learning_rate": 0.00010120000000000001, - "loss": 1.1541, + "epoch": 1.38, + "learning_rate": 0.00013100558659217879, + "loss": 1.7732, "step": 247 }, { - "epoch": 9.92, - "learning_rate": 0.00010080000000000001, - "loss": 1.4049, + "epoch": 1.39, + "learning_rate": 0.00013072625698324022, + "loss": 1.7851, "step": 248 }, { - "epoch": 9.96, - "learning_rate": 0.0001004, - "loss": 1.13, + "epoch": 1.39, + "learning_rate": 0.0001304469273743017, + "loss": 1.7406, "step": 249 }, { - "epoch": 10.0, - "learning_rate": 0.0001, - "loss": 1.3783, + "epoch": 1.4, + "learning_rate": 0.00013016759776536313, + "loss": 1.5853, "step": 250 }, { - "epoch": 10.04, - "learning_rate": 9.960000000000001e-05, - "loss": 1.3207, + "epoch": 1.4, + "learning_rate": 0.0001298882681564246, + "loss": 1.8271, "step": 251 }, { - "epoch": 10.08, - "learning_rate": 9.92e-05, - "loss": 1.2697, + "epoch": 1.41, + "learning_rate": 0.00012960893854748604, + "loss": 1.6054, "step": 252 }, { - "epoch": 10.12, - "learning_rate": 9.88e-05, - "loss": 1.091, + "epoch": 1.41, + "learning_rate": 0.00012932960893854748, + "loss": 1.6884, "step": 253 }, { - "epoch": 10.16, - "learning_rate": 9.84e-05, - "loss": 1.0463, + "epoch": 1.42, + "learning_rate": 0.00012905027932960894, + "loss": 1.7333, "step": 254 }, { - "epoch": 10.2, - "learning_rate": 9.8e-05, - "loss": 1.3686, + "epoch": 1.42, + "learning_rate": 0.00012877094972067038, + "loss": 1.803, "step": 255 }, { - "epoch": 10.24, - "learning_rate": 9.76e-05, - "loss": 1.3109, + "epoch": 1.43, + "learning_rate": 0.00012849162011173185, + "loss": 1.63, "step": 256 }, { - "epoch": 10.28, - "learning_rate": 9.72e-05, - "loss": 1.2338, + "epoch": 1.44, + "learning_rate": 0.00012821229050279331, + "loss": 1.8023, "step": 257 }, { - "epoch": 10.32, - "learning_rate": 9.680000000000001e-05, - "loss": 1.12, + "epoch": 1.44, + "learning_rate": 0.00012793296089385475, + "loss": 1.762, "step": 258 }, { - "epoch": 10.36, - "learning_rate": 9.64e-05, - "loss": 1.1864, + "epoch": 1.45, + "learning_rate": 0.00012765363128491622, + "loss": 1.7565, "step": 259 }, { - "epoch": 10.4, - "learning_rate": 9.6e-05, - "loss": 1.1062, + "epoch": 1.45, + "learning_rate": 0.00012737430167597766, + "loss": 1.5937, "step": 260 }, { - "epoch": 10.44, - "learning_rate": 9.56e-05, - "loss": 1.2924, + "epoch": 1.46, + "learning_rate": 0.0001270949720670391, + "loss": 1.7208, "step": 261 }, { - "epoch": 10.48, - "learning_rate": 9.52e-05, - "loss": 1.1931, + "epoch": 1.46, + "learning_rate": 0.00012681564245810057, + "loss": 1.8097, "step": 262 }, { - "epoch": 10.52, - "learning_rate": 9.48e-05, - "loss": 1.2366, + "epoch": 1.47, + "learning_rate": 0.000126536312849162, + "loss": 1.7042, "step": 263 }, { - "epoch": 10.56, - "learning_rate": 9.44e-05, - "loss": 1.1589, + "epoch": 1.47, + "learning_rate": 0.00012625698324022347, + "loss": 1.7892, "step": 264 }, { - "epoch": 10.6, - "learning_rate": 9.4e-05, - "loss": 1.1197, + "epoch": 1.48, + "learning_rate": 0.00012597765363128494, + "loss": 1.752, "step": 265 }, { - "epoch": 10.64, - "learning_rate": 9.360000000000001e-05, - "loss": 1.3072, + "epoch": 1.49, + "learning_rate": 0.00012569832402234638, + "loss": 1.7333, "step": 266 }, { - "epoch": 10.68, - "learning_rate": 9.320000000000002e-05, - "loss": 1.1173, + "epoch": 1.49, + "learning_rate": 0.00012541899441340784, + "loss": 1.7492, "step": 267 }, { - "epoch": 10.72, - "learning_rate": 9.28e-05, - "loss": 1.0378, + "epoch": 1.5, + "learning_rate": 0.00012513966480446928, + "loss": 1.818, "step": 268 }, { - "epoch": 10.76, - "learning_rate": 9.240000000000001e-05, - "loss": 1.1746, + "epoch": 1.5, + "learning_rate": 0.00012486033519553072, + "loss": 1.7146, "step": 269 }, { - "epoch": 10.8, - "learning_rate": 9.200000000000001e-05, - "loss": 1.1144, + "epoch": 1.51, + "learning_rate": 0.0001245810055865922, + "loss": 1.7958, "step": 270 }, { - "epoch": 10.84, - "learning_rate": 9.16e-05, - "loss": 1.1862, + "epoch": 1.51, + "learning_rate": 0.00012430167597765363, + "loss": 1.7212, "step": 271 }, { - "epoch": 10.88, - "learning_rate": 9.120000000000001e-05, - "loss": 1.0816, + "epoch": 1.52, + "learning_rate": 0.0001240223463687151, + "loss": 1.8835, "step": 272 }, { - "epoch": 10.92, - "learning_rate": 9.080000000000001e-05, - "loss": 1.2288, + "epoch": 1.53, + "learning_rate": 0.00012374301675977656, + "loss": 1.7486, "step": 273 }, { - "epoch": 10.96, - "learning_rate": 9.04e-05, - "loss": 1.3031, + "epoch": 1.53, + "learning_rate": 0.000123463687150838, + "loss": 1.8103, "step": 274 }, { - "epoch": 11.0, - "learning_rate": 9e-05, - "loss": 1.1646, + "epoch": 1.54, + "learning_rate": 0.00012318435754189944, + "loss": 1.7557, "step": 275 }, { - "epoch": 11.04, - "learning_rate": 8.960000000000001e-05, - "loss": 1.2088, + "epoch": 1.54, + "learning_rate": 0.0001229050279329609, + "loss": 1.6198, "step": 276 }, { - "epoch": 11.08, - "learning_rate": 8.92e-05, - "loss": 1.1465, + "epoch": 1.55, + "learning_rate": 0.00012262569832402235, + "loss": 1.6971, "step": 277 }, { - "epoch": 11.12, - "learning_rate": 8.88e-05, - "loss": 1.0313, + "epoch": 1.55, + "learning_rate": 0.0001223463687150838, + "loss": 1.668, "step": 278 }, { - "epoch": 11.16, - "learning_rate": 8.840000000000001e-05, - "loss": 0.9662, + "epoch": 1.56, + "learning_rate": 0.00012206703910614525, + "loss": 1.8795, "step": 279 }, { - "epoch": 11.2, - "learning_rate": 8.800000000000001e-05, - "loss": 1.051, + "epoch": 1.56, + "learning_rate": 0.0001217877094972067, + "loss": 1.6412, "step": 280 }, { - "epoch": 11.24, - "learning_rate": 8.76e-05, - "loss": 1.1708, + "epoch": 1.57, + "learning_rate": 0.00012150837988826816, + "loss": 1.7497, "step": 281 }, { - "epoch": 11.28, - "learning_rate": 8.72e-05, - "loss": 1.0236, + "epoch": 1.58, + "learning_rate": 0.00012122905027932962, + "loss": 1.5577, "step": 282 }, { - "epoch": 11.32, - "learning_rate": 8.680000000000001e-05, - "loss": 1.1359, + "epoch": 1.58, + "learning_rate": 0.00012094972067039108, + "loss": 1.8049, "step": 283 }, { - "epoch": 11.36, - "learning_rate": 8.64e-05, - "loss": 1.1398, + "epoch": 1.59, + "learning_rate": 0.00012067039106145253, + "loss": 1.6834, "step": 284 }, { - "epoch": 11.4, - "learning_rate": 8.6e-05, - "loss": 1.0213, + "epoch": 1.59, + "learning_rate": 0.00012039106145251397, + "loss": 1.7978, "step": 285 }, { - "epoch": 11.44, - "learning_rate": 8.560000000000001e-05, - "loss": 1.1788, + "epoch": 1.6, + "learning_rate": 0.00012011173184357542, + "loss": 1.6558, "step": 286 }, { - "epoch": 11.48, - "learning_rate": 8.52e-05, - "loss": 1.0387, + "epoch": 1.6, + "learning_rate": 0.00011983240223463687, + "loss": 1.7561, "step": 287 }, { - "epoch": 11.52, - "learning_rate": 8.48e-05, - "loss": 0.9476, + "epoch": 1.61, + "learning_rate": 0.00011955307262569833, + "loss": 1.6807, "step": 288 }, { - "epoch": 11.56, - "learning_rate": 8.44e-05, - "loss": 1.1954, + "epoch": 1.61, + "learning_rate": 0.00011927374301675978, + "loss": 1.6937, "step": 289 }, { - "epoch": 11.6, - "learning_rate": 8.4e-05, - "loss": 1.0309, + "epoch": 1.62, + "learning_rate": 0.00011899441340782122, + "loss": 1.6591, "step": 290 }, { - "epoch": 11.64, - "learning_rate": 8.36e-05, - "loss": 1.1321, + "epoch": 1.63, + "learning_rate": 0.0001187150837988827, + "loss": 1.6771, "step": 291 }, { - "epoch": 11.68, - "learning_rate": 8.32e-05, - "loss": 1.0914, + "epoch": 1.63, + "learning_rate": 0.00011843575418994415, + "loss": 1.7743, "step": 292 }, { - "epoch": 11.72, - "learning_rate": 8.28e-05, - "loss": 1.222, + "epoch": 1.64, + "learning_rate": 0.00011815642458100559, + "loss": 1.5857, "step": 293 }, { - "epoch": 11.76, - "learning_rate": 8.24e-05, - "loss": 1.0127, + "epoch": 1.64, + "learning_rate": 0.00011787709497206705, + "loss": 1.6999, "step": 294 }, { - "epoch": 11.8, - "learning_rate": 8.2e-05, - "loss": 1.1977, + "epoch": 1.65, + "learning_rate": 0.0001175977653631285, + "loss": 1.5661, "step": 295 }, { - "epoch": 11.84, - "learning_rate": 8.16e-05, - "loss": 1.122, + "epoch": 1.65, + "learning_rate": 0.00011731843575418995, + "loss": 1.7235, "step": 296 }, { - "epoch": 11.88, - "learning_rate": 8.120000000000001e-05, - "loss": 1.1543, + "epoch": 1.66, + "learning_rate": 0.0001170391061452514, + "loss": 1.607, "step": 297 }, { - "epoch": 11.92, - "learning_rate": 8.080000000000001e-05, - "loss": 1.1767, + "epoch": 1.66, + "learning_rate": 0.00011675977653631284, + "loss": 1.68, "step": 298 }, { - "epoch": 11.96, - "learning_rate": 8.04e-05, - "loss": 1.1341, + "epoch": 1.67, + "learning_rate": 0.0001164804469273743, + "loss": 1.6938, "step": 299 }, { - "epoch": 12.0, - "learning_rate": 8e-05, - "loss": 1.0107, + "epoch": 1.68, + "learning_rate": 0.00011620111731843578, + "loss": 1.6315, "step": 300 }, { - "epoch": 12.04, - "learning_rate": 7.960000000000001e-05, - "loss": 1.1455, + "epoch": 1.68, + "learning_rate": 0.00011592178770949722, + "loss": 1.6802, "step": 301 }, { - "epoch": 12.08, - "learning_rate": 7.920000000000001e-05, - "loss": 1.0091, + "epoch": 1.69, + "learning_rate": 0.00011564245810055867, + "loss": 1.7174, "step": 302 }, { - "epoch": 12.12, - "learning_rate": 7.88e-05, - "loss": 0.9888, + "epoch": 1.69, + "learning_rate": 0.00011536312849162012, + "loss": 1.5212, "step": 303 }, { - "epoch": 12.16, - "learning_rate": 7.840000000000001e-05, - "loss": 0.9894, + "epoch": 1.7, + "learning_rate": 0.00011508379888268157, + "loss": 1.5808, "step": 304 }, { - "epoch": 12.2, - "learning_rate": 7.800000000000001e-05, - "loss": 1.0095, + "epoch": 1.7, + "learning_rate": 0.00011480446927374303, + "loss": 1.6152, "step": 305 }, { - "epoch": 12.24, - "learning_rate": 7.76e-05, - "loss": 1.1163, + "epoch": 1.71, + "learning_rate": 0.00011452513966480447, + "loss": 1.5435, "step": 306 }, { - "epoch": 12.28, - "learning_rate": 7.72e-05, - "loss": 1.0345, + "epoch": 1.72, + "learning_rate": 0.00011424581005586592, + "loss": 1.6603, "step": 307 }, { - "epoch": 12.32, - "learning_rate": 7.680000000000001e-05, - "loss": 0.9662, + "epoch": 1.72, + "learning_rate": 0.00011396648044692737, + "loss": 1.685, "step": 308 }, { - "epoch": 12.36, - "learning_rate": 7.64e-05, - "loss": 0.9064, + "epoch": 1.73, + "learning_rate": 0.00011368715083798884, + "loss": 1.6002, "step": 309 }, { - "epoch": 12.4, - "learning_rate": 7.6e-05, - "loss": 0.9448, + "epoch": 1.73, + "learning_rate": 0.00011340782122905029, + "loss": 1.6046, "step": 310 }, { - "epoch": 12.44, - "learning_rate": 7.560000000000001e-05, - "loss": 0.9509, + "epoch": 1.74, + "learning_rate": 0.00011312849162011174, + "loss": 1.5969, "step": 311 }, { - "epoch": 12.48, - "learning_rate": 7.52e-05, - "loss": 1.123, + "epoch": 1.74, + "learning_rate": 0.0001128491620111732, + "loss": 1.5845, "step": 312 }, { - "epoch": 12.52, - "learning_rate": 7.48e-05, - "loss": 1.1011, + "epoch": 1.75, + "learning_rate": 0.00011256983240223464, + "loss": 1.8183, "step": 313 }, { - "epoch": 12.56, - "learning_rate": 7.44e-05, - "loss": 0.9398, + "epoch": 1.75, + "learning_rate": 0.00011229050279329609, + "loss": 1.6953, "step": 314 }, { - "epoch": 12.6, - "learning_rate": 7.4e-05, - "loss": 1.0498, + "epoch": 1.76, + "learning_rate": 0.00011201117318435754, + "loss": 1.7787, "step": 315 }, { - "epoch": 12.64, - "learning_rate": 7.36e-05, - "loss": 1.0442, + "epoch": 1.77, + "learning_rate": 0.000111731843575419, + "loss": 1.6422, "step": 316 }, { - "epoch": 12.68, - "learning_rate": 7.32e-05, - "loss": 0.9599, + "epoch": 1.77, + "learning_rate": 0.00011145251396648045, + "loss": 1.7034, "step": 317 }, { - "epoch": 12.72, - "learning_rate": 7.280000000000001e-05, - "loss": 0.8778, + "epoch": 1.78, + "learning_rate": 0.00011117318435754192, + "loss": 1.7301, "step": 318 }, { - "epoch": 12.76, - "learning_rate": 7.24e-05, - "loss": 0.8727, + "epoch": 1.78, + "learning_rate": 0.00011089385474860337, + "loss": 1.7084, "step": 319 }, { - "epoch": 12.8, - "learning_rate": 7.2e-05, - "loss": 1.0189, + "epoch": 1.79, + "learning_rate": 0.00011061452513966482, + "loss": 1.772, "step": 320 }, { - "epoch": 12.84, - "learning_rate": 7.16e-05, - "loss": 1.0543, + "epoch": 1.79, + "learning_rate": 0.00011033519553072626, + "loss": 1.5733, "step": 321 }, { - "epoch": 12.88, - "learning_rate": 7.12e-05, - "loss": 1.1282, + "epoch": 1.8, + "learning_rate": 0.00011005586592178771, + "loss": 1.6423, "step": 322 }, { - "epoch": 12.92, - "learning_rate": 7.08e-05, - "loss": 1.1018, + "epoch": 1.8, + "learning_rate": 0.00010977653631284917, + "loss": 1.5809, "step": 323 }, { - "epoch": 12.96, - "learning_rate": 7.04e-05, - "loss": 1.1827, + "epoch": 1.81, + "learning_rate": 0.00010949720670391062, + "loss": 1.6781, "step": 324 }, { - "epoch": 13.0, - "learning_rate": 7e-05, - "loss": 1.108, + "epoch": 1.82, + "learning_rate": 0.00010921787709497207, + "loss": 1.6788, "step": 325 }, { - "epoch": 13.04, - "learning_rate": 6.96e-05, - "loss": 0.9618, + "epoch": 1.82, + "learning_rate": 0.00010893854748603351, + "loss": 1.6346, "step": 326 }, { - "epoch": 13.08, - "learning_rate": 6.92e-05, - "loss": 1.0666, + "epoch": 1.83, + "learning_rate": 0.00010865921787709499, + "loss": 1.6634, "step": 327 }, { - "epoch": 13.12, - "learning_rate": 6.879999999999999e-05, - "loss": 0.9973, + "epoch": 1.83, + "learning_rate": 0.00010837988826815643, + "loss": 1.7561, "step": 328 }, { - "epoch": 13.16, - "learning_rate": 6.840000000000001e-05, - "loss": 1.0103, + "epoch": 1.84, + "learning_rate": 0.00010810055865921788, + "loss": 1.66, "step": 329 }, { - "epoch": 13.2, - "learning_rate": 6.800000000000001e-05, - "loss": 0.9974, + "epoch": 1.84, + "learning_rate": 0.00010782122905027934, + "loss": 1.7298, "step": 330 }, { - "epoch": 13.24, - "learning_rate": 6.76e-05, - "loss": 1.2283, + "epoch": 1.85, + "learning_rate": 0.00010754189944134079, + "loss": 1.6893, "step": 331 }, { - "epoch": 13.28, - "learning_rate": 6.720000000000001e-05, - "loss": 0.7485, + "epoch": 1.85, + "learning_rate": 0.00010726256983240224, + "loss": 1.7631, "step": 332 }, { - "epoch": 13.32, - "learning_rate": 6.680000000000001e-05, - "loss": 0.8072, + "epoch": 1.86, + "learning_rate": 0.00010698324022346368, + "loss": 1.6633, "step": 333 }, { - "epoch": 13.36, - "learning_rate": 6.64e-05, - "loss": 1.1106, + "epoch": 1.87, + "learning_rate": 0.00010670391061452513, + "loss": 1.5388, "step": 334 }, { - "epoch": 13.4, - "learning_rate": 6.6e-05, - "loss": 0.8216, + "epoch": 1.87, + "learning_rate": 0.00010642458100558659, + "loss": 1.6718, "step": 335 }, { - "epoch": 13.44, - "learning_rate": 6.560000000000001e-05, - "loss": 0.959, + "epoch": 1.88, + "learning_rate": 0.00010614525139664805, + "loss": 1.5536, "step": 336 }, { - "epoch": 13.48, - "learning_rate": 6.52e-05, - "loss": 1.0767, + "epoch": 1.88, + "learning_rate": 0.00010586592178770951, + "loss": 1.6483, "step": 337 }, { - "epoch": 13.52, - "learning_rate": 6.48e-05, - "loss": 1.0395, + "epoch": 1.89, + "learning_rate": 0.00010558659217877096, + "loss": 1.5774, "step": 338 }, { - "epoch": 13.56, - "learning_rate": 6.440000000000001e-05, - "loss": 0.9102, + "epoch": 1.89, + "learning_rate": 0.00010530726256983241, + "loss": 1.6366, "step": 339 }, { - "epoch": 13.6, - "learning_rate": 6.400000000000001e-05, - "loss": 0.8875, + "epoch": 1.9, + "learning_rate": 0.00010502793296089387, + "loss": 1.5567, "step": 340 }, { - "epoch": 13.64, - "learning_rate": 6.36e-05, - "loss": 1.0005, + "epoch": 1.91, + "learning_rate": 0.0001047486033519553, + "loss": 1.5323, "step": 341 }, { - "epoch": 13.68, - "learning_rate": 6.32e-05, - "loss": 0.9208, + "epoch": 1.91, + "learning_rate": 0.00010446927374301676, + "loss": 1.4608, "step": 342 }, { - "epoch": 13.72, - "learning_rate": 6.280000000000001e-05, - "loss": 0.8943, + "epoch": 1.92, + "learning_rate": 0.00010418994413407821, + "loss": 1.5933, "step": 343 }, { - "epoch": 13.76, - "learning_rate": 6.24e-05, - "loss": 0.8441, + "epoch": 1.92, + "learning_rate": 0.00010391061452513966, + "loss": 1.6625, "step": 344 }, { - "epoch": 13.8, - "learning_rate": 6.2e-05, - "loss": 0.9953, + "epoch": 1.93, + "learning_rate": 0.00010363128491620113, + "loss": 1.7236, "step": 345 }, { - "epoch": 13.84, - "learning_rate": 6.16e-05, - "loss": 0.796, + "epoch": 1.93, + "learning_rate": 0.00010335195530726258, + "loss": 1.759, "step": 346 }, { - "epoch": 13.88, - "learning_rate": 6.12e-05, - "loss": 1.0745, + "epoch": 1.94, + "learning_rate": 0.00010307262569832404, + "loss": 1.7248, "step": 347 }, { - "epoch": 13.92, - "learning_rate": 6.08e-05, - "loss": 0.9898, + "epoch": 1.94, + "learning_rate": 0.00010279329608938548, + "loss": 1.5144, "step": 348 }, { - "epoch": 13.96, - "learning_rate": 6.04e-05, - "loss": 0.9559, + "epoch": 1.95, + "learning_rate": 0.00010251396648044693, + "loss": 1.6905, "step": 349 }, { - "epoch": 14.0, - "learning_rate": 6e-05, - "loss": 0.8236, + "epoch": 1.96, + "learning_rate": 0.00010223463687150838, + "loss": 1.6119, "step": 350 }, { - "epoch": 14.04, - "learning_rate": 5.96e-05, - "loss": 0.7318, + "epoch": 1.96, + "learning_rate": 0.00010195530726256983, + "loss": 1.5464, "step": 351 }, { - "epoch": 14.08, - "learning_rate": 5.92e-05, - "loss": 0.8994, + "epoch": 1.97, + "learning_rate": 0.00010167597765363129, + "loss": 1.6901, "step": 352 }, { - "epoch": 14.12, - "learning_rate": 5.88e-05, - "loss": 0.9707, + "epoch": 1.97, + "learning_rate": 0.00010139664804469273, + "loss": 1.3511, "step": 353 }, { - "epoch": 14.16, - "learning_rate": 5.8399999999999997e-05, - "loss": 0.806, + "epoch": 1.98, + "learning_rate": 0.0001011173184357542, + "loss": 1.5434, "step": 354 }, { - "epoch": 14.2, - "learning_rate": 5.8e-05, - "loss": 1.0525, + "epoch": 1.98, + "learning_rate": 0.00010083798882681566, + "loss": 1.5891, "step": 355 }, { - "epoch": 14.24, - "learning_rate": 5.76e-05, - "loss": 0.7589, + "epoch": 1.99, + "learning_rate": 0.0001005586592178771, + "loss": 1.6658, "step": 356 }, { - "epoch": 14.28, - "learning_rate": 5.72e-05, - "loss": 0.7703, + "epoch": 1.99, + "learning_rate": 0.00010027932960893855, + "loss": 1.5657, "step": 357 }, { - "epoch": 14.32, - "learning_rate": 5.68e-05, - "loss": 0.9869, + "epoch": 2.0, + "learning_rate": 0.0001, + "loss": 1.7005, "step": 358 }, { - "epoch": 14.36, - "learning_rate": 5.6399999999999995e-05, - "loss": 0.9673, + "epoch": 2.01, + "learning_rate": 9.972067039106146e-05, + "loss": 1.4202, "step": 359 }, { - "epoch": 14.4, - "learning_rate": 5.6000000000000006e-05, - "loss": 0.9913, + "epoch": 2.01, + "learning_rate": 9.944134078212291e-05, + "loss": 1.5262, "step": 360 }, { - "epoch": 14.44, - "learning_rate": 5.560000000000001e-05, - "loss": 1.0219, + "epoch": 2.02, + "learning_rate": 9.916201117318436e-05, + "loss": 1.6323, "step": 361 }, { - "epoch": 14.48, - "learning_rate": 5.520000000000001e-05, - "loss": 0.8672, + "epoch": 2.02, + "learning_rate": 9.888268156424582e-05, + "loss": 1.5521, "step": 362 }, { - "epoch": 14.52, - "learning_rate": 5.4800000000000004e-05, - "loss": 1.0464, + "epoch": 2.03, + "learning_rate": 9.860335195530727e-05, + "loss": 1.5762, "step": 363 }, { - "epoch": 14.56, - "learning_rate": 5.440000000000001e-05, - "loss": 1.0109, + "epoch": 2.03, + "learning_rate": 9.832402234636872e-05, + "loss": 1.613, "step": 364 }, { - "epoch": 14.6, - "learning_rate": 5.4000000000000005e-05, - "loss": 0.9913, + "epoch": 2.04, + "learning_rate": 9.804469273743018e-05, + "loss": 1.4231, "step": 365 }, { - "epoch": 14.64, - "learning_rate": 5.360000000000001e-05, - "loss": 0.779, + "epoch": 2.04, + "learning_rate": 9.776536312849163e-05, + "loss": 1.5706, "step": 366 }, { - "epoch": 14.68, - "learning_rate": 5.3200000000000006e-05, - "loss": 0.8675, + "epoch": 2.05, + "learning_rate": 9.748603351955308e-05, + "loss": 1.5245, "step": 367 }, { - "epoch": 14.72, - "learning_rate": 5.28e-05, - "loss": 0.987, + "epoch": 2.06, + "learning_rate": 9.720670391061453e-05, + "loss": 1.4771, "step": 368 }, { - "epoch": 14.76, - "learning_rate": 5.2400000000000007e-05, - "loss": 0.741, + "epoch": 2.06, + "learning_rate": 9.692737430167597e-05, + "loss": 1.596, "step": 369 }, { - "epoch": 14.8, - "learning_rate": 5.2000000000000004e-05, - "loss": 0.9585, + "epoch": 2.07, + "learning_rate": 9.664804469273744e-05, + "loss": 1.537, "step": 370 }, { - "epoch": 14.84, - "learning_rate": 5.16e-05, - "loss": 0.8977, + "epoch": 2.07, + "learning_rate": 9.636871508379889e-05, + "loss": 1.4276, "step": 371 }, { - "epoch": 14.88, - "learning_rate": 5.1200000000000004e-05, - "loss": 0.7836, + "epoch": 2.08, + "learning_rate": 9.608938547486033e-05, + "loss": 1.4746, "step": 372 }, { - "epoch": 14.92, - "learning_rate": 5.08e-05, - "loss": 0.8461, + "epoch": 2.08, + "learning_rate": 9.581005586592178e-05, + "loss": 1.4374, "step": 373 }, { - "epoch": 14.96, - "learning_rate": 5.0400000000000005e-05, - "loss": 0.7531, + "epoch": 2.09, + "learning_rate": 9.553072625698325e-05, + "loss": 1.4704, "step": 374 }, { - "epoch": 15.0, - "learning_rate": 5e-05, - "loss": 0.8906, + "epoch": 2.09, + "learning_rate": 9.52513966480447e-05, + "loss": 1.5997, "step": 375 }, { - "epoch": 15.04, - "learning_rate": 4.96e-05, - "loss": 0.8793, + "epoch": 2.1, + "learning_rate": 9.497206703910614e-05, + "loss": 1.5034, "step": 376 }, { - "epoch": 15.08, - "learning_rate": 4.92e-05, - "loss": 0.8596, + "epoch": 2.11, + "learning_rate": 9.46927374301676e-05, + "loss": 1.6392, "step": 377 }, { - "epoch": 15.12, - "learning_rate": 4.88e-05, - "loss": 0.7947, + "epoch": 2.11, + "learning_rate": 9.441340782122905e-05, + "loss": 1.5611, "step": 378 }, { - "epoch": 15.16, - "learning_rate": 4.8400000000000004e-05, - "loss": 0.7976, + "epoch": 2.12, + "learning_rate": 9.413407821229052e-05, + "loss": 1.3384, "step": 379 }, { - "epoch": 15.2, - "learning_rate": 4.8e-05, - "loss": 0.9037, + "epoch": 2.12, + "learning_rate": 9.385474860335196e-05, + "loss": 1.5732, "step": 380 }, { - "epoch": 15.24, - "learning_rate": 4.76e-05, - "loss": 0.9863, + "epoch": 2.13, + "learning_rate": 9.357541899441341e-05, + "loss": 1.3874, "step": 381 }, { - "epoch": 15.28, - "learning_rate": 4.72e-05, - "loss": 0.7735, + "epoch": 2.13, + "learning_rate": 9.329608938547486e-05, + "loss": 1.4139, "step": 382 }, { - "epoch": 15.32, - "learning_rate": 4.6800000000000006e-05, - "loss": 0.6723, + "epoch": 2.14, + "learning_rate": 9.301675977653633e-05, + "loss": 1.5195, "step": 383 }, { - "epoch": 15.36, - "learning_rate": 4.64e-05, - "loss": 0.7002, + "epoch": 2.15, + "learning_rate": 9.273743016759777e-05, + "loss": 1.4371, "step": 384 }, { - "epoch": 15.4, - "learning_rate": 4.600000000000001e-05, - "loss": 0.9479, + "epoch": 2.15, + "learning_rate": 9.245810055865922e-05, + "loss": 1.4411, "step": 385 }, { - "epoch": 15.44, - "learning_rate": 4.5600000000000004e-05, - "loss": 0.8159, + "epoch": 2.16, + "learning_rate": 9.217877094972067e-05, + "loss": 1.5214, "step": 386 }, { - "epoch": 15.48, - "learning_rate": 4.52e-05, - "loss": 0.7954, + "epoch": 2.16, + "learning_rate": 9.189944134078213e-05, + "loss": 1.4971, "step": 387 }, { - "epoch": 15.52, - "learning_rate": 4.4800000000000005e-05, - "loss": 0.8016, + "epoch": 2.17, + "learning_rate": 9.162011173184358e-05, + "loss": 1.3453, "step": 388 }, { - "epoch": 15.56, - "learning_rate": 4.44e-05, - "loss": 0.7995, + "epoch": 2.17, + "learning_rate": 9.134078212290503e-05, + "loss": 1.394, "step": 389 }, { - "epoch": 15.6, - "learning_rate": 4.4000000000000006e-05, - "loss": 0.8243, + "epoch": 2.18, + "learning_rate": 9.106145251396648e-05, + "loss": 1.5058, "step": 390 }, { - "epoch": 15.64, - "learning_rate": 4.36e-05, - "loss": 0.8335, + "epoch": 2.18, + "learning_rate": 9.078212290502794e-05, + "loss": 1.4855, "step": 391 }, { - "epoch": 15.68, - "learning_rate": 4.32e-05, - "loss": 1.001, + "epoch": 2.19, + "learning_rate": 9.050279329608939e-05, + "loss": 1.4647, "step": 392 }, { - "epoch": 15.72, - "learning_rate": 4.2800000000000004e-05, - "loss": 0.9385, + "epoch": 2.2, + "learning_rate": 9.022346368715084e-05, + "loss": 1.3435, "step": 393 }, { - "epoch": 15.76, - "learning_rate": 4.24e-05, - "loss": 0.8796, + "epoch": 2.2, + "learning_rate": 8.99441340782123e-05, + "loss": 1.5815, "step": 394 }, { - "epoch": 15.8, - "learning_rate": 4.2e-05, - "loss": 0.98, + "epoch": 2.21, + "learning_rate": 8.966480446927375e-05, + "loss": 1.4742, "step": 395 }, { - "epoch": 15.84, - "learning_rate": 4.16e-05, - "loss": 0.7547, + "epoch": 2.21, + "learning_rate": 8.938547486033519e-05, + "loss": 1.5389, "step": 396 }, { - "epoch": 15.88, - "learning_rate": 4.12e-05, - "loss": 0.7943, + "epoch": 2.22, + "learning_rate": 8.910614525139666e-05, + "loss": 1.384, "step": 397 }, { - "epoch": 15.92, - "learning_rate": 4.08e-05, - "loss": 0.8135, + "epoch": 2.22, + "learning_rate": 8.882681564245811e-05, + "loss": 1.3967, "step": 398 }, { - "epoch": 15.96, - "learning_rate": 4.0400000000000006e-05, - "loss": 0.87, + "epoch": 2.23, + "learning_rate": 8.854748603351956e-05, + "loss": 1.442, "step": 399 }, { - "epoch": 16.0, - "learning_rate": 4e-05, - "loss": 0.8818, + "epoch": 2.23, + "learning_rate": 8.8268156424581e-05, + "loss": 1.396, "step": 400 }, { - "epoch": 16.04, - "learning_rate": 3.960000000000001e-05, - "loss": 1.0793, + "epoch": 2.24, + "learning_rate": 8.798882681564247e-05, + "loss": 1.4706, "step": 401 }, { - "epoch": 16.08, - "learning_rate": 3.9200000000000004e-05, - "loss": 0.6816, + "epoch": 2.25, + "learning_rate": 8.770949720670392e-05, + "loss": 1.4217, "step": 402 }, { - "epoch": 16.12, - "learning_rate": 3.88e-05, - "loss": 0.8138, + "epoch": 2.25, + "learning_rate": 8.743016759776537e-05, + "loss": 1.5617, "step": 403 }, { - "epoch": 16.16, - "learning_rate": 3.8400000000000005e-05, - "loss": 0.8002, + "epoch": 2.26, + "learning_rate": 8.715083798882681e-05, + "loss": 1.5701, "step": 404 }, { - "epoch": 16.2, - "learning_rate": 3.8e-05, - "loss": 0.7104, + "epoch": 2.26, + "learning_rate": 8.687150837988828e-05, + "loss": 1.3659, "step": 405 }, { - "epoch": 16.24, - "learning_rate": 3.76e-05, - "loss": 0.749, + "epoch": 2.27, + "learning_rate": 8.659217877094973e-05, + "loss": 1.3424, "step": 406 }, { - "epoch": 16.28, - "learning_rate": 3.72e-05, - "loss": 0.9713, + "epoch": 2.27, + "learning_rate": 8.631284916201118e-05, + "loss": 1.5782, "step": 407 }, { - "epoch": 16.32, - "learning_rate": 3.68e-05, - "loss": 0.9039, + "epoch": 2.28, + "learning_rate": 8.603351955307262e-05, + "loss": 1.3355, "step": 408 }, { - "epoch": 16.36, - "learning_rate": 3.6400000000000004e-05, - "loss": 0.7958, + "epoch": 2.28, + "learning_rate": 8.575418994413408e-05, + "loss": 1.4798, "step": 409 }, { - "epoch": 16.4, - "learning_rate": 3.6e-05, - "loss": 0.9242, + "epoch": 2.29, + "learning_rate": 8.547486033519554e-05, + "loss": 1.3813, "step": 410 }, { - "epoch": 16.44, - "learning_rate": 3.56e-05, - "loss": 0.7224, + "epoch": 2.3, + "learning_rate": 8.519553072625698e-05, + "loss": 1.6011, "step": 411 }, { - "epoch": 16.48, - "learning_rate": 3.52e-05, - "loss": 0.8227, + "epoch": 2.3, + "learning_rate": 8.491620111731844e-05, + "loss": 1.4596, "step": 412 }, { - "epoch": 16.52, - "learning_rate": 3.48e-05, - "loss": 0.7336, + "epoch": 2.31, + "learning_rate": 8.463687150837989e-05, + "loss": 1.4997, "step": 413 }, { - "epoch": 16.56, - "learning_rate": 3.4399999999999996e-05, - "loss": 0.801, + "epoch": 2.31, + "learning_rate": 8.435754189944135e-05, + "loss": 1.5255, "step": 414 }, { - "epoch": 16.6, - "learning_rate": 3.4000000000000007e-05, - "loss": 0.7649, + "epoch": 2.32, + "learning_rate": 8.40782122905028e-05, + "loss": 1.5305, "step": 415 }, { - "epoch": 16.64, - "learning_rate": 3.3600000000000004e-05, - "loss": 0.859, + "epoch": 2.32, + "learning_rate": 8.379888268156425e-05, + "loss": 1.3029, "step": 416 }, { - "epoch": 16.68, - "learning_rate": 3.32e-05, - "loss": 0.7702, + "epoch": 2.33, + "learning_rate": 8.35195530726257e-05, + "loss": 1.2046, "step": 417 }, { - "epoch": 16.72, - "learning_rate": 3.2800000000000004e-05, - "loss": 0.7585, + "epoch": 2.34, + "learning_rate": 8.324022346368715e-05, + "loss": 1.4985, "step": 418 }, { - "epoch": 16.76, - "learning_rate": 3.24e-05, - "loss": 0.6829, + "epoch": 2.34, + "learning_rate": 8.29608938547486e-05, + "loss": 1.3486, "step": 419 }, { - "epoch": 16.8, - "learning_rate": 3.2000000000000005e-05, - "loss": 0.8186, + "epoch": 2.35, + "learning_rate": 8.268156424581006e-05, + "loss": 1.2839, "step": 420 }, { - "epoch": 16.84, - "learning_rate": 3.16e-05, - "loss": 0.7649, + "epoch": 2.35, + "learning_rate": 8.240223463687151e-05, + "loss": 1.4286, "step": 421 }, { - "epoch": 16.88, - "learning_rate": 3.12e-05, - "loss": 0.8019, + "epoch": 2.36, + "learning_rate": 8.212290502793296e-05, + "loss": 1.3893, "step": 422 }, { - "epoch": 16.92, - "learning_rate": 3.08e-05, - "loss": 0.82, + "epoch": 2.36, + "learning_rate": 8.184357541899442e-05, + "loss": 1.5477, "step": 423 }, { - "epoch": 16.96, - "learning_rate": 3.04e-05, - "loss": 0.7077, + "epoch": 2.37, + "learning_rate": 8.156424581005587e-05, + "loss": 1.4973, "step": 424 }, { - "epoch": 17.0, - "learning_rate": 3e-05, - "loss": 0.7803, + "epoch": 2.37, + "learning_rate": 8.128491620111732e-05, + "loss": 1.3549, "step": 425 }, { - "epoch": 17.04, - "learning_rate": 2.96e-05, - "loss": 0.8311, + "epoch": 2.38, + "learning_rate": 8.100558659217878e-05, + "loss": 1.3966, "step": 426 }, { - "epoch": 17.08, - "learning_rate": 2.9199999999999998e-05, - "loss": 0.8245, + "epoch": 2.39, + "learning_rate": 8.072625698324023e-05, + "loss": 1.3974, "step": 427 }, { - "epoch": 17.12, - "learning_rate": 2.88e-05, - "loss": 0.6627, + "epoch": 2.39, + "learning_rate": 8.044692737430168e-05, + "loss": 1.5211, "step": 428 }, { - "epoch": 17.16, - "learning_rate": 2.84e-05, - "loss": 0.6721, + "epoch": 2.4, + "learning_rate": 8.016759776536313e-05, + "loss": 1.4751, "step": 429 }, { - "epoch": 17.2, - "learning_rate": 2.8000000000000003e-05, - "loss": 0.9004, + "epoch": 2.4, + "learning_rate": 7.988826815642459e-05, + "loss": 1.4322, "step": 430 }, { - "epoch": 17.24, - "learning_rate": 2.7600000000000003e-05, - "loss": 0.7606, + "epoch": 2.41, + "learning_rate": 7.960893854748604e-05, + "loss": 1.5991, "step": 431 }, { - "epoch": 17.28, - "learning_rate": 2.7200000000000004e-05, - "loss": 0.7235, + "epoch": 2.41, + "learning_rate": 7.93296089385475e-05, + "loss": 1.3288, "step": 432 }, { - "epoch": 17.32, - "learning_rate": 2.6800000000000004e-05, - "loss": 0.797, + "epoch": 2.42, + "learning_rate": 7.905027932960895e-05, + "loss": 1.4098, "step": 433 }, { - "epoch": 17.36, - "learning_rate": 2.64e-05, - "loss": 0.6312, + "epoch": 2.42, + "learning_rate": 7.87709497206704e-05, + "loss": 1.4819, "step": 434 }, { - "epoch": 17.4, - "learning_rate": 2.6000000000000002e-05, - "loss": 0.869, + "epoch": 2.43, + "learning_rate": 7.849162011173184e-05, + "loss": 1.4003, "step": 435 }, { - "epoch": 17.44, - "learning_rate": 2.5600000000000002e-05, - "loss": 0.7968, + "epoch": 2.44, + "learning_rate": 7.821229050279329e-05, + "loss": 1.3475, "step": 436 }, { - "epoch": 17.48, - "learning_rate": 2.5200000000000003e-05, - "loss": 0.6872, + "epoch": 2.44, + "learning_rate": 7.793296089385476e-05, + "loss": 1.3354, "step": 437 }, { - "epoch": 17.52, - "learning_rate": 2.48e-05, - "loss": 0.7272, + "epoch": 2.45, + "learning_rate": 7.765363128491621e-05, + "loss": 1.3144, "step": 438 }, { - "epoch": 17.56, - "learning_rate": 2.44e-05, - "loss": 0.7695, + "epoch": 2.45, + "learning_rate": 7.737430167597765e-05, + "loss": 1.4118, "step": 439 }, { - "epoch": 17.6, - "learning_rate": 2.4e-05, - "loss": 0.6806, + "epoch": 2.46, + "learning_rate": 7.70949720670391e-05, + "loss": 1.3716, "step": 440 }, { - "epoch": 17.64, - "learning_rate": 2.36e-05, - "loss": 0.9562, + "epoch": 2.46, + "learning_rate": 7.681564245810057e-05, + "loss": 1.4191, "step": 441 }, { - "epoch": 17.68, - "learning_rate": 2.32e-05, - "loss": 0.8659, + "epoch": 2.47, + "learning_rate": 7.653631284916202e-05, + "loss": 1.5173, "step": 442 }, { - "epoch": 17.72, - "learning_rate": 2.2800000000000002e-05, - "loss": 0.8259, + "epoch": 2.47, + "learning_rate": 7.625698324022346e-05, + "loss": 1.3005, "step": 443 }, { - "epoch": 17.76, - "learning_rate": 2.2400000000000002e-05, - "loss": 0.7056, + "epoch": 2.48, + "learning_rate": 7.597765363128491e-05, + "loss": 1.4586, "step": 444 }, { - "epoch": 17.8, - "learning_rate": 2.2000000000000003e-05, - "loss": 0.7251, + "epoch": 2.49, + "learning_rate": 7.569832402234637e-05, + "loss": 1.5086, "step": 445 }, { - "epoch": 17.84, - "learning_rate": 2.16e-05, - "loss": 0.8674, + "epoch": 2.49, + "learning_rate": 7.541899441340783e-05, + "loss": 1.5446, "step": 446 }, { - "epoch": 17.88, - "learning_rate": 2.12e-05, - "loss": 0.7127, + "epoch": 2.5, + "learning_rate": 7.513966480446927e-05, + "loss": 1.3274, "step": 447 }, { - "epoch": 17.92, - "learning_rate": 2.08e-05, - "loss": 0.849, + "epoch": 2.5, + "learning_rate": 7.486033519553073e-05, + "loss": 1.4424, "step": 448 }, { - "epoch": 17.96, - "learning_rate": 2.04e-05, - "loss": 0.7506, + "epoch": 2.51, + "learning_rate": 7.458100558659218e-05, + "loss": 1.2836, "step": 449 }, { - "epoch": 18.0, - "learning_rate": 2e-05, - "loss": 0.7262, + "epoch": 2.51, + "learning_rate": 7.430167597765365e-05, + "loss": 1.4322, "step": 450 }, { - "epoch": 18.04, - "learning_rate": 1.9600000000000002e-05, - "loss": 0.8102, + "epoch": 2.52, + "learning_rate": 7.402234636871509e-05, + "loss": 1.4168, "step": 451 }, { - "epoch": 18.08, - "learning_rate": 1.9200000000000003e-05, - "loss": 0.7843, + "epoch": 2.53, + "learning_rate": 7.374301675977654e-05, + "loss": 1.3892, "step": 452 }, { - "epoch": 18.12, - "learning_rate": 1.88e-05, - "loss": 0.6902, + "epoch": 2.53, + "learning_rate": 7.346368715083799e-05, + "loss": 1.6538, "step": 453 }, { - "epoch": 18.16, - "learning_rate": 1.84e-05, - "loss": 0.749, + "epoch": 2.54, + "learning_rate": 7.318435754189944e-05, + "loss": 1.4573, "step": 454 }, { - "epoch": 18.2, - "learning_rate": 1.8e-05, - "loss": 0.722, + "epoch": 2.54, + "learning_rate": 7.29050279329609e-05, + "loss": 1.4593, "step": 455 }, { - "epoch": 18.24, - "learning_rate": 1.76e-05, - "loss": 0.7015, + "epoch": 2.55, + "learning_rate": 7.262569832402235e-05, + "loss": 1.4316, "step": 456 }, { - "epoch": 18.28, - "learning_rate": 1.7199999999999998e-05, - "loss": 0.7451, + "epoch": 2.55, + "learning_rate": 7.23463687150838e-05, + "loss": 1.2702, "step": 457 }, { - "epoch": 18.32, - "learning_rate": 1.6800000000000002e-05, - "loss": 0.8411, + "epoch": 2.56, + "learning_rate": 7.206703910614526e-05, + "loss": 1.5458, "step": 458 }, { - "epoch": 18.36, - "learning_rate": 1.6400000000000002e-05, - "loss": 0.8594, + "epoch": 2.56, + "learning_rate": 7.178770949720671e-05, + "loss": 1.5179, "step": 459 }, { - "epoch": 18.4, - "learning_rate": 1.6000000000000003e-05, - "loss": 0.7394, + "epoch": 2.57, + "learning_rate": 7.150837988826816e-05, + "loss": 1.3844, "step": 460 }, { - "epoch": 18.44, - "learning_rate": 1.56e-05, - "loss": 0.8071, + "epoch": 2.58, + "learning_rate": 7.122905027932961e-05, + "loss": 1.3698, "step": 461 }, { - "epoch": 18.48, - "learning_rate": 1.52e-05, - "loss": 0.5883, + "epoch": 2.58, + "learning_rate": 7.094972067039107e-05, + "loss": 1.4396, "step": 462 }, { - "epoch": 18.52, - "learning_rate": 1.48e-05, - "loss": 0.5356, + "epoch": 2.59, + "learning_rate": 7.067039106145251e-05, + "loss": 1.5486, "step": 463 }, { - "epoch": 18.56, - "learning_rate": 1.44e-05, - "loss": 0.6916, + "epoch": 2.59, + "learning_rate": 7.039106145251397e-05, + "loss": 1.2032, "step": 464 }, { - "epoch": 18.6, - "learning_rate": 1.4000000000000001e-05, - "loss": 0.7899, + "epoch": 2.6, + "learning_rate": 7.011173184357543e-05, + "loss": 1.3434, "step": 465 }, { - "epoch": 18.64, - "learning_rate": 1.3600000000000002e-05, - "loss": 0.7651, + "epoch": 2.6, + "learning_rate": 6.983240223463688e-05, + "loss": 1.4065, "step": 466 }, { - "epoch": 18.68, - "learning_rate": 1.32e-05, - "loss": 0.7883, + "epoch": 2.61, + "learning_rate": 6.955307262569832e-05, + "loss": 1.3362, "step": 467 }, { - "epoch": 18.72, - "learning_rate": 1.2800000000000001e-05, - "loss": 0.8655, + "epoch": 2.61, + "learning_rate": 6.927374301675979e-05, + "loss": 1.375, "step": 468 }, { - "epoch": 18.76, - "learning_rate": 1.24e-05, - "loss": 0.6875, + "epoch": 2.62, + "learning_rate": 6.899441340782124e-05, + "loss": 1.4549, "step": 469 }, { - "epoch": 18.8, - "learning_rate": 1.2e-05, - "loss": 0.7629, + "epoch": 2.63, + "learning_rate": 6.871508379888269e-05, + "loss": 1.3724, "step": 470 }, { - "epoch": 18.84, - "learning_rate": 1.16e-05, - "loss": 0.5825, + "epoch": 2.63, + "learning_rate": 6.843575418994413e-05, + "loss": 1.1602, "step": 471 }, { - "epoch": 18.88, - "learning_rate": 1.1200000000000001e-05, - "loss": 0.6979, + "epoch": 2.64, + "learning_rate": 6.815642458100558e-05, + "loss": 1.4346, "step": 472 }, { - "epoch": 18.92, - "learning_rate": 1.08e-05, - "loss": 0.8777, + "epoch": 2.64, + "learning_rate": 6.787709497206705e-05, + "loss": 1.2877, "step": 473 }, { - "epoch": 18.96, - "learning_rate": 1.04e-05, - "loss": 0.7298, + "epoch": 2.65, + "learning_rate": 6.759776536312849e-05, + "loss": 1.394, "step": 474 }, { - "epoch": 19.0, - "learning_rate": 1e-05, - "loss": 0.8324, + "epoch": 2.65, + "learning_rate": 6.731843575418994e-05, + "loss": 1.3387, "step": 475 }, { - "epoch": 19.04, - "learning_rate": 9.600000000000001e-06, - "loss": 0.6186, + "epoch": 2.66, + "learning_rate": 6.70391061452514e-05, + "loss": 1.41, "step": 476 }, { - "epoch": 19.08, - "learning_rate": 9.2e-06, - "loss": 0.7853, + "epoch": 2.66, + "learning_rate": 6.675977653631286e-05, + "loss": 1.4299, "step": 477 }, { - "epoch": 19.12, - "learning_rate": 8.8e-06, - "loss": 0.5731, + "epoch": 2.67, + "learning_rate": 6.64804469273743e-05, + "loss": 1.3824, "step": 478 }, { - "epoch": 19.16, - "learning_rate": 8.400000000000001e-06, - "loss": 0.7915, + "epoch": 2.68, + "learning_rate": 6.620111731843575e-05, + "loss": 1.2656, "step": 479 }, { - "epoch": 19.2, - "learning_rate": 8.000000000000001e-06, - "loss": 0.83, + "epoch": 2.68, + "learning_rate": 6.59217877094972e-05, + "loss": 1.2811, "step": 480 }, { - "epoch": 19.24, - "learning_rate": 7.6e-06, - "loss": 0.7768, + "epoch": 2.69, + "learning_rate": 6.564245810055866e-05, + "loss": 1.6839, "step": 481 }, { - "epoch": 19.28, - "learning_rate": 7.2e-06, - "loss": 0.6634, + "epoch": 2.69, + "learning_rate": 6.536312849162011e-05, + "loss": 1.3089, "step": 482 }, { - "epoch": 19.32, - "learning_rate": 6.800000000000001e-06, - "loss": 0.657, + "epoch": 2.7, + "learning_rate": 6.508379888268157e-05, + "loss": 1.4323, "step": 483 }, { - "epoch": 19.36, - "learning_rate": 6.4000000000000006e-06, - "loss": 0.8127, + "epoch": 2.7, + "learning_rate": 6.480446927374302e-05, + "loss": 1.3439, "step": 484 }, { - "epoch": 19.4, - "learning_rate": 6e-06, - "loss": 0.7326, + "epoch": 2.71, + "learning_rate": 6.452513966480447e-05, + "loss": 1.3614, "step": 485 }, { - "epoch": 19.44, - "learning_rate": 5.600000000000001e-06, - "loss": 0.6168, + "epoch": 2.72, + "learning_rate": 6.424581005586592e-05, + "loss": 1.4974, "step": 486 }, { - "epoch": 19.48, - "learning_rate": 5.2e-06, - "loss": 0.7808, + "epoch": 2.72, + "learning_rate": 6.396648044692738e-05, + "loss": 1.4963, "step": 487 }, { - "epoch": 19.52, - "learning_rate": 4.800000000000001e-06, - "loss": 0.7509, + "epoch": 2.73, + "learning_rate": 6.368715083798883e-05, + "loss": 1.2902, "step": 488 }, { - "epoch": 19.56, - "learning_rate": 4.4e-06, - "loss": 0.7304, + "epoch": 2.73, + "learning_rate": 6.340782122905028e-05, + "loss": 1.3226, "step": 489 }, { - "epoch": 19.6, - "learning_rate": 4.000000000000001e-06, - "loss": 0.77, + "epoch": 2.74, + "learning_rate": 6.312849162011174e-05, + "loss": 1.4353, "step": 490 }, { - "epoch": 19.64, - "learning_rate": 3.6e-06, - "loss": 0.6577, + "epoch": 2.74, + "learning_rate": 6.284916201117319e-05, + "loss": 1.281, "step": 491 }, { - "epoch": 19.68, - "learning_rate": 3.2000000000000003e-06, - "loss": 0.6797, + "epoch": 2.75, + "learning_rate": 6.256983240223464e-05, + "loss": 1.3755, "step": 492 }, { - "epoch": 19.72, - "learning_rate": 2.8000000000000003e-06, - "loss": 0.807, + "epoch": 2.75, + "learning_rate": 6.22905027932961e-05, + "loss": 1.3811, "step": 493 }, { - "epoch": 19.76, - "learning_rate": 2.4000000000000003e-06, - "loss": 0.7484, + "epoch": 2.76, + "learning_rate": 6.201117318435755e-05, + "loss": 1.4627, "step": 494 }, { - "epoch": 19.8, - "learning_rate": 2.0000000000000003e-06, - "loss": 0.7582, + "epoch": 2.77, + "learning_rate": 6.1731843575419e-05, + "loss": 1.3482, "step": 495 }, { - "epoch": 19.84, - "learning_rate": 1.6000000000000001e-06, - "loss": 0.7102, + "epoch": 2.77, + "learning_rate": 6.145251396648045e-05, + "loss": 1.4274, "step": 496 }, { - "epoch": 19.88, - "learning_rate": 1.2000000000000002e-06, - "loss": 0.7078, + "epoch": 2.78, + "learning_rate": 6.11731843575419e-05, + "loss": 1.3465, "step": 497 }, { - "epoch": 19.92, - "learning_rate": 8.000000000000001e-07, - "loss": 0.7877, + "epoch": 2.78, + "learning_rate": 6.089385474860335e-05, + "loss": 1.2319, "step": 498 }, { - "epoch": 19.96, - "learning_rate": 4.0000000000000003e-07, - "loss": 0.8055, + "epoch": 2.79, + "learning_rate": 6.061452513966481e-05, + "loss": 1.2835, "step": 499 }, { - "epoch": 20.0, - "learning_rate": 0.0, - "loss": 0.6905, + "epoch": 2.79, + "learning_rate": 6.0335195530726265e-05, + "loss": 1.4836, "step": 500 } ], "logging_steps": 1, - "max_steps": 500, - "num_train_epochs": 20, + "max_steps": 716, + "num_train_epochs": 4, "save_steps": 100, - "total_flos": 1.701892530444288e+17, + "total_flos": 2.572486267094016e+17, "trial_name": null, "trial_params": null } diff --git a/checkpoint-500/training_args.bin b/checkpoint-500/training_args.bin index 4de6572a838c337c9990635a9406ebf46c0ec336..c8672c716e925d0028b4938db147703f58656ff7 100644 --- a/checkpoint-500/training_args.bin +++ b/checkpoint-500/training_args.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6ef74b31950ae6c8955316bed48c343fb06cda0cb6a8a54ca46ca3cb681c8736 +oid sha256:188ae1c421cc0c6435d1f71d8d3423ac4abc7dba0e6fc2efcbc4dbe77c741317 size 4027 diff --git a/checkpoint-600/README.md b/checkpoint-600/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-600/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-600/adapter_config.json b/checkpoint-600/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..c661f74e40394c079de68f07861d73c341803b67 --- /dev/null +++ b/checkpoint-600/adapter_config.json @@ -0,0 +1,25 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-600/adapter_model.bin b/checkpoint-600/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..8fbb1832de137e033b6dbf7ffc70284e5becb659 --- /dev/null +++ b/checkpoint-600/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:435ffb177831605f5479223de5dfef343a64e6bc9f2d4e05fba2ab86e3d42c9b +size 39409357 diff --git a/checkpoint-600/optimizer.pt b/checkpoint-600/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..64d125cfb1d1abce95ae8935c5159c5c4afc4d85 --- /dev/null +++ b/checkpoint-600/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a67c0a0a04a16e60dc6df9ce52ee43c4b51d5e34c198de23f6afed8ff5bd6d3e +size 78844421 diff --git a/checkpoint-600/rng_state.pth b/checkpoint-600/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..452f99ac534b9117d836494d73222e3d44e1523b --- /dev/null +++ b/checkpoint-600/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6869750f95a25c4e970298a33adf90e2d7ab52680bf3317239bff1b10103235 +size 14575 diff --git a/checkpoint-600/scheduler.pt b/checkpoint-600/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..dc91f19f9994d9dc5e2cb196fcfd876e3f7fe0c8 --- /dev/null +++ b/checkpoint-600/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df189dbec1d9da6456fed4321f8fb7b9b04184c73d2cc55b1372ecebfa9aaaff +size 627 diff --git a/checkpoint-600/special_tokens_map.json b/checkpoint-600/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-600/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-600/tokenizer.json b/checkpoint-600/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207 --- /dev/null +++ b/checkpoint-600/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe +size 14500570 diff --git a/checkpoint-600/tokenizer_config.json b/checkpoint-600/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-600/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-600/trainer_state.json b/checkpoint-600/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..779b4f6d8a9d155187db4987b1d9f9ebcb34f156 --- /dev/null +++ b/checkpoint-600/trainer_state.json @@ -0,0 +1,3619 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 3.35195530726257, + "eval_steps": 500, + "global_step": 600, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.01, + "learning_rate": 0.00019972067039106145, + "loss": 2.6443, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 0.00019944134078212292, + "loss": 2.4104, + "step": 2 + }, + { + "epoch": 0.02, + "learning_rate": 0.00019916201117318435, + "loss": 2.4975, + "step": 3 + }, + { + "epoch": 0.02, + "learning_rate": 0.00019888268156424582, + "loss": 2.3513, + "step": 4 + }, + { + "epoch": 0.03, + "learning_rate": 0.0001986033519553073, + "loss": 2.4274, + "step": 5 + }, + { + "epoch": 0.03, + "learning_rate": 0.00019832402234636873, + "loss": 2.3628, + "step": 6 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001980446927374302, + "loss": 2.3567, + "step": 7 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019776536312849163, + "loss": 2.4121, + "step": 8 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019748603351955307, + "loss": 2.4033, + "step": 9 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019720670391061454, + "loss": 2.2805, + "step": 10 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019692737430167598, + "loss": 2.2639, + "step": 11 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019664804469273744, + "loss": 2.2724, + "step": 12 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019636871508379888, + "loss": 2.332, + "step": 13 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019608938547486035, + "loss": 2.2261, + "step": 14 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019581005586592182, + "loss": 2.2208, + "step": 15 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019553072625698326, + "loss": 2.3351, + "step": 16 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001952513966480447, + "loss": 2.2475, + "step": 17 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019497206703910616, + "loss": 2.3283, + "step": 18 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001946927374301676, + "loss": 2.1346, + "step": 19 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019441340782122907, + "loss": 2.131, + "step": 20 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001941340782122905, + "loss": 2.1718, + "step": 21 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019385474860335195, + "loss": 2.2446, + "step": 22 + }, + { + "epoch": 0.13, + "learning_rate": 0.0001935754189944134, + "loss": 2.306, + "step": 23 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019329608938547488, + "loss": 2.1908, + "step": 24 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019301675977653632, + "loss": 2.2844, + "step": 25 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019273743016759779, + "loss": 2.2235, + "step": 26 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019245810055865922, + "loss": 2.1842, + "step": 27 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019217877094972066, + "loss": 2.2675, + "step": 28 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019189944134078213, + "loss": 2.2532, + "step": 29 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019162011173184357, + "loss": 2.1788, + "step": 30 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019134078212290504, + "loss": 2.2494, + "step": 31 + }, + { + "epoch": 0.18, + "learning_rate": 0.0001910614525139665, + "loss": 2.1995, + "step": 32 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019078212290502794, + "loss": 2.1451, + "step": 33 + }, + { + "epoch": 0.19, + "learning_rate": 0.0001905027932960894, + "loss": 2.223, + "step": 34 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019022346368715085, + "loss": 2.2854, + "step": 35 + }, + { + "epoch": 0.2, + "learning_rate": 0.0001899441340782123, + "loss": 2.2265, + "step": 36 + }, + { + "epoch": 0.21, + "learning_rate": 0.00018966480446927375, + "loss": 2.1214, + "step": 37 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001893854748603352, + "loss": 2.1898, + "step": 38 + }, + { + "epoch": 0.22, + "learning_rate": 0.00018910614525139666, + "loss": 2.1974, + "step": 39 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001888268156424581, + "loss": 2.2259, + "step": 40 + }, + { + "epoch": 0.23, + "learning_rate": 0.00018854748603351957, + "loss": 2.2094, + "step": 41 + }, + { + "epoch": 0.23, + "learning_rate": 0.00018826815642458103, + "loss": 2.1731, + "step": 42 + }, + { + "epoch": 0.24, + "learning_rate": 0.00018798882681564247, + "loss": 2.2373, + "step": 43 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001877094972067039, + "loss": 2.2295, + "step": 44 + }, + { + "epoch": 0.25, + "learning_rate": 0.00018743016759776538, + "loss": 2.1947, + "step": 45 + }, + { + "epoch": 0.26, + "learning_rate": 0.00018715083798882682, + "loss": 2.2115, + "step": 46 + }, + { + "epoch": 0.26, + "learning_rate": 0.00018687150837988828, + "loss": 2.1224, + "step": 47 + }, + { + "epoch": 0.27, + "learning_rate": 0.00018659217877094972, + "loss": 2.2137, + "step": 48 + }, + { + "epoch": 0.27, + "learning_rate": 0.00018631284916201116, + "loss": 2.2338, + "step": 49 + }, + { + "epoch": 0.28, + "learning_rate": 0.00018603351955307266, + "loss": 2.1298, + "step": 50 + }, + { + "epoch": 0.28, + "learning_rate": 0.0001857541899441341, + "loss": 2.0883, + "step": 51 + }, + { + "epoch": 0.29, + "learning_rate": 0.00018547486033519553, + "loss": 2.1216, + "step": 52 + }, + { + "epoch": 0.3, + "learning_rate": 0.000185195530726257, + "loss": 2.2112, + "step": 53 + }, + { + "epoch": 0.3, + "learning_rate": 0.00018491620111731844, + "loss": 2.1224, + "step": 54 + }, + { + "epoch": 0.31, + "learning_rate": 0.0001846368715083799, + "loss": 2.2375, + "step": 55 + }, + { + "epoch": 0.31, + "learning_rate": 0.00018435754189944135, + "loss": 2.2235, + "step": 56 + }, + { + "epoch": 0.32, + "learning_rate": 0.00018407821229050279, + "loss": 2.1682, + "step": 57 + }, + { + "epoch": 0.32, + "learning_rate": 0.00018379888268156425, + "loss": 2.2077, + "step": 58 + }, + { + "epoch": 0.33, + "learning_rate": 0.00018351955307262572, + "loss": 2.1596, + "step": 59 + }, + { + "epoch": 0.34, + "learning_rate": 0.00018324022346368716, + "loss": 2.1311, + "step": 60 + }, + { + "epoch": 0.34, + "learning_rate": 0.00018296089385474862, + "loss": 2.1333, + "step": 61 + }, + { + "epoch": 0.35, + "learning_rate": 0.00018268156424581006, + "loss": 2.0901, + "step": 62 + }, + { + "epoch": 0.35, + "learning_rate": 0.00018240223463687153, + "loss": 2.1971, + "step": 63 + }, + { + "epoch": 0.36, + "learning_rate": 0.00018212290502793297, + "loss": 2.2602, + "step": 64 + }, + { + "epoch": 0.36, + "learning_rate": 0.0001818435754189944, + "loss": 2.2194, + "step": 65 + }, + { + "epoch": 0.37, + "learning_rate": 0.00018156424581005588, + "loss": 2.1218, + "step": 66 + }, + { + "epoch": 0.37, + "learning_rate": 0.00018128491620111731, + "loss": 2.2049, + "step": 67 + }, + { + "epoch": 0.38, + "learning_rate": 0.00018100558659217878, + "loss": 2.1521, + "step": 68 + }, + { + "epoch": 0.39, + "learning_rate": 0.00018072625698324025, + "loss": 2.112, + "step": 69 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001804469273743017, + "loss": 2.1906, + "step": 70 + }, + { + "epoch": 0.4, + "learning_rate": 0.00018016759776536313, + "loss": 2.1717, + "step": 71 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001798882681564246, + "loss": 2.0712, + "step": 72 + }, + { + "epoch": 0.41, + "learning_rate": 0.00017960893854748603, + "loss": 2.141, + "step": 73 + }, + { + "epoch": 0.41, + "learning_rate": 0.0001793296089385475, + "loss": 2.0656, + "step": 74 + }, + { + "epoch": 0.42, + "learning_rate": 0.00017905027932960894, + "loss": 2.1125, + "step": 75 + }, + { + "epoch": 0.42, + "learning_rate": 0.00017877094972067038, + "loss": 2.0869, + "step": 76 + }, + { + "epoch": 0.43, + "learning_rate": 0.00017849162011173187, + "loss": 2.2478, + "step": 77 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001782122905027933, + "loss": 2.1535, + "step": 78 + }, + { + "epoch": 0.44, + "learning_rate": 0.00017793296089385475, + "loss": 2.1927, + "step": 79 + }, + { + "epoch": 0.45, + "learning_rate": 0.00017765363128491622, + "loss": 2.1213, + "step": 80 + }, + { + "epoch": 0.45, + "learning_rate": 0.00017737430167597766, + "loss": 2.0981, + "step": 81 + }, + { + "epoch": 0.46, + "learning_rate": 0.00017709497206703912, + "loss": 2.1828, + "step": 82 + }, + { + "epoch": 0.46, + "learning_rate": 0.00017681564245810056, + "loss": 2.0562, + "step": 83 + }, + { + "epoch": 0.47, + "learning_rate": 0.000176536312849162, + "loss": 2.1334, + "step": 84 + }, + { + "epoch": 0.47, + "learning_rate": 0.00017625698324022347, + "loss": 2.1225, + "step": 85 + }, + { + "epoch": 0.48, + "learning_rate": 0.00017597765363128493, + "loss": 2.2098, + "step": 86 + }, + { + "epoch": 0.49, + "learning_rate": 0.00017569832402234637, + "loss": 2.1519, + "step": 87 + }, + { + "epoch": 0.49, + "learning_rate": 0.00017541899441340784, + "loss": 2.1132, + "step": 88 + }, + { + "epoch": 0.5, + "learning_rate": 0.00017513966480446928, + "loss": 2.0333, + "step": 89 + }, + { + "epoch": 0.5, + "learning_rate": 0.00017486033519553075, + "loss": 2.2764, + "step": 90 + }, + { + "epoch": 0.51, + "learning_rate": 0.00017458100558659218, + "loss": 2.1838, + "step": 91 + }, + { + "epoch": 0.51, + "learning_rate": 0.00017430167597765362, + "loss": 2.1386, + "step": 92 + }, + { + "epoch": 0.52, + "learning_rate": 0.0001740223463687151, + "loss": 2.1034, + "step": 93 + }, + { + "epoch": 0.53, + "learning_rate": 0.00017374301675977656, + "loss": 2.0346, + "step": 94 + }, + { + "epoch": 0.53, + "learning_rate": 0.000173463687150838, + "loss": 2.0274, + "step": 95 + }, + { + "epoch": 0.54, + "learning_rate": 0.00017318435754189946, + "loss": 2.1036, + "step": 96 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001729050279329609, + "loss": 2.1208, + "step": 97 + }, + { + "epoch": 0.55, + "learning_rate": 0.00017262569832402237, + "loss": 2.0572, + "step": 98 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001723463687150838, + "loss": 2.1702, + "step": 99 + }, + { + "epoch": 0.56, + "learning_rate": 0.00017206703910614525, + "loss": 2.1302, + "step": 100 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001717877094972067, + "loss": 2.0175, + "step": 101 + }, + { + "epoch": 0.57, + "learning_rate": 0.00017150837988826815, + "loss": 2.1006, + "step": 102 + }, + { + "epoch": 0.58, + "learning_rate": 0.00017122905027932962, + "loss": 2.0662, + "step": 103 + }, + { + "epoch": 0.58, + "learning_rate": 0.00017094972067039109, + "loss": 1.988, + "step": 104 + }, + { + "epoch": 0.59, + "learning_rate": 0.00017067039106145253, + "loss": 2.1008, + "step": 105 + }, + { + "epoch": 0.59, + "learning_rate": 0.00017039106145251396, + "loss": 2.1482, + "step": 106 + }, + { + "epoch": 0.6, + "learning_rate": 0.00017011173184357543, + "loss": 2.1052, + "step": 107 + }, + { + "epoch": 0.6, + "learning_rate": 0.00016983240223463687, + "loss": 2.0978, + "step": 108 + }, + { + "epoch": 0.61, + "learning_rate": 0.00016955307262569834, + "loss": 2.1303, + "step": 109 + }, + { + "epoch": 0.61, + "learning_rate": 0.00016927374301675978, + "loss": 2.0794, + "step": 110 + }, + { + "epoch": 0.62, + "learning_rate": 0.00016899441340782122, + "loss": 2.1059, + "step": 111 + }, + { + "epoch": 0.63, + "learning_rate": 0.0001687150837988827, + "loss": 1.9642, + "step": 112 + }, + { + "epoch": 0.63, + "learning_rate": 0.00016843575418994415, + "loss": 2.0415, + "step": 113 + }, + { + "epoch": 0.64, + "learning_rate": 0.0001681564245810056, + "loss": 2.0795, + "step": 114 + }, + { + "epoch": 0.64, + "learning_rate": 0.00016787709497206705, + "loss": 2.0238, + "step": 115 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001675977653631285, + "loss": 2.078, + "step": 116 + }, + { + "epoch": 0.65, + "learning_rate": 0.00016731843575418996, + "loss": 2.1362, + "step": 117 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001670391061452514, + "loss": 2.0552, + "step": 118 + }, + { + "epoch": 0.66, + "learning_rate": 0.00016675977653631284, + "loss": 2.213, + "step": 119 + }, + { + "epoch": 0.67, + "learning_rate": 0.0001664804469273743, + "loss": 1.9999, + "step": 120 + }, + { + "epoch": 0.68, + "learning_rate": 0.00016620111731843577, + "loss": 2.06, + "step": 121 + }, + { + "epoch": 0.68, + "learning_rate": 0.0001659217877094972, + "loss": 2.0177, + "step": 122 + }, + { + "epoch": 0.69, + "learning_rate": 0.00016564245810055868, + "loss": 2.0504, + "step": 123 + }, + { + "epoch": 0.69, + "learning_rate": 0.00016536312849162012, + "loss": 2.0585, + "step": 124 + }, + { + "epoch": 0.7, + "learning_rate": 0.00016508379888268158, + "loss": 2.0273, + "step": 125 + }, + { + "epoch": 0.7, + "learning_rate": 0.00016480446927374302, + "loss": 2.0549, + "step": 126 + }, + { + "epoch": 0.71, + "learning_rate": 0.00016452513966480446, + "loss": 2.044, + "step": 127 + }, + { + "epoch": 0.72, + "learning_rate": 0.00016424581005586593, + "loss": 2.0731, + "step": 128 + }, + { + "epoch": 0.72, + "learning_rate": 0.00016396648044692737, + "loss": 2.0568, + "step": 129 + }, + { + "epoch": 0.73, + "learning_rate": 0.00016368715083798883, + "loss": 2.007, + "step": 130 + }, + { + "epoch": 0.73, + "learning_rate": 0.0001634078212290503, + "loss": 2.0707, + "step": 131 + }, + { + "epoch": 0.74, + "learning_rate": 0.00016312849162011174, + "loss": 1.9793, + "step": 132 + }, + { + "epoch": 0.74, + "learning_rate": 0.0001628491620111732, + "loss": 2.1311, + "step": 133 + }, + { + "epoch": 0.75, + "learning_rate": 0.00016256983240223465, + "loss": 2.0016, + "step": 134 + }, + { + "epoch": 0.75, + "learning_rate": 0.00016229050279329609, + "loss": 1.9945, + "step": 135 + }, + { + "epoch": 0.76, + "learning_rate": 0.00016201117318435755, + "loss": 2.0186, + "step": 136 + }, + { + "epoch": 0.77, + "learning_rate": 0.000161731843575419, + "loss": 2.0971, + "step": 137 + }, + { + "epoch": 0.77, + "learning_rate": 0.00016145251396648046, + "loss": 2.0883, + "step": 138 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016117318435754192, + "loss": 2.0803, + "step": 139 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016089385474860336, + "loss": 2.0617, + "step": 140 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016061452513966483, + "loss": 2.1265, + "step": 141 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016033519553072627, + "loss": 2.0151, + "step": 142 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001600558659217877, + "loss": 1.996, + "step": 143 + }, + { + "epoch": 0.8, + "learning_rate": 0.00015977653631284918, + "loss": 2.0164, + "step": 144 + }, + { + "epoch": 0.81, + "learning_rate": 0.00015949720670391061, + "loss": 2.0314, + "step": 145 + }, + { + "epoch": 0.82, + "learning_rate": 0.00015921787709497208, + "loss": 1.9501, + "step": 146 + }, + { + "epoch": 0.82, + "learning_rate": 0.00015893854748603352, + "loss": 2.087, + "step": 147 + }, + { + "epoch": 0.83, + "learning_rate": 0.000158659217877095, + "loss": 2.0262, + "step": 148 + }, + { + "epoch": 0.83, + "learning_rate": 0.00015837988826815643, + "loss": 2.0765, + "step": 149 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001581005586592179, + "loss": 2.105, + "step": 150 + }, + { + "epoch": 0.84, + "learning_rate": 0.00015782122905027933, + "loss": 1.9863, + "step": 151 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001575418994413408, + "loss": 1.9873, + "step": 152 + }, + { + "epoch": 0.85, + "learning_rate": 0.00015726256983240224, + "loss": 2.0094, + "step": 153 + }, + { + "epoch": 0.86, + "learning_rate": 0.00015698324022346368, + "loss": 1.9141, + "step": 154 + }, + { + "epoch": 0.87, + "learning_rate": 0.00015670391061452514, + "loss": 1.917, + "step": 155 + }, + { + "epoch": 0.87, + "learning_rate": 0.00015642458100558658, + "loss": 2.109, + "step": 156 + }, + { + "epoch": 0.88, + "learning_rate": 0.00015614525139664805, + "loss": 1.9799, + "step": 157 + }, + { + "epoch": 0.88, + "learning_rate": 0.00015586592178770952, + "loss": 1.9571, + "step": 158 + }, + { + "epoch": 0.89, + "learning_rate": 0.00015558659217877096, + "loss": 1.9931, + "step": 159 + }, + { + "epoch": 0.89, + "learning_rate": 0.00015530726256983242, + "loss": 2.1004, + "step": 160 + }, + { + "epoch": 0.9, + "learning_rate": 0.00015502793296089386, + "loss": 2.0385, + "step": 161 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001547486033519553, + "loss": 1.9751, + "step": 162 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015446927374301677, + "loss": 2.0544, + "step": 163 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001541899441340782, + "loss": 2.0069, + "step": 164 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015391061452513967, + "loss": 1.9576, + "step": 165 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015363128491620114, + "loss": 1.8991, + "step": 166 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015335195530726258, + "loss": 1.9336, + "step": 167 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015307262569832405, + "loss": 1.9736, + "step": 168 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015279329608938548, + "loss": 1.9702, + "step": 169 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015251396648044692, + "loss": 1.9055, + "step": 170 + }, + { + "epoch": 0.96, + "learning_rate": 0.0001522346368715084, + "loss": 2.0503, + "step": 171 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015195530726256983, + "loss": 2.0039, + "step": 172 + }, + { + "epoch": 0.97, + "learning_rate": 0.0001516759776536313, + "loss": 1.9406, + "step": 173 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015139664804469274, + "loss": 2.0525, + "step": 174 + }, + { + "epoch": 0.98, + "learning_rate": 0.0001511173184357542, + "loss": 1.9234, + "step": 175 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015083798882681567, + "loss": 1.8614, + "step": 176 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001505586592178771, + "loss": 1.9616, + "step": 177 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015027932960893855, + "loss": 1.9509, + "step": 178 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015000000000000001, + "loss": 1.9592, + "step": 179 + }, + { + "epoch": 1.01, + "learning_rate": 0.00014972067039106145, + "loss": 1.8991, + "step": 180 + }, + { + "epoch": 1.01, + "learning_rate": 0.00014944134078212292, + "loss": 1.9127, + "step": 181 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014916201117318436, + "loss": 1.8982, + "step": 182 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001488826815642458, + "loss": 1.9534, + "step": 183 + }, + { + "epoch": 1.03, + "learning_rate": 0.0001486033519553073, + "loss": 1.7794, + "step": 184 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014832402234636873, + "loss": 1.7958, + "step": 185 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014804469273743017, + "loss": 1.8282, + "step": 186 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014776536312849164, + "loss": 2.0423, + "step": 187 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014748603351955308, + "loss": 1.9282, + "step": 188 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014720670391061454, + "loss": 1.9072, + "step": 189 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014692737430167598, + "loss": 1.8665, + "step": 190 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014664804469273742, + "loss": 1.9021, + "step": 191 + }, + { + "epoch": 1.07, + "learning_rate": 0.0001463687150837989, + "loss": 1.7308, + "step": 192 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014608938547486035, + "loss": 1.9165, + "step": 193 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001458100558659218, + "loss": 1.842, + "step": 194 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014553072625698326, + "loss": 1.9128, + "step": 195 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001452513966480447, + "loss": 1.8005, + "step": 196 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014497206703910614, + "loss": 1.8547, + "step": 197 + }, + { + "epoch": 1.11, + "learning_rate": 0.0001446927374301676, + "loss": 1.9042, + "step": 198 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014441340782122905, + "loss": 1.8609, + "step": 199 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001441340782122905, + "loss": 1.9591, + "step": 200 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014385474860335195, + "loss": 1.8722, + "step": 201 + }, + { + "epoch": 1.13, + "learning_rate": 0.00014357541899441342, + "loss": 1.8535, + "step": 202 + }, + { + "epoch": 1.13, + "learning_rate": 0.00014329608938547488, + "loss": 1.8676, + "step": 203 + }, + { + "epoch": 1.14, + "learning_rate": 0.00014301675977653632, + "loss": 1.8976, + "step": 204 + }, + { + "epoch": 1.15, + "learning_rate": 0.00014273743016759776, + "loss": 1.7723, + "step": 205 + }, + { + "epoch": 1.15, + "learning_rate": 0.00014245810055865923, + "loss": 1.8165, + "step": 206 + }, + { + "epoch": 1.16, + "learning_rate": 0.00014217877094972067, + "loss": 1.7811, + "step": 207 + }, + { + "epoch": 1.16, + "learning_rate": 0.00014189944134078214, + "loss": 1.908, + "step": 208 + }, + { + "epoch": 1.17, + "learning_rate": 0.00014162011173184357, + "loss": 1.7663, + "step": 209 + }, + { + "epoch": 1.17, + "learning_rate": 0.00014134078212290501, + "loss": 1.6779, + "step": 210 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001410614525139665, + "loss": 1.9039, + "step": 211 + }, + { + "epoch": 1.18, + "learning_rate": 0.00014078212290502795, + "loss": 1.8033, + "step": 212 + }, + { + "epoch": 1.19, + "learning_rate": 0.00014050279329608939, + "loss": 1.8251, + "step": 213 + }, + { + "epoch": 1.2, + "learning_rate": 0.00014022346368715085, + "loss": 1.8505, + "step": 214 + }, + { + "epoch": 1.2, + "learning_rate": 0.0001399441340782123, + "loss": 1.8147, + "step": 215 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013966480446927376, + "loss": 1.8419, + "step": 216 + }, + { + "epoch": 1.21, + "learning_rate": 0.0001393854748603352, + "loss": 1.8401, + "step": 217 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013910614525139664, + "loss": 1.8912, + "step": 218 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001388268156424581, + "loss": 1.7548, + "step": 219 + }, + { + "epoch": 1.23, + "learning_rate": 0.00013854748603351957, + "loss": 1.8741, + "step": 220 + }, + { + "epoch": 1.23, + "learning_rate": 0.000138268156424581, + "loss": 1.9549, + "step": 221 + }, + { + "epoch": 1.24, + "learning_rate": 0.00013798882681564248, + "loss": 1.9093, + "step": 222 + }, + { + "epoch": 1.25, + "learning_rate": 0.00013770949720670392, + "loss": 1.7896, + "step": 223 + }, + { + "epoch": 1.25, + "learning_rate": 0.00013743016759776538, + "loss": 1.8491, + "step": 224 + }, + { + "epoch": 1.26, + "learning_rate": 0.00013715083798882682, + "loss": 1.7851, + "step": 225 + }, + { + "epoch": 1.26, + "learning_rate": 0.00013687150837988826, + "loss": 1.6992, + "step": 226 + }, + { + "epoch": 1.27, + "learning_rate": 0.00013659217877094973, + "loss": 1.9765, + "step": 227 + }, + { + "epoch": 1.27, + "learning_rate": 0.00013631284916201117, + "loss": 1.8179, + "step": 228 + }, + { + "epoch": 1.28, + "learning_rate": 0.00013603351955307263, + "loss": 1.8548, + "step": 229 + }, + { + "epoch": 1.28, + "learning_rate": 0.0001357541899441341, + "loss": 1.8843, + "step": 230 + }, + { + "epoch": 1.29, + "learning_rate": 0.00013547486033519554, + "loss": 1.9105, + "step": 231 + }, + { + "epoch": 1.3, + "learning_rate": 0.00013519553072625698, + "loss": 1.8748, + "step": 232 + }, + { + "epoch": 1.3, + "learning_rate": 0.00013491620111731844, + "loss": 1.7976, + "step": 233 + }, + { + "epoch": 1.31, + "learning_rate": 0.00013463687150837988, + "loss": 1.7369, + "step": 234 + }, + { + "epoch": 1.31, + "learning_rate": 0.00013435754189944135, + "loss": 1.7808, + "step": 235 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001340782122905028, + "loss": 1.8385, + "step": 236 + }, + { + "epoch": 1.32, + "learning_rate": 0.00013379888268156423, + "loss": 1.8295, + "step": 237 + }, + { + "epoch": 1.33, + "learning_rate": 0.00013351955307262572, + "loss": 1.757, + "step": 238 + }, + { + "epoch": 1.34, + "learning_rate": 0.00013324022346368716, + "loss": 1.7904, + "step": 239 + }, + { + "epoch": 1.34, + "learning_rate": 0.0001329608938547486, + "loss": 1.7632, + "step": 240 + }, + { + "epoch": 1.35, + "learning_rate": 0.00013268156424581007, + "loss": 1.7867, + "step": 241 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001324022346368715, + "loss": 1.8259, + "step": 242 + }, + { + "epoch": 1.36, + "learning_rate": 0.00013212290502793297, + "loss": 1.6655, + "step": 243 + }, + { + "epoch": 1.36, + "learning_rate": 0.0001318435754189944, + "loss": 1.6848, + "step": 244 + }, + { + "epoch": 1.37, + "learning_rate": 0.00013156424581005585, + "loss": 1.7931, + "step": 245 + }, + { + "epoch": 1.37, + "learning_rate": 0.00013128491620111732, + "loss": 1.7868, + "step": 246 + }, + { + "epoch": 1.38, + "learning_rate": 0.00013100558659217879, + "loss": 1.7732, + "step": 247 + }, + { + "epoch": 1.39, + "learning_rate": 0.00013072625698324022, + "loss": 1.7851, + "step": 248 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001304469273743017, + "loss": 1.7406, + "step": 249 + }, + { + "epoch": 1.4, + "learning_rate": 0.00013016759776536313, + "loss": 1.5853, + "step": 250 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001298882681564246, + "loss": 1.8271, + "step": 251 + }, + { + "epoch": 1.41, + "learning_rate": 0.00012960893854748604, + "loss": 1.6054, + "step": 252 + }, + { + "epoch": 1.41, + "learning_rate": 0.00012932960893854748, + "loss": 1.6884, + "step": 253 + }, + { + "epoch": 1.42, + "learning_rate": 0.00012905027932960894, + "loss": 1.7333, + "step": 254 + }, + { + "epoch": 1.42, + "learning_rate": 0.00012877094972067038, + "loss": 1.803, + "step": 255 + }, + { + "epoch": 1.43, + "learning_rate": 0.00012849162011173185, + "loss": 1.63, + "step": 256 + }, + { + "epoch": 1.44, + "learning_rate": 0.00012821229050279331, + "loss": 1.8023, + "step": 257 + }, + { + "epoch": 1.44, + "learning_rate": 0.00012793296089385475, + "loss": 1.762, + "step": 258 + }, + { + "epoch": 1.45, + "learning_rate": 0.00012765363128491622, + "loss": 1.7565, + "step": 259 + }, + { + "epoch": 1.45, + "learning_rate": 0.00012737430167597766, + "loss": 1.5937, + "step": 260 + }, + { + "epoch": 1.46, + "learning_rate": 0.0001270949720670391, + "loss": 1.7208, + "step": 261 + }, + { + "epoch": 1.46, + "learning_rate": 0.00012681564245810057, + "loss": 1.8097, + "step": 262 + }, + { + "epoch": 1.47, + "learning_rate": 0.000126536312849162, + "loss": 1.7042, + "step": 263 + }, + { + "epoch": 1.47, + "learning_rate": 0.00012625698324022347, + "loss": 1.7892, + "step": 264 + }, + { + "epoch": 1.48, + "learning_rate": 0.00012597765363128494, + "loss": 1.752, + "step": 265 + }, + { + "epoch": 1.49, + "learning_rate": 0.00012569832402234638, + "loss": 1.7333, + "step": 266 + }, + { + "epoch": 1.49, + "learning_rate": 0.00012541899441340784, + "loss": 1.7492, + "step": 267 + }, + { + "epoch": 1.5, + "learning_rate": 0.00012513966480446928, + "loss": 1.818, + "step": 268 + }, + { + "epoch": 1.5, + "learning_rate": 0.00012486033519553072, + "loss": 1.7146, + "step": 269 + }, + { + "epoch": 1.51, + "learning_rate": 0.0001245810055865922, + "loss": 1.7958, + "step": 270 + }, + { + "epoch": 1.51, + "learning_rate": 0.00012430167597765363, + "loss": 1.7212, + "step": 271 + }, + { + "epoch": 1.52, + "learning_rate": 0.0001240223463687151, + "loss": 1.8835, + "step": 272 + }, + { + "epoch": 1.53, + "learning_rate": 0.00012374301675977656, + "loss": 1.7486, + "step": 273 + }, + { + "epoch": 1.53, + "learning_rate": 0.000123463687150838, + "loss": 1.8103, + "step": 274 + }, + { + "epoch": 1.54, + "learning_rate": 0.00012318435754189944, + "loss": 1.7557, + "step": 275 + }, + { + "epoch": 1.54, + "learning_rate": 0.0001229050279329609, + "loss": 1.6198, + "step": 276 + }, + { + "epoch": 1.55, + "learning_rate": 0.00012262569832402235, + "loss": 1.6971, + "step": 277 + }, + { + "epoch": 1.55, + "learning_rate": 0.0001223463687150838, + "loss": 1.668, + "step": 278 + }, + { + "epoch": 1.56, + "learning_rate": 0.00012206703910614525, + "loss": 1.8795, + "step": 279 + }, + { + "epoch": 1.56, + "learning_rate": 0.0001217877094972067, + "loss": 1.6412, + "step": 280 + }, + { + "epoch": 1.57, + "learning_rate": 0.00012150837988826816, + "loss": 1.7497, + "step": 281 + }, + { + "epoch": 1.58, + "learning_rate": 0.00012122905027932962, + "loss": 1.5577, + "step": 282 + }, + { + "epoch": 1.58, + "learning_rate": 0.00012094972067039108, + "loss": 1.8049, + "step": 283 + }, + { + "epoch": 1.59, + "learning_rate": 0.00012067039106145253, + "loss": 1.6834, + "step": 284 + }, + { + "epoch": 1.59, + "learning_rate": 0.00012039106145251397, + "loss": 1.7978, + "step": 285 + }, + { + "epoch": 1.6, + "learning_rate": 0.00012011173184357542, + "loss": 1.6558, + "step": 286 + }, + { + "epoch": 1.6, + "learning_rate": 0.00011983240223463687, + "loss": 1.7561, + "step": 287 + }, + { + "epoch": 1.61, + "learning_rate": 0.00011955307262569833, + "loss": 1.6807, + "step": 288 + }, + { + "epoch": 1.61, + "learning_rate": 0.00011927374301675978, + "loss": 1.6937, + "step": 289 + }, + { + "epoch": 1.62, + "learning_rate": 0.00011899441340782122, + "loss": 1.6591, + "step": 290 + }, + { + "epoch": 1.63, + "learning_rate": 0.0001187150837988827, + "loss": 1.6771, + "step": 291 + }, + { + "epoch": 1.63, + "learning_rate": 0.00011843575418994415, + "loss": 1.7743, + "step": 292 + }, + { + "epoch": 1.64, + "learning_rate": 0.00011815642458100559, + "loss": 1.5857, + "step": 293 + }, + { + "epoch": 1.64, + "learning_rate": 0.00011787709497206705, + "loss": 1.6999, + "step": 294 + }, + { + "epoch": 1.65, + "learning_rate": 0.0001175977653631285, + "loss": 1.5661, + "step": 295 + }, + { + "epoch": 1.65, + "learning_rate": 0.00011731843575418995, + "loss": 1.7235, + "step": 296 + }, + { + "epoch": 1.66, + "learning_rate": 0.0001170391061452514, + "loss": 1.607, + "step": 297 + }, + { + "epoch": 1.66, + "learning_rate": 0.00011675977653631284, + "loss": 1.68, + "step": 298 + }, + { + "epoch": 1.67, + "learning_rate": 0.0001164804469273743, + "loss": 1.6938, + "step": 299 + }, + { + "epoch": 1.68, + "learning_rate": 0.00011620111731843578, + "loss": 1.6315, + "step": 300 + }, + { + "epoch": 1.68, + "learning_rate": 0.00011592178770949722, + "loss": 1.6802, + "step": 301 + }, + { + "epoch": 1.69, + "learning_rate": 0.00011564245810055867, + "loss": 1.7174, + "step": 302 + }, + { + "epoch": 1.69, + "learning_rate": 0.00011536312849162012, + "loss": 1.5212, + "step": 303 + }, + { + "epoch": 1.7, + "learning_rate": 0.00011508379888268157, + "loss": 1.5808, + "step": 304 + }, + { + "epoch": 1.7, + "learning_rate": 0.00011480446927374303, + "loss": 1.6152, + "step": 305 + }, + { + "epoch": 1.71, + "learning_rate": 0.00011452513966480447, + "loss": 1.5435, + "step": 306 + }, + { + "epoch": 1.72, + "learning_rate": 0.00011424581005586592, + "loss": 1.6603, + "step": 307 + }, + { + "epoch": 1.72, + "learning_rate": 0.00011396648044692737, + "loss": 1.685, + "step": 308 + }, + { + "epoch": 1.73, + "learning_rate": 0.00011368715083798884, + "loss": 1.6002, + "step": 309 + }, + { + "epoch": 1.73, + "learning_rate": 0.00011340782122905029, + "loss": 1.6046, + "step": 310 + }, + { + "epoch": 1.74, + "learning_rate": 0.00011312849162011174, + "loss": 1.5969, + "step": 311 + }, + { + "epoch": 1.74, + "learning_rate": 0.0001128491620111732, + "loss": 1.5845, + "step": 312 + }, + { + "epoch": 1.75, + "learning_rate": 0.00011256983240223464, + "loss": 1.8183, + "step": 313 + }, + { + "epoch": 1.75, + "learning_rate": 0.00011229050279329609, + "loss": 1.6953, + "step": 314 + }, + { + "epoch": 1.76, + "learning_rate": 0.00011201117318435754, + "loss": 1.7787, + "step": 315 + }, + { + "epoch": 1.77, + "learning_rate": 0.000111731843575419, + "loss": 1.6422, + "step": 316 + }, + { + "epoch": 1.77, + "learning_rate": 0.00011145251396648045, + "loss": 1.7034, + "step": 317 + }, + { + "epoch": 1.78, + "learning_rate": 0.00011117318435754192, + "loss": 1.7301, + "step": 318 + }, + { + "epoch": 1.78, + "learning_rate": 0.00011089385474860337, + "loss": 1.7084, + "step": 319 + }, + { + "epoch": 1.79, + "learning_rate": 0.00011061452513966482, + "loss": 1.772, + "step": 320 + }, + { + "epoch": 1.79, + "learning_rate": 0.00011033519553072626, + "loss": 1.5733, + "step": 321 + }, + { + "epoch": 1.8, + "learning_rate": 0.00011005586592178771, + "loss": 1.6423, + "step": 322 + }, + { + "epoch": 1.8, + "learning_rate": 0.00010977653631284917, + "loss": 1.5809, + "step": 323 + }, + { + "epoch": 1.81, + "learning_rate": 0.00010949720670391062, + "loss": 1.6781, + "step": 324 + }, + { + "epoch": 1.82, + "learning_rate": 0.00010921787709497207, + "loss": 1.6788, + "step": 325 + }, + { + "epoch": 1.82, + "learning_rate": 0.00010893854748603351, + "loss": 1.6346, + "step": 326 + }, + { + "epoch": 1.83, + "learning_rate": 0.00010865921787709499, + "loss": 1.6634, + "step": 327 + }, + { + "epoch": 1.83, + "learning_rate": 0.00010837988826815643, + "loss": 1.7561, + "step": 328 + }, + { + "epoch": 1.84, + "learning_rate": 0.00010810055865921788, + "loss": 1.66, + "step": 329 + }, + { + "epoch": 1.84, + "learning_rate": 0.00010782122905027934, + "loss": 1.7298, + "step": 330 + }, + { + "epoch": 1.85, + "learning_rate": 0.00010754189944134079, + "loss": 1.6893, + "step": 331 + }, + { + "epoch": 1.85, + "learning_rate": 0.00010726256983240224, + "loss": 1.7631, + "step": 332 + }, + { + "epoch": 1.86, + "learning_rate": 0.00010698324022346368, + "loss": 1.6633, + "step": 333 + }, + { + "epoch": 1.87, + "learning_rate": 0.00010670391061452513, + "loss": 1.5388, + "step": 334 + }, + { + "epoch": 1.87, + "learning_rate": 0.00010642458100558659, + "loss": 1.6718, + "step": 335 + }, + { + "epoch": 1.88, + "learning_rate": 0.00010614525139664805, + "loss": 1.5536, + "step": 336 + }, + { + "epoch": 1.88, + "learning_rate": 0.00010586592178770951, + "loss": 1.6483, + "step": 337 + }, + { + "epoch": 1.89, + "learning_rate": 0.00010558659217877096, + "loss": 1.5774, + "step": 338 + }, + { + "epoch": 1.89, + "learning_rate": 0.00010530726256983241, + "loss": 1.6366, + "step": 339 + }, + { + "epoch": 1.9, + "learning_rate": 0.00010502793296089387, + "loss": 1.5567, + "step": 340 + }, + { + "epoch": 1.91, + "learning_rate": 0.0001047486033519553, + "loss": 1.5323, + "step": 341 + }, + { + "epoch": 1.91, + "learning_rate": 0.00010446927374301676, + "loss": 1.4608, + "step": 342 + }, + { + "epoch": 1.92, + "learning_rate": 0.00010418994413407821, + "loss": 1.5933, + "step": 343 + }, + { + "epoch": 1.92, + "learning_rate": 0.00010391061452513966, + "loss": 1.6625, + "step": 344 + }, + { + "epoch": 1.93, + "learning_rate": 0.00010363128491620113, + "loss": 1.7236, + "step": 345 + }, + { + "epoch": 1.93, + "learning_rate": 0.00010335195530726258, + "loss": 1.759, + "step": 346 + }, + { + "epoch": 1.94, + "learning_rate": 0.00010307262569832404, + "loss": 1.7248, + "step": 347 + }, + { + "epoch": 1.94, + "learning_rate": 0.00010279329608938548, + "loss": 1.5144, + "step": 348 + }, + { + "epoch": 1.95, + "learning_rate": 0.00010251396648044693, + "loss": 1.6905, + "step": 349 + }, + { + "epoch": 1.96, + "learning_rate": 0.00010223463687150838, + "loss": 1.6119, + "step": 350 + }, + { + "epoch": 1.96, + "learning_rate": 0.00010195530726256983, + "loss": 1.5464, + "step": 351 + }, + { + "epoch": 1.97, + "learning_rate": 0.00010167597765363129, + "loss": 1.6901, + "step": 352 + }, + { + "epoch": 1.97, + "learning_rate": 0.00010139664804469273, + "loss": 1.3511, + "step": 353 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001011173184357542, + "loss": 1.5434, + "step": 354 + }, + { + "epoch": 1.98, + "learning_rate": 0.00010083798882681566, + "loss": 1.5891, + "step": 355 + }, + { + "epoch": 1.99, + "learning_rate": 0.0001005586592178771, + "loss": 1.6658, + "step": 356 + }, + { + "epoch": 1.99, + "learning_rate": 0.00010027932960893855, + "loss": 1.5657, + "step": 357 + }, + { + "epoch": 2.0, + "learning_rate": 0.0001, + "loss": 1.7005, + "step": 358 + }, + { + "epoch": 2.01, + "learning_rate": 9.972067039106146e-05, + "loss": 1.4202, + "step": 359 + }, + { + "epoch": 2.01, + "learning_rate": 9.944134078212291e-05, + "loss": 1.5262, + "step": 360 + }, + { + "epoch": 2.02, + "learning_rate": 9.916201117318436e-05, + "loss": 1.6323, + "step": 361 + }, + { + "epoch": 2.02, + "learning_rate": 9.888268156424582e-05, + "loss": 1.5521, + "step": 362 + }, + { + "epoch": 2.03, + "learning_rate": 9.860335195530727e-05, + "loss": 1.5762, + "step": 363 + }, + { + "epoch": 2.03, + "learning_rate": 9.832402234636872e-05, + "loss": 1.613, + "step": 364 + }, + { + "epoch": 2.04, + "learning_rate": 9.804469273743018e-05, + "loss": 1.4231, + "step": 365 + }, + { + "epoch": 2.04, + "learning_rate": 9.776536312849163e-05, + "loss": 1.5706, + "step": 366 + }, + { + "epoch": 2.05, + "learning_rate": 9.748603351955308e-05, + "loss": 1.5245, + "step": 367 + }, + { + "epoch": 2.06, + "learning_rate": 9.720670391061453e-05, + "loss": 1.4771, + "step": 368 + }, + { + "epoch": 2.06, + "learning_rate": 9.692737430167597e-05, + "loss": 1.596, + "step": 369 + }, + { + "epoch": 2.07, + "learning_rate": 9.664804469273744e-05, + "loss": 1.537, + "step": 370 + }, + { + "epoch": 2.07, + "learning_rate": 9.636871508379889e-05, + "loss": 1.4276, + "step": 371 + }, + { + "epoch": 2.08, + "learning_rate": 9.608938547486033e-05, + "loss": 1.4746, + "step": 372 + }, + { + "epoch": 2.08, + "learning_rate": 9.581005586592178e-05, + "loss": 1.4374, + "step": 373 + }, + { + "epoch": 2.09, + "learning_rate": 9.553072625698325e-05, + "loss": 1.4704, + "step": 374 + }, + { + "epoch": 2.09, + "learning_rate": 9.52513966480447e-05, + "loss": 1.5997, + "step": 375 + }, + { + "epoch": 2.1, + "learning_rate": 9.497206703910614e-05, + "loss": 1.5034, + "step": 376 + }, + { + "epoch": 2.11, + "learning_rate": 9.46927374301676e-05, + "loss": 1.6392, + "step": 377 + }, + { + "epoch": 2.11, + "learning_rate": 9.441340782122905e-05, + "loss": 1.5611, + "step": 378 + }, + { + "epoch": 2.12, + "learning_rate": 9.413407821229052e-05, + "loss": 1.3384, + "step": 379 + }, + { + "epoch": 2.12, + "learning_rate": 9.385474860335196e-05, + "loss": 1.5732, + "step": 380 + }, + { + "epoch": 2.13, + "learning_rate": 9.357541899441341e-05, + "loss": 1.3874, + "step": 381 + }, + { + "epoch": 2.13, + "learning_rate": 9.329608938547486e-05, + "loss": 1.4139, + "step": 382 + }, + { + "epoch": 2.14, + "learning_rate": 9.301675977653633e-05, + "loss": 1.5195, + "step": 383 + }, + { + "epoch": 2.15, + "learning_rate": 9.273743016759777e-05, + "loss": 1.4371, + "step": 384 + }, + { + "epoch": 2.15, + "learning_rate": 9.245810055865922e-05, + "loss": 1.4411, + "step": 385 + }, + { + "epoch": 2.16, + "learning_rate": 9.217877094972067e-05, + "loss": 1.5214, + "step": 386 + }, + { + "epoch": 2.16, + "learning_rate": 9.189944134078213e-05, + "loss": 1.4971, + "step": 387 + }, + { + "epoch": 2.17, + "learning_rate": 9.162011173184358e-05, + "loss": 1.3453, + "step": 388 + }, + { + "epoch": 2.17, + "learning_rate": 9.134078212290503e-05, + "loss": 1.394, + "step": 389 + }, + { + "epoch": 2.18, + "learning_rate": 9.106145251396648e-05, + "loss": 1.5058, + "step": 390 + }, + { + "epoch": 2.18, + "learning_rate": 9.078212290502794e-05, + "loss": 1.4855, + "step": 391 + }, + { + "epoch": 2.19, + "learning_rate": 9.050279329608939e-05, + "loss": 1.4647, + "step": 392 + }, + { + "epoch": 2.2, + "learning_rate": 9.022346368715084e-05, + "loss": 1.3435, + "step": 393 + }, + { + "epoch": 2.2, + "learning_rate": 8.99441340782123e-05, + "loss": 1.5815, + "step": 394 + }, + { + "epoch": 2.21, + "learning_rate": 8.966480446927375e-05, + "loss": 1.4742, + "step": 395 + }, + { + "epoch": 2.21, + "learning_rate": 8.938547486033519e-05, + "loss": 1.5389, + "step": 396 + }, + { + "epoch": 2.22, + "learning_rate": 8.910614525139666e-05, + "loss": 1.384, + "step": 397 + }, + { + "epoch": 2.22, + "learning_rate": 8.882681564245811e-05, + "loss": 1.3967, + "step": 398 + }, + { + "epoch": 2.23, + "learning_rate": 8.854748603351956e-05, + "loss": 1.442, + "step": 399 + }, + { + "epoch": 2.23, + "learning_rate": 8.8268156424581e-05, + "loss": 1.396, + "step": 400 + }, + { + "epoch": 2.24, + "learning_rate": 8.798882681564247e-05, + "loss": 1.4706, + "step": 401 + }, + { + "epoch": 2.25, + "learning_rate": 8.770949720670392e-05, + "loss": 1.4217, + "step": 402 + }, + { + "epoch": 2.25, + "learning_rate": 8.743016759776537e-05, + "loss": 1.5617, + "step": 403 + }, + { + "epoch": 2.26, + "learning_rate": 8.715083798882681e-05, + "loss": 1.5701, + "step": 404 + }, + { + "epoch": 2.26, + "learning_rate": 8.687150837988828e-05, + "loss": 1.3659, + "step": 405 + }, + { + "epoch": 2.27, + "learning_rate": 8.659217877094973e-05, + "loss": 1.3424, + "step": 406 + }, + { + "epoch": 2.27, + "learning_rate": 8.631284916201118e-05, + "loss": 1.5782, + "step": 407 + }, + { + "epoch": 2.28, + "learning_rate": 8.603351955307262e-05, + "loss": 1.3355, + "step": 408 + }, + { + "epoch": 2.28, + "learning_rate": 8.575418994413408e-05, + "loss": 1.4798, + "step": 409 + }, + { + "epoch": 2.29, + "learning_rate": 8.547486033519554e-05, + "loss": 1.3813, + "step": 410 + }, + { + "epoch": 2.3, + "learning_rate": 8.519553072625698e-05, + "loss": 1.6011, + "step": 411 + }, + { + "epoch": 2.3, + "learning_rate": 8.491620111731844e-05, + "loss": 1.4596, + "step": 412 + }, + { + "epoch": 2.31, + "learning_rate": 8.463687150837989e-05, + "loss": 1.4997, + "step": 413 + }, + { + "epoch": 2.31, + "learning_rate": 8.435754189944135e-05, + "loss": 1.5255, + "step": 414 + }, + { + "epoch": 2.32, + "learning_rate": 8.40782122905028e-05, + "loss": 1.5305, + "step": 415 + }, + { + "epoch": 2.32, + "learning_rate": 8.379888268156425e-05, + "loss": 1.3029, + "step": 416 + }, + { + "epoch": 2.33, + "learning_rate": 8.35195530726257e-05, + "loss": 1.2046, + "step": 417 + }, + { + "epoch": 2.34, + "learning_rate": 8.324022346368715e-05, + "loss": 1.4985, + "step": 418 + }, + { + "epoch": 2.34, + "learning_rate": 8.29608938547486e-05, + "loss": 1.3486, + "step": 419 + }, + { + "epoch": 2.35, + "learning_rate": 8.268156424581006e-05, + "loss": 1.2839, + "step": 420 + }, + { + "epoch": 2.35, + "learning_rate": 8.240223463687151e-05, + "loss": 1.4286, + "step": 421 + }, + { + "epoch": 2.36, + "learning_rate": 8.212290502793296e-05, + "loss": 1.3893, + "step": 422 + }, + { + "epoch": 2.36, + "learning_rate": 8.184357541899442e-05, + "loss": 1.5477, + "step": 423 + }, + { + "epoch": 2.37, + "learning_rate": 8.156424581005587e-05, + "loss": 1.4973, + "step": 424 + }, + { + "epoch": 2.37, + "learning_rate": 8.128491620111732e-05, + "loss": 1.3549, + "step": 425 + }, + { + "epoch": 2.38, + "learning_rate": 8.100558659217878e-05, + "loss": 1.3966, + "step": 426 + }, + { + "epoch": 2.39, + "learning_rate": 8.072625698324023e-05, + "loss": 1.3974, + "step": 427 + }, + { + "epoch": 2.39, + "learning_rate": 8.044692737430168e-05, + "loss": 1.5211, + "step": 428 + }, + { + "epoch": 2.4, + "learning_rate": 8.016759776536313e-05, + "loss": 1.4751, + "step": 429 + }, + { + "epoch": 2.4, + "learning_rate": 7.988826815642459e-05, + "loss": 1.4322, + "step": 430 + }, + { + "epoch": 2.41, + "learning_rate": 7.960893854748604e-05, + "loss": 1.5991, + "step": 431 + }, + { + "epoch": 2.41, + "learning_rate": 7.93296089385475e-05, + "loss": 1.3288, + "step": 432 + }, + { + "epoch": 2.42, + "learning_rate": 7.905027932960895e-05, + "loss": 1.4098, + "step": 433 + }, + { + "epoch": 2.42, + "learning_rate": 7.87709497206704e-05, + "loss": 1.4819, + "step": 434 + }, + { + "epoch": 2.43, + "learning_rate": 7.849162011173184e-05, + "loss": 1.4003, + "step": 435 + }, + { + "epoch": 2.44, + "learning_rate": 7.821229050279329e-05, + "loss": 1.3475, + "step": 436 + }, + { + "epoch": 2.44, + "learning_rate": 7.793296089385476e-05, + "loss": 1.3354, + "step": 437 + }, + { + "epoch": 2.45, + "learning_rate": 7.765363128491621e-05, + "loss": 1.3144, + "step": 438 + }, + { + "epoch": 2.45, + "learning_rate": 7.737430167597765e-05, + "loss": 1.4118, + "step": 439 + }, + { + "epoch": 2.46, + "learning_rate": 7.70949720670391e-05, + "loss": 1.3716, + "step": 440 + }, + { + "epoch": 2.46, + "learning_rate": 7.681564245810057e-05, + "loss": 1.4191, + "step": 441 + }, + { + "epoch": 2.47, + "learning_rate": 7.653631284916202e-05, + "loss": 1.5173, + "step": 442 + }, + { + "epoch": 2.47, + "learning_rate": 7.625698324022346e-05, + "loss": 1.3005, + "step": 443 + }, + { + "epoch": 2.48, + "learning_rate": 7.597765363128491e-05, + "loss": 1.4586, + "step": 444 + }, + { + "epoch": 2.49, + "learning_rate": 7.569832402234637e-05, + "loss": 1.5086, + "step": 445 + }, + { + "epoch": 2.49, + "learning_rate": 7.541899441340783e-05, + "loss": 1.5446, + "step": 446 + }, + { + "epoch": 2.5, + "learning_rate": 7.513966480446927e-05, + "loss": 1.3274, + "step": 447 + }, + { + "epoch": 2.5, + "learning_rate": 7.486033519553073e-05, + "loss": 1.4424, + "step": 448 + }, + { + "epoch": 2.51, + "learning_rate": 7.458100558659218e-05, + "loss": 1.2836, + "step": 449 + }, + { + "epoch": 2.51, + "learning_rate": 7.430167597765365e-05, + "loss": 1.4322, + "step": 450 + }, + { + "epoch": 2.52, + "learning_rate": 7.402234636871509e-05, + "loss": 1.4168, + "step": 451 + }, + { + "epoch": 2.53, + "learning_rate": 7.374301675977654e-05, + "loss": 1.3892, + "step": 452 + }, + { + "epoch": 2.53, + "learning_rate": 7.346368715083799e-05, + "loss": 1.6538, + "step": 453 + }, + { + "epoch": 2.54, + "learning_rate": 7.318435754189944e-05, + "loss": 1.4573, + "step": 454 + }, + { + "epoch": 2.54, + "learning_rate": 7.29050279329609e-05, + "loss": 1.4593, + "step": 455 + }, + { + "epoch": 2.55, + "learning_rate": 7.262569832402235e-05, + "loss": 1.4316, + "step": 456 + }, + { + "epoch": 2.55, + "learning_rate": 7.23463687150838e-05, + "loss": 1.2702, + "step": 457 + }, + { + "epoch": 2.56, + "learning_rate": 7.206703910614526e-05, + "loss": 1.5458, + "step": 458 + }, + { + "epoch": 2.56, + "learning_rate": 7.178770949720671e-05, + "loss": 1.5179, + "step": 459 + }, + { + "epoch": 2.57, + "learning_rate": 7.150837988826816e-05, + "loss": 1.3844, + "step": 460 + }, + { + "epoch": 2.58, + "learning_rate": 7.122905027932961e-05, + "loss": 1.3698, + "step": 461 + }, + { + "epoch": 2.58, + "learning_rate": 7.094972067039107e-05, + "loss": 1.4396, + "step": 462 + }, + { + "epoch": 2.59, + "learning_rate": 7.067039106145251e-05, + "loss": 1.5486, + "step": 463 + }, + { + "epoch": 2.59, + "learning_rate": 7.039106145251397e-05, + "loss": 1.2032, + "step": 464 + }, + { + "epoch": 2.6, + "learning_rate": 7.011173184357543e-05, + "loss": 1.3434, + "step": 465 + }, + { + "epoch": 2.6, + "learning_rate": 6.983240223463688e-05, + "loss": 1.4065, + "step": 466 + }, + { + "epoch": 2.61, + "learning_rate": 6.955307262569832e-05, + "loss": 1.3362, + "step": 467 + }, + { + "epoch": 2.61, + "learning_rate": 6.927374301675979e-05, + "loss": 1.375, + "step": 468 + }, + { + "epoch": 2.62, + "learning_rate": 6.899441340782124e-05, + "loss": 1.4549, + "step": 469 + }, + { + "epoch": 2.63, + "learning_rate": 6.871508379888269e-05, + "loss": 1.3724, + "step": 470 + }, + { + "epoch": 2.63, + "learning_rate": 6.843575418994413e-05, + "loss": 1.1602, + "step": 471 + }, + { + "epoch": 2.64, + "learning_rate": 6.815642458100558e-05, + "loss": 1.4346, + "step": 472 + }, + { + "epoch": 2.64, + "learning_rate": 6.787709497206705e-05, + "loss": 1.2877, + "step": 473 + }, + { + "epoch": 2.65, + "learning_rate": 6.759776536312849e-05, + "loss": 1.394, + "step": 474 + }, + { + "epoch": 2.65, + "learning_rate": 6.731843575418994e-05, + "loss": 1.3387, + "step": 475 + }, + { + "epoch": 2.66, + "learning_rate": 6.70391061452514e-05, + "loss": 1.41, + "step": 476 + }, + { + "epoch": 2.66, + "learning_rate": 6.675977653631286e-05, + "loss": 1.4299, + "step": 477 + }, + { + "epoch": 2.67, + "learning_rate": 6.64804469273743e-05, + "loss": 1.3824, + "step": 478 + }, + { + "epoch": 2.68, + "learning_rate": 6.620111731843575e-05, + "loss": 1.2656, + "step": 479 + }, + { + "epoch": 2.68, + "learning_rate": 6.59217877094972e-05, + "loss": 1.2811, + "step": 480 + }, + { + "epoch": 2.69, + "learning_rate": 6.564245810055866e-05, + "loss": 1.6839, + "step": 481 + }, + { + "epoch": 2.69, + "learning_rate": 6.536312849162011e-05, + "loss": 1.3089, + "step": 482 + }, + { + "epoch": 2.7, + "learning_rate": 6.508379888268157e-05, + "loss": 1.4323, + "step": 483 + }, + { + "epoch": 2.7, + "learning_rate": 6.480446927374302e-05, + "loss": 1.3439, + "step": 484 + }, + { + "epoch": 2.71, + "learning_rate": 6.452513966480447e-05, + "loss": 1.3614, + "step": 485 + }, + { + "epoch": 2.72, + "learning_rate": 6.424581005586592e-05, + "loss": 1.4974, + "step": 486 + }, + { + "epoch": 2.72, + "learning_rate": 6.396648044692738e-05, + "loss": 1.4963, + "step": 487 + }, + { + "epoch": 2.73, + "learning_rate": 6.368715083798883e-05, + "loss": 1.2902, + "step": 488 + }, + { + "epoch": 2.73, + "learning_rate": 6.340782122905028e-05, + "loss": 1.3226, + "step": 489 + }, + { + "epoch": 2.74, + "learning_rate": 6.312849162011174e-05, + "loss": 1.4353, + "step": 490 + }, + { + "epoch": 2.74, + "learning_rate": 6.284916201117319e-05, + "loss": 1.281, + "step": 491 + }, + { + "epoch": 2.75, + "learning_rate": 6.256983240223464e-05, + "loss": 1.3755, + "step": 492 + }, + { + "epoch": 2.75, + "learning_rate": 6.22905027932961e-05, + "loss": 1.3811, + "step": 493 + }, + { + "epoch": 2.76, + "learning_rate": 6.201117318435755e-05, + "loss": 1.4627, + "step": 494 + }, + { + "epoch": 2.77, + "learning_rate": 6.1731843575419e-05, + "loss": 1.3482, + "step": 495 + }, + { + "epoch": 2.77, + "learning_rate": 6.145251396648045e-05, + "loss": 1.4274, + "step": 496 + }, + { + "epoch": 2.78, + "learning_rate": 6.11731843575419e-05, + "loss": 1.3465, + "step": 497 + }, + { + "epoch": 2.78, + "learning_rate": 6.089385474860335e-05, + "loss": 1.2319, + "step": 498 + }, + { + "epoch": 2.79, + "learning_rate": 6.061452513966481e-05, + "loss": 1.2835, + "step": 499 + }, + { + "epoch": 2.79, + "learning_rate": 6.0335195530726265e-05, + "loss": 1.4836, + "step": 500 + }, + { + "epoch": 2.8, + "learning_rate": 6.005586592178771e-05, + "loss": 1.3878, + "step": 501 + }, + { + "epoch": 2.8, + "learning_rate": 5.9776536312849164e-05, + "loss": 1.4367, + "step": 502 + }, + { + "epoch": 2.81, + "learning_rate": 5.949720670391061e-05, + "loss": 1.2994, + "step": 503 + }, + { + "epoch": 2.82, + "learning_rate": 5.9217877094972076e-05, + "loss": 1.3599, + "step": 504 + }, + { + "epoch": 2.82, + "learning_rate": 5.893854748603352e-05, + "loss": 1.5357, + "step": 505 + }, + { + "epoch": 2.83, + "learning_rate": 5.8659217877094976e-05, + "loss": 1.4497, + "step": 506 + }, + { + "epoch": 2.83, + "learning_rate": 5.837988826815642e-05, + "loss": 1.3496, + "step": 507 + }, + { + "epoch": 2.84, + "learning_rate": 5.810055865921789e-05, + "loss": 1.294, + "step": 508 + }, + { + "epoch": 2.84, + "learning_rate": 5.7821229050279334e-05, + "loss": 1.4204, + "step": 509 + }, + { + "epoch": 2.85, + "learning_rate": 5.754189944134079e-05, + "loss": 1.5231, + "step": 510 + }, + { + "epoch": 2.85, + "learning_rate": 5.726256983240223e-05, + "loss": 1.3165, + "step": 511 + }, + { + "epoch": 2.86, + "learning_rate": 5.6983240223463686e-05, + "loss": 1.0796, + "step": 512 + }, + { + "epoch": 2.87, + "learning_rate": 5.6703910614525146e-05, + "loss": 1.4086, + "step": 513 + }, + { + "epoch": 2.87, + "learning_rate": 5.64245810055866e-05, + "loss": 1.2895, + "step": 514 + }, + { + "epoch": 2.88, + "learning_rate": 5.6145251396648045e-05, + "loss": 1.3024, + "step": 515 + }, + { + "epoch": 2.88, + "learning_rate": 5.58659217877095e-05, + "loss": 1.2479, + "step": 516 + }, + { + "epoch": 2.89, + "learning_rate": 5.558659217877096e-05, + "loss": 1.5232, + "step": 517 + }, + { + "epoch": 2.89, + "learning_rate": 5.530726256983241e-05, + "loss": 1.2654, + "step": 518 + }, + { + "epoch": 2.9, + "learning_rate": 5.502793296089386e-05, + "loss": 1.3018, + "step": 519 + }, + { + "epoch": 2.91, + "learning_rate": 5.474860335195531e-05, + "loss": 1.3077, + "step": 520 + }, + { + "epoch": 2.91, + "learning_rate": 5.4469273743016756e-05, + "loss": 1.3501, + "step": 521 + }, + { + "epoch": 2.92, + "learning_rate": 5.4189944134078215e-05, + "loss": 1.2607, + "step": 522 + }, + { + "epoch": 2.92, + "learning_rate": 5.391061452513967e-05, + "loss": 1.3819, + "step": 523 + }, + { + "epoch": 2.93, + "learning_rate": 5.363128491620112e-05, + "loss": 1.321, + "step": 524 + }, + { + "epoch": 2.93, + "learning_rate": 5.335195530726257e-05, + "loss": 1.3351, + "step": 525 + }, + { + "epoch": 2.94, + "learning_rate": 5.307262569832403e-05, + "loss": 1.2205, + "step": 526 + }, + { + "epoch": 2.94, + "learning_rate": 5.279329608938548e-05, + "loss": 1.2579, + "step": 527 + }, + { + "epoch": 2.95, + "learning_rate": 5.251396648044693e-05, + "loss": 1.2771, + "step": 528 + }, + { + "epoch": 2.96, + "learning_rate": 5.223463687150838e-05, + "loss": 1.3492, + "step": 529 + }, + { + "epoch": 2.96, + "learning_rate": 5.195530726256983e-05, + "loss": 1.3232, + "step": 530 + }, + { + "epoch": 2.97, + "learning_rate": 5.167597765363129e-05, + "loss": 1.2223, + "step": 531 + }, + { + "epoch": 2.97, + "learning_rate": 5.139664804469274e-05, + "loss": 1.3983, + "step": 532 + }, + { + "epoch": 2.98, + "learning_rate": 5.111731843575419e-05, + "loss": 1.2987, + "step": 533 + }, + { + "epoch": 2.98, + "learning_rate": 5.0837988826815644e-05, + "loss": 1.3717, + "step": 534 + }, + { + "epoch": 2.99, + "learning_rate": 5.05586592178771e-05, + "loss": 1.4467, + "step": 535 + }, + { + "epoch": 2.99, + "learning_rate": 5.027932960893855e-05, + "loss": 1.4314, + "step": 536 + }, + { + "epoch": 3.0, + "learning_rate": 5e-05, + "loss": 1.6593, + "step": 537 + }, + { + "epoch": 3.01, + "learning_rate": 4.9720670391061455e-05, + "loss": 1.3425, + "step": 538 + }, + { + "epoch": 3.01, + "learning_rate": 4.944134078212291e-05, + "loss": 1.4171, + "step": 539 + }, + { + "epoch": 3.02, + "learning_rate": 4.916201117318436e-05, + "loss": 1.3766, + "step": 540 + }, + { + "epoch": 3.02, + "learning_rate": 4.8882681564245814e-05, + "loss": 1.1605, + "step": 541 + }, + { + "epoch": 3.03, + "learning_rate": 4.860335195530727e-05, + "loss": 1.3132, + "step": 542 + }, + { + "epoch": 3.03, + "learning_rate": 4.832402234636872e-05, + "loss": 1.4128, + "step": 543 + }, + { + "epoch": 3.04, + "learning_rate": 4.8044692737430166e-05, + "loss": 1.4902, + "step": 544 + }, + { + "epoch": 3.04, + "learning_rate": 4.7765363128491626e-05, + "loss": 1.1832, + "step": 545 + }, + { + "epoch": 3.05, + "learning_rate": 4.748603351955307e-05, + "loss": 1.2019, + "step": 546 + }, + { + "epoch": 3.06, + "learning_rate": 4.7206703910614525e-05, + "loss": 1.2753, + "step": 547 + }, + { + "epoch": 3.06, + "learning_rate": 4.692737430167598e-05, + "loss": 1.2745, + "step": 548 + }, + { + "epoch": 3.07, + "learning_rate": 4.664804469273743e-05, + "loss": 1.3944, + "step": 549 + }, + { + "epoch": 3.07, + "learning_rate": 4.6368715083798884e-05, + "loss": 1.3676, + "step": 550 + }, + { + "epoch": 3.08, + "learning_rate": 4.6089385474860336e-05, + "loss": 1.3226, + "step": 551 + }, + { + "epoch": 3.08, + "learning_rate": 4.581005586592179e-05, + "loss": 1.134, + "step": 552 + }, + { + "epoch": 3.09, + "learning_rate": 4.553072625698324e-05, + "loss": 1.2143, + "step": 553 + }, + { + "epoch": 3.09, + "learning_rate": 4.5251396648044695e-05, + "loss": 1.2543, + "step": 554 + }, + { + "epoch": 3.1, + "learning_rate": 4.497206703910615e-05, + "loss": 1.2136, + "step": 555 + }, + { + "epoch": 3.11, + "learning_rate": 4.4692737430167594e-05, + "loss": 1.5303, + "step": 556 + }, + { + "epoch": 3.11, + "learning_rate": 4.4413407821229054e-05, + "loss": 1.2461, + "step": 557 + }, + { + "epoch": 3.12, + "learning_rate": 4.41340782122905e-05, + "loss": 1.3282, + "step": 558 + }, + { + "epoch": 3.12, + "learning_rate": 4.385474860335196e-05, + "loss": 1.201, + "step": 559 + }, + { + "epoch": 3.13, + "learning_rate": 4.3575418994413406e-05, + "loss": 1.188, + "step": 560 + }, + { + "epoch": 3.13, + "learning_rate": 4.3296089385474866e-05, + "loss": 1.2299, + "step": 561 + }, + { + "epoch": 3.14, + "learning_rate": 4.301675977653631e-05, + "loss": 1.2713, + "step": 562 + }, + { + "epoch": 3.15, + "learning_rate": 4.273743016759777e-05, + "loss": 1.2606, + "step": 563 + }, + { + "epoch": 3.15, + "learning_rate": 4.245810055865922e-05, + "loss": 1.2021, + "step": 564 + }, + { + "epoch": 3.16, + "learning_rate": 4.217877094972068e-05, + "loss": 1.243, + "step": 565 + }, + { + "epoch": 3.16, + "learning_rate": 4.1899441340782123e-05, + "loss": 1.2973, + "step": 566 + }, + { + "epoch": 3.17, + "learning_rate": 4.1620111731843576e-05, + "loss": 1.2369, + "step": 567 + }, + { + "epoch": 3.17, + "learning_rate": 4.134078212290503e-05, + "loss": 1.2729, + "step": 568 + }, + { + "epoch": 3.18, + "learning_rate": 4.106145251396648e-05, + "loss": 1.1511, + "step": 569 + }, + { + "epoch": 3.18, + "learning_rate": 4.0782122905027935e-05, + "loss": 1.3134, + "step": 570 + }, + { + "epoch": 3.19, + "learning_rate": 4.050279329608939e-05, + "loss": 1.3388, + "step": 571 + }, + { + "epoch": 3.2, + "learning_rate": 4.022346368715084e-05, + "loss": 1.3224, + "step": 572 + }, + { + "epoch": 3.2, + "learning_rate": 3.9944134078212294e-05, + "loss": 1.2827, + "step": 573 + }, + { + "epoch": 3.21, + "learning_rate": 3.966480446927375e-05, + "loss": 1.5265, + "step": 574 + }, + { + "epoch": 3.21, + "learning_rate": 3.93854748603352e-05, + "loss": 1.4008, + "step": 575 + }, + { + "epoch": 3.22, + "learning_rate": 3.9106145251396646e-05, + "loss": 1.1727, + "step": 576 + }, + { + "epoch": 3.22, + "learning_rate": 3.8826815642458106e-05, + "loss": 1.1999, + "step": 577 + }, + { + "epoch": 3.23, + "learning_rate": 3.854748603351955e-05, + "loss": 1.2031, + "step": 578 + }, + { + "epoch": 3.23, + "learning_rate": 3.826815642458101e-05, + "loss": 1.1469, + "step": 579 + }, + { + "epoch": 3.24, + "learning_rate": 3.798882681564246e-05, + "loss": 1.2784, + "step": 580 + }, + { + "epoch": 3.25, + "learning_rate": 3.770949720670392e-05, + "loss": 1.262, + "step": 581 + }, + { + "epoch": 3.25, + "learning_rate": 3.743016759776536e-05, + "loss": 1.224, + "step": 582 + }, + { + "epoch": 3.26, + "learning_rate": 3.715083798882682e-05, + "loss": 1.3554, + "step": 583 + }, + { + "epoch": 3.26, + "learning_rate": 3.687150837988827e-05, + "loss": 1.5137, + "step": 584 + }, + { + "epoch": 3.27, + "learning_rate": 3.659217877094972e-05, + "loss": 1.2462, + "step": 585 + }, + { + "epoch": 3.27, + "learning_rate": 3.6312849162011175e-05, + "loss": 1.2217, + "step": 586 + }, + { + "epoch": 3.28, + "learning_rate": 3.603351955307263e-05, + "loss": 1.2346, + "step": 587 + }, + { + "epoch": 3.28, + "learning_rate": 3.575418994413408e-05, + "loss": 1.3059, + "step": 588 + }, + { + "epoch": 3.29, + "learning_rate": 3.5474860335195534e-05, + "loss": 1.2355, + "step": 589 + }, + { + "epoch": 3.3, + "learning_rate": 3.519553072625699e-05, + "loss": 1.2233, + "step": 590 + }, + { + "epoch": 3.3, + "learning_rate": 3.491620111731844e-05, + "loss": 1.2014, + "step": 591 + }, + { + "epoch": 3.31, + "learning_rate": 3.463687150837989e-05, + "loss": 1.1919, + "step": 592 + }, + { + "epoch": 3.31, + "learning_rate": 3.4357541899441345e-05, + "loss": 1.2905, + "step": 593 + }, + { + "epoch": 3.32, + "learning_rate": 3.407821229050279e-05, + "loss": 1.0858, + "step": 594 + }, + { + "epoch": 3.32, + "learning_rate": 3.3798882681564244e-05, + "loss": 1.087, + "step": 595 + }, + { + "epoch": 3.33, + "learning_rate": 3.35195530726257e-05, + "loss": 1.1205, + "step": 596 + }, + { + "epoch": 3.34, + "learning_rate": 3.324022346368715e-05, + "loss": 1.1657, + "step": 597 + }, + { + "epoch": 3.34, + "learning_rate": 3.29608938547486e-05, + "loss": 1.5024, + "step": 598 + }, + { + "epoch": 3.35, + "learning_rate": 3.2681564245810056e-05, + "loss": 1.246, + "step": 599 + }, + { + "epoch": 3.35, + "learning_rate": 3.240223463687151e-05, + "loss": 1.4441, + "step": 600 + } + ], + "logging_steps": 1, + "max_steps": 716, + "num_train_epochs": 4, + "save_steps": 100, + "total_flos": 3.0851814594392064e+17, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-600/training_args.bin b/checkpoint-600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..c8672c716e925d0028b4938db147703f58656ff7 --- /dev/null +++ b/checkpoint-600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:188ae1c421cc0c6435d1f71d8d3423ac4abc7dba0e6fc2efcbc4dbe77c741317 +size 4027 diff --git a/checkpoint-700/README.md b/checkpoint-700/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-700/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-700/adapter_config.json b/checkpoint-700/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..c661f74e40394c079de68f07861d73c341803b67 --- /dev/null +++ b/checkpoint-700/adapter_config.json @@ -0,0 +1,25 @@ +{ + "alpha_pattern": {}, + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-700/adapter_model.bin b/checkpoint-700/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..0caec0282fb2027bbfc211849f9ba41039610438 --- /dev/null +++ b/checkpoint-700/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d1b6626d0b27d106d068977d6a30f5fc34a5a358b55dafa50ff163993945015 +size 39409357 diff --git a/checkpoint-700/optimizer.pt b/checkpoint-700/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..d65c53f0bec067303125e293857bdec3e9a6e893 --- /dev/null +++ b/checkpoint-700/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5ae4fe25030e7252e0627af5e03d7f17a8d31b5da0e6730e68e43565c8cd8867 +size 78844421 diff --git a/checkpoint-700/rng_state.pth b/checkpoint-700/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..452f99ac534b9117d836494d73222e3d44e1523b --- /dev/null +++ b/checkpoint-700/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c6869750f95a25c4e970298a33adf90e2d7ab52680bf3317239bff1b10103235 +size 14575 diff --git a/checkpoint-700/scheduler.pt b/checkpoint-700/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..1358aafe4baa4670638ee88564b818147a3ea246 --- /dev/null +++ b/checkpoint-700/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed66937ddb6bbdab2dab1a50e3279ef0e50829c8752c931e65a10f8a8aed77af +size 627 diff --git a/checkpoint-700/special_tokens_map.json b/checkpoint-700/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-700/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-700/tokenizer.json b/checkpoint-700/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..673c31abdeadf6576c3c754df86459e1ad64e207 --- /dev/null +++ b/checkpoint-700/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:85b00d7db4df5df2e3f01cacc3feda246002a672f3356eec7f4b04a22eb0dfbe +size 14500570 diff --git a/checkpoint-700/tokenizer_config.json b/checkpoint-700/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-700/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-700/trainer_state.json b/checkpoint-700/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..9168ec3620e1259528cd292a3d325413a2bd0a9a --- /dev/null +++ b/checkpoint-700/trainer_state.json @@ -0,0 +1,4219 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 3.910614525139665, + "eval_steps": 500, + "global_step": 700, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.01, + "learning_rate": 0.00019972067039106145, + "loss": 2.6443, + "step": 1 + }, + { + "epoch": 0.01, + "learning_rate": 0.00019944134078212292, + "loss": 2.4104, + "step": 2 + }, + { + "epoch": 0.02, + "learning_rate": 0.00019916201117318435, + "loss": 2.4975, + "step": 3 + }, + { + "epoch": 0.02, + "learning_rate": 0.00019888268156424582, + "loss": 2.3513, + "step": 4 + }, + { + "epoch": 0.03, + "learning_rate": 0.0001986033519553073, + "loss": 2.4274, + "step": 5 + }, + { + "epoch": 0.03, + "learning_rate": 0.00019832402234636873, + "loss": 2.3628, + "step": 6 + }, + { + "epoch": 0.04, + "learning_rate": 0.0001980446927374302, + "loss": 2.3567, + "step": 7 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019776536312849163, + "loss": 2.4121, + "step": 8 + }, + { + "epoch": 0.05, + "learning_rate": 0.00019748603351955307, + "loss": 2.4033, + "step": 9 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019720670391061454, + "loss": 2.2805, + "step": 10 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019692737430167598, + "loss": 2.2639, + "step": 11 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019664804469273744, + "loss": 2.2724, + "step": 12 + }, + { + "epoch": 0.07, + "learning_rate": 0.00019636871508379888, + "loss": 2.332, + "step": 13 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019608938547486035, + "loss": 2.2261, + "step": 14 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019581005586592182, + "loss": 2.2208, + "step": 15 + }, + { + "epoch": 0.09, + "learning_rate": 0.00019553072625698326, + "loss": 2.3351, + "step": 16 + }, + { + "epoch": 0.09, + "learning_rate": 0.0001952513966480447, + "loss": 2.2475, + "step": 17 + }, + { + "epoch": 0.1, + "learning_rate": 0.00019497206703910616, + "loss": 2.3283, + "step": 18 + }, + { + "epoch": 0.11, + "learning_rate": 0.0001946927374301676, + "loss": 2.1346, + "step": 19 + }, + { + "epoch": 0.11, + "learning_rate": 0.00019441340782122907, + "loss": 2.131, + "step": 20 + }, + { + "epoch": 0.12, + "learning_rate": 0.0001941340782122905, + "loss": 2.1718, + "step": 21 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019385474860335195, + "loss": 2.2446, + "step": 22 + }, + { + "epoch": 0.13, + "learning_rate": 0.0001935754189944134, + "loss": 2.306, + "step": 23 + }, + { + "epoch": 0.13, + "learning_rate": 0.00019329608938547488, + "loss": 2.1908, + "step": 24 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019301675977653632, + "loss": 2.2844, + "step": 25 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019273743016759779, + "loss": 2.2235, + "step": 26 + }, + { + "epoch": 0.15, + "learning_rate": 0.00019245810055865922, + "loss": 2.1842, + "step": 27 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019217877094972066, + "loss": 2.2675, + "step": 28 + }, + { + "epoch": 0.16, + "learning_rate": 0.00019189944134078213, + "loss": 2.2532, + "step": 29 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019162011173184357, + "loss": 2.1788, + "step": 30 + }, + { + "epoch": 0.17, + "learning_rate": 0.00019134078212290504, + "loss": 2.2494, + "step": 31 + }, + { + "epoch": 0.18, + "learning_rate": 0.0001910614525139665, + "loss": 2.1995, + "step": 32 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019078212290502794, + "loss": 2.1451, + "step": 33 + }, + { + "epoch": 0.19, + "learning_rate": 0.0001905027932960894, + "loss": 2.223, + "step": 34 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019022346368715085, + "loss": 2.2854, + "step": 35 + }, + { + "epoch": 0.2, + "learning_rate": 0.0001899441340782123, + "loss": 2.2265, + "step": 36 + }, + { + "epoch": 0.21, + "learning_rate": 0.00018966480446927375, + "loss": 2.1214, + "step": 37 + }, + { + "epoch": 0.21, + "learning_rate": 0.0001893854748603352, + "loss": 2.1898, + "step": 38 + }, + { + "epoch": 0.22, + "learning_rate": 0.00018910614525139666, + "loss": 2.1974, + "step": 39 + }, + { + "epoch": 0.22, + "learning_rate": 0.0001888268156424581, + "loss": 2.2259, + "step": 40 + }, + { + "epoch": 0.23, + "learning_rate": 0.00018854748603351957, + "loss": 2.2094, + "step": 41 + }, + { + "epoch": 0.23, + "learning_rate": 0.00018826815642458103, + "loss": 2.1731, + "step": 42 + }, + { + "epoch": 0.24, + "learning_rate": 0.00018798882681564247, + "loss": 2.2373, + "step": 43 + }, + { + "epoch": 0.25, + "learning_rate": 0.0001877094972067039, + "loss": 2.2295, + "step": 44 + }, + { + "epoch": 0.25, + "learning_rate": 0.00018743016759776538, + "loss": 2.1947, + "step": 45 + }, + { + "epoch": 0.26, + "learning_rate": 0.00018715083798882682, + "loss": 2.2115, + "step": 46 + }, + { + "epoch": 0.26, + "learning_rate": 0.00018687150837988828, + "loss": 2.1224, + "step": 47 + }, + { + "epoch": 0.27, + "learning_rate": 0.00018659217877094972, + "loss": 2.2137, + "step": 48 + }, + { + "epoch": 0.27, + "learning_rate": 0.00018631284916201116, + "loss": 2.2338, + "step": 49 + }, + { + "epoch": 0.28, + "learning_rate": 0.00018603351955307266, + "loss": 2.1298, + "step": 50 + }, + { + "epoch": 0.28, + "learning_rate": 0.0001857541899441341, + "loss": 2.0883, + "step": 51 + }, + { + "epoch": 0.29, + "learning_rate": 0.00018547486033519553, + "loss": 2.1216, + "step": 52 + }, + { + "epoch": 0.3, + "learning_rate": 0.000185195530726257, + "loss": 2.2112, + "step": 53 + }, + { + "epoch": 0.3, + "learning_rate": 0.00018491620111731844, + "loss": 2.1224, + "step": 54 + }, + { + "epoch": 0.31, + "learning_rate": 0.0001846368715083799, + "loss": 2.2375, + "step": 55 + }, + { + "epoch": 0.31, + "learning_rate": 0.00018435754189944135, + "loss": 2.2235, + "step": 56 + }, + { + "epoch": 0.32, + "learning_rate": 0.00018407821229050279, + "loss": 2.1682, + "step": 57 + }, + { + "epoch": 0.32, + "learning_rate": 0.00018379888268156425, + "loss": 2.2077, + "step": 58 + }, + { + "epoch": 0.33, + "learning_rate": 0.00018351955307262572, + "loss": 2.1596, + "step": 59 + }, + { + "epoch": 0.34, + "learning_rate": 0.00018324022346368716, + "loss": 2.1311, + "step": 60 + }, + { + "epoch": 0.34, + "learning_rate": 0.00018296089385474862, + "loss": 2.1333, + "step": 61 + }, + { + "epoch": 0.35, + "learning_rate": 0.00018268156424581006, + "loss": 2.0901, + "step": 62 + }, + { + "epoch": 0.35, + "learning_rate": 0.00018240223463687153, + "loss": 2.1971, + "step": 63 + }, + { + "epoch": 0.36, + "learning_rate": 0.00018212290502793297, + "loss": 2.2602, + "step": 64 + }, + { + "epoch": 0.36, + "learning_rate": 0.0001818435754189944, + "loss": 2.2194, + "step": 65 + }, + { + "epoch": 0.37, + "learning_rate": 0.00018156424581005588, + "loss": 2.1218, + "step": 66 + }, + { + "epoch": 0.37, + "learning_rate": 0.00018128491620111731, + "loss": 2.2049, + "step": 67 + }, + { + "epoch": 0.38, + "learning_rate": 0.00018100558659217878, + "loss": 2.1521, + "step": 68 + }, + { + "epoch": 0.39, + "learning_rate": 0.00018072625698324025, + "loss": 2.112, + "step": 69 + }, + { + "epoch": 0.39, + "learning_rate": 0.0001804469273743017, + "loss": 2.1906, + "step": 70 + }, + { + "epoch": 0.4, + "learning_rate": 0.00018016759776536313, + "loss": 2.1717, + "step": 71 + }, + { + "epoch": 0.4, + "learning_rate": 0.0001798882681564246, + "loss": 2.0712, + "step": 72 + }, + { + "epoch": 0.41, + "learning_rate": 0.00017960893854748603, + "loss": 2.141, + "step": 73 + }, + { + "epoch": 0.41, + "learning_rate": 0.0001793296089385475, + "loss": 2.0656, + "step": 74 + }, + { + "epoch": 0.42, + "learning_rate": 0.00017905027932960894, + "loss": 2.1125, + "step": 75 + }, + { + "epoch": 0.42, + "learning_rate": 0.00017877094972067038, + "loss": 2.0869, + "step": 76 + }, + { + "epoch": 0.43, + "learning_rate": 0.00017849162011173187, + "loss": 2.2478, + "step": 77 + }, + { + "epoch": 0.44, + "learning_rate": 0.0001782122905027933, + "loss": 2.1535, + "step": 78 + }, + { + "epoch": 0.44, + "learning_rate": 0.00017793296089385475, + "loss": 2.1927, + "step": 79 + }, + { + "epoch": 0.45, + "learning_rate": 0.00017765363128491622, + "loss": 2.1213, + "step": 80 + }, + { + "epoch": 0.45, + "learning_rate": 0.00017737430167597766, + "loss": 2.0981, + "step": 81 + }, + { + "epoch": 0.46, + "learning_rate": 0.00017709497206703912, + "loss": 2.1828, + "step": 82 + }, + { + "epoch": 0.46, + "learning_rate": 0.00017681564245810056, + "loss": 2.0562, + "step": 83 + }, + { + "epoch": 0.47, + "learning_rate": 0.000176536312849162, + "loss": 2.1334, + "step": 84 + }, + { + "epoch": 0.47, + "learning_rate": 0.00017625698324022347, + "loss": 2.1225, + "step": 85 + }, + { + "epoch": 0.48, + "learning_rate": 0.00017597765363128493, + "loss": 2.2098, + "step": 86 + }, + { + "epoch": 0.49, + "learning_rate": 0.00017569832402234637, + "loss": 2.1519, + "step": 87 + }, + { + "epoch": 0.49, + "learning_rate": 0.00017541899441340784, + "loss": 2.1132, + "step": 88 + }, + { + "epoch": 0.5, + "learning_rate": 0.00017513966480446928, + "loss": 2.0333, + "step": 89 + }, + { + "epoch": 0.5, + "learning_rate": 0.00017486033519553075, + "loss": 2.2764, + "step": 90 + }, + { + "epoch": 0.51, + "learning_rate": 0.00017458100558659218, + "loss": 2.1838, + "step": 91 + }, + { + "epoch": 0.51, + "learning_rate": 0.00017430167597765362, + "loss": 2.1386, + "step": 92 + }, + { + "epoch": 0.52, + "learning_rate": 0.0001740223463687151, + "loss": 2.1034, + "step": 93 + }, + { + "epoch": 0.53, + "learning_rate": 0.00017374301675977656, + "loss": 2.0346, + "step": 94 + }, + { + "epoch": 0.53, + "learning_rate": 0.000173463687150838, + "loss": 2.0274, + "step": 95 + }, + { + "epoch": 0.54, + "learning_rate": 0.00017318435754189946, + "loss": 2.1036, + "step": 96 + }, + { + "epoch": 0.54, + "learning_rate": 0.0001729050279329609, + "loss": 2.1208, + "step": 97 + }, + { + "epoch": 0.55, + "learning_rate": 0.00017262569832402237, + "loss": 2.0572, + "step": 98 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001723463687150838, + "loss": 2.1702, + "step": 99 + }, + { + "epoch": 0.56, + "learning_rate": 0.00017206703910614525, + "loss": 2.1302, + "step": 100 + }, + { + "epoch": 0.56, + "learning_rate": 0.0001717877094972067, + "loss": 2.0175, + "step": 101 + }, + { + "epoch": 0.57, + "learning_rate": 0.00017150837988826815, + "loss": 2.1006, + "step": 102 + }, + { + "epoch": 0.58, + "learning_rate": 0.00017122905027932962, + "loss": 2.0662, + "step": 103 + }, + { + "epoch": 0.58, + "learning_rate": 0.00017094972067039109, + "loss": 1.988, + "step": 104 + }, + { + "epoch": 0.59, + "learning_rate": 0.00017067039106145253, + "loss": 2.1008, + "step": 105 + }, + { + "epoch": 0.59, + "learning_rate": 0.00017039106145251396, + "loss": 2.1482, + "step": 106 + }, + { + "epoch": 0.6, + "learning_rate": 0.00017011173184357543, + "loss": 2.1052, + "step": 107 + }, + { + "epoch": 0.6, + "learning_rate": 0.00016983240223463687, + "loss": 2.0978, + "step": 108 + }, + { + "epoch": 0.61, + "learning_rate": 0.00016955307262569834, + "loss": 2.1303, + "step": 109 + }, + { + "epoch": 0.61, + "learning_rate": 0.00016927374301675978, + "loss": 2.0794, + "step": 110 + }, + { + "epoch": 0.62, + "learning_rate": 0.00016899441340782122, + "loss": 2.1059, + "step": 111 + }, + { + "epoch": 0.63, + "learning_rate": 0.0001687150837988827, + "loss": 1.9642, + "step": 112 + }, + { + "epoch": 0.63, + "learning_rate": 0.00016843575418994415, + "loss": 2.0415, + "step": 113 + }, + { + "epoch": 0.64, + "learning_rate": 0.0001681564245810056, + "loss": 2.0795, + "step": 114 + }, + { + "epoch": 0.64, + "learning_rate": 0.00016787709497206705, + "loss": 2.0238, + "step": 115 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001675977653631285, + "loss": 2.078, + "step": 116 + }, + { + "epoch": 0.65, + "learning_rate": 0.00016731843575418996, + "loss": 2.1362, + "step": 117 + }, + { + "epoch": 0.66, + "learning_rate": 0.0001670391061452514, + "loss": 2.0552, + "step": 118 + }, + { + "epoch": 0.66, + "learning_rate": 0.00016675977653631284, + "loss": 2.213, + "step": 119 + }, + { + "epoch": 0.67, + "learning_rate": 0.0001664804469273743, + "loss": 1.9999, + "step": 120 + }, + { + "epoch": 0.68, + "learning_rate": 0.00016620111731843577, + "loss": 2.06, + "step": 121 + }, + { + "epoch": 0.68, + "learning_rate": 0.0001659217877094972, + "loss": 2.0177, + "step": 122 + }, + { + "epoch": 0.69, + "learning_rate": 0.00016564245810055868, + "loss": 2.0504, + "step": 123 + }, + { + "epoch": 0.69, + "learning_rate": 0.00016536312849162012, + "loss": 2.0585, + "step": 124 + }, + { + "epoch": 0.7, + "learning_rate": 0.00016508379888268158, + "loss": 2.0273, + "step": 125 + }, + { + "epoch": 0.7, + "learning_rate": 0.00016480446927374302, + "loss": 2.0549, + "step": 126 + }, + { + "epoch": 0.71, + "learning_rate": 0.00016452513966480446, + "loss": 2.044, + "step": 127 + }, + { + "epoch": 0.72, + "learning_rate": 0.00016424581005586593, + "loss": 2.0731, + "step": 128 + }, + { + "epoch": 0.72, + "learning_rate": 0.00016396648044692737, + "loss": 2.0568, + "step": 129 + }, + { + "epoch": 0.73, + "learning_rate": 0.00016368715083798883, + "loss": 2.007, + "step": 130 + }, + { + "epoch": 0.73, + "learning_rate": 0.0001634078212290503, + "loss": 2.0707, + "step": 131 + }, + { + "epoch": 0.74, + "learning_rate": 0.00016312849162011174, + "loss": 1.9793, + "step": 132 + }, + { + "epoch": 0.74, + "learning_rate": 0.0001628491620111732, + "loss": 2.1311, + "step": 133 + }, + { + "epoch": 0.75, + "learning_rate": 0.00016256983240223465, + "loss": 2.0016, + "step": 134 + }, + { + "epoch": 0.75, + "learning_rate": 0.00016229050279329609, + "loss": 1.9945, + "step": 135 + }, + { + "epoch": 0.76, + "learning_rate": 0.00016201117318435755, + "loss": 2.0186, + "step": 136 + }, + { + "epoch": 0.77, + "learning_rate": 0.000161731843575419, + "loss": 2.0971, + "step": 137 + }, + { + "epoch": 0.77, + "learning_rate": 0.00016145251396648046, + "loss": 2.0883, + "step": 138 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016117318435754192, + "loss": 2.0803, + "step": 139 + }, + { + "epoch": 0.78, + "learning_rate": 0.00016089385474860336, + "loss": 2.0617, + "step": 140 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016061452513966483, + "loss": 2.1265, + "step": 141 + }, + { + "epoch": 0.79, + "learning_rate": 0.00016033519553072627, + "loss": 2.0151, + "step": 142 + }, + { + "epoch": 0.8, + "learning_rate": 0.0001600558659217877, + "loss": 1.996, + "step": 143 + }, + { + "epoch": 0.8, + "learning_rate": 0.00015977653631284918, + "loss": 2.0164, + "step": 144 + }, + { + "epoch": 0.81, + "learning_rate": 0.00015949720670391061, + "loss": 2.0314, + "step": 145 + }, + { + "epoch": 0.82, + "learning_rate": 0.00015921787709497208, + "loss": 1.9501, + "step": 146 + }, + { + "epoch": 0.82, + "learning_rate": 0.00015893854748603352, + "loss": 2.087, + "step": 147 + }, + { + "epoch": 0.83, + "learning_rate": 0.000158659217877095, + "loss": 2.0262, + "step": 148 + }, + { + "epoch": 0.83, + "learning_rate": 0.00015837988826815643, + "loss": 2.0765, + "step": 149 + }, + { + "epoch": 0.84, + "learning_rate": 0.0001581005586592179, + "loss": 2.105, + "step": 150 + }, + { + "epoch": 0.84, + "learning_rate": 0.00015782122905027933, + "loss": 1.9863, + "step": 151 + }, + { + "epoch": 0.85, + "learning_rate": 0.0001575418994413408, + "loss": 1.9873, + "step": 152 + }, + { + "epoch": 0.85, + "learning_rate": 0.00015726256983240224, + "loss": 2.0094, + "step": 153 + }, + { + "epoch": 0.86, + "learning_rate": 0.00015698324022346368, + "loss": 1.9141, + "step": 154 + }, + { + "epoch": 0.87, + "learning_rate": 0.00015670391061452514, + "loss": 1.917, + "step": 155 + }, + { + "epoch": 0.87, + "learning_rate": 0.00015642458100558658, + "loss": 2.109, + "step": 156 + }, + { + "epoch": 0.88, + "learning_rate": 0.00015614525139664805, + "loss": 1.9799, + "step": 157 + }, + { + "epoch": 0.88, + "learning_rate": 0.00015586592178770952, + "loss": 1.9571, + "step": 158 + }, + { + "epoch": 0.89, + "learning_rate": 0.00015558659217877096, + "loss": 1.9931, + "step": 159 + }, + { + "epoch": 0.89, + "learning_rate": 0.00015530726256983242, + "loss": 2.1004, + "step": 160 + }, + { + "epoch": 0.9, + "learning_rate": 0.00015502793296089386, + "loss": 2.0385, + "step": 161 + }, + { + "epoch": 0.91, + "learning_rate": 0.0001547486033519553, + "loss": 1.9751, + "step": 162 + }, + { + "epoch": 0.91, + "learning_rate": 0.00015446927374301677, + "loss": 2.0544, + "step": 163 + }, + { + "epoch": 0.92, + "learning_rate": 0.0001541899441340782, + "loss": 2.0069, + "step": 164 + }, + { + "epoch": 0.92, + "learning_rate": 0.00015391061452513967, + "loss": 1.9576, + "step": 165 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015363128491620114, + "loss": 1.8991, + "step": 166 + }, + { + "epoch": 0.93, + "learning_rate": 0.00015335195530726258, + "loss": 1.9336, + "step": 167 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015307262569832405, + "loss": 1.9736, + "step": 168 + }, + { + "epoch": 0.94, + "learning_rate": 0.00015279329608938548, + "loss": 1.9702, + "step": 169 + }, + { + "epoch": 0.95, + "learning_rate": 0.00015251396648044692, + "loss": 1.9055, + "step": 170 + }, + { + "epoch": 0.96, + "learning_rate": 0.0001522346368715084, + "loss": 2.0503, + "step": 171 + }, + { + "epoch": 0.96, + "learning_rate": 0.00015195530726256983, + "loss": 2.0039, + "step": 172 + }, + { + "epoch": 0.97, + "learning_rate": 0.0001516759776536313, + "loss": 1.9406, + "step": 173 + }, + { + "epoch": 0.97, + "learning_rate": 0.00015139664804469274, + "loss": 2.0525, + "step": 174 + }, + { + "epoch": 0.98, + "learning_rate": 0.0001511173184357542, + "loss": 1.9234, + "step": 175 + }, + { + "epoch": 0.98, + "learning_rate": 0.00015083798882681567, + "loss": 1.8614, + "step": 176 + }, + { + "epoch": 0.99, + "learning_rate": 0.0001505586592178771, + "loss": 1.9616, + "step": 177 + }, + { + "epoch": 0.99, + "learning_rate": 0.00015027932960893855, + "loss": 1.9509, + "step": 178 + }, + { + "epoch": 1.0, + "learning_rate": 0.00015000000000000001, + "loss": 1.9592, + "step": 179 + }, + { + "epoch": 1.01, + "learning_rate": 0.00014972067039106145, + "loss": 1.8991, + "step": 180 + }, + { + "epoch": 1.01, + "learning_rate": 0.00014944134078212292, + "loss": 1.9127, + "step": 181 + }, + { + "epoch": 1.02, + "learning_rate": 0.00014916201117318436, + "loss": 1.8982, + "step": 182 + }, + { + "epoch": 1.02, + "learning_rate": 0.0001488826815642458, + "loss": 1.9534, + "step": 183 + }, + { + "epoch": 1.03, + "learning_rate": 0.0001486033519553073, + "loss": 1.7794, + "step": 184 + }, + { + "epoch": 1.03, + "learning_rate": 0.00014832402234636873, + "loss": 1.7958, + "step": 185 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014804469273743017, + "loss": 1.8282, + "step": 186 + }, + { + "epoch": 1.04, + "learning_rate": 0.00014776536312849164, + "loss": 2.0423, + "step": 187 + }, + { + "epoch": 1.05, + "learning_rate": 0.00014748603351955308, + "loss": 1.9282, + "step": 188 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014720670391061454, + "loss": 1.9072, + "step": 189 + }, + { + "epoch": 1.06, + "learning_rate": 0.00014692737430167598, + "loss": 1.8665, + "step": 190 + }, + { + "epoch": 1.07, + "learning_rate": 0.00014664804469273742, + "loss": 1.9021, + "step": 191 + }, + { + "epoch": 1.07, + "learning_rate": 0.0001463687150837989, + "loss": 1.7308, + "step": 192 + }, + { + "epoch": 1.08, + "learning_rate": 0.00014608938547486035, + "loss": 1.9165, + "step": 193 + }, + { + "epoch": 1.08, + "learning_rate": 0.0001458100558659218, + "loss": 1.842, + "step": 194 + }, + { + "epoch": 1.09, + "learning_rate": 0.00014553072625698326, + "loss": 1.9128, + "step": 195 + }, + { + "epoch": 1.09, + "learning_rate": 0.0001452513966480447, + "loss": 1.8005, + "step": 196 + }, + { + "epoch": 1.1, + "learning_rate": 0.00014497206703910614, + "loss": 1.8547, + "step": 197 + }, + { + "epoch": 1.11, + "learning_rate": 0.0001446927374301676, + "loss": 1.9042, + "step": 198 + }, + { + "epoch": 1.11, + "learning_rate": 0.00014441340782122905, + "loss": 1.8609, + "step": 199 + }, + { + "epoch": 1.12, + "learning_rate": 0.0001441340782122905, + "loss": 1.9591, + "step": 200 + }, + { + "epoch": 1.12, + "learning_rate": 0.00014385474860335195, + "loss": 1.8722, + "step": 201 + }, + { + "epoch": 1.13, + "learning_rate": 0.00014357541899441342, + "loss": 1.8535, + "step": 202 + }, + { + "epoch": 1.13, + "learning_rate": 0.00014329608938547488, + "loss": 1.8676, + "step": 203 + }, + { + "epoch": 1.14, + "learning_rate": 0.00014301675977653632, + "loss": 1.8976, + "step": 204 + }, + { + "epoch": 1.15, + "learning_rate": 0.00014273743016759776, + "loss": 1.7723, + "step": 205 + }, + { + "epoch": 1.15, + "learning_rate": 0.00014245810055865923, + "loss": 1.8165, + "step": 206 + }, + { + "epoch": 1.16, + "learning_rate": 0.00014217877094972067, + "loss": 1.7811, + "step": 207 + }, + { + "epoch": 1.16, + "learning_rate": 0.00014189944134078214, + "loss": 1.908, + "step": 208 + }, + { + "epoch": 1.17, + "learning_rate": 0.00014162011173184357, + "loss": 1.7663, + "step": 209 + }, + { + "epoch": 1.17, + "learning_rate": 0.00014134078212290501, + "loss": 1.6779, + "step": 210 + }, + { + "epoch": 1.18, + "learning_rate": 0.0001410614525139665, + "loss": 1.9039, + "step": 211 + }, + { + "epoch": 1.18, + "learning_rate": 0.00014078212290502795, + "loss": 1.8033, + "step": 212 + }, + { + "epoch": 1.19, + "learning_rate": 0.00014050279329608939, + "loss": 1.8251, + "step": 213 + }, + { + "epoch": 1.2, + "learning_rate": 0.00014022346368715085, + "loss": 1.8505, + "step": 214 + }, + { + "epoch": 1.2, + "learning_rate": 0.0001399441340782123, + "loss": 1.8147, + "step": 215 + }, + { + "epoch": 1.21, + "learning_rate": 0.00013966480446927376, + "loss": 1.8419, + "step": 216 + }, + { + "epoch": 1.21, + "learning_rate": 0.0001393854748603352, + "loss": 1.8401, + "step": 217 + }, + { + "epoch": 1.22, + "learning_rate": 0.00013910614525139664, + "loss": 1.8912, + "step": 218 + }, + { + "epoch": 1.22, + "learning_rate": 0.0001388268156424581, + "loss": 1.7548, + "step": 219 + }, + { + "epoch": 1.23, + "learning_rate": 0.00013854748603351957, + "loss": 1.8741, + "step": 220 + }, + { + "epoch": 1.23, + "learning_rate": 0.000138268156424581, + "loss": 1.9549, + "step": 221 + }, + { + "epoch": 1.24, + "learning_rate": 0.00013798882681564248, + "loss": 1.9093, + "step": 222 + }, + { + "epoch": 1.25, + "learning_rate": 0.00013770949720670392, + "loss": 1.7896, + "step": 223 + }, + { + "epoch": 1.25, + "learning_rate": 0.00013743016759776538, + "loss": 1.8491, + "step": 224 + }, + { + "epoch": 1.26, + "learning_rate": 0.00013715083798882682, + "loss": 1.7851, + "step": 225 + }, + { + "epoch": 1.26, + "learning_rate": 0.00013687150837988826, + "loss": 1.6992, + "step": 226 + }, + { + "epoch": 1.27, + "learning_rate": 0.00013659217877094973, + "loss": 1.9765, + "step": 227 + }, + { + "epoch": 1.27, + "learning_rate": 0.00013631284916201117, + "loss": 1.8179, + "step": 228 + }, + { + "epoch": 1.28, + "learning_rate": 0.00013603351955307263, + "loss": 1.8548, + "step": 229 + }, + { + "epoch": 1.28, + "learning_rate": 0.0001357541899441341, + "loss": 1.8843, + "step": 230 + }, + { + "epoch": 1.29, + "learning_rate": 0.00013547486033519554, + "loss": 1.9105, + "step": 231 + }, + { + "epoch": 1.3, + "learning_rate": 0.00013519553072625698, + "loss": 1.8748, + "step": 232 + }, + { + "epoch": 1.3, + "learning_rate": 0.00013491620111731844, + "loss": 1.7976, + "step": 233 + }, + { + "epoch": 1.31, + "learning_rate": 0.00013463687150837988, + "loss": 1.7369, + "step": 234 + }, + { + "epoch": 1.31, + "learning_rate": 0.00013435754189944135, + "loss": 1.7808, + "step": 235 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001340782122905028, + "loss": 1.8385, + "step": 236 + }, + { + "epoch": 1.32, + "learning_rate": 0.00013379888268156423, + "loss": 1.8295, + "step": 237 + }, + { + "epoch": 1.33, + "learning_rate": 0.00013351955307262572, + "loss": 1.757, + "step": 238 + }, + { + "epoch": 1.34, + "learning_rate": 0.00013324022346368716, + "loss": 1.7904, + "step": 239 + }, + { + "epoch": 1.34, + "learning_rate": 0.0001329608938547486, + "loss": 1.7632, + "step": 240 + }, + { + "epoch": 1.35, + "learning_rate": 0.00013268156424581007, + "loss": 1.7867, + "step": 241 + }, + { + "epoch": 1.35, + "learning_rate": 0.0001324022346368715, + "loss": 1.8259, + "step": 242 + }, + { + "epoch": 1.36, + "learning_rate": 0.00013212290502793297, + "loss": 1.6655, + "step": 243 + }, + { + "epoch": 1.36, + "learning_rate": 0.0001318435754189944, + "loss": 1.6848, + "step": 244 + }, + { + "epoch": 1.37, + "learning_rate": 0.00013156424581005585, + "loss": 1.7931, + "step": 245 + }, + { + "epoch": 1.37, + "learning_rate": 0.00013128491620111732, + "loss": 1.7868, + "step": 246 + }, + { + "epoch": 1.38, + "learning_rate": 0.00013100558659217879, + "loss": 1.7732, + "step": 247 + }, + { + "epoch": 1.39, + "learning_rate": 0.00013072625698324022, + "loss": 1.7851, + "step": 248 + }, + { + "epoch": 1.39, + "learning_rate": 0.0001304469273743017, + "loss": 1.7406, + "step": 249 + }, + { + "epoch": 1.4, + "learning_rate": 0.00013016759776536313, + "loss": 1.5853, + "step": 250 + }, + { + "epoch": 1.4, + "learning_rate": 0.0001298882681564246, + "loss": 1.8271, + "step": 251 + }, + { + "epoch": 1.41, + "learning_rate": 0.00012960893854748604, + "loss": 1.6054, + "step": 252 + }, + { + "epoch": 1.41, + "learning_rate": 0.00012932960893854748, + "loss": 1.6884, + "step": 253 + }, + { + "epoch": 1.42, + "learning_rate": 0.00012905027932960894, + "loss": 1.7333, + "step": 254 + }, + { + "epoch": 1.42, + "learning_rate": 0.00012877094972067038, + "loss": 1.803, + "step": 255 + }, + { + "epoch": 1.43, + "learning_rate": 0.00012849162011173185, + "loss": 1.63, + "step": 256 + }, + { + "epoch": 1.44, + "learning_rate": 0.00012821229050279331, + "loss": 1.8023, + "step": 257 + }, + { + "epoch": 1.44, + "learning_rate": 0.00012793296089385475, + "loss": 1.762, + "step": 258 + }, + { + "epoch": 1.45, + "learning_rate": 0.00012765363128491622, + "loss": 1.7565, + "step": 259 + }, + { + "epoch": 1.45, + "learning_rate": 0.00012737430167597766, + "loss": 1.5937, + "step": 260 + }, + { + "epoch": 1.46, + "learning_rate": 0.0001270949720670391, + "loss": 1.7208, + "step": 261 + }, + { + "epoch": 1.46, + "learning_rate": 0.00012681564245810057, + "loss": 1.8097, + "step": 262 + }, + { + "epoch": 1.47, + "learning_rate": 0.000126536312849162, + "loss": 1.7042, + "step": 263 + }, + { + "epoch": 1.47, + "learning_rate": 0.00012625698324022347, + "loss": 1.7892, + "step": 264 + }, + { + "epoch": 1.48, + "learning_rate": 0.00012597765363128494, + "loss": 1.752, + "step": 265 + }, + { + "epoch": 1.49, + "learning_rate": 0.00012569832402234638, + "loss": 1.7333, + "step": 266 + }, + { + "epoch": 1.49, + "learning_rate": 0.00012541899441340784, + "loss": 1.7492, + "step": 267 + }, + { + "epoch": 1.5, + "learning_rate": 0.00012513966480446928, + "loss": 1.818, + "step": 268 + }, + { + "epoch": 1.5, + "learning_rate": 0.00012486033519553072, + "loss": 1.7146, + "step": 269 + }, + { + "epoch": 1.51, + "learning_rate": 0.0001245810055865922, + "loss": 1.7958, + "step": 270 + }, + { + "epoch": 1.51, + "learning_rate": 0.00012430167597765363, + "loss": 1.7212, + "step": 271 + }, + { + "epoch": 1.52, + "learning_rate": 0.0001240223463687151, + "loss": 1.8835, + "step": 272 + }, + { + "epoch": 1.53, + "learning_rate": 0.00012374301675977656, + "loss": 1.7486, + "step": 273 + }, + { + "epoch": 1.53, + "learning_rate": 0.000123463687150838, + "loss": 1.8103, + "step": 274 + }, + { + "epoch": 1.54, + "learning_rate": 0.00012318435754189944, + "loss": 1.7557, + "step": 275 + }, + { + "epoch": 1.54, + "learning_rate": 0.0001229050279329609, + "loss": 1.6198, + "step": 276 + }, + { + "epoch": 1.55, + "learning_rate": 0.00012262569832402235, + "loss": 1.6971, + "step": 277 + }, + { + "epoch": 1.55, + "learning_rate": 0.0001223463687150838, + "loss": 1.668, + "step": 278 + }, + { + "epoch": 1.56, + "learning_rate": 0.00012206703910614525, + "loss": 1.8795, + "step": 279 + }, + { + "epoch": 1.56, + "learning_rate": 0.0001217877094972067, + "loss": 1.6412, + "step": 280 + }, + { + "epoch": 1.57, + "learning_rate": 0.00012150837988826816, + "loss": 1.7497, + "step": 281 + }, + { + "epoch": 1.58, + "learning_rate": 0.00012122905027932962, + "loss": 1.5577, + "step": 282 + }, + { + "epoch": 1.58, + "learning_rate": 0.00012094972067039108, + "loss": 1.8049, + "step": 283 + }, + { + "epoch": 1.59, + "learning_rate": 0.00012067039106145253, + "loss": 1.6834, + "step": 284 + }, + { + "epoch": 1.59, + "learning_rate": 0.00012039106145251397, + "loss": 1.7978, + "step": 285 + }, + { + "epoch": 1.6, + "learning_rate": 0.00012011173184357542, + "loss": 1.6558, + "step": 286 + }, + { + "epoch": 1.6, + "learning_rate": 0.00011983240223463687, + "loss": 1.7561, + "step": 287 + }, + { + "epoch": 1.61, + "learning_rate": 0.00011955307262569833, + "loss": 1.6807, + "step": 288 + }, + { + "epoch": 1.61, + "learning_rate": 0.00011927374301675978, + "loss": 1.6937, + "step": 289 + }, + { + "epoch": 1.62, + "learning_rate": 0.00011899441340782122, + "loss": 1.6591, + "step": 290 + }, + { + "epoch": 1.63, + "learning_rate": 0.0001187150837988827, + "loss": 1.6771, + "step": 291 + }, + { + "epoch": 1.63, + "learning_rate": 0.00011843575418994415, + "loss": 1.7743, + "step": 292 + }, + { + "epoch": 1.64, + "learning_rate": 0.00011815642458100559, + "loss": 1.5857, + "step": 293 + }, + { + "epoch": 1.64, + "learning_rate": 0.00011787709497206705, + "loss": 1.6999, + "step": 294 + }, + { + "epoch": 1.65, + "learning_rate": 0.0001175977653631285, + "loss": 1.5661, + "step": 295 + }, + { + "epoch": 1.65, + "learning_rate": 0.00011731843575418995, + "loss": 1.7235, + "step": 296 + }, + { + "epoch": 1.66, + "learning_rate": 0.0001170391061452514, + "loss": 1.607, + "step": 297 + }, + { + "epoch": 1.66, + "learning_rate": 0.00011675977653631284, + "loss": 1.68, + "step": 298 + }, + { + "epoch": 1.67, + "learning_rate": 0.0001164804469273743, + "loss": 1.6938, + "step": 299 + }, + { + "epoch": 1.68, + "learning_rate": 0.00011620111731843578, + "loss": 1.6315, + "step": 300 + }, + { + "epoch": 1.68, + "learning_rate": 0.00011592178770949722, + "loss": 1.6802, + "step": 301 + }, + { + "epoch": 1.69, + "learning_rate": 0.00011564245810055867, + "loss": 1.7174, + "step": 302 + }, + { + "epoch": 1.69, + "learning_rate": 0.00011536312849162012, + "loss": 1.5212, + "step": 303 + }, + { + "epoch": 1.7, + "learning_rate": 0.00011508379888268157, + "loss": 1.5808, + "step": 304 + }, + { + "epoch": 1.7, + "learning_rate": 0.00011480446927374303, + "loss": 1.6152, + "step": 305 + }, + { + "epoch": 1.71, + "learning_rate": 0.00011452513966480447, + "loss": 1.5435, + "step": 306 + }, + { + "epoch": 1.72, + "learning_rate": 0.00011424581005586592, + "loss": 1.6603, + "step": 307 + }, + { + "epoch": 1.72, + "learning_rate": 0.00011396648044692737, + "loss": 1.685, + "step": 308 + }, + { + "epoch": 1.73, + "learning_rate": 0.00011368715083798884, + "loss": 1.6002, + "step": 309 + }, + { + "epoch": 1.73, + "learning_rate": 0.00011340782122905029, + "loss": 1.6046, + "step": 310 + }, + { + "epoch": 1.74, + "learning_rate": 0.00011312849162011174, + "loss": 1.5969, + "step": 311 + }, + { + "epoch": 1.74, + "learning_rate": 0.0001128491620111732, + "loss": 1.5845, + "step": 312 + }, + { + "epoch": 1.75, + "learning_rate": 0.00011256983240223464, + "loss": 1.8183, + "step": 313 + }, + { + "epoch": 1.75, + "learning_rate": 0.00011229050279329609, + "loss": 1.6953, + "step": 314 + }, + { + "epoch": 1.76, + "learning_rate": 0.00011201117318435754, + "loss": 1.7787, + "step": 315 + }, + { + "epoch": 1.77, + "learning_rate": 0.000111731843575419, + "loss": 1.6422, + "step": 316 + }, + { + "epoch": 1.77, + "learning_rate": 0.00011145251396648045, + "loss": 1.7034, + "step": 317 + }, + { + "epoch": 1.78, + "learning_rate": 0.00011117318435754192, + "loss": 1.7301, + "step": 318 + }, + { + "epoch": 1.78, + "learning_rate": 0.00011089385474860337, + "loss": 1.7084, + "step": 319 + }, + { + "epoch": 1.79, + "learning_rate": 0.00011061452513966482, + "loss": 1.772, + "step": 320 + }, + { + "epoch": 1.79, + "learning_rate": 0.00011033519553072626, + "loss": 1.5733, + "step": 321 + }, + { + "epoch": 1.8, + "learning_rate": 0.00011005586592178771, + "loss": 1.6423, + "step": 322 + }, + { + "epoch": 1.8, + "learning_rate": 0.00010977653631284917, + "loss": 1.5809, + "step": 323 + }, + { + "epoch": 1.81, + "learning_rate": 0.00010949720670391062, + "loss": 1.6781, + "step": 324 + }, + { + "epoch": 1.82, + "learning_rate": 0.00010921787709497207, + "loss": 1.6788, + "step": 325 + }, + { + "epoch": 1.82, + "learning_rate": 0.00010893854748603351, + "loss": 1.6346, + "step": 326 + }, + { + "epoch": 1.83, + "learning_rate": 0.00010865921787709499, + "loss": 1.6634, + "step": 327 + }, + { + "epoch": 1.83, + "learning_rate": 0.00010837988826815643, + "loss": 1.7561, + "step": 328 + }, + { + "epoch": 1.84, + "learning_rate": 0.00010810055865921788, + "loss": 1.66, + "step": 329 + }, + { + "epoch": 1.84, + "learning_rate": 0.00010782122905027934, + "loss": 1.7298, + "step": 330 + }, + { + "epoch": 1.85, + "learning_rate": 0.00010754189944134079, + "loss": 1.6893, + "step": 331 + }, + { + "epoch": 1.85, + "learning_rate": 0.00010726256983240224, + "loss": 1.7631, + "step": 332 + }, + { + "epoch": 1.86, + "learning_rate": 0.00010698324022346368, + "loss": 1.6633, + "step": 333 + }, + { + "epoch": 1.87, + "learning_rate": 0.00010670391061452513, + "loss": 1.5388, + "step": 334 + }, + { + "epoch": 1.87, + "learning_rate": 0.00010642458100558659, + "loss": 1.6718, + "step": 335 + }, + { + "epoch": 1.88, + "learning_rate": 0.00010614525139664805, + "loss": 1.5536, + "step": 336 + }, + { + "epoch": 1.88, + "learning_rate": 0.00010586592178770951, + "loss": 1.6483, + "step": 337 + }, + { + "epoch": 1.89, + "learning_rate": 0.00010558659217877096, + "loss": 1.5774, + "step": 338 + }, + { + "epoch": 1.89, + "learning_rate": 0.00010530726256983241, + "loss": 1.6366, + "step": 339 + }, + { + "epoch": 1.9, + "learning_rate": 0.00010502793296089387, + "loss": 1.5567, + "step": 340 + }, + { + "epoch": 1.91, + "learning_rate": 0.0001047486033519553, + "loss": 1.5323, + "step": 341 + }, + { + "epoch": 1.91, + "learning_rate": 0.00010446927374301676, + "loss": 1.4608, + "step": 342 + }, + { + "epoch": 1.92, + "learning_rate": 0.00010418994413407821, + "loss": 1.5933, + "step": 343 + }, + { + "epoch": 1.92, + "learning_rate": 0.00010391061452513966, + "loss": 1.6625, + "step": 344 + }, + { + "epoch": 1.93, + "learning_rate": 0.00010363128491620113, + "loss": 1.7236, + "step": 345 + }, + { + "epoch": 1.93, + "learning_rate": 0.00010335195530726258, + "loss": 1.759, + "step": 346 + }, + { + "epoch": 1.94, + "learning_rate": 0.00010307262569832404, + "loss": 1.7248, + "step": 347 + }, + { + "epoch": 1.94, + "learning_rate": 0.00010279329608938548, + "loss": 1.5144, + "step": 348 + }, + { + "epoch": 1.95, + "learning_rate": 0.00010251396648044693, + "loss": 1.6905, + "step": 349 + }, + { + "epoch": 1.96, + "learning_rate": 0.00010223463687150838, + "loss": 1.6119, + "step": 350 + }, + { + "epoch": 1.96, + "learning_rate": 0.00010195530726256983, + "loss": 1.5464, + "step": 351 + }, + { + "epoch": 1.97, + "learning_rate": 0.00010167597765363129, + "loss": 1.6901, + "step": 352 + }, + { + "epoch": 1.97, + "learning_rate": 0.00010139664804469273, + "loss": 1.3511, + "step": 353 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001011173184357542, + "loss": 1.5434, + "step": 354 + }, + { + "epoch": 1.98, + "learning_rate": 0.00010083798882681566, + "loss": 1.5891, + "step": 355 + }, + { + "epoch": 1.99, + "learning_rate": 0.0001005586592178771, + "loss": 1.6658, + "step": 356 + }, + { + "epoch": 1.99, + "learning_rate": 0.00010027932960893855, + "loss": 1.5657, + "step": 357 + }, + { + "epoch": 2.0, + "learning_rate": 0.0001, + "loss": 1.7005, + "step": 358 + }, + { + "epoch": 2.01, + "learning_rate": 9.972067039106146e-05, + "loss": 1.4202, + "step": 359 + }, + { + "epoch": 2.01, + "learning_rate": 9.944134078212291e-05, + "loss": 1.5262, + "step": 360 + }, + { + "epoch": 2.02, + "learning_rate": 9.916201117318436e-05, + "loss": 1.6323, + "step": 361 + }, + { + "epoch": 2.02, + "learning_rate": 9.888268156424582e-05, + "loss": 1.5521, + "step": 362 + }, + { + "epoch": 2.03, + "learning_rate": 9.860335195530727e-05, + "loss": 1.5762, + "step": 363 + }, + { + "epoch": 2.03, + "learning_rate": 9.832402234636872e-05, + "loss": 1.613, + "step": 364 + }, + { + "epoch": 2.04, + "learning_rate": 9.804469273743018e-05, + "loss": 1.4231, + "step": 365 + }, + { + "epoch": 2.04, + "learning_rate": 9.776536312849163e-05, + "loss": 1.5706, + "step": 366 + }, + { + "epoch": 2.05, + "learning_rate": 9.748603351955308e-05, + "loss": 1.5245, + "step": 367 + }, + { + "epoch": 2.06, + "learning_rate": 9.720670391061453e-05, + "loss": 1.4771, + "step": 368 + }, + { + "epoch": 2.06, + "learning_rate": 9.692737430167597e-05, + "loss": 1.596, + "step": 369 + }, + { + "epoch": 2.07, + "learning_rate": 9.664804469273744e-05, + "loss": 1.537, + "step": 370 + }, + { + "epoch": 2.07, + "learning_rate": 9.636871508379889e-05, + "loss": 1.4276, + "step": 371 + }, + { + "epoch": 2.08, + "learning_rate": 9.608938547486033e-05, + "loss": 1.4746, + "step": 372 + }, + { + "epoch": 2.08, + "learning_rate": 9.581005586592178e-05, + "loss": 1.4374, + "step": 373 + }, + { + "epoch": 2.09, + "learning_rate": 9.553072625698325e-05, + "loss": 1.4704, + "step": 374 + }, + { + "epoch": 2.09, + "learning_rate": 9.52513966480447e-05, + "loss": 1.5997, + "step": 375 + }, + { + "epoch": 2.1, + "learning_rate": 9.497206703910614e-05, + "loss": 1.5034, + "step": 376 + }, + { + "epoch": 2.11, + "learning_rate": 9.46927374301676e-05, + "loss": 1.6392, + "step": 377 + }, + { + "epoch": 2.11, + "learning_rate": 9.441340782122905e-05, + "loss": 1.5611, + "step": 378 + }, + { + "epoch": 2.12, + "learning_rate": 9.413407821229052e-05, + "loss": 1.3384, + "step": 379 + }, + { + "epoch": 2.12, + "learning_rate": 9.385474860335196e-05, + "loss": 1.5732, + "step": 380 + }, + { + "epoch": 2.13, + "learning_rate": 9.357541899441341e-05, + "loss": 1.3874, + "step": 381 + }, + { + "epoch": 2.13, + "learning_rate": 9.329608938547486e-05, + "loss": 1.4139, + "step": 382 + }, + { + "epoch": 2.14, + "learning_rate": 9.301675977653633e-05, + "loss": 1.5195, + "step": 383 + }, + { + "epoch": 2.15, + "learning_rate": 9.273743016759777e-05, + "loss": 1.4371, + "step": 384 + }, + { + "epoch": 2.15, + "learning_rate": 9.245810055865922e-05, + "loss": 1.4411, + "step": 385 + }, + { + "epoch": 2.16, + "learning_rate": 9.217877094972067e-05, + "loss": 1.5214, + "step": 386 + }, + { + "epoch": 2.16, + "learning_rate": 9.189944134078213e-05, + "loss": 1.4971, + "step": 387 + }, + { + "epoch": 2.17, + "learning_rate": 9.162011173184358e-05, + "loss": 1.3453, + "step": 388 + }, + { + "epoch": 2.17, + "learning_rate": 9.134078212290503e-05, + "loss": 1.394, + "step": 389 + }, + { + "epoch": 2.18, + "learning_rate": 9.106145251396648e-05, + "loss": 1.5058, + "step": 390 + }, + { + "epoch": 2.18, + "learning_rate": 9.078212290502794e-05, + "loss": 1.4855, + "step": 391 + }, + { + "epoch": 2.19, + "learning_rate": 9.050279329608939e-05, + "loss": 1.4647, + "step": 392 + }, + { + "epoch": 2.2, + "learning_rate": 9.022346368715084e-05, + "loss": 1.3435, + "step": 393 + }, + { + "epoch": 2.2, + "learning_rate": 8.99441340782123e-05, + "loss": 1.5815, + "step": 394 + }, + { + "epoch": 2.21, + "learning_rate": 8.966480446927375e-05, + "loss": 1.4742, + "step": 395 + }, + { + "epoch": 2.21, + "learning_rate": 8.938547486033519e-05, + "loss": 1.5389, + "step": 396 + }, + { + "epoch": 2.22, + "learning_rate": 8.910614525139666e-05, + "loss": 1.384, + "step": 397 + }, + { + "epoch": 2.22, + "learning_rate": 8.882681564245811e-05, + "loss": 1.3967, + "step": 398 + }, + { + "epoch": 2.23, + "learning_rate": 8.854748603351956e-05, + "loss": 1.442, + "step": 399 + }, + { + "epoch": 2.23, + "learning_rate": 8.8268156424581e-05, + "loss": 1.396, + "step": 400 + }, + { + "epoch": 2.24, + "learning_rate": 8.798882681564247e-05, + "loss": 1.4706, + "step": 401 + }, + { + "epoch": 2.25, + "learning_rate": 8.770949720670392e-05, + "loss": 1.4217, + "step": 402 + }, + { + "epoch": 2.25, + "learning_rate": 8.743016759776537e-05, + "loss": 1.5617, + "step": 403 + }, + { + "epoch": 2.26, + "learning_rate": 8.715083798882681e-05, + "loss": 1.5701, + "step": 404 + }, + { + "epoch": 2.26, + "learning_rate": 8.687150837988828e-05, + "loss": 1.3659, + "step": 405 + }, + { + "epoch": 2.27, + "learning_rate": 8.659217877094973e-05, + "loss": 1.3424, + "step": 406 + }, + { + "epoch": 2.27, + "learning_rate": 8.631284916201118e-05, + "loss": 1.5782, + "step": 407 + }, + { + "epoch": 2.28, + "learning_rate": 8.603351955307262e-05, + "loss": 1.3355, + "step": 408 + }, + { + "epoch": 2.28, + "learning_rate": 8.575418994413408e-05, + "loss": 1.4798, + "step": 409 + }, + { + "epoch": 2.29, + "learning_rate": 8.547486033519554e-05, + "loss": 1.3813, + "step": 410 + }, + { + "epoch": 2.3, + "learning_rate": 8.519553072625698e-05, + "loss": 1.6011, + "step": 411 + }, + { + "epoch": 2.3, + "learning_rate": 8.491620111731844e-05, + "loss": 1.4596, + "step": 412 + }, + { + "epoch": 2.31, + "learning_rate": 8.463687150837989e-05, + "loss": 1.4997, + "step": 413 + }, + { + "epoch": 2.31, + "learning_rate": 8.435754189944135e-05, + "loss": 1.5255, + "step": 414 + }, + { + "epoch": 2.32, + "learning_rate": 8.40782122905028e-05, + "loss": 1.5305, + "step": 415 + }, + { + "epoch": 2.32, + "learning_rate": 8.379888268156425e-05, + "loss": 1.3029, + "step": 416 + }, + { + "epoch": 2.33, + "learning_rate": 8.35195530726257e-05, + "loss": 1.2046, + "step": 417 + }, + { + "epoch": 2.34, + "learning_rate": 8.324022346368715e-05, + "loss": 1.4985, + "step": 418 + }, + { + "epoch": 2.34, + "learning_rate": 8.29608938547486e-05, + "loss": 1.3486, + "step": 419 + }, + { + "epoch": 2.35, + "learning_rate": 8.268156424581006e-05, + "loss": 1.2839, + "step": 420 + }, + { + "epoch": 2.35, + "learning_rate": 8.240223463687151e-05, + "loss": 1.4286, + "step": 421 + }, + { + "epoch": 2.36, + "learning_rate": 8.212290502793296e-05, + "loss": 1.3893, + "step": 422 + }, + { + "epoch": 2.36, + "learning_rate": 8.184357541899442e-05, + "loss": 1.5477, + "step": 423 + }, + { + "epoch": 2.37, + "learning_rate": 8.156424581005587e-05, + "loss": 1.4973, + "step": 424 + }, + { + "epoch": 2.37, + "learning_rate": 8.128491620111732e-05, + "loss": 1.3549, + "step": 425 + }, + { + "epoch": 2.38, + "learning_rate": 8.100558659217878e-05, + "loss": 1.3966, + "step": 426 + }, + { + "epoch": 2.39, + "learning_rate": 8.072625698324023e-05, + "loss": 1.3974, + "step": 427 + }, + { + "epoch": 2.39, + "learning_rate": 8.044692737430168e-05, + "loss": 1.5211, + "step": 428 + }, + { + "epoch": 2.4, + "learning_rate": 8.016759776536313e-05, + "loss": 1.4751, + "step": 429 + }, + { + "epoch": 2.4, + "learning_rate": 7.988826815642459e-05, + "loss": 1.4322, + "step": 430 + }, + { + "epoch": 2.41, + "learning_rate": 7.960893854748604e-05, + "loss": 1.5991, + "step": 431 + }, + { + "epoch": 2.41, + "learning_rate": 7.93296089385475e-05, + "loss": 1.3288, + "step": 432 + }, + { + "epoch": 2.42, + "learning_rate": 7.905027932960895e-05, + "loss": 1.4098, + "step": 433 + }, + { + "epoch": 2.42, + "learning_rate": 7.87709497206704e-05, + "loss": 1.4819, + "step": 434 + }, + { + "epoch": 2.43, + "learning_rate": 7.849162011173184e-05, + "loss": 1.4003, + "step": 435 + }, + { + "epoch": 2.44, + "learning_rate": 7.821229050279329e-05, + "loss": 1.3475, + "step": 436 + }, + { + "epoch": 2.44, + "learning_rate": 7.793296089385476e-05, + "loss": 1.3354, + "step": 437 + }, + { + "epoch": 2.45, + "learning_rate": 7.765363128491621e-05, + "loss": 1.3144, + "step": 438 + }, + { + "epoch": 2.45, + "learning_rate": 7.737430167597765e-05, + "loss": 1.4118, + "step": 439 + }, + { + "epoch": 2.46, + "learning_rate": 7.70949720670391e-05, + "loss": 1.3716, + "step": 440 + }, + { + "epoch": 2.46, + "learning_rate": 7.681564245810057e-05, + "loss": 1.4191, + "step": 441 + }, + { + "epoch": 2.47, + "learning_rate": 7.653631284916202e-05, + "loss": 1.5173, + "step": 442 + }, + { + "epoch": 2.47, + "learning_rate": 7.625698324022346e-05, + "loss": 1.3005, + "step": 443 + }, + { + "epoch": 2.48, + "learning_rate": 7.597765363128491e-05, + "loss": 1.4586, + "step": 444 + }, + { + "epoch": 2.49, + "learning_rate": 7.569832402234637e-05, + "loss": 1.5086, + "step": 445 + }, + { + "epoch": 2.49, + "learning_rate": 7.541899441340783e-05, + "loss": 1.5446, + "step": 446 + }, + { + "epoch": 2.5, + "learning_rate": 7.513966480446927e-05, + "loss": 1.3274, + "step": 447 + }, + { + "epoch": 2.5, + "learning_rate": 7.486033519553073e-05, + "loss": 1.4424, + "step": 448 + }, + { + "epoch": 2.51, + "learning_rate": 7.458100558659218e-05, + "loss": 1.2836, + "step": 449 + }, + { + "epoch": 2.51, + "learning_rate": 7.430167597765365e-05, + "loss": 1.4322, + "step": 450 + }, + { + "epoch": 2.52, + "learning_rate": 7.402234636871509e-05, + "loss": 1.4168, + "step": 451 + }, + { + "epoch": 2.53, + "learning_rate": 7.374301675977654e-05, + "loss": 1.3892, + "step": 452 + }, + { + "epoch": 2.53, + "learning_rate": 7.346368715083799e-05, + "loss": 1.6538, + "step": 453 + }, + { + "epoch": 2.54, + "learning_rate": 7.318435754189944e-05, + "loss": 1.4573, + "step": 454 + }, + { + "epoch": 2.54, + "learning_rate": 7.29050279329609e-05, + "loss": 1.4593, + "step": 455 + }, + { + "epoch": 2.55, + "learning_rate": 7.262569832402235e-05, + "loss": 1.4316, + "step": 456 + }, + { + "epoch": 2.55, + "learning_rate": 7.23463687150838e-05, + "loss": 1.2702, + "step": 457 + }, + { + "epoch": 2.56, + "learning_rate": 7.206703910614526e-05, + "loss": 1.5458, + "step": 458 + }, + { + "epoch": 2.56, + "learning_rate": 7.178770949720671e-05, + "loss": 1.5179, + "step": 459 + }, + { + "epoch": 2.57, + "learning_rate": 7.150837988826816e-05, + "loss": 1.3844, + "step": 460 + }, + { + "epoch": 2.58, + "learning_rate": 7.122905027932961e-05, + "loss": 1.3698, + "step": 461 + }, + { + "epoch": 2.58, + "learning_rate": 7.094972067039107e-05, + "loss": 1.4396, + "step": 462 + }, + { + "epoch": 2.59, + "learning_rate": 7.067039106145251e-05, + "loss": 1.5486, + "step": 463 + }, + { + "epoch": 2.59, + "learning_rate": 7.039106145251397e-05, + "loss": 1.2032, + "step": 464 + }, + { + "epoch": 2.6, + "learning_rate": 7.011173184357543e-05, + "loss": 1.3434, + "step": 465 + }, + { + "epoch": 2.6, + "learning_rate": 6.983240223463688e-05, + "loss": 1.4065, + "step": 466 + }, + { + "epoch": 2.61, + "learning_rate": 6.955307262569832e-05, + "loss": 1.3362, + "step": 467 + }, + { + "epoch": 2.61, + "learning_rate": 6.927374301675979e-05, + "loss": 1.375, + "step": 468 + }, + { + "epoch": 2.62, + "learning_rate": 6.899441340782124e-05, + "loss": 1.4549, + "step": 469 + }, + { + "epoch": 2.63, + "learning_rate": 6.871508379888269e-05, + "loss": 1.3724, + "step": 470 + }, + { + "epoch": 2.63, + "learning_rate": 6.843575418994413e-05, + "loss": 1.1602, + "step": 471 + }, + { + "epoch": 2.64, + "learning_rate": 6.815642458100558e-05, + "loss": 1.4346, + "step": 472 + }, + { + "epoch": 2.64, + "learning_rate": 6.787709497206705e-05, + "loss": 1.2877, + "step": 473 + }, + { + "epoch": 2.65, + "learning_rate": 6.759776536312849e-05, + "loss": 1.394, + "step": 474 + }, + { + "epoch": 2.65, + "learning_rate": 6.731843575418994e-05, + "loss": 1.3387, + "step": 475 + }, + { + "epoch": 2.66, + "learning_rate": 6.70391061452514e-05, + "loss": 1.41, + "step": 476 + }, + { + "epoch": 2.66, + "learning_rate": 6.675977653631286e-05, + "loss": 1.4299, + "step": 477 + }, + { + "epoch": 2.67, + "learning_rate": 6.64804469273743e-05, + "loss": 1.3824, + "step": 478 + }, + { + "epoch": 2.68, + "learning_rate": 6.620111731843575e-05, + "loss": 1.2656, + "step": 479 + }, + { + "epoch": 2.68, + "learning_rate": 6.59217877094972e-05, + "loss": 1.2811, + "step": 480 + }, + { + "epoch": 2.69, + "learning_rate": 6.564245810055866e-05, + "loss": 1.6839, + "step": 481 + }, + { + "epoch": 2.69, + "learning_rate": 6.536312849162011e-05, + "loss": 1.3089, + "step": 482 + }, + { + "epoch": 2.7, + "learning_rate": 6.508379888268157e-05, + "loss": 1.4323, + "step": 483 + }, + { + "epoch": 2.7, + "learning_rate": 6.480446927374302e-05, + "loss": 1.3439, + "step": 484 + }, + { + "epoch": 2.71, + "learning_rate": 6.452513966480447e-05, + "loss": 1.3614, + "step": 485 + }, + { + "epoch": 2.72, + "learning_rate": 6.424581005586592e-05, + "loss": 1.4974, + "step": 486 + }, + { + "epoch": 2.72, + "learning_rate": 6.396648044692738e-05, + "loss": 1.4963, + "step": 487 + }, + { + "epoch": 2.73, + "learning_rate": 6.368715083798883e-05, + "loss": 1.2902, + "step": 488 + }, + { + "epoch": 2.73, + "learning_rate": 6.340782122905028e-05, + "loss": 1.3226, + "step": 489 + }, + { + "epoch": 2.74, + "learning_rate": 6.312849162011174e-05, + "loss": 1.4353, + "step": 490 + }, + { + "epoch": 2.74, + "learning_rate": 6.284916201117319e-05, + "loss": 1.281, + "step": 491 + }, + { + "epoch": 2.75, + "learning_rate": 6.256983240223464e-05, + "loss": 1.3755, + "step": 492 + }, + { + "epoch": 2.75, + "learning_rate": 6.22905027932961e-05, + "loss": 1.3811, + "step": 493 + }, + { + "epoch": 2.76, + "learning_rate": 6.201117318435755e-05, + "loss": 1.4627, + "step": 494 + }, + { + "epoch": 2.77, + "learning_rate": 6.1731843575419e-05, + "loss": 1.3482, + "step": 495 + }, + { + "epoch": 2.77, + "learning_rate": 6.145251396648045e-05, + "loss": 1.4274, + "step": 496 + }, + { + "epoch": 2.78, + "learning_rate": 6.11731843575419e-05, + "loss": 1.3465, + "step": 497 + }, + { + "epoch": 2.78, + "learning_rate": 6.089385474860335e-05, + "loss": 1.2319, + "step": 498 + }, + { + "epoch": 2.79, + "learning_rate": 6.061452513966481e-05, + "loss": 1.2835, + "step": 499 + }, + { + "epoch": 2.79, + "learning_rate": 6.0335195530726265e-05, + "loss": 1.4836, + "step": 500 + }, + { + "epoch": 2.8, + "learning_rate": 6.005586592178771e-05, + "loss": 1.3878, + "step": 501 + }, + { + "epoch": 2.8, + "learning_rate": 5.9776536312849164e-05, + "loss": 1.4367, + "step": 502 + }, + { + "epoch": 2.81, + "learning_rate": 5.949720670391061e-05, + "loss": 1.2994, + "step": 503 + }, + { + "epoch": 2.82, + "learning_rate": 5.9217877094972076e-05, + "loss": 1.3599, + "step": 504 + }, + { + "epoch": 2.82, + "learning_rate": 5.893854748603352e-05, + "loss": 1.5357, + "step": 505 + }, + { + "epoch": 2.83, + "learning_rate": 5.8659217877094976e-05, + "loss": 1.4497, + "step": 506 + }, + { + "epoch": 2.83, + "learning_rate": 5.837988826815642e-05, + "loss": 1.3496, + "step": 507 + }, + { + "epoch": 2.84, + "learning_rate": 5.810055865921789e-05, + "loss": 1.294, + "step": 508 + }, + { + "epoch": 2.84, + "learning_rate": 5.7821229050279334e-05, + "loss": 1.4204, + "step": 509 + }, + { + "epoch": 2.85, + "learning_rate": 5.754189944134079e-05, + "loss": 1.5231, + "step": 510 + }, + { + "epoch": 2.85, + "learning_rate": 5.726256983240223e-05, + "loss": 1.3165, + "step": 511 + }, + { + "epoch": 2.86, + "learning_rate": 5.6983240223463686e-05, + "loss": 1.0796, + "step": 512 + }, + { + "epoch": 2.87, + "learning_rate": 5.6703910614525146e-05, + "loss": 1.4086, + "step": 513 + }, + { + "epoch": 2.87, + "learning_rate": 5.64245810055866e-05, + "loss": 1.2895, + "step": 514 + }, + { + "epoch": 2.88, + "learning_rate": 5.6145251396648045e-05, + "loss": 1.3024, + "step": 515 + }, + { + "epoch": 2.88, + "learning_rate": 5.58659217877095e-05, + "loss": 1.2479, + "step": 516 + }, + { + "epoch": 2.89, + "learning_rate": 5.558659217877096e-05, + "loss": 1.5232, + "step": 517 + }, + { + "epoch": 2.89, + "learning_rate": 5.530726256983241e-05, + "loss": 1.2654, + "step": 518 + }, + { + "epoch": 2.9, + "learning_rate": 5.502793296089386e-05, + "loss": 1.3018, + "step": 519 + }, + { + "epoch": 2.91, + "learning_rate": 5.474860335195531e-05, + "loss": 1.3077, + "step": 520 + }, + { + "epoch": 2.91, + "learning_rate": 5.4469273743016756e-05, + "loss": 1.3501, + "step": 521 + }, + { + "epoch": 2.92, + "learning_rate": 5.4189944134078215e-05, + "loss": 1.2607, + "step": 522 + }, + { + "epoch": 2.92, + "learning_rate": 5.391061452513967e-05, + "loss": 1.3819, + "step": 523 + }, + { + "epoch": 2.93, + "learning_rate": 5.363128491620112e-05, + "loss": 1.321, + "step": 524 + }, + { + "epoch": 2.93, + "learning_rate": 5.335195530726257e-05, + "loss": 1.3351, + "step": 525 + }, + { + "epoch": 2.94, + "learning_rate": 5.307262569832403e-05, + "loss": 1.2205, + "step": 526 + }, + { + "epoch": 2.94, + "learning_rate": 5.279329608938548e-05, + "loss": 1.2579, + "step": 527 + }, + { + "epoch": 2.95, + "learning_rate": 5.251396648044693e-05, + "loss": 1.2771, + "step": 528 + }, + { + "epoch": 2.96, + "learning_rate": 5.223463687150838e-05, + "loss": 1.3492, + "step": 529 + }, + { + "epoch": 2.96, + "learning_rate": 5.195530726256983e-05, + "loss": 1.3232, + "step": 530 + }, + { + "epoch": 2.97, + "learning_rate": 5.167597765363129e-05, + "loss": 1.2223, + "step": 531 + }, + { + "epoch": 2.97, + "learning_rate": 5.139664804469274e-05, + "loss": 1.3983, + "step": 532 + }, + { + "epoch": 2.98, + "learning_rate": 5.111731843575419e-05, + "loss": 1.2987, + "step": 533 + }, + { + "epoch": 2.98, + "learning_rate": 5.0837988826815644e-05, + "loss": 1.3717, + "step": 534 + }, + { + "epoch": 2.99, + "learning_rate": 5.05586592178771e-05, + "loss": 1.4467, + "step": 535 + }, + { + "epoch": 2.99, + "learning_rate": 5.027932960893855e-05, + "loss": 1.4314, + "step": 536 + }, + { + "epoch": 3.0, + "learning_rate": 5e-05, + "loss": 1.6593, + "step": 537 + }, + { + "epoch": 3.01, + "learning_rate": 4.9720670391061455e-05, + "loss": 1.3425, + "step": 538 + }, + { + "epoch": 3.01, + "learning_rate": 4.944134078212291e-05, + "loss": 1.4171, + "step": 539 + }, + { + "epoch": 3.02, + "learning_rate": 4.916201117318436e-05, + "loss": 1.3766, + "step": 540 + }, + { + "epoch": 3.02, + "learning_rate": 4.8882681564245814e-05, + "loss": 1.1605, + "step": 541 + }, + { + "epoch": 3.03, + "learning_rate": 4.860335195530727e-05, + "loss": 1.3132, + "step": 542 + }, + { + "epoch": 3.03, + "learning_rate": 4.832402234636872e-05, + "loss": 1.4128, + "step": 543 + }, + { + "epoch": 3.04, + "learning_rate": 4.8044692737430166e-05, + "loss": 1.4902, + "step": 544 + }, + { + "epoch": 3.04, + "learning_rate": 4.7765363128491626e-05, + "loss": 1.1832, + "step": 545 + }, + { + "epoch": 3.05, + "learning_rate": 4.748603351955307e-05, + "loss": 1.2019, + "step": 546 + }, + { + "epoch": 3.06, + "learning_rate": 4.7206703910614525e-05, + "loss": 1.2753, + "step": 547 + }, + { + "epoch": 3.06, + "learning_rate": 4.692737430167598e-05, + "loss": 1.2745, + "step": 548 + }, + { + "epoch": 3.07, + "learning_rate": 4.664804469273743e-05, + "loss": 1.3944, + "step": 549 + }, + { + "epoch": 3.07, + "learning_rate": 4.6368715083798884e-05, + "loss": 1.3676, + "step": 550 + }, + { + "epoch": 3.08, + "learning_rate": 4.6089385474860336e-05, + "loss": 1.3226, + "step": 551 + }, + { + "epoch": 3.08, + "learning_rate": 4.581005586592179e-05, + "loss": 1.134, + "step": 552 + }, + { + "epoch": 3.09, + "learning_rate": 4.553072625698324e-05, + "loss": 1.2143, + "step": 553 + }, + { + "epoch": 3.09, + "learning_rate": 4.5251396648044695e-05, + "loss": 1.2543, + "step": 554 + }, + { + "epoch": 3.1, + "learning_rate": 4.497206703910615e-05, + "loss": 1.2136, + "step": 555 + }, + { + "epoch": 3.11, + "learning_rate": 4.4692737430167594e-05, + "loss": 1.5303, + "step": 556 + }, + { + "epoch": 3.11, + "learning_rate": 4.4413407821229054e-05, + "loss": 1.2461, + "step": 557 + }, + { + "epoch": 3.12, + "learning_rate": 4.41340782122905e-05, + "loss": 1.3282, + "step": 558 + }, + { + "epoch": 3.12, + "learning_rate": 4.385474860335196e-05, + "loss": 1.201, + "step": 559 + }, + { + "epoch": 3.13, + "learning_rate": 4.3575418994413406e-05, + "loss": 1.188, + "step": 560 + }, + { + "epoch": 3.13, + "learning_rate": 4.3296089385474866e-05, + "loss": 1.2299, + "step": 561 + }, + { + "epoch": 3.14, + "learning_rate": 4.301675977653631e-05, + "loss": 1.2713, + "step": 562 + }, + { + "epoch": 3.15, + "learning_rate": 4.273743016759777e-05, + "loss": 1.2606, + "step": 563 + }, + { + "epoch": 3.15, + "learning_rate": 4.245810055865922e-05, + "loss": 1.2021, + "step": 564 + }, + { + "epoch": 3.16, + "learning_rate": 4.217877094972068e-05, + "loss": 1.243, + "step": 565 + }, + { + "epoch": 3.16, + "learning_rate": 4.1899441340782123e-05, + "loss": 1.2973, + "step": 566 + }, + { + "epoch": 3.17, + "learning_rate": 4.1620111731843576e-05, + "loss": 1.2369, + "step": 567 + }, + { + "epoch": 3.17, + "learning_rate": 4.134078212290503e-05, + "loss": 1.2729, + "step": 568 + }, + { + "epoch": 3.18, + "learning_rate": 4.106145251396648e-05, + "loss": 1.1511, + "step": 569 + }, + { + "epoch": 3.18, + "learning_rate": 4.0782122905027935e-05, + "loss": 1.3134, + "step": 570 + }, + { + "epoch": 3.19, + "learning_rate": 4.050279329608939e-05, + "loss": 1.3388, + "step": 571 + }, + { + "epoch": 3.2, + "learning_rate": 4.022346368715084e-05, + "loss": 1.3224, + "step": 572 + }, + { + "epoch": 3.2, + "learning_rate": 3.9944134078212294e-05, + "loss": 1.2827, + "step": 573 + }, + { + "epoch": 3.21, + "learning_rate": 3.966480446927375e-05, + "loss": 1.5265, + "step": 574 + }, + { + "epoch": 3.21, + "learning_rate": 3.93854748603352e-05, + "loss": 1.4008, + "step": 575 + }, + { + "epoch": 3.22, + "learning_rate": 3.9106145251396646e-05, + "loss": 1.1727, + "step": 576 + }, + { + "epoch": 3.22, + "learning_rate": 3.8826815642458106e-05, + "loss": 1.1999, + "step": 577 + }, + { + "epoch": 3.23, + "learning_rate": 3.854748603351955e-05, + "loss": 1.2031, + "step": 578 + }, + { + "epoch": 3.23, + "learning_rate": 3.826815642458101e-05, + "loss": 1.1469, + "step": 579 + }, + { + "epoch": 3.24, + "learning_rate": 3.798882681564246e-05, + "loss": 1.2784, + "step": 580 + }, + { + "epoch": 3.25, + "learning_rate": 3.770949720670392e-05, + "loss": 1.262, + "step": 581 + }, + { + "epoch": 3.25, + "learning_rate": 3.743016759776536e-05, + "loss": 1.224, + "step": 582 + }, + { + "epoch": 3.26, + "learning_rate": 3.715083798882682e-05, + "loss": 1.3554, + "step": 583 + }, + { + "epoch": 3.26, + "learning_rate": 3.687150837988827e-05, + "loss": 1.5137, + "step": 584 + }, + { + "epoch": 3.27, + "learning_rate": 3.659217877094972e-05, + "loss": 1.2462, + "step": 585 + }, + { + "epoch": 3.27, + "learning_rate": 3.6312849162011175e-05, + "loss": 1.2217, + "step": 586 + }, + { + "epoch": 3.28, + "learning_rate": 3.603351955307263e-05, + "loss": 1.2346, + "step": 587 + }, + { + "epoch": 3.28, + "learning_rate": 3.575418994413408e-05, + "loss": 1.3059, + "step": 588 + }, + { + "epoch": 3.29, + "learning_rate": 3.5474860335195534e-05, + "loss": 1.2355, + "step": 589 + }, + { + "epoch": 3.3, + "learning_rate": 3.519553072625699e-05, + "loss": 1.2233, + "step": 590 + }, + { + "epoch": 3.3, + "learning_rate": 3.491620111731844e-05, + "loss": 1.2014, + "step": 591 + }, + { + "epoch": 3.31, + "learning_rate": 3.463687150837989e-05, + "loss": 1.1919, + "step": 592 + }, + { + "epoch": 3.31, + "learning_rate": 3.4357541899441345e-05, + "loss": 1.2905, + "step": 593 + }, + { + "epoch": 3.32, + "learning_rate": 3.407821229050279e-05, + "loss": 1.0858, + "step": 594 + }, + { + "epoch": 3.32, + "learning_rate": 3.3798882681564244e-05, + "loss": 1.087, + "step": 595 + }, + { + "epoch": 3.33, + "learning_rate": 3.35195530726257e-05, + "loss": 1.1205, + "step": 596 + }, + { + "epoch": 3.34, + "learning_rate": 3.324022346368715e-05, + "loss": 1.1657, + "step": 597 + }, + { + "epoch": 3.34, + "learning_rate": 3.29608938547486e-05, + "loss": 1.5024, + "step": 598 + }, + { + "epoch": 3.35, + "learning_rate": 3.2681564245810056e-05, + "loss": 1.246, + "step": 599 + }, + { + "epoch": 3.35, + "learning_rate": 3.240223463687151e-05, + "loss": 1.4441, + "step": 600 + }, + { + "epoch": 3.36, + "learning_rate": 3.212290502793296e-05, + "loss": 1.19, + "step": 601 + }, + { + "epoch": 3.36, + "learning_rate": 3.1843575418994415e-05, + "loss": 1.2521, + "step": 602 + }, + { + "epoch": 3.37, + "learning_rate": 3.156424581005587e-05, + "loss": 1.2051, + "step": 603 + }, + { + "epoch": 3.37, + "learning_rate": 3.128491620111732e-05, + "loss": 1.4349, + "step": 604 + }, + { + "epoch": 3.38, + "learning_rate": 3.1005586592178774e-05, + "loss": 0.9633, + "step": 605 + }, + { + "epoch": 3.39, + "learning_rate": 3.0726256983240227e-05, + "loss": 1.4547, + "step": 606 + }, + { + "epoch": 3.39, + "learning_rate": 3.0446927374301676e-05, + "loss": 1.2496, + "step": 607 + }, + { + "epoch": 3.4, + "learning_rate": 3.0167597765363132e-05, + "loss": 1.4193, + "step": 608 + }, + { + "epoch": 3.4, + "learning_rate": 2.9888268156424582e-05, + "loss": 1.3427, + "step": 609 + }, + { + "epoch": 3.41, + "learning_rate": 2.9608938547486038e-05, + "loss": 1.3162, + "step": 610 + }, + { + "epoch": 3.41, + "learning_rate": 2.9329608938547488e-05, + "loss": 1.2194, + "step": 611 + }, + { + "epoch": 3.42, + "learning_rate": 2.9050279329608944e-05, + "loss": 1.3005, + "step": 612 + }, + { + "epoch": 3.42, + "learning_rate": 2.8770949720670394e-05, + "loss": 1.2581, + "step": 613 + }, + { + "epoch": 3.43, + "learning_rate": 2.8491620111731843e-05, + "loss": 1.1465, + "step": 614 + }, + { + "epoch": 3.44, + "learning_rate": 2.82122905027933e-05, + "loss": 1.1334, + "step": 615 + }, + { + "epoch": 3.44, + "learning_rate": 2.793296089385475e-05, + "loss": 1.3151, + "step": 616 + }, + { + "epoch": 3.45, + "learning_rate": 2.7653631284916205e-05, + "loss": 1.3295, + "step": 617 + }, + { + "epoch": 3.45, + "learning_rate": 2.7374301675977655e-05, + "loss": 1.0037, + "step": 618 + }, + { + "epoch": 3.46, + "learning_rate": 2.7094972067039108e-05, + "loss": 1.2604, + "step": 619 + }, + { + "epoch": 3.46, + "learning_rate": 2.681564245810056e-05, + "loss": 1.0984, + "step": 620 + }, + { + "epoch": 3.47, + "learning_rate": 2.6536312849162014e-05, + "loss": 1.3646, + "step": 621 + }, + { + "epoch": 3.47, + "learning_rate": 2.6256983240223466e-05, + "loss": 1.3652, + "step": 622 + }, + { + "epoch": 3.48, + "learning_rate": 2.5977653631284916e-05, + "loss": 1.355, + "step": 623 + }, + { + "epoch": 3.49, + "learning_rate": 2.569832402234637e-05, + "loss": 1.2907, + "step": 624 + }, + { + "epoch": 3.49, + "learning_rate": 2.5418994413407822e-05, + "loss": 1.2126, + "step": 625 + }, + { + "epoch": 3.5, + "learning_rate": 2.5139664804469275e-05, + "loss": 1.1342, + "step": 626 + }, + { + "epoch": 3.5, + "learning_rate": 2.4860335195530728e-05, + "loss": 1.1031, + "step": 627 + }, + { + "epoch": 3.51, + "learning_rate": 2.458100558659218e-05, + "loss": 1.2255, + "step": 628 + }, + { + "epoch": 3.51, + "learning_rate": 2.4301675977653633e-05, + "loss": 1.2866, + "step": 629 + }, + { + "epoch": 3.52, + "learning_rate": 2.4022346368715083e-05, + "loss": 1.1543, + "step": 630 + }, + { + "epoch": 3.53, + "learning_rate": 2.3743016759776536e-05, + "loss": 1.2531, + "step": 631 + }, + { + "epoch": 3.53, + "learning_rate": 2.346368715083799e-05, + "loss": 1.2455, + "step": 632 + }, + { + "epoch": 3.54, + "learning_rate": 2.3184357541899442e-05, + "loss": 1.2035, + "step": 633 + }, + { + "epoch": 3.54, + "learning_rate": 2.2905027932960895e-05, + "loss": 1.3482, + "step": 634 + }, + { + "epoch": 3.55, + "learning_rate": 2.2625698324022348e-05, + "loss": 1.1586, + "step": 635 + }, + { + "epoch": 3.55, + "learning_rate": 2.2346368715083797e-05, + "loss": 1.1665, + "step": 636 + }, + { + "epoch": 3.56, + "learning_rate": 2.206703910614525e-05, + "loss": 1.2767, + "step": 637 + }, + { + "epoch": 3.56, + "learning_rate": 2.1787709497206703e-05, + "loss": 1.302, + "step": 638 + }, + { + "epoch": 3.57, + "learning_rate": 2.1508379888268156e-05, + "loss": 1.2224, + "step": 639 + }, + { + "epoch": 3.58, + "learning_rate": 2.122905027932961e-05, + "loss": 1.1817, + "step": 640 + }, + { + "epoch": 3.58, + "learning_rate": 2.0949720670391062e-05, + "loss": 1.4949, + "step": 641 + }, + { + "epoch": 3.59, + "learning_rate": 2.0670391061452515e-05, + "loss": 1.2552, + "step": 642 + }, + { + "epoch": 3.59, + "learning_rate": 2.0391061452513968e-05, + "loss": 1.3075, + "step": 643 + }, + { + "epoch": 3.6, + "learning_rate": 2.011173184357542e-05, + "loss": 1.1825, + "step": 644 + }, + { + "epoch": 3.6, + "learning_rate": 1.9832402234636873e-05, + "loss": 1.484, + "step": 645 + }, + { + "epoch": 3.61, + "learning_rate": 1.9553072625698323e-05, + "loss": 1.1765, + "step": 646 + }, + { + "epoch": 3.61, + "learning_rate": 1.9273743016759776e-05, + "loss": 1.381, + "step": 647 + }, + { + "epoch": 3.62, + "learning_rate": 1.899441340782123e-05, + "loss": 1.2835, + "step": 648 + }, + { + "epoch": 3.63, + "learning_rate": 1.871508379888268e-05, + "loss": 1.235, + "step": 649 + }, + { + "epoch": 3.63, + "learning_rate": 1.8435754189944135e-05, + "loss": 1.2041, + "step": 650 + }, + { + "epoch": 3.64, + "learning_rate": 1.8156424581005588e-05, + "loss": 0.9949, + "step": 651 + }, + { + "epoch": 3.64, + "learning_rate": 1.787709497206704e-05, + "loss": 1.2147, + "step": 652 + }, + { + "epoch": 3.65, + "learning_rate": 1.7597765363128493e-05, + "loss": 1.3876, + "step": 653 + }, + { + "epoch": 3.65, + "learning_rate": 1.7318435754189946e-05, + "loss": 1.1255, + "step": 654 + }, + { + "epoch": 3.66, + "learning_rate": 1.7039106145251396e-05, + "loss": 1.2252, + "step": 655 + }, + { + "epoch": 3.66, + "learning_rate": 1.675977653631285e-05, + "loss": 1.303, + "step": 656 + }, + { + "epoch": 3.67, + "learning_rate": 1.64804469273743e-05, + "loss": 1.1023, + "step": 657 + }, + { + "epoch": 3.68, + "learning_rate": 1.6201117318435755e-05, + "loss": 1.1444, + "step": 658 + }, + { + "epoch": 3.68, + "learning_rate": 1.5921787709497207e-05, + "loss": 1.1893, + "step": 659 + }, + { + "epoch": 3.69, + "learning_rate": 1.564245810055866e-05, + "loss": 1.2557, + "step": 660 + }, + { + "epoch": 3.69, + "learning_rate": 1.5363128491620113e-05, + "loss": 1.3431, + "step": 661 + }, + { + "epoch": 3.7, + "learning_rate": 1.5083798882681566e-05, + "loss": 1.389, + "step": 662 + }, + { + "epoch": 3.7, + "learning_rate": 1.4804469273743019e-05, + "loss": 1.0841, + "step": 663 + }, + { + "epoch": 3.71, + "learning_rate": 1.4525139664804472e-05, + "loss": 1.1225, + "step": 664 + }, + { + "epoch": 3.72, + "learning_rate": 1.4245810055865922e-05, + "loss": 1.2914, + "step": 665 + }, + { + "epoch": 3.72, + "learning_rate": 1.3966480446927374e-05, + "loss": 1.2876, + "step": 666 + }, + { + "epoch": 3.73, + "learning_rate": 1.3687150837988827e-05, + "loss": 1.2285, + "step": 667 + }, + { + "epoch": 3.73, + "learning_rate": 1.340782122905028e-05, + "loss": 1.0809, + "step": 668 + }, + { + "epoch": 3.74, + "learning_rate": 1.3128491620111733e-05, + "loss": 1.0506, + "step": 669 + }, + { + "epoch": 3.74, + "learning_rate": 1.2849162011173184e-05, + "loss": 1.2041, + "step": 670 + }, + { + "epoch": 3.75, + "learning_rate": 1.2569832402234637e-05, + "loss": 1.1061, + "step": 671 + }, + { + "epoch": 3.75, + "learning_rate": 1.229050279329609e-05, + "loss": 1.2219, + "step": 672 + }, + { + "epoch": 3.76, + "learning_rate": 1.2011173184357542e-05, + "loss": 1.0946, + "step": 673 + }, + { + "epoch": 3.77, + "learning_rate": 1.1731843575418994e-05, + "loss": 1.089, + "step": 674 + }, + { + "epoch": 3.77, + "learning_rate": 1.1452513966480447e-05, + "loss": 1.237, + "step": 675 + }, + { + "epoch": 3.78, + "learning_rate": 1.1173184357541899e-05, + "loss": 1.4085, + "step": 676 + }, + { + "epoch": 3.78, + "learning_rate": 1.0893854748603351e-05, + "loss": 1.2196, + "step": 677 + }, + { + "epoch": 3.79, + "learning_rate": 1.0614525139664804e-05, + "loss": 1.1506, + "step": 678 + }, + { + "epoch": 3.79, + "learning_rate": 1.0335195530726257e-05, + "loss": 1.2791, + "step": 679 + }, + { + "epoch": 3.8, + "learning_rate": 1.005586592178771e-05, + "loss": 1.3344, + "step": 680 + }, + { + "epoch": 3.8, + "learning_rate": 9.776536312849161e-06, + "loss": 1.3458, + "step": 681 + }, + { + "epoch": 3.81, + "learning_rate": 9.497206703910614e-06, + "loss": 1.1622, + "step": 682 + }, + { + "epoch": 3.82, + "learning_rate": 9.217877094972067e-06, + "loss": 1.1179, + "step": 683 + }, + { + "epoch": 3.82, + "learning_rate": 8.93854748603352e-06, + "loss": 1.2485, + "step": 684 + }, + { + "epoch": 3.83, + "learning_rate": 8.659217877094973e-06, + "loss": 1.1207, + "step": 685 + }, + { + "epoch": 3.83, + "learning_rate": 8.379888268156424e-06, + "loss": 1.3791, + "step": 686 + }, + { + "epoch": 3.84, + "learning_rate": 8.100558659217877e-06, + "loss": 1.2976, + "step": 687 + }, + { + "epoch": 3.84, + "learning_rate": 7.82122905027933e-06, + "loss": 1.2041, + "step": 688 + }, + { + "epoch": 3.85, + "learning_rate": 7.541899441340783e-06, + "loss": 1.1002, + "step": 689 + }, + { + "epoch": 3.85, + "learning_rate": 7.262569832402236e-06, + "loss": 1.2982, + "step": 690 + }, + { + "epoch": 3.86, + "learning_rate": 6.983240223463687e-06, + "loss": 1.3469, + "step": 691 + }, + { + "epoch": 3.87, + "learning_rate": 6.70391061452514e-06, + "loss": 1.2923, + "step": 692 + }, + { + "epoch": 3.87, + "learning_rate": 6.424581005586592e-06, + "loss": 1.3308, + "step": 693 + }, + { + "epoch": 3.88, + "learning_rate": 6.145251396648045e-06, + "loss": 1.2588, + "step": 694 + }, + { + "epoch": 3.88, + "learning_rate": 5.865921787709497e-06, + "loss": 1.0669, + "step": 695 + }, + { + "epoch": 3.89, + "learning_rate": 5.586592178770949e-06, + "loss": 1.4808, + "step": 696 + }, + { + "epoch": 3.89, + "learning_rate": 5.307262569832402e-06, + "loss": 1.2714, + "step": 697 + }, + { + "epoch": 3.9, + "learning_rate": 5.027932960893855e-06, + "loss": 1.1979, + "step": 698 + }, + { + "epoch": 3.91, + "learning_rate": 4.748603351955307e-06, + "loss": 1.2548, + "step": 699 + }, + { + "epoch": 3.91, + "learning_rate": 4.46927374301676e-06, + "loss": 1.2473, + "step": 700 + } + ], + "logging_steps": 1, + "max_steps": 716, + "num_train_epochs": 4, + "save_steps": 100, + "total_flos": 3.6028388613095424e+17, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-700/training_args.bin b/checkpoint-700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..c8672c716e925d0028b4938db147703f58656ff7 --- /dev/null +++ b/checkpoint-700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:188ae1c421cc0c6435d1f71d8d3423ac4abc7dba0e6fc2efcbc4dbe77c741317 +size 4027