diff --git a/README.md b/README.md index 08371015f02382e6fcba318f4aaea54ae52cd3c4..5f4f5625039e17bc7326cc10fbbc7896bb06453b 100644 --- a/README.md +++ b/README.md @@ -4,18 +4,6 @@ library_name: peft ## Training procedure -The following `bitsandbytes` quantization config was used during training: -- quant_method: bitsandbytes -- load_in_8bit: True -- load_in_4bit: False -- llm_int8_threshold: 6.0 -- llm_int8_skip_modules: None -- llm_int8_enable_fp32_cpu_offload: False -- llm_int8_has_fp16_weight: False -- bnb_4bit_quant_type: fp4 -- bnb_4bit_use_double_quant: False -- bnb_4bit_compute_dtype: float32 - The following `bitsandbytes` quantization config was used during training: - quant_method: bitsandbytes - load_in_8bit: True @@ -29,6 +17,5 @@ The following `bitsandbytes` quantization config was used during training: - bnb_4bit_compute_dtype: float32 ### Framework versions -- PEFT 0.6.0.dev0 - PEFT 0.6.0.dev0 diff --git a/adapter_model.bin b/adapter_model.bin index 34fbbd2c7ce064739b832ea4f04685aa6d41faa0..40421e31736760bee0f697f3c9dbac7e00d9c192 100644 --- a/adapter_model.bin +++ b/adapter_model.bin @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:f453889b7b5f1f21797413708a1c82782ca563c8173a223b50fb5e004837c160 +oid sha256:cc8aa748553051f477f8856a04dab6c16386e77b50764d21abc20a51512c7c1e size 39409357 diff --git a/checkpoint-1000/README.md b/checkpoint-1000/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-1000/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-1000/adapter_config.json b/checkpoint-1000/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b --- /dev/null +++ b/checkpoint-1000/adapter_config.json @@ -0,0 +1,23 @@ +{ + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-1000/adapter_model.bin b/checkpoint-1000/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..34fbbd2c7ce064739b832ea4f04685aa6d41faa0 --- /dev/null +++ b/checkpoint-1000/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f453889b7b5f1f21797413708a1c82782ca563c8173a223b50fb5e004837c160 +size 39409357 diff --git a/checkpoint-1000/optimizer.pt b/checkpoint-1000/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..efd8cd5608cddb3c4edda6e0d5da2052af77a93d --- /dev/null +++ b/checkpoint-1000/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:980ce4082f86e8643d829ecc076ea00335d9e2d80d18c421ae802d241b9e3197 +size 78844421 diff --git a/checkpoint-1000/rng_state.pth b/checkpoint-1000/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..faca5241da3506eadd57588fb689a90f390f5558 --- /dev/null +++ b/checkpoint-1000/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a5432f961e7c82cbe7b6ec30d027cf3db4d17372f65774be8cf037a208cbc1ac +size 14575 diff --git a/checkpoint-1000/scheduler.pt b/checkpoint-1000/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..df7757b8324bccfaf0b6d6a0d952f9ebd94027b5 --- /dev/null +++ b/checkpoint-1000/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:203097d159784262bd5c95fedbda5673a26bd26ba8483bc4d9972c3ce0a53781 +size 627 diff --git a/checkpoint-1000/special_tokens_map.json b/checkpoint-1000/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-1000/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-1000/tokenizer.json b/checkpoint-1000/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf002cafbd4818dcff2abc9156c088d681b4533 --- /dev/null +++ b/checkpoint-1000/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba +size 14500471 diff --git a/checkpoint-1000/tokenizer_config.json b/checkpoint-1000/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-1000/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-1000/trainer_state.json b/checkpoint-1000/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..e75c2b4bd63a8b9fb8aec5addf777c7352df2c99 --- /dev/null +++ b/checkpoint-1000/trainer_state.json @@ -0,0 +1,6019 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 20.23395510591211, + "eval_steps": 500, + "global_step": 1000, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.02, + "learning_rate": 0.00019987244897959184, + "loss": 3.2215, + "step": 1 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019974489795918367, + "loss": 2.8365, + "step": 2 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019961734693877553, + "loss": 2.602, + "step": 3 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019948979591836736, + "loss": 2.4196, + "step": 4 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001993622448979592, + "loss": 2.2574, + "step": 5 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019923469387755102, + "loss": 2.2239, + "step": 6 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019910714285714288, + "loss": 2.1661, + "step": 7 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001989795918367347, + "loss": 2.0987, + "step": 8 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019885204081632654, + "loss": 2.015, + "step": 9 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019872448979591837, + "loss": 1.9771, + "step": 10 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019859693877551023, + "loss": 2.0271, + "step": 11 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019846938775510203, + "loss": 1.9812, + "step": 12 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001983418367346939, + "loss": 2.0834, + "step": 13 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019821428571428572, + "loss": 1.9174, + "step": 14 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019808673469387755, + "loss": 1.8409, + "step": 15 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019795918367346938, + "loss": 1.929, + "step": 16 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019783163265306124, + "loss": 2.0041, + "step": 17 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019770408163265305, + "loss": 1.9385, + "step": 18 + }, + { + "epoch": 0.38, + "learning_rate": 0.0001975765306122449, + "loss": 1.9592, + "step": 19 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019744897959183674, + "loss": 1.9701, + "step": 20 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001973214285714286, + "loss": 1.9277, + "step": 21 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019719387755102042, + "loss": 1.8394, + "step": 22 + }, + { + "epoch": 0.47, + "learning_rate": 0.00019706632653061226, + "loss": 1.8666, + "step": 23 + }, + { + "epoch": 0.49, + "learning_rate": 0.00019693877551020409, + "loss": 1.8997, + "step": 24 + }, + { + "epoch": 0.51, + "learning_rate": 0.00019681122448979592, + "loss": 1.9432, + "step": 25 + }, + { + "epoch": 0.53, + "learning_rate": 0.00019668367346938777, + "loss": 1.9137, + "step": 26 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001965561224489796, + "loss": 1.905, + "step": 27 + }, + { + "epoch": 0.57, + "learning_rate": 0.00019642857142857144, + "loss": 1.8708, + "step": 28 + }, + { + "epoch": 0.59, + "learning_rate": 0.00019630102040816327, + "loss": 1.9097, + "step": 29 + }, + { + "epoch": 0.61, + "learning_rate": 0.00019617346938775513, + "loss": 1.896, + "step": 30 + }, + { + "epoch": 0.63, + "learning_rate": 0.00019604591836734696, + "loss": 1.8834, + "step": 31 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001959183673469388, + "loss": 1.8323, + "step": 32 + }, + { + "epoch": 0.67, + "learning_rate": 0.00019579081632653062, + "loss": 1.804, + "step": 33 + }, + { + "epoch": 0.69, + "learning_rate": 0.00019566326530612248, + "loss": 1.8906, + "step": 34 + }, + { + "epoch": 0.71, + "learning_rate": 0.00019553571428571428, + "loss": 1.8693, + "step": 35 + }, + { + "epoch": 0.73, + "learning_rate": 0.00019540816326530614, + "loss": 1.9308, + "step": 36 + }, + { + "epoch": 0.75, + "learning_rate": 0.00019528061224489797, + "loss": 1.8082, + "step": 37 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001951530612244898, + "loss": 1.848, + "step": 38 + }, + { + "epoch": 0.79, + "learning_rate": 0.00019502551020408163, + "loss": 1.8866, + "step": 39 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001948979591836735, + "loss": 1.7844, + "step": 40 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001947704081632653, + "loss": 1.8485, + "step": 41 + }, + { + "epoch": 0.85, + "learning_rate": 0.00019464285714285715, + "loss": 1.7917, + "step": 42 + }, + { + "epoch": 0.87, + "learning_rate": 0.00019451530612244898, + "loss": 1.7342, + "step": 43 + }, + { + "epoch": 0.89, + "learning_rate": 0.00019438775510204084, + "loss": 1.8479, + "step": 44 + }, + { + "epoch": 0.91, + "learning_rate": 0.00019426020408163267, + "loss": 1.8639, + "step": 45 + }, + { + "epoch": 0.93, + "learning_rate": 0.0001941326530612245, + "loss": 1.8166, + "step": 46 + }, + { + "epoch": 0.95, + "learning_rate": 0.00019400510204081633, + "loss": 1.7566, + "step": 47 + }, + { + "epoch": 0.97, + "learning_rate": 0.00019387755102040816, + "loss": 1.8071, + "step": 48 + }, + { + "epoch": 0.99, + "learning_rate": 0.00019375000000000002, + "loss": 1.8612, + "step": 49 + }, + { + "epoch": 1.01, + "learning_rate": 0.00019362244897959185, + "loss": 1.7819, + "step": 50 + }, + { + "epoch": 1.03, + "learning_rate": 0.00019349489795918368, + "loss": 1.8647, + "step": 51 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001933673469387755, + "loss": 1.8196, + "step": 52 + }, + { + "epoch": 1.07, + "learning_rate": 0.00019323979591836737, + "loss": 1.8027, + "step": 53 + }, + { + "epoch": 1.09, + "learning_rate": 0.00019311224489795917, + "loss": 1.8927, + "step": 54 + }, + { + "epoch": 1.11, + "learning_rate": 0.00019298469387755103, + "loss": 1.8481, + "step": 55 + }, + { + "epoch": 1.13, + "learning_rate": 0.00019285714285714286, + "loss": 1.7781, + "step": 56 + }, + { + "epoch": 1.15, + "learning_rate": 0.00019272959183673472, + "loss": 1.8101, + "step": 57 + }, + { + "epoch": 1.17, + "learning_rate": 0.00019260204081632653, + "loss": 1.7257, + "step": 58 + }, + { + "epoch": 1.19, + "learning_rate": 0.00019247448979591838, + "loss": 1.8185, + "step": 59 + }, + { + "epoch": 1.21, + "learning_rate": 0.00019234693877551021, + "loss": 1.8557, + "step": 60 + }, + { + "epoch": 1.23, + "learning_rate": 0.00019221938775510204, + "loss": 1.7418, + "step": 61 + }, + { + "epoch": 1.25, + "learning_rate": 0.00019209183673469388, + "loss": 1.6879, + "step": 62 + }, + { + "epoch": 1.27, + "learning_rate": 0.00019196428571428573, + "loss": 1.7651, + "step": 63 + }, + { + "epoch": 1.29, + "learning_rate": 0.00019183673469387756, + "loss": 1.7759, + "step": 64 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001917091836734694, + "loss": 1.7691, + "step": 65 + }, + { + "epoch": 1.34, + "learning_rate": 0.00019158163265306123, + "loss": 1.7794, + "step": 66 + }, + { + "epoch": 1.36, + "learning_rate": 0.00019145408163265306, + "loss": 1.8152, + "step": 67 + }, + { + "epoch": 1.38, + "learning_rate": 0.00019132653061224492, + "loss": 1.8052, + "step": 68 + }, + { + "epoch": 1.4, + "learning_rate": 0.00019119897959183675, + "loss": 1.8054, + "step": 69 + }, + { + "epoch": 1.42, + "learning_rate": 0.00019107142857142858, + "loss": 1.8114, + "step": 70 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001909438775510204, + "loss": 1.7749, + "step": 71 + }, + { + "epoch": 1.46, + "learning_rate": 0.00019081632653061227, + "loss": 1.777, + "step": 72 + }, + { + "epoch": 1.48, + "learning_rate": 0.0001906887755102041, + "loss": 1.7896, + "step": 73 + }, + { + "epoch": 1.5, + "learning_rate": 0.00019056122448979593, + "loss": 1.8335, + "step": 74 + }, + { + "epoch": 1.52, + "learning_rate": 0.00019043367346938776, + "loss": 1.8155, + "step": 75 + }, + { + "epoch": 1.54, + "learning_rate": 0.00019030612244897962, + "loss": 1.8224, + "step": 76 + }, + { + "epoch": 1.56, + "learning_rate": 0.00019017857142857142, + "loss": 1.7889, + "step": 77 + }, + { + "epoch": 1.58, + "learning_rate": 0.00019005102040816328, + "loss": 1.8866, + "step": 78 + }, + { + "epoch": 1.6, + "learning_rate": 0.0001899234693877551, + "loss": 1.8439, + "step": 79 + }, + { + "epoch": 1.62, + "learning_rate": 0.00018979591836734697, + "loss": 1.7906, + "step": 80 + }, + { + "epoch": 1.64, + "learning_rate": 0.00018966836734693877, + "loss": 1.8627, + "step": 81 + }, + { + "epoch": 1.66, + "learning_rate": 0.00018954081632653063, + "loss": 1.7497, + "step": 82 + }, + { + "epoch": 1.68, + "learning_rate": 0.00018941326530612246, + "loss": 1.7936, + "step": 83 + }, + { + "epoch": 1.7, + "learning_rate": 0.0001892857142857143, + "loss": 1.8341, + "step": 84 + }, + { + "epoch": 1.72, + "learning_rate": 0.00018915816326530612, + "loss": 1.7868, + "step": 85 + }, + { + "epoch": 1.74, + "learning_rate": 0.00018903061224489798, + "loss": 1.7493, + "step": 86 + }, + { + "epoch": 1.76, + "learning_rate": 0.0001889030612244898, + "loss": 1.7926, + "step": 87 + }, + { + "epoch": 1.78, + "learning_rate": 0.00018877551020408164, + "loss": 1.8278, + "step": 88 + }, + { + "epoch": 1.8, + "learning_rate": 0.00018864795918367347, + "loss": 1.7387, + "step": 89 + }, + { + "epoch": 1.82, + "learning_rate": 0.0001885204081632653, + "loss": 1.7669, + "step": 90 + }, + { + "epoch": 1.84, + "learning_rate": 0.00018839285714285716, + "loss": 1.7686, + "step": 91 + }, + { + "epoch": 1.86, + "learning_rate": 0.000188265306122449, + "loss": 1.7759, + "step": 92 + }, + { + "epoch": 1.88, + "learning_rate": 0.00018813775510204082, + "loss": 1.7016, + "step": 93 + }, + { + "epoch": 1.9, + "learning_rate": 0.00018801020408163265, + "loss": 1.8123, + "step": 94 + }, + { + "epoch": 1.92, + "learning_rate": 0.0001878826530612245, + "loss": 1.8315, + "step": 95 + }, + { + "epoch": 1.94, + "learning_rate": 0.00018775510204081634, + "loss": 1.7679, + "step": 96 + }, + { + "epoch": 1.96, + "learning_rate": 0.00018762755102040817, + "loss": 1.7874, + "step": 97 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001875, + "loss": 1.8008, + "step": 98 + }, + { + "epoch": 2.0, + "learning_rate": 0.00018737244897959186, + "loss": 1.7177, + "step": 99 + }, + { + "epoch": 2.02, + "learning_rate": 0.00018724489795918367, + "loss": 1.7272, + "step": 100 + }, + { + "epoch": 2.04, + "learning_rate": 0.00018711734693877552, + "loss": 1.7848, + "step": 101 + }, + { + "epoch": 2.06, + "learning_rate": 0.00018698979591836735, + "loss": 1.744, + "step": 102 + }, + { + "epoch": 2.08, + "learning_rate": 0.00018686224489795919, + "loss": 1.7005, + "step": 103 + }, + { + "epoch": 2.1, + "learning_rate": 0.00018673469387755102, + "loss": 1.8247, + "step": 104 + }, + { + "epoch": 2.12, + "learning_rate": 0.00018660714285714287, + "loss": 1.6855, + "step": 105 + }, + { + "epoch": 2.14, + "learning_rate": 0.0001864795918367347, + "loss": 1.7627, + "step": 106 + }, + { + "epoch": 2.17, + "learning_rate": 0.00018635204081632654, + "loss": 1.7564, + "step": 107 + }, + { + "epoch": 2.19, + "learning_rate": 0.00018622448979591837, + "loss": 1.8237, + "step": 108 + }, + { + "epoch": 2.21, + "learning_rate": 0.00018609693877551022, + "loss": 1.7421, + "step": 109 + }, + { + "epoch": 2.23, + "learning_rate": 0.00018596938775510206, + "loss": 1.7517, + "step": 110 + }, + { + "epoch": 2.25, + "learning_rate": 0.0001858418367346939, + "loss": 1.7515, + "step": 111 + }, + { + "epoch": 2.27, + "learning_rate": 0.00018571428571428572, + "loss": 1.7842, + "step": 112 + }, + { + "epoch": 2.29, + "learning_rate": 0.00018558673469387755, + "loss": 1.8001, + "step": 113 + }, + { + "epoch": 2.31, + "learning_rate": 0.0001854591836734694, + "loss": 1.7653, + "step": 114 + }, + { + "epoch": 2.33, + "learning_rate": 0.00018533163265306124, + "loss": 1.694, + "step": 115 + }, + { + "epoch": 2.35, + "learning_rate": 0.00018520408163265307, + "loss": 1.7457, + "step": 116 + }, + { + "epoch": 2.37, + "learning_rate": 0.0001850765306122449, + "loss": 1.7899, + "step": 117 + }, + { + "epoch": 2.39, + "learning_rate": 0.00018494897959183676, + "loss": 1.7473, + "step": 118 + }, + { + "epoch": 2.41, + "learning_rate": 0.0001848214285714286, + "loss": 1.6639, + "step": 119 + }, + { + "epoch": 2.43, + "learning_rate": 0.00018469387755102042, + "loss": 1.762, + "step": 120 + }, + { + "epoch": 2.45, + "learning_rate": 0.00018456632653061225, + "loss": 1.7378, + "step": 121 + }, + { + "epoch": 2.47, + "learning_rate": 0.0001844387755102041, + "loss": 1.672, + "step": 122 + }, + { + "epoch": 2.49, + "learning_rate": 0.0001843112244897959, + "loss": 1.7267, + "step": 123 + }, + { + "epoch": 2.51, + "learning_rate": 0.00018418367346938777, + "loss": 1.7825, + "step": 124 + }, + { + "epoch": 2.53, + "learning_rate": 0.0001840561224489796, + "loss": 1.7566, + "step": 125 + }, + { + "epoch": 2.55, + "learning_rate": 0.00018392857142857143, + "loss": 1.8169, + "step": 126 + }, + { + "epoch": 2.57, + "learning_rate": 0.00018380102040816326, + "loss": 1.6801, + "step": 127 + }, + { + "epoch": 2.59, + "learning_rate": 0.00018367346938775512, + "loss": 1.7292, + "step": 128 + }, + { + "epoch": 2.61, + "learning_rate": 0.00018354591836734695, + "loss": 1.737, + "step": 129 + }, + { + "epoch": 2.63, + "learning_rate": 0.00018341836734693878, + "loss": 1.7696, + "step": 130 + }, + { + "epoch": 2.65, + "learning_rate": 0.0001832908163265306, + "loss": 1.7239, + "step": 131 + }, + { + "epoch": 2.67, + "learning_rate": 0.00018316326530612247, + "loss": 1.7441, + "step": 132 + }, + { + "epoch": 2.69, + "learning_rate": 0.0001830357142857143, + "loss": 1.7825, + "step": 133 + }, + { + "epoch": 2.71, + "learning_rate": 0.00018290816326530613, + "loss": 1.7411, + "step": 134 + }, + { + "epoch": 2.73, + "learning_rate": 0.00018278061224489796, + "loss": 1.7119, + "step": 135 + }, + { + "epoch": 2.75, + "learning_rate": 0.0001826530612244898, + "loss": 1.7443, + "step": 136 + }, + { + "epoch": 2.77, + "learning_rate": 0.00018252551020408165, + "loss": 1.7197, + "step": 137 + }, + { + "epoch": 2.79, + "learning_rate": 0.00018239795918367348, + "loss": 1.7273, + "step": 138 + }, + { + "epoch": 2.81, + "learning_rate": 0.0001822704081632653, + "loss": 1.7681, + "step": 139 + }, + { + "epoch": 2.83, + "learning_rate": 0.00018214285714285714, + "loss": 1.8088, + "step": 140 + }, + { + "epoch": 2.85, + "learning_rate": 0.000182015306122449, + "loss": 1.7301, + "step": 141 + }, + { + "epoch": 2.87, + "learning_rate": 0.00018188775510204083, + "loss": 1.6853, + "step": 142 + }, + { + "epoch": 2.89, + "learning_rate": 0.00018176020408163266, + "loss": 1.6966, + "step": 143 + }, + { + "epoch": 2.91, + "learning_rate": 0.0001816326530612245, + "loss": 1.7938, + "step": 144 + }, + { + "epoch": 2.93, + "learning_rate": 0.00018150510204081635, + "loss": 1.7639, + "step": 145 + }, + { + "epoch": 2.95, + "learning_rate": 0.00018137755102040816, + "loss": 1.7527, + "step": 146 + }, + { + "epoch": 2.97, + "learning_rate": 0.00018125000000000001, + "loss": 1.7386, + "step": 147 + }, + { + "epoch": 2.99, + "learning_rate": 0.00018112244897959185, + "loss": 1.7223, + "step": 148 + }, + { + "epoch": 3.01, + "learning_rate": 0.00018099489795918368, + "loss": 1.7571, + "step": 149 + }, + { + "epoch": 3.04, + "learning_rate": 0.0001808673469387755, + "loss": 1.7054, + "step": 150 + }, + { + "epoch": 3.06, + "learning_rate": 0.00018073979591836737, + "loss": 1.6581, + "step": 151 + }, + { + "epoch": 3.08, + "learning_rate": 0.00018061224489795917, + "loss": 1.681, + "step": 152 + }, + { + "epoch": 3.1, + "learning_rate": 0.00018048469387755103, + "loss": 1.7425, + "step": 153 + }, + { + "epoch": 3.12, + "learning_rate": 0.00018035714285714286, + "loss": 1.7108, + "step": 154 + }, + { + "epoch": 3.14, + "learning_rate": 0.00018022959183673472, + "loss": 1.7194, + "step": 155 + }, + { + "epoch": 3.16, + "learning_rate": 0.00018010204081632655, + "loss": 1.6953, + "step": 156 + }, + { + "epoch": 3.18, + "learning_rate": 0.00017997448979591838, + "loss": 1.669, + "step": 157 + }, + { + "epoch": 3.2, + "learning_rate": 0.0001798469387755102, + "loss": 1.744, + "step": 158 + }, + { + "epoch": 3.22, + "learning_rate": 0.00017971938775510204, + "loss": 1.6467, + "step": 159 + }, + { + "epoch": 3.24, + "learning_rate": 0.0001795918367346939, + "loss": 1.7103, + "step": 160 + }, + { + "epoch": 3.26, + "learning_rate": 0.00017946428571428573, + "loss": 1.6662, + "step": 161 + }, + { + "epoch": 3.28, + "learning_rate": 0.00017933673469387756, + "loss": 1.6657, + "step": 162 + }, + { + "epoch": 3.3, + "learning_rate": 0.0001792091836734694, + "loss": 1.791, + "step": 163 + }, + { + "epoch": 3.32, + "learning_rate": 0.00017908163265306125, + "loss": 1.7704, + "step": 164 + }, + { + "epoch": 3.34, + "learning_rate": 0.00017895408163265305, + "loss": 1.7229, + "step": 165 + }, + { + "epoch": 3.36, + "learning_rate": 0.0001788265306122449, + "loss": 1.76, + "step": 166 + }, + { + "epoch": 3.38, + "learning_rate": 0.00017869897959183674, + "loss": 1.6482, + "step": 167 + }, + { + "epoch": 3.4, + "learning_rate": 0.0001785714285714286, + "loss": 1.8076, + "step": 168 + }, + { + "epoch": 3.42, + "learning_rate": 0.0001784438775510204, + "loss": 1.7368, + "step": 169 + }, + { + "epoch": 3.44, + "learning_rate": 0.00017831632653061226, + "loss": 1.6264, + "step": 170 + }, + { + "epoch": 3.46, + "learning_rate": 0.0001781887755102041, + "loss": 1.6289, + "step": 171 + }, + { + "epoch": 3.48, + "learning_rate": 0.00017806122448979592, + "loss": 1.7913, + "step": 172 + }, + { + "epoch": 3.5, + "learning_rate": 0.00017793367346938775, + "loss": 1.6985, + "step": 173 + }, + { + "epoch": 3.52, + "learning_rate": 0.0001778061224489796, + "loss": 1.6936, + "step": 174 + }, + { + "epoch": 3.54, + "learning_rate": 0.00017767857142857141, + "loss": 1.8068, + "step": 175 + }, + { + "epoch": 3.56, + "learning_rate": 0.00017755102040816327, + "loss": 1.7243, + "step": 176 + }, + { + "epoch": 3.58, + "learning_rate": 0.0001774234693877551, + "loss": 1.6893, + "step": 177 + }, + { + "epoch": 3.6, + "learning_rate": 0.00017729591836734696, + "loss": 1.8122, + "step": 178 + }, + { + "epoch": 3.62, + "learning_rate": 0.0001771683673469388, + "loss": 1.6562, + "step": 179 + }, + { + "epoch": 3.64, + "learning_rate": 0.00017704081632653062, + "loss": 1.6999, + "step": 180 + }, + { + "epoch": 3.66, + "learning_rate": 0.00017691326530612245, + "loss": 1.7229, + "step": 181 + }, + { + "epoch": 3.68, + "learning_rate": 0.00017678571428571428, + "loss": 1.6764, + "step": 182 + }, + { + "epoch": 3.7, + "learning_rate": 0.00017665816326530614, + "loss": 1.6982, + "step": 183 + }, + { + "epoch": 3.72, + "learning_rate": 0.00017653061224489797, + "loss": 1.696, + "step": 184 + }, + { + "epoch": 3.74, + "learning_rate": 0.0001764030612244898, + "loss": 1.6797, + "step": 185 + }, + { + "epoch": 3.76, + "learning_rate": 0.00017627551020408164, + "loss": 1.637, + "step": 186 + }, + { + "epoch": 3.78, + "learning_rate": 0.0001761479591836735, + "loss": 1.7074, + "step": 187 + }, + { + "epoch": 3.8, + "learning_rate": 0.0001760204081632653, + "loss": 1.705, + "step": 188 + }, + { + "epoch": 3.82, + "learning_rate": 0.00017589285714285716, + "loss": 1.6153, + "step": 189 + }, + { + "epoch": 3.84, + "learning_rate": 0.00017576530612244899, + "loss": 1.7354, + "step": 190 + }, + { + "epoch": 3.86, + "learning_rate": 0.00017563775510204084, + "loss": 1.6941, + "step": 191 + }, + { + "epoch": 3.88, + "learning_rate": 0.00017551020408163265, + "loss": 1.7231, + "step": 192 + }, + { + "epoch": 3.91, + "learning_rate": 0.0001753826530612245, + "loss": 1.7663, + "step": 193 + }, + { + "epoch": 3.93, + "learning_rate": 0.00017525510204081634, + "loss": 1.6532, + "step": 194 + }, + { + "epoch": 3.95, + "learning_rate": 0.00017512755102040817, + "loss": 1.7115, + "step": 195 + }, + { + "epoch": 3.97, + "learning_rate": 0.000175, + "loss": 1.6955, + "step": 196 + }, + { + "epoch": 3.99, + "learning_rate": 0.00017487244897959186, + "loss": 1.6863, + "step": 197 + }, + { + "epoch": 4.01, + "learning_rate": 0.00017474489795918366, + "loss": 1.7012, + "step": 198 + }, + { + "epoch": 4.03, + "learning_rate": 0.00017461734693877552, + "loss": 1.5927, + "step": 199 + }, + { + "epoch": 4.05, + "learning_rate": 0.00017448979591836735, + "loss": 1.6272, + "step": 200 + }, + { + "epoch": 4.07, + "learning_rate": 0.00017436224489795918, + "loss": 1.5994, + "step": 201 + }, + { + "epoch": 4.09, + "learning_rate": 0.00017423469387755104, + "loss": 1.7141, + "step": 202 + }, + { + "epoch": 4.11, + "learning_rate": 0.00017410714285714287, + "loss": 1.7547, + "step": 203 + }, + { + "epoch": 4.13, + "learning_rate": 0.0001739795918367347, + "loss": 1.6254, + "step": 204 + }, + { + "epoch": 4.15, + "learning_rate": 0.00017385204081632653, + "loss": 1.6686, + "step": 205 + }, + { + "epoch": 4.17, + "learning_rate": 0.0001737244897959184, + "loss": 1.6684, + "step": 206 + }, + { + "epoch": 4.19, + "learning_rate": 0.00017359693877551022, + "loss": 1.6724, + "step": 207 + }, + { + "epoch": 4.21, + "learning_rate": 0.00017346938775510205, + "loss": 1.7361, + "step": 208 + }, + { + "epoch": 4.23, + "learning_rate": 0.00017334183673469388, + "loss": 1.7167, + "step": 209 + }, + { + "epoch": 4.25, + "learning_rate": 0.00017321428571428574, + "loss": 1.7226, + "step": 210 + }, + { + "epoch": 4.27, + "learning_rate": 0.00017308673469387754, + "loss": 1.7133, + "step": 211 + }, + { + "epoch": 4.29, + "learning_rate": 0.0001729591836734694, + "loss": 1.649, + "step": 212 + }, + { + "epoch": 4.31, + "learning_rate": 0.00017283163265306123, + "loss": 1.7104, + "step": 213 + }, + { + "epoch": 4.33, + "learning_rate": 0.00017270408163265306, + "loss": 1.6861, + "step": 214 + }, + { + "epoch": 4.35, + "learning_rate": 0.0001725765306122449, + "loss": 1.648, + "step": 215 + }, + { + "epoch": 4.37, + "learning_rate": 0.00017244897959183675, + "loss": 1.6215, + "step": 216 + }, + { + "epoch": 4.39, + "learning_rate": 0.00017232142857142858, + "loss": 1.6334, + "step": 217 + }, + { + "epoch": 4.41, + "learning_rate": 0.0001721938775510204, + "loss": 1.6283, + "step": 218 + }, + { + "epoch": 4.43, + "learning_rate": 0.00017206632653061224, + "loss": 1.6462, + "step": 219 + }, + { + "epoch": 4.45, + "learning_rate": 0.0001719387755102041, + "loss": 1.7233, + "step": 220 + }, + { + "epoch": 4.47, + "learning_rate": 0.0001718112244897959, + "loss": 1.7839, + "step": 221 + }, + { + "epoch": 4.49, + "learning_rate": 0.00017168367346938776, + "loss": 1.7204, + "step": 222 + }, + { + "epoch": 4.51, + "learning_rate": 0.0001715561224489796, + "loss": 1.7671, + "step": 223 + }, + { + "epoch": 4.53, + "learning_rate": 0.00017142857142857143, + "loss": 1.6824, + "step": 224 + }, + { + "epoch": 4.55, + "learning_rate": 0.00017130102040816328, + "loss": 1.7068, + "step": 225 + }, + { + "epoch": 4.57, + "learning_rate": 0.00017117346938775511, + "loss": 1.6515, + "step": 226 + }, + { + "epoch": 4.59, + "learning_rate": 0.00017104591836734694, + "loss": 1.6586, + "step": 227 + }, + { + "epoch": 4.61, + "learning_rate": 0.00017091836734693878, + "loss": 1.6355, + "step": 228 + }, + { + "epoch": 4.63, + "learning_rate": 0.00017079081632653063, + "loss": 1.7173, + "step": 229 + }, + { + "epoch": 4.65, + "learning_rate": 0.00017066326530612246, + "loss": 1.6585, + "step": 230 + }, + { + "epoch": 4.67, + "learning_rate": 0.0001705357142857143, + "loss": 1.5856, + "step": 231 + }, + { + "epoch": 4.69, + "learning_rate": 0.00017040816326530613, + "loss": 1.5923, + "step": 232 + }, + { + "epoch": 4.71, + "learning_rate": 0.00017028061224489798, + "loss": 1.7128, + "step": 233 + }, + { + "epoch": 4.73, + "learning_rate": 0.0001701530612244898, + "loss": 1.6971, + "step": 234 + }, + { + "epoch": 4.75, + "learning_rate": 0.00017002551020408165, + "loss": 1.6416, + "step": 235 + }, + { + "epoch": 4.78, + "learning_rate": 0.00016989795918367348, + "loss": 1.645, + "step": 236 + }, + { + "epoch": 4.8, + "learning_rate": 0.0001697704081632653, + "loss": 1.6792, + "step": 237 + }, + { + "epoch": 4.82, + "learning_rate": 0.00016964285714285714, + "loss": 1.6522, + "step": 238 + }, + { + "epoch": 4.84, + "learning_rate": 0.000169515306122449, + "loss": 1.6315, + "step": 239 + }, + { + "epoch": 4.86, + "learning_rate": 0.00016938775510204083, + "loss": 1.6622, + "step": 240 + }, + { + "epoch": 4.88, + "learning_rate": 0.00016926020408163266, + "loss": 1.6566, + "step": 241 + }, + { + "epoch": 4.9, + "learning_rate": 0.0001691326530612245, + "loss": 1.7141, + "step": 242 + }, + { + "epoch": 4.92, + "learning_rate": 0.00016900510204081635, + "loss": 1.5873, + "step": 243 + }, + { + "epoch": 4.94, + "learning_rate": 0.00016887755102040818, + "loss": 1.6571, + "step": 244 + }, + { + "epoch": 4.96, + "learning_rate": 0.00016875, + "loss": 1.6829, + "step": 245 + }, + { + "epoch": 4.98, + "learning_rate": 0.00016862244897959184, + "loss": 1.6935, + "step": 246 + }, + { + "epoch": 5.0, + "learning_rate": 0.00016849489795918367, + "loss": 1.6782, + "step": 247 + }, + { + "epoch": 5.02, + "learning_rate": 0.00016836734693877553, + "loss": 1.622, + "step": 248 + }, + { + "epoch": 5.04, + "learning_rate": 0.00016823979591836736, + "loss": 1.6596, + "step": 249 + }, + { + "epoch": 5.06, + "learning_rate": 0.0001681122448979592, + "loss": 1.5821, + "step": 250 + }, + { + "epoch": 5.08, + "learning_rate": 0.00016798469387755102, + "loss": 1.7292, + "step": 251 + }, + { + "epoch": 5.1, + "learning_rate": 0.00016785714285714288, + "loss": 1.646, + "step": 252 + }, + { + "epoch": 5.12, + "learning_rate": 0.0001677295918367347, + "loss": 1.6969, + "step": 253 + }, + { + "epoch": 5.14, + "learning_rate": 0.00016760204081632654, + "loss": 1.6082, + "step": 254 + }, + { + "epoch": 5.16, + "learning_rate": 0.00016747448979591837, + "loss": 1.5843, + "step": 255 + }, + { + "epoch": 5.18, + "learning_rate": 0.00016734693877551023, + "loss": 1.6827, + "step": 256 + }, + { + "epoch": 5.2, + "learning_rate": 0.00016721938775510203, + "loss": 1.5824, + "step": 257 + }, + { + "epoch": 5.22, + "learning_rate": 0.0001670918367346939, + "loss": 1.6795, + "step": 258 + }, + { + "epoch": 5.24, + "learning_rate": 0.00016696428571428572, + "loss": 1.5639, + "step": 259 + }, + { + "epoch": 5.26, + "learning_rate": 0.00016683673469387755, + "loss": 1.592, + "step": 260 + }, + { + "epoch": 5.28, + "learning_rate": 0.00016670918367346938, + "loss": 1.65, + "step": 261 + }, + { + "epoch": 5.3, + "learning_rate": 0.00016658163265306124, + "loss": 1.5592, + "step": 262 + }, + { + "epoch": 5.32, + "learning_rate": 0.00016645408163265305, + "loss": 1.5091, + "step": 263 + }, + { + "epoch": 5.34, + "learning_rate": 0.0001663265306122449, + "loss": 1.6138, + "step": 264 + }, + { + "epoch": 5.36, + "learning_rate": 0.00016619897959183673, + "loss": 1.625, + "step": 265 + }, + { + "epoch": 5.38, + "learning_rate": 0.0001660714285714286, + "loss": 1.5757, + "step": 266 + }, + { + "epoch": 5.4, + "learning_rate": 0.00016594387755102042, + "loss": 1.6372, + "step": 267 + }, + { + "epoch": 5.42, + "learning_rate": 0.00016581632653061225, + "loss": 1.5891, + "step": 268 + }, + { + "epoch": 5.44, + "learning_rate": 0.00016568877551020409, + "loss": 1.6893, + "step": 269 + }, + { + "epoch": 5.46, + "learning_rate": 0.00016556122448979592, + "loss": 1.6662, + "step": 270 + }, + { + "epoch": 5.48, + "learning_rate": 0.00016543367346938777, + "loss": 1.7132, + "step": 271 + }, + { + "epoch": 5.5, + "learning_rate": 0.0001653061224489796, + "loss": 1.5835, + "step": 272 + }, + { + "epoch": 5.52, + "learning_rate": 0.00016517857142857144, + "loss": 1.6342, + "step": 273 + }, + { + "epoch": 5.54, + "learning_rate": 0.00016505102040816327, + "loss": 1.6717, + "step": 274 + }, + { + "epoch": 5.56, + "learning_rate": 0.00016492346938775512, + "loss": 1.6248, + "step": 275 + }, + { + "epoch": 5.58, + "learning_rate": 0.00016479591836734696, + "loss": 1.6117, + "step": 276 + }, + { + "epoch": 5.6, + "learning_rate": 0.0001646683673469388, + "loss": 1.6798, + "step": 277 + }, + { + "epoch": 5.63, + "learning_rate": 0.00016454081632653062, + "loss": 1.6406, + "step": 278 + }, + { + "epoch": 5.65, + "learning_rate": 0.00016441326530612248, + "loss": 1.6512, + "step": 279 + }, + { + "epoch": 5.67, + "learning_rate": 0.00016428571428571428, + "loss": 1.6102, + "step": 280 + }, + { + "epoch": 5.69, + "learning_rate": 0.00016415816326530614, + "loss": 1.6113, + "step": 281 + }, + { + "epoch": 5.71, + "learning_rate": 0.00016403061224489797, + "loss": 1.7116, + "step": 282 + }, + { + "epoch": 5.73, + "learning_rate": 0.0001639030612244898, + "loss": 1.6846, + "step": 283 + }, + { + "epoch": 5.75, + "learning_rate": 0.00016377551020408163, + "loss": 1.6911, + "step": 284 + }, + { + "epoch": 5.77, + "learning_rate": 0.0001636479591836735, + "loss": 1.6202, + "step": 285 + }, + { + "epoch": 5.79, + "learning_rate": 0.0001635204081632653, + "loss": 1.5715, + "step": 286 + }, + { + "epoch": 5.81, + "learning_rate": 0.00016339285714285715, + "loss": 1.6461, + "step": 287 + }, + { + "epoch": 5.83, + "learning_rate": 0.00016326530612244898, + "loss": 1.6624, + "step": 288 + }, + { + "epoch": 5.85, + "learning_rate": 0.00016313775510204084, + "loss": 1.6535, + "step": 289 + }, + { + "epoch": 5.87, + "learning_rate": 0.00016301020408163267, + "loss": 1.6275, + "step": 290 + }, + { + "epoch": 5.89, + "learning_rate": 0.0001628826530612245, + "loss": 1.6636, + "step": 291 + }, + { + "epoch": 5.91, + "learning_rate": 0.00016275510204081633, + "loss": 1.6546, + "step": 292 + }, + { + "epoch": 5.93, + "learning_rate": 0.00016262755102040816, + "loss": 1.7274, + "step": 293 + }, + { + "epoch": 5.95, + "learning_rate": 0.00016250000000000002, + "loss": 1.5901, + "step": 294 + }, + { + "epoch": 5.97, + "learning_rate": 0.00016237244897959185, + "loss": 1.6046, + "step": 295 + }, + { + "epoch": 5.99, + "learning_rate": 0.00016224489795918368, + "loss": 1.5828, + "step": 296 + }, + { + "epoch": 6.01, + "learning_rate": 0.0001621173469387755, + "loss": 1.6435, + "step": 297 + }, + { + "epoch": 6.03, + "learning_rate": 0.00016198979591836737, + "loss": 1.6263, + "step": 298 + }, + { + "epoch": 6.05, + "learning_rate": 0.00016186224489795917, + "loss": 1.4944, + "step": 299 + }, + { + "epoch": 6.07, + "learning_rate": 0.00016173469387755103, + "loss": 1.6286, + "step": 300 + }, + { + "epoch": 6.09, + "learning_rate": 0.00016160714285714286, + "loss": 1.694, + "step": 301 + }, + { + "epoch": 6.11, + "learning_rate": 0.00016147959183673472, + "loss": 1.6197, + "step": 302 + }, + { + "epoch": 6.13, + "learning_rate": 0.00016135204081632652, + "loss": 1.5597, + "step": 303 + }, + { + "epoch": 6.15, + "learning_rate": 0.00016122448979591838, + "loss": 1.5487, + "step": 304 + }, + { + "epoch": 6.17, + "learning_rate": 0.0001610969387755102, + "loss": 1.5769, + "step": 305 + }, + { + "epoch": 6.19, + "learning_rate": 0.00016096938775510204, + "loss": 1.6367, + "step": 306 + }, + { + "epoch": 6.21, + "learning_rate": 0.00016084183673469388, + "loss": 1.583, + "step": 307 + }, + { + "epoch": 6.23, + "learning_rate": 0.00016071428571428573, + "loss": 1.6201, + "step": 308 + }, + { + "epoch": 6.25, + "learning_rate": 0.00016058673469387754, + "loss": 1.6586, + "step": 309 + }, + { + "epoch": 6.27, + "learning_rate": 0.0001604591836734694, + "loss": 1.6711, + "step": 310 + }, + { + "epoch": 6.29, + "learning_rate": 0.00016033163265306123, + "loss": 1.6402, + "step": 311 + }, + { + "epoch": 6.31, + "learning_rate": 0.00016020408163265306, + "loss": 1.5247, + "step": 312 + }, + { + "epoch": 6.33, + "learning_rate": 0.00016007653061224491, + "loss": 1.5356, + "step": 313 + }, + { + "epoch": 6.35, + "learning_rate": 0.00015994897959183675, + "loss": 1.564, + "step": 314 + }, + { + "epoch": 6.37, + "learning_rate": 0.00015982142857142858, + "loss": 1.563, + "step": 315 + }, + { + "epoch": 6.39, + "learning_rate": 0.0001596938775510204, + "loss": 1.5198, + "step": 316 + }, + { + "epoch": 6.41, + "learning_rate": 0.00015956632653061227, + "loss": 1.6558, + "step": 317 + }, + { + "epoch": 6.43, + "learning_rate": 0.0001594387755102041, + "loss": 1.5534, + "step": 318 + }, + { + "epoch": 6.45, + "learning_rate": 0.00015931122448979593, + "loss": 1.6239, + "step": 319 + }, + { + "epoch": 6.47, + "learning_rate": 0.00015918367346938776, + "loss": 1.5645, + "step": 320 + }, + { + "epoch": 6.5, + "learning_rate": 0.00015905612244897962, + "loss": 1.5713, + "step": 321 + }, + { + "epoch": 6.52, + "learning_rate": 0.00015892857142857142, + "loss": 1.6176, + "step": 322 + }, + { + "epoch": 6.54, + "learning_rate": 0.00015880102040816328, + "loss": 1.502, + "step": 323 + }, + { + "epoch": 6.56, + "learning_rate": 0.0001586734693877551, + "loss": 1.645, + "step": 324 + }, + { + "epoch": 6.58, + "learning_rate": 0.00015854591836734697, + "loss": 1.5904, + "step": 325 + }, + { + "epoch": 6.6, + "learning_rate": 0.00015841836734693877, + "loss": 1.6149, + "step": 326 + }, + { + "epoch": 6.62, + "learning_rate": 0.00015829081632653063, + "loss": 1.6757, + "step": 327 + }, + { + "epoch": 6.64, + "learning_rate": 0.00015816326530612246, + "loss": 1.541, + "step": 328 + }, + { + "epoch": 6.66, + "learning_rate": 0.0001580357142857143, + "loss": 1.5898, + "step": 329 + }, + { + "epoch": 6.68, + "learning_rate": 0.00015790816326530612, + "loss": 1.5441, + "step": 330 + }, + { + "epoch": 6.7, + "learning_rate": 0.00015778061224489798, + "loss": 1.61, + "step": 331 + }, + { + "epoch": 6.72, + "learning_rate": 0.00015765306122448978, + "loss": 1.615, + "step": 332 + }, + { + "epoch": 6.74, + "learning_rate": 0.00015752551020408164, + "loss": 1.6575, + "step": 333 + }, + { + "epoch": 6.76, + "learning_rate": 0.00015739795918367347, + "loss": 1.6702, + "step": 334 + }, + { + "epoch": 6.78, + "learning_rate": 0.0001572704081632653, + "loss": 1.6009, + "step": 335 + }, + { + "epoch": 6.8, + "learning_rate": 0.00015714285714285716, + "loss": 1.5568, + "step": 336 + }, + { + "epoch": 6.82, + "learning_rate": 0.000157015306122449, + "loss": 1.619, + "step": 337 + }, + { + "epoch": 6.84, + "learning_rate": 0.00015688775510204082, + "loss": 1.5563, + "step": 338 + }, + { + "epoch": 6.86, + "learning_rate": 0.00015676020408163265, + "loss": 1.6328, + "step": 339 + }, + { + "epoch": 6.88, + "learning_rate": 0.0001566326530612245, + "loss": 1.5726, + "step": 340 + }, + { + "epoch": 6.9, + "learning_rate": 0.00015650510204081634, + "loss": 1.6199, + "step": 341 + }, + { + "epoch": 6.92, + "learning_rate": 0.00015637755102040817, + "loss": 1.5722, + "step": 342 + }, + { + "epoch": 6.94, + "learning_rate": 0.00015625, + "loss": 1.5685, + "step": 343 + }, + { + "epoch": 6.96, + "learning_rate": 0.00015612244897959186, + "loss": 1.5615, + "step": 344 + }, + { + "epoch": 6.98, + "learning_rate": 0.00015599489795918366, + "loss": 1.5994, + "step": 345 + }, + { + "epoch": 7.0, + "learning_rate": 0.00015586734693877552, + "loss": 1.5579, + "step": 346 + }, + { + "epoch": 7.02, + "learning_rate": 0.00015573979591836735, + "loss": 1.547, + "step": 347 + }, + { + "epoch": 7.04, + "learning_rate": 0.00015561224489795918, + "loss": 1.5292, + "step": 348 + }, + { + "epoch": 7.06, + "learning_rate": 0.00015548469387755102, + "loss": 1.6032, + "step": 349 + }, + { + "epoch": 7.08, + "learning_rate": 0.00015535714285714287, + "loss": 1.5149, + "step": 350 + }, + { + "epoch": 7.1, + "learning_rate": 0.0001552295918367347, + "loss": 1.6093, + "step": 351 + }, + { + "epoch": 7.12, + "learning_rate": 0.00015510204081632654, + "loss": 1.5421, + "step": 352 + }, + { + "epoch": 7.14, + "learning_rate": 0.00015497448979591837, + "loss": 1.5733, + "step": 353 + }, + { + "epoch": 7.16, + "learning_rate": 0.00015484693877551022, + "loss": 1.5703, + "step": 354 + }, + { + "epoch": 7.18, + "learning_rate": 0.00015471938775510203, + "loss": 1.6141, + "step": 355 + }, + { + "epoch": 7.2, + "learning_rate": 0.00015459183673469389, + "loss": 1.5526, + "step": 356 + }, + { + "epoch": 7.22, + "learning_rate": 0.00015446428571428572, + "loss": 1.5347, + "step": 357 + }, + { + "epoch": 7.24, + "learning_rate": 0.00015433673469387755, + "loss": 1.5682, + "step": 358 + }, + { + "epoch": 7.26, + "learning_rate": 0.0001542091836734694, + "loss": 1.5292, + "step": 359 + }, + { + "epoch": 7.28, + "learning_rate": 0.00015408163265306124, + "loss": 1.499, + "step": 360 + }, + { + "epoch": 7.3, + "learning_rate": 0.00015395408163265307, + "loss": 1.5624, + "step": 361 + }, + { + "epoch": 7.32, + "learning_rate": 0.0001538265306122449, + "loss": 1.627, + "step": 362 + }, + { + "epoch": 7.34, + "learning_rate": 0.00015369897959183676, + "loss": 1.5327, + "step": 363 + }, + { + "epoch": 7.37, + "learning_rate": 0.0001535714285714286, + "loss": 1.5622, + "step": 364 + }, + { + "epoch": 7.39, + "learning_rate": 0.00015344387755102042, + "loss": 1.5659, + "step": 365 + }, + { + "epoch": 7.41, + "learning_rate": 0.00015331632653061225, + "loss": 1.5019, + "step": 366 + }, + { + "epoch": 7.43, + "learning_rate": 0.0001531887755102041, + "loss": 1.5921, + "step": 367 + }, + { + "epoch": 7.45, + "learning_rate": 0.0001530612244897959, + "loss": 1.5914, + "step": 368 + }, + { + "epoch": 7.47, + "learning_rate": 0.00015293367346938777, + "loss": 1.5045, + "step": 369 + }, + { + "epoch": 7.49, + "learning_rate": 0.0001528061224489796, + "loss": 1.6209, + "step": 370 + }, + { + "epoch": 7.51, + "learning_rate": 0.00015267857142857143, + "loss": 1.5198, + "step": 371 + }, + { + "epoch": 7.53, + "learning_rate": 0.00015255102040816326, + "loss": 1.5363, + "step": 372 + }, + { + "epoch": 7.55, + "learning_rate": 0.00015242346938775512, + "loss": 1.5391, + "step": 373 + }, + { + "epoch": 7.57, + "learning_rate": 0.00015229591836734695, + "loss": 1.4546, + "step": 374 + }, + { + "epoch": 7.59, + "learning_rate": 0.00015216836734693878, + "loss": 1.5546, + "step": 375 + }, + { + "epoch": 7.61, + "learning_rate": 0.0001520408163265306, + "loss": 1.5629, + "step": 376 + }, + { + "epoch": 7.63, + "learning_rate": 0.00015191326530612247, + "loss": 1.6002, + "step": 377 + }, + { + "epoch": 7.65, + "learning_rate": 0.00015178571428571427, + "loss": 1.5543, + "step": 378 + }, + { + "epoch": 7.67, + "learning_rate": 0.00015165816326530613, + "loss": 1.5925, + "step": 379 + }, + { + "epoch": 7.69, + "learning_rate": 0.00015153061224489796, + "loss": 1.5631, + "step": 380 + }, + { + "epoch": 7.71, + "learning_rate": 0.0001514030612244898, + "loss": 1.5677, + "step": 381 + }, + { + "epoch": 7.73, + "learning_rate": 0.00015127551020408165, + "loss": 1.5828, + "step": 382 + }, + { + "epoch": 7.75, + "learning_rate": 0.00015114795918367348, + "loss": 1.6494, + "step": 383 + }, + { + "epoch": 7.77, + "learning_rate": 0.0001510204081632653, + "loss": 1.553, + "step": 384 + }, + { + "epoch": 7.79, + "learning_rate": 0.00015089285714285714, + "loss": 1.6156, + "step": 385 + }, + { + "epoch": 7.81, + "learning_rate": 0.000150765306122449, + "loss": 1.5001, + "step": 386 + }, + { + "epoch": 7.83, + "learning_rate": 0.00015063775510204083, + "loss": 1.5321, + "step": 387 + }, + { + "epoch": 7.85, + "learning_rate": 0.00015051020408163266, + "loss": 1.5307, + "step": 388 + }, + { + "epoch": 7.87, + "learning_rate": 0.0001503826530612245, + "loss": 1.5639, + "step": 389 + }, + { + "epoch": 7.89, + "learning_rate": 0.00015025510204081635, + "loss": 1.517, + "step": 390 + }, + { + "epoch": 7.91, + "learning_rate": 0.00015012755102040816, + "loss": 1.4776, + "step": 391 + }, + { + "epoch": 7.93, + "learning_rate": 0.00015000000000000001, + "loss": 1.5368, + "step": 392 + }, + { + "epoch": 7.95, + "learning_rate": 0.00014987244897959184, + "loss": 1.5636, + "step": 393 + }, + { + "epoch": 7.97, + "learning_rate": 0.00014974489795918368, + "loss": 1.6004, + "step": 394 + }, + { + "epoch": 7.99, + "learning_rate": 0.0001496173469387755, + "loss": 1.5524, + "step": 395 + }, + { + "epoch": 8.01, + "learning_rate": 0.00014948979591836736, + "loss": 1.5307, + "step": 396 + }, + { + "epoch": 8.03, + "learning_rate": 0.00014936224489795917, + "loss": 1.5123, + "step": 397 + }, + { + "epoch": 8.05, + "learning_rate": 0.00014923469387755103, + "loss": 1.5132, + "step": 398 + }, + { + "epoch": 8.07, + "learning_rate": 0.00014910714285714286, + "loss": 1.5109, + "step": 399 + }, + { + "epoch": 8.09, + "learning_rate": 0.00014897959183673472, + "loss": 1.5302, + "step": 400 + }, + { + "epoch": 8.11, + "learning_rate": 0.00014885204081632652, + "loss": 1.5238, + "step": 401 + }, + { + "epoch": 8.13, + "learning_rate": 0.00014872448979591838, + "loss": 1.4781, + "step": 402 + }, + { + "epoch": 8.15, + "learning_rate": 0.0001485969387755102, + "loss": 1.5446, + "step": 403 + }, + { + "epoch": 8.17, + "learning_rate": 0.00014846938775510204, + "loss": 1.5, + "step": 404 + }, + { + "epoch": 8.19, + "learning_rate": 0.0001483418367346939, + "loss": 1.5458, + "step": 405 + }, + { + "epoch": 8.21, + "learning_rate": 0.00014821428571428573, + "loss": 1.5257, + "step": 406 + }, + { + "epoch": 8.24, + "learning_rate": 0.00014808673469387756, + "loss": 1.4607, + "step": 407 + }, + { + "epoch": 8.26, + "learning_rate": 0.0001479591836734694, + "loss": 1.4282, + "step": 408 + }, + { + "epoch": 8.28, + "learning_rate": 0.00014783163265306125, + "loss": 1.4519, + "step": 409 + }, + { + "epoch": 8.3, + "learning_rate": 0.00014770408163265305, + "loss": 1.475, + "step": 410 + }, + { + "epoch": 8.32, + "learning_rate": 0.0001475765306122449, + "loss": 1.5425, + "step": 411 + }, + { + "epoch": 8.34, + "learning_rate": 0.00014744897959183674, + "loss": 1.5407, + "step": 412 + }, + { + "epoch": 8.36, + "learning_rate": 0.0001473214285714286, + "loss": 1.5698, + "step": 413 + }, + { + "epoch": 8.38, + "learning_rate": 0.0001471938775510204, + "loss": 1.4282, + "step": 414 + }, + { + "epoch": 8.4, + "learning_rate": 0.00014706632653061226, + "loss": 1.5301, + "step": 415 + }, + { + "epoch": 8.42, + "learning_rate": 0.0001469387755102041, + "loss": 1.5083, + "step": 416 + }, + { + "epoch": 8.44, + "learning_rate": 0.00014681122448979592, + "loss": 1.5712, + "step": 417 + }, + { + "epoch": 8.46, + "learning_rate": 0.00014668367346938775, + "loss": 1.4363, + "step": 418 + }, + { + "epoch": 8.48, + "learning_rate": 0.0001465561224489796, + "loss": 1.4463, + "step": 419 + }, + { + "epoch": 8.5, + "learning_rate": 0.00014642857142857141, + "loss": 1.4738, + "step": 420 + }, + { + "epoch": 8.52, + "learning_rate": 0.00014630102040816327, + "loss": 1.5396, + "step": 421 + }, + { + "epoch": 8.54, + "learning_rate": 0.0001461734693877551, + "loss": 1.4384, + "step": 422 + }, + { + "epoch": 8.56, + "learning_rate": 0.00014604591836734696, + "loss": 1.5345, + "step": 423 + }, + { + "epoch": 8.58, + "learning_rate": 0.0001459183673469388, + "loss": 1.5355, + "step": 424 + }, + { + "epoch": 8.6, + "learning_rate": 0.00014579081632653062, + "loss": 1.5188, + "step": 425 + }, + { + "epoch": 8.62, + "learning_rate": 0.00014566326530612245, + "loss": 1.5575, + "step": 426 + }, + { + "epoch": 8.64, + "learning_rate": 0.00014553571428571428, + "loss": 1.5279, + "step": 427 + }, + { + "epoch": 8.66, + "learning_rate": 0.00014540816326530614, + "loss": 1.5484, + "step": 428 + }, + { + "epoch": 8.68, + "learning_rate": 0.00014528061224489797, + "loss": 1.4878, + "step": 429 + }, + { + "epoch": 8.7, + "learning_rate": 0.0001451530612244898, + "loss": 1.503, + "step": 430 + }, + { + "epoch": 8.72, + "learning_rate": 0.00014502551020408163, + "loss": 1.4723, + "step": 431 + }, + { + "epoch": 8.74, + "learning_rate": 0.0001448979591836735, + "loss": 1.5579, + "step": 432 + }, + { + "epoch": 8.76, + "learning_rate": 0.0001447704081632653, + "loss": 1.4789, + "step": 433 + }, + { + "epoch": 8.78, + "learning_rate": 0.00014464285714285715, + "loss": 1.5501, + "step": 434 + }, + { + "epoch": 8.8, + "learning_rate": 0.00014451530612244899, + "loss": 1.5204, + "step": 435 + }, + { + "epoch": 8.82, + "learning_rate": 0.00014438775510204084, + "loss": 1.5489, + "step": 436 + }, + { + "epoch": 8.84, + "learning_rate": 0.00014426020408163265, + "loss": 1.5464, + "step": 437 + }, + { + "epoch": 8.86, + "learning_rate": 0.0001441326530612245, + "loss": 1.5896, + "step": 438 + }, + { + "epoch": 8.88, + "learning_rate": 0.00014400510204081634, + "loss": 1.5465, + "step": 439 + }, + { + "epoch": 8.9, + "learning_rate": 0.00014387755102040817, + "loss": 1.5094, + "step": 440 + }, + { + "epoch": 8.92, + "learning_rate": 0.00014375, + "loss": 1.5144, + "step": 441 + }, + { + "epoch": 8.94, + "learning_rate": 0.00014362244897959186, + "loss": 1.4919, + "step": 442 + }, + { + "epoch": 8.96, + "learning_rate": 0.00014349489795918366, + "loss": 1.4702, + "step": 443 + }, + { + "epoch": 8.98, + "learning_rate": 0.00014336734693877552, + "loss": 1.4996, + "step": 444 + }, + { + "epoch": 9.0, + "learning_rate": 0.00014323979591836735, + "loss": 1.5503, + "step": 445 + }, + { + "epoch": 9.02, + "learning_rate": 0.00014311224489795918, + "loss": 1.4125, + "step": 446 + }, + { + "epoch": 9.04, + "learning_rate": 0.00014298469387755104, + "loss": 1.4722, + "step": 447 + }, + { + "epoch": 9.06, + "learning_rate": 0.00014285714285714287, + "loss": 1.5199, + "step": 448 + }, + { + "epoch": 9.09, + "learning_rate": 0.0001427295918367347, + "loss": 1.4571, + "step": 449 + }, + { + "epoch": 9.11, + "learning_rate": 0.00014260204081632653, + "loss": 1.4996, + "step": 450 + }, + { + "epoch": 9.13, + "learning_rate": 0.0001424744897959184, + "loss": 1.4092, + "step": 451 + }, + { + "epoch": 9.15, + "learning_rate": 0.00014234693877551022, + "loss": 1.4198, + "step": 452 + }, + { + "epoch": 9.17, + "learning_rate": 0.00014221938775510205, + "loss": 1.4916, + "step": 453 + }, + { + "epoch": 9.19, + "learning_rate": 0.00014209183673469388, + "loss": 1.5051, + "step": 454 + }, + { + "epoch": 9.21, + "learning_rate": 0.00014196428571428574, + "loss": 1.4321, + "step": 455 + }, + { + "epoch": 9.23, + "learning_rate": 0.00014183673469387754, + "loss": 1.4097, + "step": 456 + }, + { + "epoch": 9.25, + "learning_rate": 0.0001417091836734694, + "loss": 1.4853, + "step": 457 + }, + { + "epoch": 9.27, + "learning_rate": 0.00014158163265306123, + "loss": 1.4593, + "step": 458 + }, + { + "epoch": 9.29, + "learning_rate": 0.00014145408163265306, + "loss": 1.3729, + "step": 459 + }, + { + "epoch": 9.31, + "learning_rate": 0.0001413265306122449, + "loss": 1.4467, + "step": 460 + }, + { + "epoch": 9.33, + "learning_rate": 0.00014119897959183675, + "loss": 1.4467, + "step": 461 + }, + { + "epoch": 9.35, + "learning_rate": 0.00014107142857142858, + "loss": 1.4785, + "step": 462 + }, + { + "epoch": 9.37, + "learning_rate": 0.0001409438775510204, + "loss": 1.4089, + "step": 463 + }, + { + "epoch": 9.39, + "learning_rate": 0.00014081632653061224, + "loss": 1.5026, + "step": 464 + }, + { + "epoch": 9.41, + "learning_rate": 0.0001406887755102041, + "loss": 1.4857, + "step": 465 + }, + { + "epoch": 9.43, + "learning_rate": 0.0001405612244897959, + "loss": 1.3745, + "step": 466 + }, + { + "epoch": 9.45, + "learning_rate": 0.00014043367346938776, + "loss": 1.4733, + "step": 467 + }, + { + "epoch": 9.47, + "learning_rate": 0.0001403061224489796, + "loss": 1.5212, + "step": 468 + }, + { + "epoch": 9.49, + "learning_rate": 0.00014017857142857142, + "loss": 1.5398, + "step": 469 + }, + { + "epoch": 9.51, + "learning_rate": 0.00014005102040816328, + "loss": 1.478, + "step": 470 + }, + { + "epoch": 9.53, + "learning_rate": 0.0001399234693877551, + "loss": 1.496, + "step": 471 + }, + { + "epoch": 9.55, + "learning_rate": 0.00013979591836734694, + "loss": 1.4837, + "step": 472 + }, + { + "epoch": 9.57, + "learning_rate": 0.00013966836734693878, + "loss": 1.4724, + "step": 473 + }, + { + "epoch": 9.59, + "learning_rate": 0.00013954081632653063, + "loss": 1.4828, + "step": 474 + }, + { + "epoch": 9.61, + "learning_rate": 0.00013941326530612246, + "loss": 1.5012, + "step": 475 + }, + { + "epoch": 9.63, + "learning_rate": 0.0001392857142857143, + "loss": 1.4879, + "step": 476 + }, + { + "epoch": 9.65, + "learning_rate": 0.00013915816326530613, + "loss": 1.4196, + "step": 477 + }, + { + "epoch": 9.67, + "learning_rate": 0.00013903061224489798, + "loss": 1.4915, + "step": 478 + }, + { + "epoch": 9.69, + "learning_rate": 0.0001389030612244898, + "loss": 1.3878, + "step": 479 + }, + { + "epoch": 9.71, + "learning_rate": 0.00013877551020408165, + "loss": 1.466, + "step": 480 + }, + { + "epoch": 9.73, + "learning_rate": 0.00013864795918367348, + "loss": 1.4582, + "step": 481 + }, + { + "epoch": 9.75, + "learning_rate": 0.0001385204081632653, + "loss": 1.533, + "step": 482 + }, + { + "epoch": 9.77, + "learning_rate": 0.00013839285714285714, + "loss": 1.4697, + "step": 483 + }, + { + "epoch": 9.79, + "learning_rate": 0.000138265306122449, + "loss": 1.3989, + "step": 484 + }, + { + "epoch": 9.81, + "learning_rate": 0.00013813775510204083, + "loss": 1.4361, + "step": 485 + }, + { + "epoch": 9.83, + "learning_rate": 0.00013801020408163266, + "loss": 1.5271, + "step": 486 + }, + { + "epoch": 9.85, + "learning_rate": 0.0001378826530612245, + "loss": 1.4905, + "step": 487 + }, + { + "epoch": 9.87, + "learning_rate": 0.00013775510204081635, + "loss": 1.4757, + "step": 488 + }, + { + "epoch": 9.89, + "learning_rate": 0.00013762755102040815, + "loss": 1.5485, + "step": 489 + }, + { + "epoch": 9.91, + "learning_rate": 0.0001375, + "loss": 1.4783, + "step": 490 + }, + { + "epoch": 9.93, + "learning_rate": 0.00013737244897959184, + "loss": 1.4849, + "step": 491 + }, + { + "epoch": 9.96, + "learning_rate": 0.00013724489795918367, + "loss": 1.5382, + "step": 492 + }, + { + "epoch": 9.98, + "learning_rate": 0.00013711734693877553, + "loss": 1.4902, + "step": 493 + }, + { + "epoch": 10.0, + "learning_rate": 0.00013698979591836736, + "loss": 1.4865, + "step": 494 + }, + { + "epoch": 10.02, + "learning_rate": 0.0001368622448979592, + "loss": 1.4436, + "step": 495 + }, + { + "epoch": 10.04, + "learning_rate": 0.00013673469387755102, + "loss": 1.408, + "step": 496 + }, + { + "epoch": 10.06, + "learning_rate": 0.00013660714285714288, + "loss": 1.4764, + "step": 497 + }, + { + "epoch": 10.08, + "learning_rate": 0.0001364795918367347, + "loss": 1.4646, + "step": 498 + }, + { + "epoch": 10.1, + "learning_rate": 0.00013635204081632654, + "loss": 1.406, + "step": 499 + }, + { + "epoch": 10.12, + "learning_rate": 0.00013622448979591837, + "loss": 1.4785, + "step": 500 + }, + { + "epoch": 10.14, + "learning_rate": 0.00013609693877551023, + "loss": 1.4117, + "step": 501 + }, + { + "epoch": 10.16, + "learning_rate": 0.00013596938775510203, + "loss": 1.4108, + "step": 502 + }, + { + "epoch": 10.18, + "learning_rate": 0.0001358418367346939, + "loss": 1.4155, + "step": 503 + }, + { + "epoch": 10.2, + "learning_rate": 0.00013571428571428572, + "loss": 1.4021, + "step": 504 + }, + { + "epoch": 10.22, + "learning_rate": 0.00013558673469387755, + "loss": 1.411, + "step": 505 + }, + { + "epoch": 10.24, + "learning_rate": 0.00013545918367346938, + "loss": 1.3851, + "step": 506 + }, + { + "epoch": 10.26, + "learning_rate": 0.00013533163265306124, + "loss": 1.387, + "step": 507 + }, + { + "epoch": 10.28, + "learning_rate": 0.00013520408163265305, + "loss": 1.4163, + "step": 508 + }, + { + "epoch": 10.3, + "learning_rate": 0.0001350765306122449, + "loss": 1.3343, + "step": 509 + }, + { + "epoch": 10.32, + "learning_rate": 0.00013494897959183673, + "loss": 1.4811, + "step": 510 + }, + { + "epoch": 10.34, + "learning_rate": 0.0001348214285714286, + "loss": 1.4086, + "step": 511 + }, + { + "epoch": 10.36, + "learning_rate": 0.0001346938775510204, + "loss": 1.3879, + "step": 512 + }, + { + "epoch": 10.38, + "learning_rate": 0.00013456632653061225, + "loss": 1.4204, + "step": 513 + }, + { + "epoch": 10.4, + "learning_rate": 0.00013443877551020408, + "loss": 1.4158, + "step": 514 + }, + { + "epoch": 10.42, + "learning_rate": 0.00013431122448979592, + "loss": 1.4521, + "step": 515 + }, + { + "epoch": 10.44, + "learning_rate": 0.00013418367346938777, + "loss": 1.4196, + "step": 516 + }, + { + "epoch": 10.46, + "learning_rate": 0.0001340561224489796, + "loss": 1.4361, + "step": 517 + }, + { + "epoch": 10.48, + "learning_rate": 0.00013392857142857144, + "loss": 1.4482, + "step": 518 + }, + { + "epoch": 10.5, + "learning_rate": 0.00013380102040816327, + "loss": 1.4801, + "step": 519 + }, + { + "epoch": 10.52, + "learning_rate": 0.00013367346938775512, + "loss": 1.4556, + "step": 520 + }, + { + "epoch": 10.54, + "learning_rate": 0.00013354591836734695, + "loss": 1.3902, + "step": 521 + }, + { + "epoch": 10.56, + "learning_rate": 0.00013341836734693879, + "loss": 1.4269, + "step": 522 + }, + { + "epoch": 10.58, + "learning_rate": 0.00013329081632653062, + "loss": 1.4899, + "step": 523 + }, + { + "epoch": 10.6, + "learning_rate": 0.00013316326530612247, + "loss": 1.3952, + "step": 524 + }, + { + "epoch": 10.62, + "learning_rate": 0.00013303571428571428, + "loss": 1.4116, + "step": 525 + }, + { + "epoch": 10.64, + "learning_rate": 0.00013290816326530614, + "loss": 1.4583, + "step": 526 + }, + { + "epoch": 10.66, + "learning_rate": 0.00013278061224489797, + "loss": 1.4466, + "step": 527 + }, + { + "epoch": 10.68, + "learning_rate": 0.0001326530612244898, + "loss": 1.4242, + "step": 528 + }, + { + "epoch": 10.7, + "learning_rate": 0.00013252551020408163, + "loss": 1.3717, + "step": 529 + }, + { + "epoch": 10.72, + "learning_rate": 0.0001323979591836735, + "loss": 1.4583, + "step": 530 + }, + { + "epoch": 10.74, + "learning_rate": 0.0001322704081632653, + "loss": 1.4185, + "step": 531 + }, + { + "epoch": 10.76, + "learning_rate": 0.00013214285714285715, + "loss": 1.4287, + "step": 532 + }, + { + "epoch": 10.78, + "learning_rate": 0.00013201530612244898, + "loss": 1.4385, + "step": 533 + }, + { + "epoch": 10.8, + "learning_rate": 0.00013188775510204084, + "loss": 1.453, + "step": 534 + }, + { + "epoch": 10.83, + "learning_rate": 0.00013176020408163264, + "loss": 1.4161, + "step": 535 + }, + { + "epoch": 10.85, + "learning_rate": 0.0001316326530612245, + "loss": 1.457, + "step": 536 + }, + { + "epoch": 10.87, + "learning_rate": 0.00013150510204081633, + "loss": 1.4367, + "step": 537 + }, + { + "epoch": 10.89, + "learning_rate": 0.00013137755102040816, + "loss": 1.4256, + "step": 538 + }, + { + "epoch": 10.91, + "learning_rate": 0.00013125000000000002, + "loss": 1.424, + "step": 539 + }, + { + "epoch": 10.93, + "learning_rate": 0.00013112244897959185, + "loss": 1.3923, + "step": 540 + }, + { + "epoch": 10.95, + "learning_rate": 0.00013099489795918368, + "loss": 1.4225, + "step": 541 + }, + { + "epoch": 10.97, + "learning_rate": 0.0001308673469387755, + "loss": 1.3969, + "step": 542 + }, + { + "epoch": 10.99, + "learning_rate": 0.00013073979591836737, + "loss": 1.4446, + "step": 543 + }, + { + "epoch": 11.01, + "learning_rate": 0.00013061224489795917, + "loss": 1.4375, + "step": 544 + }, + { + "epoch": 11.03, + "learning_rate": 0.00013048469387755103, + "loss": 1.4064, + "step": 545 + }, + { + "epoch": 11.05, + "learning_rate": 0.00013035714285714286, + "loss": 1.3454, + "step": 546 + }, + { + "epoch": 11.07, + "learning_rate": 0.00013022959183673472, + "loss": 1.3234, + "step": 547 + }, + { + "epoch": 11.09, + "learning_rate": 0.00013010204081632652, + "loss": 1.3759, + "step": 548 + }, + { + "epoch": 11.11, + "learning_rate": 0.00012997448979591838, + "loss": 1.4221, + "step": 549 + }, + { + "epoch": 11.13, + "learning_rate": 0.0001298469387755102, + "loss": 1.4261, + "step": 550 + }, + { + "epoch": 11.15, + "learning_rate": 0.00012971938775510204, + "loss": 1.3341, + "step": 551 + }, + { + "epoch": 11.17, + "learning_rate": 0.00012959183673469387, + "loss": 1.3994, + "step": 552 + }, + { + "epoch": 11.19, + "learning_rate": 0.00012946428571428573, + "loss": 1.3894, + "step": 553 + }, + { + "epoch": 11.21, + "learning_rate": 0.00012933673469387754, + "loss": 1.3585, + "step": 554 + }, + { + "epoch": 11.23, + "learning_rate": 0.0001292091836734694, + "loss": 1.3763, + "step": 555 + }, + { + "epoch": 11.25, + "learning_rate": 0.00012908163265306123, + "loss": 1.3623, + "step": 556 + }, + { + "epoch": 11.27, + "learning_rate": 0.00012895408163265306, + "loss": 1.3907, + "step": 557 + }, + { + "epoch": 11.29, + "learning_rate": 0.0001288265306122449, + "loss": 1.3807, + "step": 558 + }, + { + "epoch": 11.31, + "learning_rate": 0.00012869897959183674, + "loss": 1.4045, + "step": 559 + }, + { + "epoch": 11.33, + "learning_rate": 0.00012857142857142858, + "loss": 1.4038, + "step": 560 + }, + { + "epoch": 11.35, + "learning_rate": 0.0001284438775510204, + "loss": 1.3466, + "step": 561 + }, + { + "epoch": 11.37, + "learning_rate": 0.00012831632653061226, + "loss": 1.3449, + "step": 562 + }, + { + "epoch": 11.39, + "learning_rate": 0.0001281887755102041, + "loss": 1.3866, + "step": 563 + }, + { + "epoch": 11.41, + "learning_rate": 0.00012806122448979593, + "loss": 1.3106, + "step": 564 + }, + { + "epoch": 11.43, + "learning_rate": 0.00012793367346938776, + "loss": 1.4414, + "step": 565 + }, + { + "epoch": 11.45, + "learning_rate": 0.00012780612244897962, + "loss": 1.3737, + "step": 566 + }, + { + "epoch": 11.47, + "learning_rate": 0.00012767857142857142, + "loss": 1.4053, + "step": 567 + }, + { + "epoch": 11.49, + "learning_rate": 0.00012755102040816328, + "loss": 1.4561, + "step": 568 + }, + { + "epoch": 11.51, + "learning_rate": 0.0001274234693877551, + "loss": 1.3684, + "step": 569 + }, + { + "epoch": 11.53, + "learning_rate": 0.00012729591836734697, + "loss": 1.3117, + "step": 570 + }, + { + "epoch": 11.55, + "learning_rate": 0.00012716836734693877, + "loss": 1.3474, + "step": 571 + }, + { + "epoch": 11.57, + "learning_rate": 0.00012704081632653063, + "loss": 1.3804, + "step": 572 + }, + { + "epoch": 11.59, + "learning_rate": 0.00012691326530612246, + "loss": 1.3656, + "step": 573 + }, + { + "epoch": 11.61, + "learning_rate": 0.0001267857142857143, + "loss": 1.3133, + "step": 574 + }, + { + "epoch": 11.63, + "learning_rate": 0.00012665816326530612, + "loss": 1.4077, + "step": 575 + }, + { + "epoch": 11.65, + "learning_rate": 0.00012653061224489798, + "loss": 1.4087, + "step": 576 + }, + { + "epoch": 11.67, + "learning_rate": 0.00012640306122448978, + "loss": 1.3524, + "step": 577 + }, + { + "epoch": 11.7, + "learning_rate": 0.00012627551020408164, + "loss": 1.3481, + "step": 578 + }, + { + "epoch": 11.72, + "learning_rate": 0.00012614795918367347, + "loss": 1.4497, + "step": 579 + }, + { + "epoch": 11.74, + "learning_rate": 0.0001260204081632653, + "loss": 1.3866, + "step": 580 + }, + { + "epoch": 11.76, + "learning_rate": 0.00012589285714285713, + "loss": 1.42, + "step": 581 + }, + { + "epoch": 11.78, + "learning_rate": 0.000125765306122449, + "loss": 1.3562, + "step": 582 + }, + { + "epoch": 11.8, + "learning_rate": 0.00012563775510204082, + "loss": 1.3249, + "step": 583 + }, + { + "epoch": 11.82, + "learning_rate": 0.00012551020408163265, + "loss": 1.4277, + "step": 584 + }, + { + "epoch": 11.84, + "learning_rate": 0.0001253826530612245, + "loss": 1.3734, + "step": 585 + }, + { + "epoch": 11.86, + "learning_rate": 0.00012525510204081634, + "loss": 1.3765, + "step": 586 + }, + { + "epoch": 11.88, + "learning_rate": 0.00012512755102040817, + "loss": 1.4153, + "step": 587 + }, + { + "epoch": 11.9, + "learning_rate": 0.000125, + "loss": 1.3847, + "step": 588 + }, + { + "epoch": 11.92, + "learning_rate": 0.00012487244897959186, + "loss": 1.3824, + "step": 589 + }, + { + "epoch": 11.94, + "learning_rate": 0.00012474489795918366, + "loss": 1.3938, + "step": 590 + }, + { + "epoch": 11.96, + "learning_rate": 0.00012461734693877552, + "loss": 1.4143, + "step": 591 + }, + { + "epoch": 11.98, + "learning_rate": 0.00012448979591836735, + "loss": 1.3794, + "step": 592 + }, + { + "epoch": 12.0, + "learning_rate": 0.00012436224489795918, + "loss": 1.3755, + "step": 593 + }, + { + "epoch": 12.02, + "learning_rate": 0.00012423469387755101, + "loss": 1.3736, + "step": 594 + }, + { + "epoch": 12.04, + "learning_rate": 0.00012410714285714287, + "loss": 1.2957, + "step": 595 + }, + { + "epoch": 12.06, + "learning_rate": 0.0001239795918367347, + "loss": 1.2996, + "step": 596 + }, + { + "epoch": 12.08, + "learning_rate": 0.00012385204081632653, + "loss": 1.3648, + "step": 597 + }, + { + "epoch": 12.1, + "learning_rate": 0.00012372448979591837, + "loss": 1.3031, + "step": 598 + }, + { + "epoch": 12.12, + "learning_rate": 0.00012359693877551022, + "loss": 1.2933, + "step": 599 + }, + { + "epoch": 12.14, + "learning_rate": 0.00012346938775510203, + "loss": 1.322, + "step": 600 + }, + { + "epoch": 12.16, + "learning_rate": 0.00012334183673469389, + "loss": 1.3123, + "step": 601 + }, + { + "epoch": 12.18, + "learning_rate": 0.00012321428571428572, + "loss": 1.3187, + "step": 602 + }, + { + "epoch": 12.2, + "learning_rate": 0.00012308673469387755, + "loss": 1.3353, + "step": 603 + }, + { + "epoch": 12.22, + "learning_rate": 0.0001229591836734694, + "loss": 1.3221, + "step": 604 + }, + { + "epoch": 12.24, + "learning_rate": 0.00012283163265306124, + "loss": 1.3458, + "step": 605 + }, + { + "epoch": 12.26, + "learning_rate": 0.00012270408163265307, + "loss": 1.275, + "step": 606 + }, + { + "epoch": 12.28, + "learning_rate": 0.0001225765306122449, + "loss": 1.3455, + "step": 607 + }, + { + "epoch": 12.3, + "learning_rate": 0.00012244897959183676, + "loss": 1.2769, + "step": 608 + }, + { + "epoch": 12.32, + "learning_rate": 0.00012232142857142859, + "loss": 1.3201, + "step": 609 + }, + { + "epoch": 12.34, + "learning_rate": 0.00012219387755102042, + "loss": 1.3073, + "step": 610 + }, + { + "epoch": 12.36, + "learning_rate": 0.00012206632653061225, + "loss": 1.3103, + "step": 611 + }, + { + "epoch": 12.38, + "learning_rate": 0.00012193877551020409, + "loss": 1.4437, + "step": 612 + }, + { + "epoch": 12.4, + "learning_rate": 0.00012181122448979591, + "loss": 1.3086, + "step": 613 + }, + { + "epoch": 12.42, + "learning_rate": 0.00012168367346938775, + "loss": 1.3867, + "step": 614 + }, + { + "epoch": 12.44, + "learning_rate": 0.0001215561224489796, + "loss": 1.2565, + "step": 615 + }, + { + "epoch": 12.46, + "learning_rate": 0.00012142857142857143, + "loss": 1.335, + "step": 616 + }, + { + "epoch": 12.48, + "learning_rate": 0.00012130102040816327, + "loss": 1.3423, + "step": 617 + }, + { + "epoch": 12.5, + "learning_rate": 0.00012117346938775512, + "loss": 1.3433, + "step": 618 + }, + { + "epoch": 12.52, + "learning_rate": 0.00012104591836734695, + "loss": 1.3387, + "step": 619 + }, + { + "epoch": 12.55, + "learning_rate": 0.00012091836734693878, + "loss": 1.3923, + "step": 620 + }, + { + "epoch": 12.57, + "learning_rate": 0.00012079081632653062, + "loss": 1.3774, + "step": 621 + }, + { + "epoch": 12.59, + "learning_rate": 0.00012066326530612247, + "loss": 1.3203, + "step": 622 + }, + { + "epoch": 12.61, + "learning_rate": 0.00012053571428571429, + "loss": 1.2924, + "step": 623 + }, + { + "epoch": 12.63, + "learning_rate": 0.00012040816326530613, + "loss": 1.3292, + "step": 624 + }, + { + "epoch": 12.65, + "learning_rate": 0.00012028061224489798, + "loss": 1.3161, + "step": 625 + }, + { + "epoch": 12.67, + "learning_rate": 0.00012015306122448979, + "loss": 1.352, + "step": 626 + }, + { + "epoch": 12.69, + "learning_rate": 0.00012002551020408164, + "loss": 1.3577, + "step": 627 + }, + { + "epoch": 12.71, + "learning_rate": 0.00011989795918367348, + "loss": 1.3575, + "step": 628 + }, + { + "epoch": 12.73, + "learning_rate": 0.0001197704081632653, + "loss": 1.3727, + "step": 629 + }, + { + "epoch": 12.75, + "learning_rate": 0.00011964285714285714, + "loss": 1.3312, + "step": 630 + }, + { + "epoch": 12.77, + "learning_rate": 0.00011951530612244899, + "loss": 1.3378, + "step": 631 + }, + { + "epoch": 12.79, + "learning_rate": 0.00011938775510204083, + "loss": 1.295, + "step": 632 + }, + { + "epoch": 12.81, + "learning_rate": 0.00011926020408163265, + "loss": 1.3447, + "step": 633 + }, + { + "epoch": 12.83, + "learning_rate": 0.0001191326530612245, + "loss": 1.3835, + "step": 634 + }, + { + "epoch": 12.85, + "learning_rate": 0.00011900510204081634, + "loss": 1.3222, + "step": 635 + }, + { + "epoch": 12.87, + "learning_rate": 0.00011887755102040817, + "loss": 1.2851, + "step": 636 + }, + { + "epoch": 12.89, + "learning_rate": 0.00011875, + "loss": 1.2723, + "step": 637 + }, + { + "epoch": 12.91, + "learning_rate": 0.00011862244897959184, + "loss": 1.3924, + "step": 638 + }, + { + "epoch": 12.93, + "learning_rate": 0.00011849489795918368, + "loss": 1.4625, + "step": 639 + }, + { + "epoch": 12.95, + "learning_rate": 0.00011836734693877552, + "loss": 1.3245, + "step": 640 + }, + { + "epoch": 12.97, + "learning_rate": 0.00011823979591836736, + "loss": 1.4042, + "step": 641 + }, + { + "epoch": 12.99, + "learning_rate": 0.00011811224489795918, + "loss": 1.3761, + "step": 642 + }, + { + "epoch": 13.01, + "learning_rate": 0.00011798469387755103, + "loss": 1.3376, + "step": 643 + }, + { + "epoch": 13.03, + "learning_rate": 0.00011785714285714287, + "loss": 1.2174, + "step": 644 + }, + { + "epoch": 13.05, + "learning_rate": 0.00011772959183673471, + "loss": 1.3602, + "step": 645 + }, + { + "epoch": 13.07, + "learning_rate": 0.00011760204081632653, + "loss": 1.3002, + "step": 646 + }, + { + "epoch": 13.09, + "learning_rate": 0.00011747448979591838, + "loss": 1.2262, + "step": 647 + }, + { + "epoch": 13.11, + "learning_rate": 0.00011734693877551022, + "loss": 1.3048, + "step": 648 + }, + { + "epoch": 13.13, + "learning_rate": 0.00011721938775510204, + "loss": 1.2231, + "step": 649 + }, + { + "epoch": 13.15, + "learning_rate": 0.00011709183673469388, + "loss": 1.2996, + "step": 650 + }, + { + "epoch": 13.17, + "learning_rate": 0.00011696428571428573, + "loss": 1.2708, + "step": 651 + }, + { + "epoch": 13.19, + "learning_rate": 0.00011683673469387754, + "loss": 1.2776, + "step": 652 + }, + { + "epoch": 13.21, + "learning_rate": 0.00011670918367346939, + "loss": 1.248, + "step": 653 + }, + { + "epoch": 13.23, + "learning_rate": 0.00011658163265306123, + "loss": 1.2582, + "step": 654 + }, + { + "epoch": 13.25, + "learning_rate": 0.00011645408163265305, + "loss": 1.3011, + "step": 655 + }, + { + "epoch": 13.27, + "learning_rate": 0.0001163265306122449, + "loss": 1.2969, + "step": 656 + }, + { + "epoch": 13.29, + "learning_rate": 0.00011619897959183674, + "loss": 1.2454, + "step": 657 + }, + { + "epoch": 13.31, + "learning_rate": 0.00011607142857142858, + "loss": 1.1914, + "step": 658 + }, + { + "epoch": 13.33, + "learning_rate": 0.00011594387755102041, + "loss": 1.34, + "step": 659 + }, + { + "epoch": 13.35, + "learning_rate": 0.00011581632653061225, + "loss": 1.2828, + "step": 660 + }, + { + "epoch": 13.37, + "learning_rate": 0.00011568877551020409, + "loss": 1.2962, + "step": 661 + }, + { + "epoch": 13.39, + "learning_rate": 0.00011556122448979592, + "loss": 1.3334, + "step": 662 + }, + { + "epoch": 13.42, + "learning_rate": 0.00011543367346938776, + "loss": 1.2832, + "step": 663 + }, + { + "epoch": 13.44, + "learning_rate": 0.00011530612244897961, + "loss": 1.3012, + "step": 664 + }, + { + "epoch": 13.46, + "learning_rate": 0.00011517857142857143, + "loss": 1.2857, + "step": 665 + }, + { + "epoch": 13.48, + "learning_rate": 0.00011505102040816327, + "loss": 1.2855, + "step": 666 + }, + { + "epoch": 13.5, + "learning_rate": 0.00011492346938775512, + "loss": 1.3077, + "step": 667 + }, + { + "epoch": 13.52, + "learning_rate": 0.00011479591836734696, + "loss": 1.3139, + "step": 668 + }, + { + "epoch": 13.54, + "learning_rate": 0.00011466836734693878, + "loss": 1.3138, + "step": 669 + }, + { + "epoch": 13.56, + "learning_rate": 0.00011454081632653062, + "loss": 1.2808, + "step": 670 + }, + { + "epoch": 13.58, + "learning_rate": 0.00011441326530612247, + "loss": 1.2492, + "step": 671 + }, + { + "epoch": 13.6, + "learning_rate": 0.00011428571428571428, + "loss": 1.2027, + "step": 672 + }, + { + "epoch": 13.62, + "learning_rate": 0.00011415816326530613, + "loss": 1.33, + "step": 673 + }, + { + "epoch": 13.64, + "learning_rate": 0.00011403061224489797, + "loss": 1.3112, + "step": 674 + }, + { + "epoch": 13.66, + "learning_rate": 0.00011390306122448979, + "loss": 1.2772, + "step": 675 + }, + { + "epoch": 13.68, + "learning_rate": 0.00011377551020408163, + "loss": 1.2701, + "step": 676 + }, + { + "epoch": 13.7, + "learning_rate": 0.00011364795918367348, + "loss": 1.1973, + "step": 677 + }, + { + "epoch": 13.72, + "learning_rate": 0.0001135204081632653, + "loss": 1.3124, + "step": 678 + }, + { + "epoch": 13.74, + "learning_rate": 0.00011339285714285714, + "loss": 1.3085, + "step": 679 + }, + { + "epoch": 13.76, + "learning_rate": 0.00011326530612244898, + "loss": 1.3457, + "step": 680 + }, + { + "epoch": 13.78, + "learning_rate": 0.00011313775510204083, + "loss": 1.3338, + "step": 681 + }, + { + "epoch": 13.8, + "learning_rate": 0.00011301020408163266, + "loss": 1.2753, + "step": 682 + }, + { + "epoch": 13.82, + "learning_rate": 0.00011288265306122449, + "loss": 1.2786, + "step": 683 + }, + { + "epoch": 13.84, + "learning_rate": 0.00011275510204081634, + "loss": 1.2584, + "step": 684 + }, + { + "epoch": 13.86, + "learning_rate": 0.00011262755102040817, + "loss": 1.2779, + "step": 685 + }, + { + "epoch": 13.88, + "learning_rate": 0.00011250000000000001, + "loss": 1.3502, + "step": 686 + }, + { + "epoch": 13.9, + "learning_rate": 0.00011237244897959185, + "loss": 1.3251, + "step": 687 + }, + { + "epoch": 13.92, + "learning_rate": 0.00011224489795918367, + "loss": 1.273, + "step": 688 + }, + { + "epoch": 13.94, + "learning_rate": 0.00011211734693877552, + "loss": 1.3341, + "step": 689 + }, + { + "epoch": 13.96, + "learning_rate": 0.00011198979591836736, + "loss": 1.2654, + "step": 690 + }, + { + "epoch": 13.98, + "learning_rate": 0.00011186224489795918, + "loss": 1.3333, + "step": 691 + }, + { + "epoch": 14.0, + "learning_rate": 0.00011173469387755102, + "loss": 1.3246, + "step": 692 + }, + { + "epoch": 14.02, + "learning_rate": 0.00011160714285714287, + "loss": 1.2547, + "step": 693 + }, + { + "epoch": 14.04, + "learning_rate": 0.00011147959183673471, + "loss": 1.208, + "step": 694 + }, + { + "epoch": 14.06, + "learning_rate": 0.00011135204081632653, + "loss": 1.223, + "step": 695 + }, + { + "epoch": 14.08, + "learning_rate": 0.00011122448979591837, + "loss": 1.2483, + "step": 696 + }, + { + "epoch": 14.1, + "learning_rate": 0.00011109693877551022, + "loss": 1.2823, + "step": 697 + }, + { + "epoch": 14.12, + "learning_rate": 0.00011096938775510204, + "loss": 1.2013, + "step": 698 + }, + { + "epoch": 14.14, + "learning_rate": 0.00011084183673469388, + "loss": 1.1883, + "step": 699 + }, + { + "epoch": 14.16, + "learning_rate": 0.00011071428571428572, + "loss": 1.2364, + "step": 700 + }, + { + "epoch": 14.18, + "learning_rate": 0.00011058673469387754, + "loss": 1.2069, + "step": 701 + }, + { + "epoch": 14.2, + "learning_rate": 0.00011045918367346939, + "loss": 1.1968, + "step": 702 + }, + { + "epoch": 14.22, + "learning_rate": 0.00011033163265306123, + "loss": 1.2236, + "step": 703 + }, + { + "epoch": 14.24, + "learning_rate": 0.00011020408163265306, + "loss": 1.1942, + "step": 704 + }, + { + "epoch": 14.26, + "learning_rate": 0.0001100765306122449, + "loss": 1.2561, + "step": 705 + }, + { + "epoch": 14.29, + "learning_rate": 0.00010994897959183674, + "loss": 1.1839, + "step": 706 + }, + { + "epoch": 14.31, + "learning_rate": 0.00010982142857142858, + "loss": 1.2128, + "step": 707 + }, + { + "epoch": 14.33, + "learning_rate": 0.00010969387755102041, + "loss": 1.3086, + "step": 708 + }, + { + "epoch": 14.35, + "learning_rate": 0.00010956632653061226, + "loss": 1.2379, + "step": 709 + }, + { + "epoch": 14.37, + "learning_rate": 0.0001094387755102041, + "loss": 1.176, + "step": 710 + }, + { + "epoch": 14.39, + "learning_rate": 0.00010931122448979592, + "loss": 1.2105, + "step": 711 + }, + { + "epoch": 14.41, + "learning_rate": 0.00010918367346938776, + "loss": 1.2149, + "step": 712 + }, + { + "epoch": 14.43, + "learning_rate": 0.0001090561224489796, + "loss": 1.2392, + "step": 713 + }, + { + "epoch": 14.45, + "learning_rate": 0.00010892857142857142, + "loss": 1.2471, + "step": 714 + }, + { + "epoch": 14.47, + "learning_rate": 0.00010880102040816327, + "loss": 1.2561, + "step": 715 + }, + { + "epoch": 14.49, + "learning_rate": 0.00010867346938775511, + "loss": 1.2179, + "step": 716 + }, + { + "epoch": 14.51, + "learning_rate": 0.00010854591836734696, + "loss": 1.2459, + "step": 717 + }, + { + "epoch": 14.53, + "learning_rate": 0.00010841836734693877, + "loss": 1.2933, + "step": 718 + }, + { + "epoch": 14.55, + "learning_rate": 0.00010829081632653062, + "loss": 1.2862, + "step": 719 + }, + { + "epoch": 14.57, + "learning_rate": 0.00010816326530612246, + "loss": 1.2976, + "step": 720 + }, + { + "epoch": 14.59, + "learning_rate": 0.00010803571428571428, + "loss": 1.231, + "step": 721 + }, + { + "epoch": 14.61, + "learning_rate": 0.00010790816326530613, + "loss": 1.2464, + "step": 722 + }, + { + "epoch": 14.63, + "learning_rate": 0.00010778061224489797, + "loss": 1.2181, + "step": 723 + }, + { + "epoch": 14.65, + "learning_rate": 0.00010765306122448979, + "loss": 1.3307, + "step": 724 + }, + { + "epoch": 14.67, + "learning_rate": 0.00010752551020408163, + "loss": 1.1723, + "step": 725 + }, + { + "epoch": 14.69, + "learning_rate": 0.00010739795918367348, + "loss": 1.1528, + "step": 726 + }, + { + "epoch": 14.71, + "learning_rate": 0.0001072704081632653, + "loss": 1.215, + "step": 727 + }, + { + "epoch": 14.73, + "learning_rate": 0.00010714285714285715, + "loss": 1.2624, + "step": 728 + }, + { + "epoch": 14.75, + "learning_rate": 0.00010701530612244898, + "loss": 1.3117, + "step": 729 + }, + { + "epoch": 14.77, + "learning_rate": 0.00010688775510204083, + "loss": 1.2572, + "step": 730 + }, + { + "epoch": 14.79, + "learning_rate": 0.00010676020408163266, + "loss": 1.222, + "step": 731 + }, + { + "epoch": 14.81, + "learning_rate": 0.0001066326530612245, + "loss": 1.2881, + "step": 732 + }, + { + "epoch": 14.83, + "learning_rate": 0.00010650510204081635, + "loss": 1.2676, + "step": 733 + }, + { + "epoch": 14.85, + "learning_rate": 0.00010637755102040816, + "loss": 1.2734, + "step": 734 + }, + { + "epoch": 14.87, + "learning_rate": 0.00010625000000000001, + "loss": 1.2885, + "step": 735 + }, + { + "epoch": 14.89, + "learning_rate": 0.00010612244897959185, + "loss": 1.2764, + "step": 736 + }, + { + "epoch": 14.91, + "learning_rate": 0.00010599489795918367, + "loss": 1.3267, + "step": 737 + }, + { + "epoch": 14.93, + "learning_rate": 0.00010586734693877551, + "loss": 1.2445, + "step": 738 + }, + { + "epoch": 14.95, + "learning_rate": 0.00010573979591836736, + "loss": 1.3359, + "step": 739 + }, + { + "epoch": 14.97, + "learning_rate": 0.00010561224489795918, + "loss": 1.2508, + "step": 740 + }, + { + "epoch": 14.99, + "learning_rate": 0.00010548469387755102, + "loss": 1.2227, + "step": 741 + }, + { + "epoch": 15.01, + "learning_rate": 0.00010535714285714286, + "loss": 1.1889, + "step": 742 + }, + { + "epoch": 15.03, + "learning_rate": 0.00010522959183673471, + "loss": 1.1919, + "step": 743 + }, + { + "epoch": 15.05, + "learning_rate": 0.00010510204081632653, + "loss": 1.2383, + "step": 744 + }, + { + "epoch": 15.07, + "learning_rate": 0.00010497448979591837, + "loss": 1.2401, + "step": 745 + }, + { + "epoch": 15.09, + "learning_rate": 0.00010484693877551021, + "loss": 1.2015, + "step": 746 + }, + { + "epoch": 15.11, + "learning_rate": 0.00010471938775510203, + "loss": 1.1509, + "step": 747 + }, + { + "epoch": 15.13, + "learning_rate": 0.00010459183673469388, + "loss": 1.1878, + "step": 748 + }, + { + "epoch": 15.16, + "learning_rate": 0.00010446428571428572, + "loss": 1.1706, + "step": 749 + }, + { + "epoch": 15.18, + "learning_rate": 0.00010433673469387755, + "loss": 1.1285, + "step": 750 + }, + { + "epoch": 15.2, + "learning_rate": 0.0001042091836734694, + "loss": 1.1608, + "step": 751 + }, + { + "epoch": 15.22, + "learning_rate": 0.00010408163265306123, + "loss": 1.1178, + "step": 752 + }, + { + "epoch": 15.24, + "learning_rate": 0.00010395408163265306, + "loss": 1.1293, + "step": 753 + }, + { + "epoch": 15.26, + "learning_rate": 0.0001038265306122449, + "loss": 1.2306, + "step": 754 + }, + { + "epoch": 15.28, + "learning_rate": 0.00010369897959183675, + "loss": 1.1541, + "step": 755 + }, + { + "epoch": 15.3, + "learning_rate": 0.00010357142857142859, + "loss": 1.1702, + "step": 756 + }, + { + "epoch": 15.32, + "learning_rate": 0.00010344387755102041, + "loss": 1.2119, + "step": 757 + }, + { + "epoch": 15.34, + "learning_rate": 0.00010331632653061225, + "loss": 1.2239, + "step": 758 + }, + { + "epoch": 15.36, + "learning_rate": 0.0001031887755102041, + "loss": 1.2019, + "step": 759 + }, + { + "epoch": 15.38, + "learning_rate": 0.00010306122448979591, + "loss": 1.2197, + "step": 760 + }, + { + "epoch": 15.4, + "learning_rate": 0.00010293367346938776, + "loss": 1.1769, + "step": 761 + }, + { + "epoch": 15.42, + "learning_rate": 0.0001028061224489796, + "loss": 1.1907, + "step": 762 + }, + { + "epoch": 15.44, + "learning_rate": 0.00010267857142857142, + "loss": 1.2089, + "step": 763 + }, + { + "epoch": 15.46, + "learning_rate": 0.00010255102040816327, + "loss": 1.1335, + "step": 764 + }, + { + "epoch": 15.48, + "learning_rate": 0.00010242346938775511, + "loss": 1.1633, + "step": 765 + }, + { + "epoch": 15.5, + "learning_rate": 0.00010229591836734695, + "loss": 1.1578, + "step": 766 + }, + { + "epoch": 15.52, + "learning_rate": 0.00010216836734693877, + "loss": 1.2236, + "step": 767 + }, + { + "epoch": 15.54, + "learning_rate": 0.00010204081632653062, + "loss": 1.1941, + "step": 768 + }, + { + "epoch": 15.56, + "learning_rate": 0.00010191326530612246, + "loss": 1.2666, + "step": 769 + }, + { + "epoch": 15.58, + "learning_rate": 0.00010178571428571428, + "loss": 1.1232, + "step": 770 + }, + { + "epoch": 15.6, + "learning_rate": 0.00010165816326530612, + "loss": 1.2242, + "step": 771 + }, + { + "epoch": 15.62, + "learning_rate": 0.00010153061224489797, + "loss": 1.1852, + "step": 772 + }, + { + "epoch": 15.64, + "learning_rate": 0.0001014030612244898, + "loss": 1.2626, + "step": 773 + }, + { + "epoch": 15.66, + "learning_rate": 0.00010127551020408164, + "loss": 1.1873, + "step": 774 + }, + { + "epoch": 15.68, + "learning_rate": 0.00010114795918367349, + "loss": 1.3005, + "step": 775 + }, + { + "epoch": 15.7, + "learning_rate": 0.0001010204081632653, + "loss": 1.1904, + "step": 776 + }, + { + "epoch": 15.72, + "learning_rate": 0.00010089285714285715, + "loss": 1.2927, + "step": 777 + }, + { + "epoch": 15.74, + "learning_rate": 0.00010076530612244899, + "loss": 1.179, + "step": 778 + }, + { + "epoch": 15.76, + "learning_rate": 0.00010063775510204084, + "loss": 1.2027, + "step": 779 + }, + { + "epoch": 15.78, + "learning_rate": 0.00010051020408163265, + "loss": 1.2428, + "step": 780 + }, + { + "epoch": 15.8, + "learning_rate": 0.0001003826530612245, + "loss": 1.2324, + "step": 781 + }, + { + "epoch": 15.82, + "learning_rate": 0.00010025510204081634, + "loss": 1.1251, + "step": 782 + }, + { + "epoch": 15.84, + "learning_rate": 0.00010012755102040816, + "loss": 1.2405, + "step": 783 + }, + { + "epoch": 15.86, + "learning_rate": 0.0001, + "loss": 1.2005, + "step": 784 + }, + { + "epoch": 15.88, + "learning_rate": 9.987244897959184e-05, + "loss": 1.2259, + "step": 785 + }, + { + "epoch": 15.9, + "learning_rate": 9.974489795918368e-05, + "loss": 1.1576, + "step": 786 + }, + { + "epoch": 15.92, + "learning_rate": 9.961734693877551e-05, + "loss": 1.1834, + "step": 787 + }, + { + "epoch": 15.94, + "learning_rate": 9.948979591836736e-05, + "loss": 1.2396, + "step": 788 + }, + { + "epoch": 15.96, + "learning_rate": 9.936224489795919e-05, + "loss": 1.1865, + "step": 789 + }, + { + "epoch": 15.98, + "learning_rate": 9.923469387755102e-05, + "loss": 1.2356, + "step": 790 + }, + { + "epoch": 16.01, + "learning_rate": 9.910714285714286e-05, + "loss": 1.2639, + "step": 791 + }, + { + "epoch": 16.03, + "learning_rate": 9.897959183673469e-05, + "loss": 1.1216, + "step": 792 + }, + { + "epoch": 16.05, + "learning_rate": 9.885204081632652e-05, + "loss": 1.1051, + "step": 793 + }, + { + "epoch": 16.07, + "learning_rate": 9.872448979591837e-05, + "loss": 1.0864, + "step": 794 + }, + { + "epoch": 16.09, + "learning_rate": 9.859693877551021e-05, + "loss": 1.182, + "step": 795 + }, + { + "epoch": 16.11, + "learning_rate": 9.846938775510204e-05, + "loss": 1.1272, + "step": 796 + }, + { + "epoch": 16.13, + "learning_rate": 9.834183673469389e-05, + "loss": 1.1946, + "step": 797 + }, + { + "epoch": 16.15, + "learning_rate": 9.821428571428572e-05, + "loss": 1.0875, + "step": 798 + }, + { + "epoch": 16.17, + "learning_rate": 9.808673469387756e-05, + "loss": 1.1671, + "step": 799 + }, + { + "epoch": 16.19, + "learning_rate": 9.79591836734694e-05, + "loss": 1.1502, + "step": 800 + }, + { + "epoch": 16.21, + "learning_rate": 9.783163265306124e-05, + "loss": 1.19, + "step": 801 + }, + { + "epoch": 16.23, + "learning_rate": 9.770408163265307e-05, + "loss": 1.1258, + "step": 802 + }, + { + "epoch": 16.25, + "learning_rate": 9.75765306122449e-05, + "loss": 1.1765, + "step": 803 + }, + { + "epoch": 16.27, + "learning_rate": 9.744897959183674e-05, + "loss": 1.1217, + "step": 804 + }, + { + "epoch": 16.29, + "learning_rate": 9.732142857142858e-05, + "loss": 1.1293, + "step": 805 + }, + { + "epoch": 16.31, + "learning_rate": 9.719387755102042e-05, + "loss": 1.17, + "step": 806 + }, + { + "epoch": 16.33, + "learning_rate": 9.706632653061225e-05, + "loss": 1.17, + "step": 807 + }, + { + "epoch": 16.35, + "learning_rate": 9.693877551020408e-05, + "loss": 1.2004, + "step": 808 + }, + { + "epoch": 16.37, + "learning_rate": 9.681122448979593e-05, + "loss": 1.1648, + "step": 809 + }, + { + "epoch": 16.39, + "learning_rate": 9.668367346938776e-05, + "loss": 1.0688, + "step": 810 + }, + { + "epoch": 16.41, + "learning_rate": 9.655612244897959e-05, + "loss": 1.1607, + "step": 811 + }, + { + "epoch": 16.43, + "learning_rate": 9.642857142857143e-05, + "loss": 1.1298, + "step": 812 + }, + { + "epoch": 16.45, + "learning_rate": 9.630102040816326e-05, + "loss": 1.1064, + "step": 813 + }, + { + "epoch": 16.47, + "learning_rate": 9.617346938775511e-05, + "loss": 1.1472, + "step": 814 + }, + { + "epoch": 16.49, + "learning_rate": 9.604591836734694e-05, + "loss": 1.1577, + "step": 815 + }, + { + "epoch": 16.51, + "learning_rate": 9.591836734693878e-05, + "loss": 1.1436, + "step": 816 + }, + { + "epoch": 16.53, + "learning_rate": 9.579081632653061e-05, + "loss": 1.1657, + "step": 817 + }, + { + "epoch": 16.55, + "learning_rate": 9.566326530612246e-05, + "loss": 1.1147, + "step": 818 + }, + { + "epoch": 16.57, + "learning_rate": 9.553571428571429e-05, + "loss": 1.1839, + "step": 819 + }, + { + "epoch": 16.59, + "learning_rate": 9.540816326530613e-05, + "loss": 1.1298, + "step": 820 + }, + { + "epoch": 16.61, + "learning_rate": 9.528061224489796e-05, + "loss": 1.2141, + "step": 821 + }, + { + "epoch": 16.63, + "learning_rate": 9.515306122448981e-05, + "loss": 1.2045, + "step": 822 + }, + { + "epoch": 16.65, + "learning_rate": 9.502551020408164e-05, + "loss": 1.1791, + "step": 823 + }, + { + "epoch": 16.67, + "learning_rate": 9.489795918367348e-05, + "loss": 1.1137, + "step": 824 + }, + { + "epoch": 16.69, + "learning_rate": 9.477040816326531e-05, + "loss": 1.1312, + "step": 825 + }, + { + "epoch": 16.71, + "learning_rate": 9.464285714285715e-05, + "loss": 1.1102, + "step": 826 + }, + { + "epoch": 16.73, + "learning_rate": 9.451530612244899e-05, + "loss": 1.1865, + "step": 827 + }, + { + "epoch": 16.75, + "learning_rate": 9.438775510204082e-05, + "loss": 1.1232, + "step": 828 + }, + { + "epoch": 16.77, + "learning_rate": 9.426020408163265e-05, + "loss": 1.2068, + "step": 829 + }, + { + "epoch": 16.79, + "learning_rate": 9.41326530612245e-05, + "loss": 1.1864, + "step": 830 + }, + { + "epoch": 16.81, + "learning_rate": 9.400510204081633e-05, + "loss": 1.2195, + "step": 831 + }, + { + "epoch": 16.83, + "learning_rate": 9.387755102040817e-05, + "loss": 1.2063, + "step": 832 + }, + { + "epoch": 16.85, + "learning_rate": 9.375e-05, + "loss": 1.1455, + "step": 833 + }, + { + "epoch": 16.88, + "learning_rate": 9.362244897959183e-05, + "loss": 1.1819, + "step": 834 + }, + { + "epoch": 16.9, + "learning_rate": 9.349489795918368e-05, + "loss": 1.1887, + "step": 835 + }, + { + "epoch": 16.92, + "learning_rate": 9.336734693877551e-05, + "loss": 1.1557, + "step": 836 + }, + { + "epoch": 16.94, + "learning_rate": 9.323979591836735e-05, + "loss": 1.2094, + "step": 837 + }, + { + "epoch": 16.96, + "learning_rate": 9.311224489795918e-05, + "loss": 1.1512, + "step": 838 + }, + { + "epoch": 16.98, + "learning_rate": 9.298469387755103e-05, + "loss": 1.1463, + "step": 839 + }, + { + "epoch": 17.0, + "learning_rate": 9.285714285714286e-05, + "loss": 1.155, + "step": 840 + }, + { + "epoch": 17.02, + "learning_rate": 9.27295918367347e-05, + "loss": 1.1292, + "step": 841 + }, + { + "epoch": 17.04, + "learning_rate": 9.260204081632653e-05, + "loss": 1.0996, + "step": 842 + }, + { + "epoch": 17.06, + "learning_rate": 9.247448979591838e-05, + "loss": 1.0662, + "step": 843 + }, + { + "epoch": 17.08, + "learning_rate": 9.234693877551021e-05, + "loss": 1.0931, + "step": 844 + }, + { + "epoch": 17.1, + "learning_rate": 9.221938775510205e-05, + "loss": 1.0727, + "step": 845 + }, + { + "epoch": 17.12, + "learning_rate": 9.209183673469388e-05, + "loss": 1.1043, + "step": 846 + }, + { + "epoch": 17.14, + "learning_rate": 9.196428571428572e-05, + "loss": 1.0594, + "step": 847 + }, + { + "epoch": 17.16, + "learning_rate": 9.183673469387756e-05, + "loss": 1.0952, + "step": 848 + }, + { + "epoch": 17.18, + "learning_rate": 9.170918367346939e-05, + "loss": 1.0639, + "step": 849 + }, + { + "epoch": 17.2, + "learning_rate": 9.158163265306124e-05, + "loss": 1.132, + "step": 850 + }, + { + "epoch": 17.22, + "learning_rate": 9.145408163265307e-05, + "loss": 1.1083, + "step": 851 + }, + { + "epoch": 17.24, + "learning_rate": 9.13265306122449e-05, + "loss": 1.1282, + "step": 852 + }, + { + "epoch": 17.26, + "learning_rate": 9.119897959183674e-05, + "loss": 1.0474, + "step": 853 + }, + { + "epoch": 17.28, + "learning_rate": 9.107142857142857e-05, + "loss": 1.1138, + "step": 854 + }, + { + "epoch": 17.3, + "learning_rate": 9.094387755102042e-05, + "loss": 1.1025, + "step": 855 + }, + { + "epoch": 17.32, + "learning_rate": 9.081632653061225e-05, + "loss": 1.0968, + "step": 856 + }, + { + "epoch": 17.34, + "learning_rate": 9.068877551020408e-05, + "loss": 1.1683, + "step": 857 + }, + { + "epoch": 17.36, + "learning_rate": 9.056122448979592e-05, + "loss": 1.0975, + "step": 858 + }, + { + "epoch": 17.38, + "learning_rate": 9.043367346938775e-05, + "loss": 1.1274, + "step": 859 + }, + { + "epoch": 17.4, + "learning_rate": 9.030612244897958e-05, + "loss": 1.0916, + "step": 860 + }, + { + "epoch": 17.42, + "learning_rate": 9.017857142857143e-05, + "loss": 1.0912, + "step": 861 + }, + { + "epoch": 17.44, + "learning_rate": 9.005102040816327e-05, + "loss": 1.0875, + "step": 862 + }, + { + "epoch": 17.46, + "learning_rate": 8.99234693877551e-05, + "loss": 1.05, + "step": 863 + }, + { + "epoch": 17.48, + "learning_rate": 8.979591836734695e-05, + "loss": 1.1418, + "step": 864 + }, + { + "epoch": 17.5, + "learning_rate": 8.966836734693878e-05, + "loss": 1.0609, + "step": 865 + }, + { + "epoch": 17.52, + "learning_rate": 8.954081632653062e-05, + "loss": 1.1611, + "step": 866 + }, + { + "epoch": 17.54, + "learning_rate": 8.941326530612245e-05, + "loss": 1.1065, + "step": 867 + }, + { + "epoch": 17.56, + "learning_rate": 8.92857142857143e-05, + "loss": 1.1611, + "step": 868 + }, + { + "epoch": 17.58, + "learning_rate": 8.915816326530613e-05, + "loss": 1.1398, + "step": 869 + }, + { + "epoch": 17.6, + "learning_rate": 8.903061224489796e-05, + "loss": 1.1055, + "step": 870 + }, + { + "epoch": 17.62, + "learning_rate": 8.89030612244898e-05, + "loss": 1.1314, + "step": 871 + }, + { + "epoch": 17.64, + "learning_rate": 8.877551020408164e-05, + "loss": 1.1084, + "step": 872 + }, + { + "epoch": 17.66, + "learning_rate": 8.864795918367348e-05, + "loss": 1.1254, + "step": 873 + }, + { + "epoch": 17.68, + "learning_rate": 8.852040816326531e-05, + "loss": 1.142, + "step": 874 + }, + { + "epoch": 17.7, + "learning_rate": 8.839285714285714e-05, + "loss": 1.1371, + "step": 875 + }, + { + "epoch": 17.72, + "learning_rate": 8.826530612244899e-05, + "loss": 1.1092, + "step": 876 + }, + { + "epoch": 17.75, + "learning_rate": 8.813775510204082e-05, + "loss": 1.161, + "step": 877 + }, + { + "epoch": 17.77, + "learning_rate": 8.801020408163265e-05, + "loss": 1.1044, + "step": 878 + }, + { + "epoch": 17.79, + "learning_rate": 8.788265306122449e-05, + "loss": 1.117, + "step": 879 + }, + { + "epoch": 17.81, + "learning_rate": 8.775510204081632e-05, + "loss": 1.1262, + "step": 880 + }, + { + "epoch": 17.83, + "learning_rate": 8.762755102040817e-05, + "loss": 1.0829, + "step": 881 + }, + { + "epoch": 17.85, + "learning_rate": 8.75e-05, + "loss": 1.1393, + "step": 882 + }, + { + "epoch": 17.87, + "learning_rate": 8.737244897959183e-05, + "loss": 1.1781, + "step": 883 + }, + { + "epoch": 17.89, + "learning_rate": 8.724489795918367e-05, + "loss": 1.1582, + "step": 884 + }, + { + "epoch": 17.91, + "learning_rate": 8.711734693877552e-05, + "loss": 1.1469, + "step": 885 + }, + { + "epoch": 17.93, + "learning_rate": 8.698979591836735e-05, + "loss": 1.1494, + "step": 886 + }, + { + "epoch": 17.95, + "learning_rate": 8.68622448979592e-05, + "loss": 1.1251, + "step": 887 + }, + { + "epoch": 17.97, + "learning_rate": 8.673469387755102e-05, + "loss": 1.1624, + "step": 888 + }, + { + "epoch": 17.99, + "learning_rate": 8.660714285714287e-05, + "loss": 1.0842, + "step": 889 + }, + { + "epoch": 18.01, + "learning_rate": 8.64795918367347e-05, + "loss": 1.1944, + "step": 890 + }, + { + "epoch": 18.03, + "learning_rate": 8.635204081632653e-05, + "loss": 1.0642, + "step": 891 + }, + { + "epoch": 18.05, + "learning_rate": 8.622448979591838e-05, + "loss": 1.0459, + "step": 892 + }, + { + "epoch": 18.07, + "learning_rate": 8.60969387755102e-05, + "loss": 1.0941, + "step": 893 + }, + { + "epoch": 18.09, + "learning_rate": 8.596938775510205e-05, + "loss": 1.0457, + "step": 894 + }, + { + "epoch": 18.11, + "learning_rate": 8.584183673469388e-05, + "loss": 1.1033, + "step": 895 + }, + { + "epoch": 18.13, + "learning_rate": 8.571428571428571e-05, + "loss": 1.0756, + "step": 896 + }, + { + "epoch": 18.15, + "learning_rate": 8.558673469387756e-05, + "loss": 1.0615, + "step": 897 + }, + { + "epoch": 18.17, + "learning_rate": 8.545918367346939e-05, + "loss": 1.0828, + "step": 898 + }, + { + "epoch": 18.19, + "learning_rate": 8.533163265306123e-05, + "loss": 1.1158, + "step": 899 + }, + { + "epoch": 18.21, + "learning_rate": 8.520408163265306e-05, + "loss": 1.0133, + "step": 900 + }, + { + "epoch": 18.23, + "learning_rate": 8.50765306122449e-05, + "loss": 1.0437, + "step": 901 + }, + { + "epoch": 18.25, + "learning_rate": 8.494897959183674e-05, + "loss": 1.0372, + "step": 902 + }, + { + "epoch": 18.27, + "learning_rate": 8.482142857142857e-05, + "loss": 1.1012, + "step": 903 + }, + { + "epoch": 18.29, + "learning_rate": 8.469387755102041e-05, + "loss": 1.0777, + "step": 904 + }, + { + "epoch": 18.31, + "learning_rate": 8.456632653061224e-05, + "loss": 1.0799, + "step": 905 + }, + { + "epoch": 18.33, + "learning_rate": 8.443877551020409e-05, + "loss": 0.9846, + "step": 906 + }, + { + "epoch": 18.35, + "learning_rate": 8.431122448979592e-05, + "loss": 1.1, + "step": 907 + }, + { + "epoch": 18.37, + "learning_rate": 8.418367346938776e-05, + "loss": 1.0787, + "step": 908 + }, + { + "epoch": 18.39, + "learning_rate": 8.40561224489796e-05, + "loss": 1.0647, + "step": 909 + }, + { + "epoch": 18.41, + "learning_rate": 8.392857142857144e-05, + "loss": 1.056, + "step": 910 + }, + { + "epoch": 18.43, + "learning_rate": 8.380102040816327e-05, + "loss": 1.1131, + "step": 911 + }, + { + "epoch": 18.45, + "learning_rate": 8.367346938775511e-05, + "loss": 1.0825, + "step": 912 + }, + { + "epoch": 18.47, + "learning_rate": 8.354591836734695e-05, + "loss": 1.0681, + "step": 913 + }, + { + "epoch": 18.49, + "learning_rate": 8.341836734693878e-05, + "loss": 1.0479, + "step": 914 + }, + { + "epoch": 18.51, + "learning_rate": 8.329081632653062e-05, + "loss": 1.0921, + "step": 915 + }, + { + "epoch": 18.53, + "learning_rate": 8.316326530612245e-05, + "loss": 1.0626, + "step": 916 + }, + { + "epoch": 18.55, + "learning_rate": 8.30357142857143e-05, + "loss": 1.0518, + "step": 917 + }, + { + "epoch": 18.57, + "learning_rate": 8.290816326530613e-05, + "loss": 1.0557, + "step": 918 + }, + { + "epoch": 18.6, + "learning_rate": 8.278061224489796e-05, + "loss": 1.0831, + "step": 919 + }, + { + "epoch": 18.62, + "learning_rate": 8.26530612244898e-05, + "loss": 1.0307, + "step": 920 + }, + { + "epoch": 18.64, + "learning_rate": 8.252551020408163e-05, + "loss": 1.0455, + "step": 921 + }, + { + "epoch": 18.66, + "learning_rate": 8.239795918367348e-05, + "loss": 1.0667, + "step": 922 + }, + { + "epoch": 18.68, + "learning_rate": 8.227040816326531e-05, + "loss": 1.0736, + "step": 923 + }, + { + "epoch": 18.7, + "learning_rate": 8.214285714285714e-05, + "loss": 1.0108, + "step": 924 + }, + { + "epoch": 18.72, + "learning_rate": 8.201530612244898e-05, + "loss": 1.0458, + "step": 925 + }, + { + "epoch": 18.74, + "learning_rate": 8.188775510204081e-05, + "loss": 1.0852, + "step": 926 + }, + { + "epoch": 18.76, + "learning_rate": 8.176020408163265e-05, + "loss": 1.1207, + "step": 927 + }, + { + "epoch": 18.78, + "learning_rate": 8.163265306122449e-05, + "loss": 1.0914, + "step": 928 + }, + { + "epoch": 18.8, + "learning_rate": 8.150510204081633e-05, + "loss": 1.1108, + "step": 929 + }, + { + "epoch": 18.82, + "learning_rate": 8.137755102040817e-05, + "loss": 1.1394, + "step": 930 + }, + { + "epoch": 18.84, + "learning_rate": 8.125000000000001e-05, + "loss": 1.029, + "step": 931 + }, + { + "epoch": 18.86, + "learning_rate": 8.112244897959184e-05, + "loss": 1.0661, + "step": 932 + }, + { + "epoch": 18.88, + "learning_rate": 8.099489795918369e-05, + "loss": 1.0303, + "step": 933 + }, + { + "epoch": 18.9, + "learning_rate": 8.086734693877552e-05, + "loss": 1.1144, + "step": 934 + }, + { + "epoch": 18.92, + "learning_rate": 8.073979591836736e-05, + "loss": 1.1096, + "step": 935 + }, + { + "epoch": 18.94, + "learning_rate": 8.061224489795919e-05, + "loss": 1.123, + "step": 936 + }, + { + "epoch": 18.96, + "learning_rate": 8.048469387755102e-05, + "loss": 1.1002, + "step": 937 + }, + { + "epoch": 18.98, + "learning_rate": 8.035714285714287e-05, + "loss": 1.1016, + "step": 938 + }, + { + "epoch": 19.0, + "learning_rate": 8.02295918367347e-05, + "loss": 1.0847, + "step": 939 + }, + { + "epoch": 19.02, + "learning_rate": 8.010204081632653e-05, + "loss": 1.1029, + "step": 940 + }, + { + "epoch": 19.04, + "learning_rate": 7.997448979591837e-05, + "loss": 1.041, + "step": 941 + }, + { + "epoch": 19.06, + "learning_rate": 7.98469387755102e-05, + "loss": 1.01, + "step": 942 + }, + { + "epoch": 19.08, + "learning_rate": 7.971938775510205e-05, + "loss": 1.0197, + "step": 943 + }, + { + "epoch": 19.1, + "learning_rate": 7.959183673469388e-05, + "loss": 1.0543, + "step": 944 + }, + { + "epoch": 19.12, + "learning_rate": 7.946428571428571e-05, + "loss": 1.0369, + "step": 945 + }, + { + "epoch": 19.14, + "learning_rate": 7.933673469387755e-05, + "loss": 1.0154, + "step": 946 + }, + { + "epoch": 19.16, + "learning_rate": 7.920918367346939e-05, + "loss": 0.9546, + "step": 947 + }, + { + "epoch": 19.18, + "learning_rate": 7.908163265306123e-05, + "loss": 0.9982, + "step": 948 + }, + { + "epoch": 19.2, + "learning_rate": 7.895408163265306e-05, + "loss": 1.0748, + "step": 949 + }, + { + "epoch": 19.22, + "learning_rate": 7.882653061224489e-05, + "loss": 1.0562, + "step": 950 + }, + { + "epoch": 19.24, + "learning_rate": 7.869897959183674e-05, + "loss": 1.0352, + "step": 951 + }, + { + "epoch": 19.26, + "learning_rate": 7.857142857142858e-05, + "loss": 0.9976, + "step": 952 + }, + { + "epoch": 19.28, + "learning_rate": 7.844387755102041e-05, + "loss": 1.0221, + "step": 953 + }, + { + "epoch": 19.3, + "learning_rate": 7.831632653061226e-05, + "loss": 1.0119, + "step": 954 + }, + { + "epoch": 19.32, + "learning_rate": 7.818877551020409e-05, + "loss": 1.0657, + "step": 955 + }, + { + "epoch": 19.34, + "learning_rate": 7.806122448979593e-05, + "loss": 0.9591, + "step": 956 + }, + { + "epoch": 19.36, + "learning_rate": 7.793367346938776e-05, + "loss": 1.0101, + "step": 957 + }, + { + "epoch": 19.38, + "learning_rate": 7.780612244897959e-05, + "loss": 1.0453, + "step": 958 + }, + { + "epoch": 19.4, + "learning_rate": 7.767857142857144e-05, + "loss": 1.0461, + "step": 959 + }, + { + "epoch": 19.42, + "learning_rate": 7.755102040816327e-05, + "loss": 1.0959, + "step": 960 + }, + { + "epoch": 19.44, + "learning_rate": 7.742346938775511e-05, + "loss": 1.0608, + "step": 961 + }, + { + "epoch": 19.47, + "learning_rate": 7.729591836734694e-05, + "loss": 1.1177, + "step": 962 + }, + { + "epoch": 19.49, + "learning_rate": 7.716836734693877e-05, + "loss": 1.0354, + "step": 963 + }, + { + "epoch": 19.51, + "learning_rate": 7.704081632653062e-05, + "loss": 1.0507, + "step": 964 + }, + { + "epoch": 19.53, + "learning_rate": 7.691326530612245e-05, + "loss": 1.0313, + "step": 965 + }, + { + "epoch": 19.55, + "learning_rate": 7.67857142857143e-05, + "loss": 1.0569, + "step": 966 + }, + { + "epoch": 19.57, + "learning_rate": 7.665816326530612e-05, + "loss": 1.0862, + "step": 967 + }, + { + "epoch": 19.59, + "learning_rate": 7.653061224489796e-05, + "loss": 1.0593, + "step": 968 + }, + { + "epoch": 19.61, + "learning_rate": 7.64030612244898e-05, + "loss": 1.0602, + "step": 969 + }, + { + "epoch": 19.63, + "learning_rate": 7.627551020408163e-05, + "loss": 1.0048, + "step": 970 + }, + { + "epoch": 19.65, + "learning_rate": 7.614795918367347e-05, + "loss": 1.0346, + "step": 971 + }, + { + "epoch": 19.67, + "learning_rate": 7.60204081632653e-05, + "loss": 1.0172, + "step": 972 + }, + { + "epoch": 19.69, + "learning_rate": 7.589285714285714e-05, + "loss": 1.02, + "step": 973 + }, + { + "epoch": 19.71, + "learning_rate": 7.576530612244898e-05, + "loss": 1.0028, + "step": 974 + }, + { + "epoch": 19.73, + "learning_rate": 7.563775510204083e-05, + "loss": 1.08, + "step": 975 + }, + { + "epoch": 19.75, + "learning_rate": 7.551020408163266e-05, + "loss": 1.0402, + "step": 976 + }, + { + "epoch": 19.77, + "learning_rate": 7.53826530612245e-05, + "loss": 1.0567, + "step": 977 + }, + { + "epoch": 19.79, + "learning_rate": 7.525510204081633e-05, + "loss": 1.0169, + "step": 978 + }, + { + "epoch": 19.81, + "learning_rate": 7.512755102040818e-05, + "loss": 0.9881, + "step": 979 + }, + { + "epoch": 19.83, + "learning_rate": 7.500000000000001e-05, + "loss": 1.0677, + "step": 980 + }, + { + "epoch": 19.85, + "learning_rate": 7.487244897959184e-05, + "loss": 1.1026, + "step": 981 + }, + { + "epoch": 19.87, + "learning_rate": 7.474489795918368e-05, + "loss": 1.0101, + "step": 982 + }, + { + "epoch": 19.89, + "learning_rate": 7.461734693877551e-05, + "loss": 1.069, + "step": 983 + }, + { + "epoch": 19.91, + "learning_rate": 7.448979591836736e-05, + "loss": 1.0493, + "step": 984 + }, + { + "epoch": 19.93, + "learning_rate": 7.436224489795919e-05, + "loss": 1.0858, + "step": 985 + }, + { + "epoch": 19.95, + "learning_rate": 7.423469387755102e-05, + "loss": 1.0734, + "step": 986 + }, + { + "epoch": 19.97, + "learning_rate": 7.410714285714286e-05, + "loss": 1.0203, + "step": 987 + }, + { + "epoch": 19.99, + "learning_rate": 7.39795918367347e-05, + "loss": 1.0285, + "step": 988 + }, + { + "epoch": 20.01, + "learning_rate": 7.385204081632653e-05, + "loss": 0.9446, + "step": 989 + }, + { + "epoch": 20.03, + "learning_rate": 7.372448979591837e-05, + "loss": 0.9915, + "step": 990 + }, + { + "epoch": 20.05, + "learning_rate": 7.35969387755102e-05, + "loss": 0.9882, + "step": 991 + }, + { + "epoch": 20.07, + "learning_rate": 7.346938775510205e-05, + "loss": 0.9338, + "step": 992 + }, + { + "epoch": 20.09, + "learning_rate": 7.334183673469388e-05, + "loss": 0.942, + "step": 993 + }, + { + "epoch": 20.11, + "learning_rate": 7.321428571428571e-05, + "loss": 0.9725, + "step": 994 + }, + { + "epoch": 20.13, + "learning_rate": 7.308673469387755e-05, + "loss": 1.027, + "step": 995 + }, + { + "epoch": 20.15, + "learning_rate": 7.29591836734694e-05, + "loss": 1.0081, + "step": 996 + }, + { + "epoch": 20.17, + "learning_rate": 7.283163265306123e-05, + "loss": 1.0117, + "step": 997 + }, + { + "epoch": 20.19, + "learning_rate": 7.270408163265307e-05, + "loss": 0.969, + "step": 998 + }, + { + "epoch": 20.21, + "learning_rate": 7.25765306122449e-05, + "loss": 1.0024, + "step": 999 + }, + { + "epoch": 20.23, + "learning_rate": 7.244897959183675e-05, + "loss": 0.994, + "step": 1000 + } + ], + "logging_steps": 1, + "max_steps": 1568, + "num_train_epochs": 32, + "save_steps": 100, + "total_flos": 1.4801648710853222e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-1000/training_args.bin b/checkpoint-1000/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..db23e07d097c18532e52f58a70eb72d22e39c8c1 --- /dev/null +++ b/checkpoint-1000/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b610cbc4242bb50b4985b00e205994ae514fec6d9e2273f2b545a583a07b154b +size 4155 diff --git a/checkpoint-1100/README.md b/checkpoint-1100/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-1100/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-1100/adapter_config.json b/checkpoint-1100/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b --- /dev/null +++ b/checkpoint-1100/adapter_config.json @@ -0,0 +1,23 @@ +{ + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-1100/adapter_model.bin b/checkpoint-1100/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..7dcf0dc8d5443bd22121bd973b23076054b3ffc1 --- /dev/null +++ b/checkpoint-1100/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:475f2b2ac94b5181337d2afa8e4c9ca58b6bb0a30162d0174dc0d849d31a30b0 +size 39409357 diff --git a/checkpoint-1100/optimizer.pt b/checkpoint-1100/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..b17fb83a70f4d6556bd2414b6993be01bb4b300d --- /dev/null +++ b/checkpoint-1100/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2866f6f91b96afb3514ac044018e0555e8cbf2349cf0707fcab04ed3fddf495 +size 78844421 diff --git a/checkpoint-1100/rng_state.pth b/checkpoint-1100/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..6c28f5beeeb3e640e81e05216ffbd6b8723d5723 --- /dev/null +++ b/checkpoint-1100/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a2ce2e2326ca04fc964861061399d5335411f688384a0f62f8f943f1fff584e +size 14575 diff --git a/checkpoint-1100/scheduler.pt b/checkpoint-1100/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..93a79f6932abedd9808abc7ecd95a072e0a7d19f --- /dev/null +++ b/checkpoint-1100/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4088610c769d87bd694b880357ee2736f8d9456a674ba2deac6ae25ba56fffaa +size 627 diff --git a/checkpoint-1100/special_tokens_map.json b/checkpoint-1100/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-1100/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-1100/tokenizer.json b/checkpoint-1100/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf002cafbd4818dcff2abc9156c088d681b4533 --- /dev/null +++ b/checkpoint-1100/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba +size 14500471 diff --git a/checkpoint-1100/tokenizer_config.json b/checkpoint-1100/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-1100/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-1100/trainer_state.json b/checkpoint-1100/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..aaf6a541ca340e29c7cd0e489d61a46e1a63d6b3 --- /dev/null +++ b/checkpoint-1100/trainer_state.json @@ -0,0 +1,6619 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 22.25735061650332, + "eval_steps": 500, + "global_step": 1100, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.02, + "learning_rate": 0.00019987244897959184, + "loss": 3.2215, + "step": 1 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019974489795918367, + "loss": 2.8365, + "step": 2 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019961734693877553, + "loss": 2.602, + "step": 3 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019948979591836736, + "loss": 2.4196, + "step": 4 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001993622448979592, + "loss": 2.2574, + "step": 5 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019923469387755102, + "loss": 2.2239, + "step": 6 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019910714285714288, + "loss": 2.1661, + "step": 7 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001989795918367347, + "loss": 2.0987, + "step": 8 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019885204081632654, + "loss": 2.015, + "step": 9 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019872448979591837, + "loss": 1.9771, + "step": 10 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019859693877551023, + "loss": 2.0271, + "step": 11 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019846938775510203, + "loss": 1.9812, + "step": 12 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001983418367346939, + "loss": 2.0834, + "step": 13 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019821428571428572, + "loss": 1.9174, + "step": 14 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019808673469387755, + "loss": 1.8409, + "step": 15 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019795918367346938, + "loss": 1.929, + "step": 16 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019783163265306124, + "loss": 2.0041, + "step": 17 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019770408163265305, + "loss": 1.9385, + "step": 18 + }, + { + "epoch": 0.38, + "learning_rate": 0.0001975765306122449, + "loss": 1.9592, + "step": 19 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019744897959183674, + "loss": 1.9701, + "step": 20 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001973214285714286, + "loss": 1.9277, + "step": 21 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019719387755102042, + "loss": 1.8394, + "step": 22 + }, + { + "epoch": 0.47, + "learning_rate": 0.00019706632653061226, + "loss": 1.8666, + "step": 23 + }, + { + "epoch": 0.49, + "learning_rate": 0.00019693877551020409, + "loss": 1.8997, + "step": 24 + }, + { + "epoch": 0.51, + "learning_rate": 0.00019681122448979592, + "loss": 1.9432, + "step": 25 + }, + { + "epoch": 0.53, + "learning_rate": 0.00019668367346938777, + "loss": 1.9137, + "step": 26 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001965561224489796, + "loss": 1.905, + "step": 27 + }, + { + "epoch": 0.57, + "learning_rate": 0.00019642857142857144, + "loss": 1.8708, + "step": 28 + }, + { + "epoch": 0.59, + "learning_rate": 0.00019630102040816327, + "loss": 1.9097, + "step": 29 + }, + { + "epoch": 0.61, + "learning_rate": 0.00019617346938775513, + "loss": 1.896, + "step": 30 + }, + { + "epoch": 0.63, + "learning_rate": 0.00019604591836734696, + "loss": 1.8834, + "step": 31 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001959183673469388, + "loss": 1.8323, + "step": 32 + }, + { + "epoch": 0.67, + "learning_rate": 0.00019579081632653062, + "loss": 1.804, + "step": 33 + }, + { + "epoch": 0.69, + "learning_rate": 0.00019566326530612248, + "loss": 1.8906, + "step": 34 + }, + { + "epoch": 0.71, + "learning_rate": 0.00019553571428571428, + "loss": 1.8693, + "step": 35 + }, + { + "epoch": 0.73, + "learning_rate": 0.00019540816326530614, + "loss": 1.9308, + "step": 36 + }, + { + "epoch": 0.75, + "learning_rate": 0.00019528061224489797, + "loss": 1.8082, + "step": 37 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001951530612244898, + "loss": 1.848, + "step": 38 + }, + { + "epoch": 0.79, + "learning_rate": 0.00019502551020408163, + "loss": 1.8866, + "step": 39 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001948979591836735, + "loss": 1.7844, + "step": 40 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001947704081632653, + "loss": 1.8485, + "step": 41 + }, + { + "epoch": 0.85, + "learning_rate": 0.00019464285714285715, + "loss": 1.7917, + "step": 42 + }, + { + "epoch": 0.87, + "learning_rate": 0.00019451530612244898, + "loss": 1.7342, + "step": 43 + }, + { + "epoch": 0.89, + "learning_rate": 0.00019438775510204084, + "loss": 1.8479, + "step": 44 + }, + { + "epoch": 0.91, + "learning_rate": 0.00019426020408163267, + "loss": 1.8639, + "step": 45 + }, + { + "epoch": 0.93, + "learning_rate": 0.0001941326530612245, + "loss": 1.8166, + "step": 46 + }, + { + "epoch": 0.95, + "learning_rate": 0.00019400510204081633, + "loss": 1.7566, + "step": 47 + }, + { + "epoch": 0.97, + "learning_rate": 0.00019387755102040816, + "loss": 1.8071, + "step": 48 + }, + { + "epoch": 0.99, + "learning_rate": 0.00019375000000000002, + "loss": 1.8612, + "step": 49 + }, + { + "epoch": 1.01, + "learning_rate": 0.00019362244897959185, + "loss": 1.7819, + "step": 50 + }, + { + "epoch": 1.03, + "learning_rate": 0.00019349489795918368, + "loss": 1.8647, + "step": 51 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001933673469387755, + "loss": 1.8196, + "step": 52 + }, + { + "epoch": 1.07, + "learning_rate": 0.00019323979591836737, + "loss": 1.8027, + "step": 53 + }, + { + "epoch": 1.09, + "learning_rate": 0.00019311224489795917, + "loss": 1.8927, + "step": 54 + }, + { + "epoch": 1.11, + "learning_rate": 0.00019298469387755103, + "loss": 1.8481, + "step": 55 + }, + { + "epoch": 1.13, + "learning_rate": 0.00019285714285714286, + "loss": 1.7781, + "step": 56 + }, + { + "epoch": 1.15, + "learning_rate": 0.00019272959183673472, + "loss": 1.8101, + "step": 57 + }, + { + "epoch": 1.17, + "learning_rate": 0.00019260204081632653, + "loss": 1.7257, + "step": 58 + }, + { + "epoch": 1.19, + "learning_rate": 0.00019247448979591838, + "loss": 1.8185, + "step": 59 + }, + { + "epoch": 1.21, + "learning_rate": 0.00019234693877551021, + "loss": 1.8557, + "step": 60 + }, + { + "epoch": 1.23, + "learning_rate": 0.00019221938775510204, + "loss": 1.7418, + "step": 61 + }, + { + "epoch": 1.25, + "learning_rate": 0.00019209183673469388, + "loss": 1.6879, + "step": 62 + }, + { + "epoch": 1.27, + "learning_rate": 0.00019196428571428573, + "loss": 1.7651, + "step": 63 + }, + { + "epoch": 1.29, + "learning_rate": 0.00019183673469387756, + "loss": 1.7759, + "step": 64 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001917091836734694, + "loss": 1.7691, + "step": 65 + }, + { + "epoch": 1.34, + "learning_rate": 0.00019158163265306123, + "loss": 1.7794, + "step": 66 + }, + { + "epoch": 1.36, + "learning_rate": 0.00019145408163265306, + "loss": 1.8152, + "step": 67 + }, + { + "epoch": 1.38, + "learning_rate": 0.00019132653061224492, + "loss": 1.8052, + "step": 68 + }, + { + "epoch": 1.4, + "learning_rate": 0.00019119897959183675, + "loss": 1.8054, + "step": 69 + }, + { + "epoch": 1.42, + "learning_rate": 0.00019107142857142858, + "loss": 1.8114, + "step": 70 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001909438775510204, + "loss": 1.7749, + "step": 71 + }, + { + "epoch": 1.46, + "learning_rate": 0.00019081632653061227, + "loss": 1.777, + "step": 72 + }, + { + "epoch": 1.48, + "learning_rate": 0.0001906887755102041, + "loss": 1.7896, + "step": 73 + }, + { + "epoch": 1.5, + "learning_rate": 0.00019056122448979593, + "loss": 1.8335, + "step": 74 + }, + { + "epoch": 1.52, + "learning_rate": 0.00019043367346938776, + "loss": 1.8155, + "step": 75 + }, + { + "epoch": 1.54, + "learning_rate": 0.00019030612244897962, + "loss": 1.8224, + "step": 76 + }, + { + "epoch": 1.56, + "learning_rate": 0.00019017857142857142, + "loss": 1.7889, + "step": 77 + }, + { + "epoch": 1.58, + "learning_rate": 0.00019005102040816328, + "loss": 1.8866, + "step": 78 + }, + { + "epoch": 1.6, + "learning_rate": 0.0001899234693877551, + "loss": 1.8439, + "step": 79 + }, + { + "epoch": 1.62, + "learning_rate": 0.00018979591836734697, + "loss": 1.7906, + "step": 80 + }, + { + "epoch": 1.64, + "learning_rate": 0.00018966836734693877, + "loss": 1.8627, + "step": 81 + }, + { + "epoch": 1.66, + "learning_rate": 0.00018954081632653063, + "loss": 1.7497, + "step": 82 + }, + { + "epoch": 1.68, + "learning_rate": 0.00018941326530612246, + "loss": 1.7936, + "step": 83 + }, + { + "epoch": 1.7, + "learning_rate": 0.0001892857142857143, + "loss": 1.8341, + "step": 84 + }, + { + "epoch": 1.72, + "learning_rate": 0.00018915816326530612, + "loss": 1.7868, + "step": 85 + }, + { + "epoch": 1.74, + "learning_rate": 0.00018903061224489798, + "loss": 1.7493, + "step": 86 + }, + { + "epoch": 1.76, + "learning_rate": 0.0001889030612244898, + "loss": 1.7926, + "step": 87 + }, + { + "epoch": 1.78, + "learning_rate": 0.00018877551020408164, + "loss": 1.8278, + "step": 88 + }, + { + "epoch": 1.8, + "learning_rate": 0.00018864795918367347, + "loss": 1.7387, + "step": 89 + }, + { + "epoch": 1.82, + "learning_rate": 0.0001885204081632653, + "loss": 1.7669, + "step": 90 + }, + { + "epoch": 1.84, + "learning_rate": 0.00018839285714285716, + "loss": 1.7686, + "step": 91 + }, + { + "epoch": 1.86, + "learning_rate": 0.000188265306122449, + "loss": 1.7759, + "step": 92 + }, + { + "epoch": 1.88, + "learning_rate": 0.00018813775510204082, + "loss": 1.7016, + "step": 93 + }, + { + "epoch": 1.9, + "learning_rate": 0.00018801020408163265, + "loss": 1.8123, + "step": 94 + }, + { + "epoch": 1.92, + "learning_rate": 0.0001878826530612245, + "loss": 1.8315, + "step": 95 + }, + { + "epoch": 1.94, + "learning_rate": 0.00018775510204081634, + "loss": 1.7679, + "step": 96 + }, + { + "epoch": 1.96, + "learning_rate": 0.00018762755102040817, + "loss": 1.7874, + "step": 97 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001875, + "loss": 1.8008, + "step": 98 + }, + { + "epoch": 2.0, + "learning_rate": 0.00018737244897959186, + "loss": 1.7177, + "step": 99 + }, + { + "epoch": 2.02, + "learning_rate": 0.00018724489795918367, + "loss": 1.7272, + "step": 100 + }, + { + "epoch": 2.04, + "learning_rate": 0.00018711734693877552, + "loss": 1.7848, + "step": 101 + }, + { + "epoch": 2.06, + "learning_rate": 0.00018698979591836735, + "loss": 1.744, + "step": 102 + }, + { + "epoch": 2.08, + "learning_rate": 0.00018686224489795919, + "loss": 1.7005, + "step": 103 + }, + { + "epoch": 2.1, + "learning_rate": 0.00018673469387755102, + "loss": 1.8247, + "step": 104 + }, + { + "epoch": 2.12, + "learning_rate": 0.00018660714285714287, + "loss": 1.6855, + "step": 105 + }, + { + "epoch": 2.14, + "learning_rate": 0.0001864795918367347, + "loss": 1.7627, + "step": 106 + }, + { + "epoch": 2.17, + "learning_rate": 0.00018635204081632654, + "loss": 1.7564, + "step": 107 + }, + { + "epoch": 2.19, + "learning_rate": 0.00018622448979591837, + "loss": 1.8237, + "step": 108 + }, + { + "epoch": 2.21, + "learning_rate": 0.00018609693877551022, + "loss": 1.7421, + "step": 109 + }, + { + "epoch": 2.23, + "learning_rate": 0.00018596938775510206, + "loss": 1.7517, + "step": 110 + }, + { + "epoch": 2.25, + "learning_rate": 0.0001858418367346939, + "loss": 1.7515, + "step": 111 + }, + { + "epoch": 2.27, + "learning_rate": 0.00018571428571428572, + "loss": 1.7842, + "step": 112 + }, + { + "epoch": 2.29, + "learning_rate": 0.00018558673469387755, + "loss": 1.8001, + "step": 113 + }, + { + "epoch": 2.31, + "learning_rate": 0.0001854591836734694, + "loss": 1.7653, + "step": 114 + }, + { + "epoch": 2.33, + "learning_rate": 0.00018533163265306124, + "loss": 1.694, + "step": 115 + }, + { + "epoch": 2.35, + "learning_rate": 0.00018520408163265307, + "loss": 1.7457, + "step": 116 + }, + { + "epoch": 2.37, + "learning_rate": 0.0001850765306122449, + "loss": 1.7899, + "step": 117 + }, + { + "epoch": 2.39, + "learning_rate": 0.00018494897959183676, + "loss": 1.7473, + "step": 118 + }, + { + "epoch": 2.41, + "learning_rate": 0.0001848214285714286, + "loss": 1.6639, + "step": 119 + }, + { + "epoch": 2.43, + "learning_rate": 0.00018469387755102042, + "loss": 1.762, + "step": 120 + }, + { + "epoch": 2.45, + "learning_rate": 0.00018456632653061225, + "loss": 1.7378, + "step": 121 + }, + { + "epoch": 2.47, + "learning_rate": 0.0001844387755102041, + "loss": 1.672, + "step": 122 + }, + { + "epoch": 2.49, + "learning_rate": 0.0001843112244897959, + "loss": 1.7267, + "step": 123 + }, + { + "epoch": 2.51, + "learning_rate": 0.00018418367346938777, + "loss": 1.7825, + "step": 124 + }, + { + "epoch": 2.53, + "learning_rate": 0.0001840561224489796, + "loss": 1.7566, + "step": 125 + }, + { + "epoch": 2.55, + "learning_rate": 0.00018392857142857143, + "loss": 1.8169, + "step": 126 + }, + { + "epoch": 2.57, + "learning_rate": 0.00018380102040816326, + "loss": 1.6801, + "step": 127 + }, + { + "epoch": 2.59, + "learning_rate": 0.00018367346938775512, + "loss": 1.7292, + "step": 128 + }, + { + "epoch": 2.61, + "learning_rate": 0.00018354591836734695, + "loss": 1.737, + "step": 129 + }, + { + "epoch": 2.63, + "learning_rate": 0.00018341836734693878, + "loss": 1.7696, + "step": 130 + }, + { + "epoch": 2.65, + "learning_rate": 0.0001832908163265306, + "loss": 1.7239, + "step": 131 + }, + { + "epoch": 2.67, + "learning_rate": 0.00018316326530612247, + "loss": 1.7441, + "step": 132 + }, + { + "epoch": 2.69, + "learning_rate": 0.0001830357142857143, + "loss": 1.7825, + "step": 133 + }, + { + "epoch": 2.71, + "learning_rate": 0.00018290816326530613, + "loss": 1.7411, + "step": 134 + }, + { + "epoch": 2.73, + "learning_rate": 0.00018278061224489796, + "loss": 1.7119, + "step": 135 + }, + { + "epoch": 2.75, + "learning_rate": 0.0001826530612244898, + "loss": 1.7443, + "step": 136 + }, + { + "epoch": 2.77, + "learning_rate": 0.00018252551020408165, + "loss": 1.7197, + "step": 137 + }, + { + "epoch": 2.79, + "learning_rate": 0.00018239795918367348, + "loss": 1.7273, + "step": 138 + }, + { + "epoch": 2.81, + "learning_rate": 0.0001822704081632653, + "loss": 1.7681, + "step": 139 + }, + { + "epoch": 2.83, + "learning_rate": 0.00018214285714285714, + "loss": 1.8088, + "step": 140 + }, + { + "epoch": 2.85, + "learning_rate": 0.000182015306122449, + "loss": 1.7301, + "step": 141 + }, + { + "epoch": 2.87, + "learning_rate": 0.00018188775510204083, + "loss": 1.6853, + "step": 142 + }, + { + "epoch": 2.89, + "learning_rate": 0.00018176020408163266, + "loss": 1.6966, + "step": 143 + }, + { + "epoch": 2.91, + "learning_rate": 0.0001816326530612245, + "loss": 1.7938, + "step": 144 + }, + { + "epoch": 2.93, + "learning_rate": 0.00018150510204081635, + "loss": 1.7639, + "step": 145 + }, + { + "epoch": 2.95, + "learning_rate": 0.00018137755102040816, + "loss": 1.7527, + "step": 146 + }, + { + "epoch": 2.97, + "learning_rate": 0.00018125000000000001, + "loss": 1.7386, + "step": 147 + }, + { + "epoch": 2.99, + "learning_rate": 0.00018112244897959185, + "loss": 1.7223, + "step": 148 + }, + { + "epoch": 3.01, + "learning_rate": 0.00018099489795918368, + "loss": 1.7571, + "step": 149 + }, + { + "epoch": 3.04, + "learning_rate": 0.0001808673469387755, + "loss": 1.7054, + "step": 150 + }, + { + "epoch": 3.06, + "learning_rate": 0.00018073979591836737, + "loss": 1.6581, + "step": 151 + }, + { + "epoch": 3.08, + "learning_rate": 0.00018061224489795917, + "loss": 1.681, + "step": 152 + }, + { + "epoch": 3.1, + "learning_rate": 0.00018048469387755103, + "loss": 1.7425, + "step": 153 + }, + { + "epoch": 3.12, + "learning_rate": 0.00018035714285714286, + "loss": 1.7108, + "step": 154 + }, + { + "epoch": 3.14, + "learning_rate": 0.00018022959183673472, + "loss": 1.7194, + "step": 155 + }, + { + "epoch": 3.16, + "learning_rate": 0.00018010204081632655, + "loss": 1.6953, + "step": 156 + }, + { + "epoch": 3.18, + "learning_rate": 0.00017997448979591838, + "loss": 1.669, + "step": 157 + }, + { + "epoch": 3.2, + "learning_rate": 0.0001798469387755102, + "loss": 1.744, + "step": 158 + }, + { + "epoch": 3.22, + "learning_rate": 0.00017971938775510204, + "loss": 1.6467, + "step": 159 + }, + { + "epoch": 3.24, + "learning_rate": 0.0001795918367346939, + "loss": 1.7103, + "step": 160 + }, + { + "epoch": 3.26, + "learning_rate": 0.00017946428571428573, + "loss": 1.6662, + "step": 161 + }, + { + "epoch": 3.28, + "learning_rate": 0.00017933673469387756, + "loss": 1.6657, + "step": 162 + }, + { + "epoch": 3.3, + "learning_rate": 0.0001792091836734694, + "loss": 1.791, + "step": 163 + }, + { + "epoch": 3.32, + "learning_rate": 0.00017908163265306125, + "loss": 1.7704, + "step": 164 + }, + { + "epoch": 3.34, + "learning_rate": 0.00017895408163265305, + "loss": 1.7229, + "step": 165 + }, + { + "epoch": 3.36, + "learning_rate": 0.0001788265306122449, + "loss": 1.76, + "step": 166 + }, + { + "epoch": 3.38, + "learning_rate": 0.00017869897959183674, + "loss": 1.6482, + "step": 167 + }, + { + "epoch": 3.4, + "learning_rate": 0.0001785714285714286, + "loss": 1.8076, + "step": 168 + }, + { + "epoch": 3.42, + "learning_rate": 0.0001784438775510204, + "loss": 1.7368, + "step": 169 + }, + { + "epoch": 3.44, + "learning_rate": 0.00017831632653061226, + "loss": 1.6264, + "step": 170 + }, + { + "epoch": 3.46, + "learning_rate": 0.0001781887755102041, + "loss": 1.6289, + "step": 171 + }, + { + "epoch": 3.48, + "learning_rate": 0.00017806122448979592, + "loss": 1.7913, + "step": 172 + }, + { + "epoch": 3.5, + "learning_rate": 0.00017793367346938775, + "loss": 1.6985, + "step": 173 + }, + { + "epoch": 3.52, + "learning_rate": 0.0001778061224489796, + "loss": 1.6936, + "step": 174 + }, + { + "epoch": 3.54, + "learning_rate": 0.00017767857142857141, + "loss": 1.8068, + "step": 175 + }, + { + "epoch": 3.56, + "learning_rate": 0.00017755102040816327, + "loss": 1.7243, + "step": 176 + }, + { + "epoch": 3.58, + "learning_rate": 0.0001774234693877551, + "loss": 1.6893, + "step": 177 + }, + { + "epoch": 3.6, + "learning_rate": 0.00017729591836734696, + "loss": 1.8122, + "step": 178 + }, + { + "epoch": 3.62, + "learning_rate": 0.0001771683673469388, + "loss": 1.6562, + "step": 179 + }, + { + "epoch": 3.64, + "learning_rate": 0.00017704081632653062, + "loss": 1.6999, + "step": 180 + }, + { + "epoch": 3.66, + "learning_rate": 0.00017691326530612245, + "loss": 1.7229, + "step": 181 + }, + { + "epoch": 3.68, + "learning_rate": 0.00017678571428571428, + "loss": 1.6764, + "step": 182 + }, + { + "epoch": 3.7, + "learning_rate": 0.00017665816326530614, + "loss": 1.6982, + "step": 183 + }, + { + "epoch": 3.72, + "learning_rate": 0.00017653061224489797, + "loss": 1.696, + "step": 184 + }, + { + "epoch": 3.74, + "learning_rate": 0.0001764030612244898, + "loss": 1.6797, + "step": 185 + }, + { + "epoch": 3.76, + "learning_rate": 0.00017627551020408164, + "loss": 1.637, + "step": 186 + }, + { + "epoch": 3.78, + "learning_rate": 0.0001761479591836735, + "loss": 1.7074, + "step": 187 + }, + { + "epoch": 3.8, + "learning_rate": 0.0001760204081632653, + "loss": 1.705, + "step": 188 + }, + { + "epoch": 3.82, + "learning_rate": 0.00017589285714285716, + "loss": 1.6153, + "step": 189 + }, + { + "epoch": 3.84, + "learning_rate": 0.00017576530612244899, + "loss": 1.7354, + "step": 190 + }, + { + "epoch": 3.86, + "learning_rate": 0.00017563775510204084, + "loss": 1.6941, + "step": 191 + }, + { + "epoch": 3.88, + "learning_rate": 0.00017551020408163265, + "loss": 1.7231, + "step": 192 + }, + { + "epoch": 3.91, + "learning_rate": 0.0001753826530612245, + "loss": 1.7663, + "step": 193 + }, + { + "epoch": 3.93, + "learning_rate": 0.00017525510204081634, + "loss": 1.6532, + "step": 194 + }, + { + "epoch": 3.95, + "learning_rate": 0.00017512755102040817, + "loss": 1.7115, + "step": 195 + }, + { + "epoch": 3.97, + "learning_rate": 0.000175, + "loss": 1.6955, + "step": 196 + }, + { + "epoch": 3.99, + "learning_rate": 0.00017487244897959186, + "loss": 1.6863, + "step": 197 + }, + { + "epoch": 4.01, + "learning_rate": 0.00017474489795918366, + "loss": 1.7012, + "step": 198 + }, + { + "epoch": 4.03, + "learning_rate": 0.00017461734693877552, + "loss": 1.5927, + "step": 199 + }, + { + "epoch": 4.05, + "learning_rate": 0.00017448979591836735, + "loss": 1.6272, + "step": 200 + }, + { + "epoch": 4.07, + "learning_rate": 0.00017436224489795918, + "loss": 1.5994, + "step": 201 + }, + { + "epoch": 4.09, + "learning_rate": 0.00017423469387755104, + "loss": 1.7141, + "step": 202 + }, + { + "epoch": 4.11, + "learning_rate": 0.00017410714285714287, + "loss": 1.7547, + "step": 203 + }, + { + "epoch": 4.13, + "learning_rate": 0.0001739795918367347, + "loss": 1.6254, + "step": 204 + }, + { + "epoch": 4.15, + "learning_rate": 0.00017385204081632653, + "loss": 1.6686, + "step": 205 + }, + { + "epoch": 4.17, + "learning_rate": 0.0001737244897959184, + "loss": 1.6684, + "step": 206 + }, + { + "epoch": 4.19, + "learning_rate": 0.00017359693877551022, + "loss": 1.6724, + "step": 207 + }, + { + "epoch": 4.21, + "learning_rate": 0.00017346938775510205, + "loss": 1.7361, + "step": 208 + }, + { + "epoch": 4.23, + "learning_rate": 0.00017334183673469388, + "loss": 1.7167, + "step": 209 + }, + { + "epoch": 4.25, + "learning_rate": 0.00017321428571428574, + "loss": 1.7226, + "step": 210 + }, + { + "epoch": 4.27, + "learning_rate": 0.00017308673469387754, + "loss": 1.7133, + "step": 211 + }, + { + "epoch": 4.29, + "learning_rate": 0.0001729591836734694, + "loss": 1.649, + "step": 212 + }, + { + "epoch": 4.31, + "learning_rate": 0.00017283163265306123, + "loss": 1.7104, + "step": 213 + }, + { + "epoch": 4.33, + "learning_rate": 0.00017270408163265306, + "loss": 1.6861, + "step": 214 + }, + { + "epoch": 4.35, + "learning_rate": 0.0001725765306122449, + "loss": 1.648, + "step": 215 + }, + { + "epoch": 4.37, + "learning_rate": 0.00017244897959183675, + "loss": 1.6215, + "step": 216 + }, + { + "epoch": 4.39, + "learning_rate": 0.00017232142857142858, + "loss": 1.6334, + "step": 217 + }, + { + "epoch": 4.41, + "learning_rate": 0.0001721938775510204, + "loss": 1.6283, + "step": 218 + }, + { + "epoch": 4.43, + "learning_rate": 0.00017206632653061224, + "loss": 1.6462, + "step": 219 + }, + { + "epoch": 4.45, + "learning_rate": 0.0001719387755102041, + "loss": 1.7233, + "step": 220 + }, + { + "epoch": 4.47, + "learning_rate": 0.0001718112244897959, + "loss": 1.7839, + "step": 221 + }, + { + "epoch": 4.49, + "learning_rate": 0.00017168367346938776, + "loss": 1.7204, + "step": 222 + }, + { + "epoch": 4.51, + "learning_rate": 0.0001715561224489796, + "loss": 1.7671, + "step": 223 + }, + { + "epoch": 4.53, + "learning_rate": 0.00017142857142857143, + "loss": 1.6824, + "step": 224 + }, + { + "epoch": 4.55, + "learning_rate": 0.00017130102040816328, + "loss": 1.7068, + "step": 225 + }, + { + "epoch": 4.57, + "learning_rate": 0.00017117346938775511, + "loss": 1.6515, + "step": 226 + }, + { + "epoch": 4.59, + "learning_rate": 0.00017104591836734694, + "loss": 1.6586, + "step": 227 + }, + { + "epoch": 4.61, + "learning_rate": 0.00017091836734693878, + "loss": 1.6355, + "step": 228 + }, + { + "epoch": 4.63, + "learning_rate": 0.00017079081632653063, + "loss": 1.7173, + "step": 229 + }, + { + "epoch": 4.65, + "learning_rate": 0.00017066326530612246, + "loss": 1.6585, + "step": 230 + }, + { + "epoch": 4.67, + "learning_rate": 0.0001705357142857143, + "loss": 1.5856, + "step": 231 + }, + { + "epoch": 4.69, + "learning_rate": 0.00017040816326530613, + "loss": 1.5923, + "step": 232 + }, + { + "epoch": 4.71, + "learning_rate": 0.00017028061224489798, + "loss": 1.7128, + "step": 233 + }, + { + "epoch": 4.73, + "learning_rate": 0.0001701530612244898, + "loss": 1.6971, + "step": 234 + }, + { + "epoch": 4.75, + "learning_rate": 0.00017002551020408165, + "loss": 1.6416, + "step": 235 + }, + { + "epoch": 4.78, + "learning_rate": 0.00016989795918367348, + "loss": 1.645, + "step": 236 + }, + { + "epoch": 4.8, + "learning_rate": 0.0001697704081632653, + "loss": 1.6792, + "step": 237 + }, + { + "epoch": 4.82, + "learning_rate": 0.00016964285714285714, + "loss": 1.6522, + "step": 238 + }, + { + "epoch": 4.84, + "learning_rate": 0.000169515306122449, + "loss": 1.6315, + "step": 239 + }, + { + "epoch": 4.86, + "learning_rate": 0.00016938775510204083, + "loss": 1.6622, + "step": 240 + }, + { + "epoch": 4.88, + "learning_rate": 0.00016926020408163266, + "loss": 1.6566, + "step": 241 + }, + { + "epoch": 4.9, + "learning_rate": 0.0001691326530612245, + "loss": 1.7141, + "step": 242 + }, + { + "epoch": 4.92, + "learning_rate": 0.00016900510204081635, + "loss": 1.5873, + "step": 243 + }, + { + "epoch": 4.94, + "learning_rate": 0.00016887755102040818, + "loss": 1.6571, + "step": 244 + }, + { + "epoch": 4.96, + "learning_rate": 0.00016875, + "loss": 1.6829, + "step": 245 + }, + { + "epoch": 4.98, + "learning_rate": 0.00016862244897959184, + "loss": 1.6935, + "step": 246 + }, + { + "epoch": 5.0, + "learning_rate": 0.00016849489795918367, + "loss": 1.6782, + "step": 247 + }, + { + "epoch": 5.02, + "learning_rate": 0.00016836734693877553, + "loss": 1.622, + "step": 248 + }, + { + "epoch": 5.04, + "learning_rate": 0.00016823979591836736, + "loss": 1.6596, + "step": 249 + }, + { + "epoch": 5.06, + "learning_rate": 0.0001681122448979592, + "loss": 1.5821, + "step": 250 + }, + { + "epoch": 5.08, + "learning_rate": 0.00016798469387755102, + "loss": 1.7292, + "step": 251 + }, + { + "epoch": 5.1, + "learning_rate": 0.00016785714285714288, + "loss": 1.646, + "step": 252 + }, + { + "epoch": 5.12, + "learning_rate": 0.0001677295918367347, + "loss": 1.6969, + "step": 253 + }, + { + "epoch": 5.14, + "learning_rate": 0.00016760204081632654, + "loss": 1.6082, + "step": 254 + }, + { + "epoch": 5.16, + "learning_rate": 0.00016747448979591837, + "loss": 1.5843, + "step": 255 + }, + { + "epoch": 5.18, + "learning_rate": 0.00016734693877551023, + "loss": 1.6827, + "step": 256 + }, + { + "epoch": 5.2, + "learning_rate": 0.00016721938775510203, + "loss": 1.5824, + "step": 257 + }, + { + "epoch": 5.22, + "learning_rate": 0.0001670918367346939, + "loss": 1.6795, + "step": 258 + }, + { + "epoch": 5.24, + "learning_rate": 0.00016696428571428572, + "loss": 1.5639, + "step": 259 + }, + { + "epoch": 5.26, + "learning_rate": 0.00016683673469387755, + "loss": 1.592, + "step": 260 + }, + { + "epoch": 5.28, + "learning_rate": 0.00016670918367346938, + "loss": 1.65, + "step": 261 + }, + { + "epoch": 5.3, + "learning_rate": 0.00016658163265306124, + "loss": 1.5592, + "step": 262 + }, + { + "epoch": 5.32, + "learning_rate": 0.00016645408163265305, + "loss": 1.5091, + "step": 263 + }, + { + "epoch": 5.34, + "learning_rate": 0.0001663265306122449, + "loss": 1.6138, + "step": 264 + }, + { + "epoch": 5.36, + "learning_rate": 0.00016619897959183673, + "loss": 1.625, + "step": 265 + }, + { + "epoch": 5.38, + "learning_rate": 0.0001660714285714286, + "loss": 1.5757, + "step": 266 + }, + { + "epoch": 5.4, + "learning_rate": 0.00016594387755102042, + "loss": 1.6372, + "step": 267 + }, + { + "epoch": 5.42, + "learning_rate": 0.00016581632653061225, + "loss": 1.5891, + "step": 268 + }, + { + "epoch": 5.44, + "learning_rate": 0.00016568877551020409, + "loss": 1.6893, + "step": 269 + }, + { + "epoch": 5.46, + "learning_rate": 0.00016556122448979592, + "loss": 1.6662, + "step": 270 + }, + { + "epoch": 5.48, + "learning_rate": 0.00016543367346938777, + "loss": 1.7132, + "step": 271 + }, + { + "epoch": 5.5, + "learning_rate": 0.0001653061224489796, + "loss": 1.5835, + "step": 272 + }, + { + "epoch": 5.52, + "learning_rate": 0.00016517857142857144, + "loss": 1.6342, + "step": 273 + }, + { + "epoch": 5.54, + "learning_rate": 0.00016505102040816327, + "loss": 1.6717, + "step": 274 + }, + { + "epoch": 5.56, + "learning_rate": 0.00016492346938775512, + "loss": 1.6248, + "step": 275 + }, + { + "epoch": 5.58, + "learning_rate": 0.00016479591836734696, + "loss": 1.6117, + "step": 276 + }, + { + "epoch": 5.6, + "learning_rate": 0.0001646683673469388, + "loss": 1.6798, + "step": 277 + }, + { + "epoch": 5.63, + "learning_rate": 0.00016454081632653062, + "loss": 1.6406, + "step": 278 + }, + { + "epoch": 5.65, + "learning_rate": 0.00016441326530612248, + "loss": 1.6512, + "step": 279 + }, + { + "epoch": 5.67, + "learning_rate": 0.00016428571428571428, + "loss": 1.6102, + "step": 280 + }, + { + "epoch": 5.69, + "learning_rate": 0.00016415816326530614, + "loss": 1.6113, + "step": 281 + }, + { + "epoch": 5.71, + "learning_rate": 0.00016403061224489797, + "loss": 1.7116, + "step": 282 + }, + { + "epoch": 5.73, + "learning_rate": 0.0001639030612244898, + "loss": 1.6846, + "step": 283 + }, + { + "epoch": 5.75, + "learning_rate": 0.00016377551020408163, + "loss": 1.6911, + "step": 284 + }, + { + "epoch": 5.77, + "learning_rate": 0.0001636479591836735, + "loss": 1.6202, + "step": 285 + }, + { + "epoch": 5.79, + "learning_rate": 0.0001635204081632653, + "loss": 1.5715, + "step": 286 + }, + { + "epoch": 5.81, + "learning_rate": 0.00016339285714285715, + "loss": 1.6461, + "step": 287 + }, + { + "epoch": 5.83, + "learning_rate": 0.00016326530612244898, + "loss": 1.6624, + "step": 288 + }, + { + "epoch": 5.85, + "learning_rate": 0.00016313775510204084, + "loss": 1.6535, + "step": 289 + }, + { + "epoch": 5.87, + "learning_rate": 0.00016301020408163267, + "loss": 1.6275, + "step": 290 + }, + { + "epoch": 5.89, + "learning_rate": 0.0001628826530612245, + "loss": 1.6636, + "step": 291 + }, + { + "epoch": 5.91, + "learning_rate": 0.00016275510204081633, + "loss": 1.6546, + "step": 292 + }, + { + "epoch": 5.93, + "learning_rate": 0.00016262755102040816, + "loss": 1.7274, + "step": 293 + }, + { + "epoch": 5.95, + "learning_rate": 0.00016250000000000002, + "loss": 1.5901, + "step": 294 + }, + { + "epoch": 5.97, + "learning_rate": 0.00016237244897959185, + "loss": 1.6046, + "step": 295 + }, + { + "epoch": 5.99, + "learning_rate": 0.00016224489795918368, + "loss": 1.5828, + "step": 296 + }, + { + "epoch": 6.01, + "learning_rate": 0.0001621173469387755, + "loss": 1.6435, + "step": 297 + }, + { + "epoch": 6.03, + "learning_rate": 0.00016198979591836737, + "loss": 1.6263, + "step": 298 + }, + { + "epoch": 6.05, + "learning_rate": 0.00016186224489795917, + "loss": 1.4944, + "step": 299 + }, + { + "epoch": 6.07, + "learning_rate": 0.00016173469387755103, + "loss": 1.6286, + "step": 300 + }, + { + "epoch": 6.09, + "learning_rate": 0.00016160714285714286, + "loss": 1.694, + "step": 301 + }, + { + "epoch": 6.11, + "learning_rate": 0.00016147959183673472, + "loss": 1.6197, + "step": 302 + }, + { + "epoch": 6.13, + "learning_rate": 0.00016135204081632652, + "loss": 1.5597, + "step": 303 + }, + { + "epoch": 6.15, + "learning_rate": 0.00016122448979591838, + "loss": 1.5487, + "step": 304 + }, + { + "epoch": 6.17, + "learning_rate": 0.0001610969387755102, + "loss": 1.5769, + "step": 305 + }, + { + "epoch": 6.19, + "learning_rate": 0.00016096938775510204, + "loss": 1.6367, + "step": 306 + }, + { + "epoch": 6.21, + "learning_rate": 0.00016084183673469388, + "loss": 1.583, + "step": 307 + }, + { + "epoch": 6.23, + "learning_rate": 0.00016071428571428573, + "loss": 1.6201, + "step": 308 + }, + { + "epoch": 6.25, + "learning_rate": 0.00016058673469387754, + "loss": 1.6586, + "step": 309 + }, + { + "epoch": 6.27, + "learning_rate": 0.0001604591836734694, + "loss": 1.6711, + "step": 310 + }, + { + "epoch": 6.29, + "learning_rate": 0.00016033163265306123, + "loss": 1.6402, + "step": 311 + }, + { + "epoch": 6.31, + "learning_rate": 0.00016020408163265306, + "loss": 1.5247, + "step": 312 + }, + { + "epoch": 6.33, + "learning_rate": 0.00016007653061224491, + "loss": 1.5356, + "step": 313 + }, + { + "epoch": 6.35, + "learning_rate": 0.00015994897959183675, + "loss": 1.564, + "step": 314 + }, + { + "epoch": 6.37, + "learning_rate": 0.00015982142857142858, + "loss": 1.563, + "step": 315 + }, + { + "epoch": 6.39, + "learning_rate": 0.0001596938775510204, + "loss": 1.5198, + "step": 316 + }, + { + "epoch": 6.41, + "learning_rate": 0.00015956632653061227, + "loss": 1.6558, + "step": 317 + }, + { + "epoch": 6.43, + "learning_rate": 0.0001594387755102041, + "loss": 1.5534, + "step": 318 + }, + { + "epoch": 6.45, + "learning_rate": 0.00015931122448979593, + "loss": 1.6239, + "step": 319 + }, + { + "epoch": 6.47, + "learning_rate": 0.00015918367346938776, + "loss": 1.5645, + "step": 320 + }, + { + "epoch": 6.5, + "learning_rate": 0.00015905612244897962, + "loss": 1.5713, + "step": 321 + }, + { + "epoch": 6.52, + "learning_rate": 0.00015892857142857142, + "loss": 1.6176, + "step": 322 + }, + { + "epoch": 6.54, + "learning_rate": 0.00015880102040816328, + "loss": 1.502, + "step": 323 + }, + { + "epoch": 6.56, + "learning_rate": 0.0001586734693877551, + "loss": 1.645, + "step": 324 + }, + { + "epoch": 6.58, + "learning_rate": 0.00015854591836734697, + "loss": 1.5904, + "step": 325 + }, + { + "epoch": 6.6, + "learning_rate": 0.00015841836734693877, + "loss": 1.6149, + "step": 326 + }, + { + "epoch": 6.62, + "learning_rate": 0.00015829081632653063, + "loss": 1.6757, + "step": 327 + }, + { + "epoch": 6.64, + "learning_rate": 0.00015816326530612246, + "loss": 1.541, + "step": 328 + }, + { + "epoch": 6.66, + "learning_rate": 0.0001580357142857143, + "loss": 1.5898, + "step": 329 + }, + { + "epoch": 6.68, + "learning_rate": 0.00015790816326530612, + "loss": 1.5441, + "step": 330 + }, + { + "epoch": 6.7, + "learning_rate": 0.00015778061224489798, + "loss": 1.61, + "step": 331 + }, + { + "epoch": 6.72, + "learning_rate": 0.00015765306122448978, + "loss": 1.615, + "step": 332 + }, + { + "epoch": 6.74, + "learning_rate": 0.00015752551020408164, + "loss": 1.6575, + "step": 333 + }, + { + "epoch": 6.76, + "learning_rate": 0.00015739795918367347, + "loss": 1.6702, + "step": 334 + }, + { + "epoch": 6.78, + "learning_rate": 0.0001572704081632653, + "loss": 1.6009, + "step": 335 + }, + { + "epoch": 6.8, + "learning_rate": 0.00015714285714285716, + "loss": 1.5568, + "step": 336 + }, + { + "epoch": 6.82, + "learning_rate": 0.000157015306122449, + "loss": 1.619, + "step": 337 + }, + { + "epoch": 6.84, + "learning_rate": 0.00015688775510204082, + "loss": 1.5563, + "step": 338 + }, + { + "epoch": 6.86, + "learning_rate": 0.00015676020408163265, + "loss": 1.6328, + "step": 339 + }, + { + "epoch": 6.88, + "learning_rate": 0.0001566326530612245, + "loss": 1.5726, + "step": 340 + }, + { + "epoch": 6.9, + "learning_rate": 0.00015650510204081634, + "loss": 1.6199, + "step": 341 + }, + { + "epoch": 6.92, + "learning_rate": 0.00015637755102040817, + "loss": 1.5722, + "step": 342 + }, + { + "epoch": 6.94, + "learning_rate": 0.00015625, + "loss": 1.5685, + "step": 343 + }, + { + "epoch": 6.96, + "learning_rate": 0.00015612244897959186, + "loss": 1.5615, + "step": 344 + }, + { + "epoch": 6.98, + "learning_rate": 0.00015599489795918366, + "loss": 1.5994, + "step": 345 + }, + { + "epoch": 7.0, + "learning_rate": 0.00015586734693877552, + "loss": 1.5579, + "step": 346 + }, + { + "epoch": 7.02, + "learning_rate": 0.00015573979591836735, + "loss": 1.547, + "step": 347 + }, + { + "epoch": 7.04, + "learning_rate": 0.00015561224489795918, + "loss": 1.5292, + "step": 348 + }, + { + "epoch": 7.06, + "learning_rate": 0.00015548469387755102, + "loss": 1.6032, + "step": 349 + }, + { + "epoch": 7.08, + "learning_rate": 0.00015535714285714287, + "loss": 1.5149, + "step": 350 + }, + { + "epoch": 7.1, + "learning_rate": 0.0001552295918367347, + "loss": 1.6093, + "step": 351 + }, + { + "epoch": 7.12, + "learning_rate": 0.00015510204081632654, + "loss": 1.5421, + "step": 352 + }, + { + "epoch": 7.14, + "learning_rate": 0.00015497448979591837, + "loss": 1.5733, + "step": 353 + }, + { + "epoch": 7.16, + "learning_rate": 0.00015484693877551022, + "loss": 1.5703, + "step": 354 + }, + { + "epoch": 7.18, + "learning_rate": 0.00015471938775510203, + "loss": 1.6141, + "step": 355 + }, + { + "epoch": 7.2, + "learning_rate": 0.00015459183673469389, + "loss": 1.5526, + "step": 356 + }, + { + "epoch": 7.22, + "learning_rate": 0.00015446428571428572, + "loss": 1.5347, + "step": 357 + }, + { + "epoch": 7.24, + "learning_rate": 0.00015433673469387755, + "loss": 1.5682, + "step": 358 + }, + { + "epoch": 7.26, + "learning_rate": 0.0001542091836734694, + "loss": 1.5292, + "step": 359 + }, + { + "epoch": 7.28, + "learning_rate": 0.00015408163265306124, + "loss": 1.499, + "step": 360 + }, + { + "epoch": 7.3, + "learning_rate": 0.00015395408163265307, + "loss": 1.5624, + "step": 361 + }, + { + "epoch": 7.32, + "learning_rate": 0.0001538265306122449, + "loss": 1.627, + "step": 362 + }, + { + "epoch": 7.34, + "learning_rate": 0.00015369897959183676, + "loss": 1.5327, + "step": 363 + }, + { + "epoch": 7.37, + "learning_rate": 0.0001535714285714286, + "loss": 1.5622, + "step": 364 + }, + { + "epoch": 7.39, + "learning_rate": 0.00015344387755102042, + "loss": 1.5659, + "step": 365 + }, + { + "epoch": 7.41, + "learning_rate": 0.00015331632653061225, + "loss": 1.5019, + "step": 366 + }, + { + "epoch": 7.43, + "learning_rate": 0.0001531887755102041, + "loss": 1.5921, + "step": 367 + }, + { + "epoch": 7.45, + "learning_rate": 0.0001530612244897959, + "loss": 1.5914, + "step": 368 + }, + { + "epoch": 7.47, + "learning_rate": 0.00015293367346938777, + "loss": 1.5045, + "step": 369 + }, + { + "epoch": 7.49, + "learning_rate": 0.0001528061224489796, + "loss": 1.6209, + "step": 370 + }, + { + "epoch": 7.51, + "learning_rate": 0.00015267857142857143, + "loss": 1.5198, + "step": 371 + }, + { + "epoch": 7.53, + "learning_rate": 0.00015255102040816326, + "loss": 1.5363, + "step": 372 + }, + { + "epoch": 7.55, + "learning_rate": 0.00015242346938775512, + "loss": 1.5391, + "step": 373 + }, + { + "epoch": 7.57, + "learning_rate": 0.00015229591836734695, + "loss": 1.4546, + "step": 374 + }, + { + "epoch": 7.59, + "learning_rate": 0.00015216836734693878, + "loss": 1.5546, + "step": 375 + }, + { + "epoch": 7.61, + "learning_rate": 0.0001520408163265306, + "loss": 1.5629, + "step": 376 + }, + { + "epoch": 7.63, + "learning_rate": 0.00015191326530612247, + "loss": 1.6002, + "step": 377 + }, + { + "epoch": 7.65, + "learning_rate": 0.00015178571428571427, + "loss": 1.5543, + "step": 378 + }, + { + "epoch": 7.67, + "learning_rate": 0.00015165816326530613, + "loss": 1.5925, + "step": 379 + }, + { + "epoch": 7.69, + "learning_rate": 0.00015153061224489796, + "loss": 1.5631, + "step": 380 + }, + { + "epoch": 7.71, + "learning_rate": 0.0001514030612244898, + "loss": 1.5677, + "step": 381 + }, + { + "epoch": 7.73, + "learning_rate": 0.00015127551020408165, + "loss": 1.5828, + "step": 382 + }, + { + "epoch": 7.75, + "learning_rate": 0.00015114795918367348, + "loss": 1.6494, + "step": 383 + }, + { + "epoch": 7.77, + "learning_rate": 0.0001510204081632653, + "loss": 1.553, + "step": 384 + }, + { + "epoch": 7.79, + "learning_rate": 0.00015089285714285714, + "loss": 1.6156, + "step": 385 + }, + { + "epoch": 7.81, + "learning_rate": 0.000150765306122449, + "loss": 1.5001, + "step": 386 + }, + { + "epoch": 7.83, + "learning_rate": 0.00015063775510204083, + "loss": 1.5321, + "step": 387 + }, + { + "epoch": 7.85, + "learning_rate": 0.00015051020408163266, + "loss": 1.5307, + "step": 388 + }, + { + "epoch": 7.87, + "learning_rate": 0.0001503826530612245, + "loss": 1.5639, + "step": 389 + }, + { + "epoch": 7.89, + "learning_rate": 0.00015025510204081635, + "loss": 1.517, + "step": 390 + }, + { + "epoch": 7.91, + "learning_rate": 0.00015012755102040816, + "loss": 1.4776, + "step": 391 + }, + { + "epoch": 7.93, + "learning_rate": 0.00015000000000000001, + "loss": 1.5368, + "step": 392 + }, + { + "epoch": 7.95, + "learning_rate": 0.00014987244897959184, + "loss": 1.5636, + "step": 393 + }, + { + "epoch": 7.97, + "learning_rate": 0.00014974489795918368, + "loss": 1.6004, + "step": 394 + }, + { + "epoch": 7.99, + "learning_rate": 0.0001496173469387755, + "loss": 1.5524, + "step": 395 + }, + { + "epoch": 8.01, + "learning_rate": 0.00014948979591836736, + "loss": 1.5307, + "step": 396 + }, + { + "epoch": 8.03, + "learning_rate": 0.00014936224489795917, + "loss": 1.5123, + "step": 397 + }, + { + "epoch": 8.05, + "learning_rate": 0.00014923469387755103, + "loss": 1.5132, + "step": 398 + }, + { + "epoch": 8.07, + "learning_rate": 0.00014910714285714286, + "loss": 1.5109, + "step": 399 + }, + { + "epoch": 8.09, + "learning_rate": 0.00014897959183673472, + "loss": 1.5302, + "step": 400 + }, + { + "epoch": 8.11, + "learning_rate": 0.00014885204081632652, + "loss": 1.5238, + "step": 401 + }, + { + "epoch": 8.13, + "learning_rate": 0.00014872448979591838, + "loss": 1.4781, + "step": 402 + }, + { + "epoch": 8.15, + "learning_rate": 0.0001485969387755102, + "loss": 1.5446, + "step": 403 + }, + { + "epoch": 8.17, + "learning_rate": 0.00014846938775510204, + "loss": 1.5, + "step": 404 + }, + { + "epoch": 8.19, + "learning_rate": 0.0001483418367346939, + "loss": 1.5458, + "step": 405 + }, + { + "epoch": 8.21, + "learning_rate": 0.00014821428571428573, + "loss": 1.5257, + "step": 406 + }, + { + "epoch": 8.24, + "learning_rate": 0.00014808673469387756, + "loss": 1.4607, + "step": 407 + }, + { + "epoch": 8.26, + "learning_rate": 0.0001479591836734694, + "loss": 1.4282, + "step": 408 + }, + { + "epoch": 8.28, + "learning_rate": 0.00014783163265306125, + "loss": 1.4519, + "step": 409 + }, + { + "epoch": 8.3, + "learning_rate": 0.00014770408163265305, + "loss": 1.475, + "step": 410 + }, + { + "epoch": 8.32, + "learning_rate": 0.0001475765306122449, + "loss": 1.5425, + "step": 411 + }, + { + "epoch": 8.34, + "learning_rate": 0.00014744897959183674, + "loss": 1.5407, + "step": 412 + }, + { + "epoch": 8.36, + "learning_rate": 0.0001473214285714286, + "loss": 1.5698, + "step": 413 + }, + { + "epoch": 8.38, + "learning_rate": 0.0001471938775510204, + "loss": 1.4282, + "step": 414 + }, + { + "epoch": 8.4, + "learning_rate": 0.00014706632653061226, + "loss": 1.5301, + "step": 415 + }, + { + "epoch": 8.42, + "learning_rate": 0.0001469387755102041, + "loss": 1.5083, + "step": 416 + }, + { + "epoch": 8.44, + "learning_rate": 0.00014681122448979592, + "loss": 1.5712, + "step": 417 + }, + { + "epoch": 8.46, + "learning_rate": 0.00014668367346938775, + "loss": 1.4363, + "step": 418 + }, + { + "epoch": 8.48, + "learning_rate": 0.0001465561224489796, + "loss": 1.4463, + "step": 419 + }, + { + "epoch": 8.5, + "learning_rate": 0.00014642857142857141, + "loss": 1.4738, + "step": 420 + }, + { + "epoch": 8.52, + "learning_rate": 0.00014630102040816327, + "loss": 1.5396, + "step": 421 + }, + { + "epoch": 8.54, + "learning_rate": 0.0001461734693877551, + "loss": 1.4384, + "step": 422 + }, + { + "epoch": 8.56, + "learning_rate": 0.00014604591836734696, + "loss": 1.5345, + "step": 423 + }, + { + "epoch": 8.58, + "learning_rate": 0.0001459183673469388, + "loss": 1.5355, + "step": 424 + }, + { + "epoch": 8.6, + "learning_rate": 0.00014579081632653062, + "loss": 1.5188, + "step": 425 + }, + { + "epoch": 8.62, + "learning_rate": 0.00014566326530612245, + "loss": 1.5575, + "step": 426 + }, + { + "epoch": 8.64, + "learning_rate": 0.00014553571428571428, + "loss": 1.5279, + "step": 427 + }, + { + "epoch": 8.66, + "learning_rate": 0.00014540816326530614, + "loss": 1.5484, + "step": 428 + }, + { + "epoch": 8.68, + "learning_rate": 0.00014528061224489797, + "loss": 1.4878, + "step": 429 + }, + { + "epoch": 8.7, + "learning_rate": 0.0001451530612244898, + "loss": 1.503, + "step": 430 + }, + { + "epoch": 8.72, + "learning_rate": 0.00014502551020408163, + "loss": 1.4723, + "step": 431 + }, + { + "epoch": 8.74, + "learning_rate": 0.0001448979591836735, + "loss": 1.5579, + "step": 432 + }, + { + "epoch": 8.76, + "learning_rate": 0.0001447704081632653, + "loss": 1.4789, + "step": 433 + }, + { + "epoch": 8.78, + "learning_rate": 0.00014464285714285715, + "loss": 1.5501, + "step": 434 + }, + { + "epoch": 8.8, + "learning_rate": 0.00014451530612244899, + "loss": 1.5204, + "step": 435 + }, + { + "epoch": 8.82, + "learning_rate": 0.00014438775510204084, + "loss": 1.5489, + "step": 436 + }, + { + "epoch": 8.84, + "learning_rate": 0.00014426020408163265, + "loss": 1.5464, + "step": 437 + }, + { + "epoch": 8.86, + "learning_rate": 0.0001441326530612245, + "loss": 1.5896, + "step": 438 + }, + { + "epoch": 8.88, + "learning_rate": 0.00014400510204081634, + "loss": 1.5465, + "step": 439 + }, + { + "epoch": 8.9, + "learning_rate": 0.00014387755102040817, + "loss": 1.5094, + "step": 440 + }, + { + "epoch": 8.92, + "learning_rate": 0.00014375, + "loss": 1.5144, + "step": 441 + }, + { + "epoch": 8.94, + "learning_rate": 0.00014362244897959186, + "loss": 1.4919, + "step": 442 + }, + { + "epoch": 8.96, + "learning_rate": 0.00014349489795918366, + "loss": 1.4702, + "step": 443 + }, + { + "epoch": 8.98, + "learning_rate": 0.00014336734693877552, + "loss": 1.4996, + "step": 444 + }, + { + "epoch": 9.0, + "learning_rate": 0.00014323979591836735, + "loss": 1.5503, + "step": 445 + }, + { + "epoch": 9.02, + "learning_rate": 0.00014311224489795918, + "loss": 1.4125, + "step": 446 + }, + { + "epoch": 9.04, + "learning_rate": 0.00014298469387755104, + "loss": 1.4722, + "step": 447 + }, + { + "epoch": 9.06, + "learning_rate": 0.00014285714285714287, + "loss": 1.5199, + "step": 448 + }, + { + "epoch": 9.09, + "learning_rate": 0.0001427295918367347, + "loss": 1.4571, + "step": 449 + }, + { + "epoch": 9.11, + "learning_rate": 0.00014260204081632653, + "loss": 1.4996, + "step": 450 + }, + { + "epoch": 9.13, + "learning_rate": 0.0001424744897959184, + "loss": 1.4092, + "step": 451 + }, + { + "epoch": 9.15, + "learning_rate": 0.00014234693877551022, + "loss": 1.4198, + "step": 452 + }, + { + "epoch": 9.17, + "learning_rate": 0.00014221938775510205, + "loss": 1.4916, + "step": 453 + }, + { + "epoch": 9.19, + "learning_rate": 0.00014209183673469388, + "loss": 1.5051, + "step": 454 + }, + { + "epoch": 9.21, + "learning_rate": 0.00014196428571428574, + "loss": 1.4321, + "step": 455 + }, + { + "epoch": 9.23, + "learning_rate": 0.00014183673469387754, + "loss": 1.4097, + "step": 456 + }, + { + "epoch": 9.25, + "learning_rate": 0.0001417091836734694, + "loss": 1.4853, + "step": 457 + }, + { + "epoch": 9.27, + "learning_rate": 0.00014158163265306123, + "loss": 1.4593, + "step": 458 + }, + { + "epoch": 9.29, + "learning_rate": 0.00014145408163265306, + "loss": 1.3729, + "step": 459 + }, + { + "epoch": 9.31, + "learning_rate": 0.0001413265306122449, + "loss": 1.4467, + "step": 460 + }, + { + "epoch": 9.33, + "learning_rate": 0.00014119897959183675, + "loss": 1.4467, + "step": 461 + }, + { + "epoch": 9.35, + "learning_rate": 0.00014107142857142858, + "loss": 1.4785, + "step": 462 + }, + { + "epoch": 9.37, + "learning_rate": 0.0001409438775510204, + "loss": 1.4089, + "step": 463 + }, + { + "epoch": 9.39, + "learning_rate": 0.00014081632653061224, + "loss": 1.5026, + "step": 464 + }, + { + "epoch": 9.41, + "learning_rate": 0.0001406887755102041, + "loss": 1.4857, + "step": 465 + }, + { + "epoch": 9.43, + "learning_rate": 0.0001405612244897959, + "loss": 1.3745, + "step": 466 + }, + { + "epoch": 9.45, + "learning_rate": 0.00014043367346938776, + "loss": 1.4733, + "step": 467 + }, + { + "epoch": 9.47, + "learning_rate": 0.0001403061224489796, + "loss": 1.5212, + "step": 468 + }, + { + "epoch": 9.49, + "learning_rate": 0.00014017857142857142, + "loss": 1.5398, + "step": 469 + }, + { + "epoch": 9.51, + "learning_rate": 0.00014005102040816328, + "loss": 1.478, + "step": 470 + }, + { + "epoch": 9.53, + "learning_rate": 0.0001399234693877551, + "loss": 1.496, + "step": 471 + }, + { + "epoch": 9.55, + "learning_rate": 0.00013979591836734694, + "loss": 1.4837, + "step": 472 + }, + { + "epoch": 9.57, + "learning_rate": 0.00013966836734693878, + "loss": 1.4724, + "step": 473 + }, + { + "epoch": 9.59, + "learning_rate": 0.00013954081632653063, + "loss": 1.4828, + "step": 474 + }, + { + "epoch": 9.61, + "learning_rate": 0.00013941326530612246, + "loss": 1.5012, + "step": 475 + }, + { + "epoch": 9.63, + "learning_rate": 0.0001392857142857143, + "loss": 1.4879, + "step": 476 + }, + { + "epoch": 9.65, + "learning_rate": 0.00013915816326530613, + "loss": 1.4196, + "step": 477 + }, + { + "epoch": 9.67, + "learning_rate": 0.00013903061224489798, + "loss": 1.4915, + "step": 478 + }, + { + "epoch": 9.69, + "learning_rate": 0.0001389030612244898, + "loss": 1.3878, + "step": 479 + }, + { + "epoch": 9.71, + "learning_rate": 0.00013877551020408165, + "loss": 1.466, + "step": 480 + }, + { + "epoch": 9.73, + "learning_rate": 0.00013864795918367348, + "loss": 1.4582, + "step": 481 + }, + { + "epoch": 9.75, + "learning_rate": 0.0001385204081632653, + "loss": 1.533, + "step": 482 + }, + { + "epoch": 9.77, + "learning_rate": 0.00013839285714285714, + "loss": 1.4697, + "step": 483 + }, + { + "epoch": 9.79, + "learning_rate": 0.000138265306122449, + "loss": 1.3989, + "step": 484 + }, + { + "epoch": 9.81, + "learning_rate": 0.00013813775510204083, + "loss": 1.4361, + "step": 485 + }, + { + "epoch": 9.83, + "learning_rate": 0.00013801020408163266, + "loss": 1.5271, + "step": 486 + }, + { + "epoch": 9.85, + "learning_rate": 0.0001378826530612245, + "loss": 1.4905, + "step": 487 + }, + { + "epoch": 9.87, + "learning_rate": 0.00013775510204081635, + "loss": 1.4757, + "step": 488 + }, + { + "epoch": 9.89, + "learning_rate": 0.00013762755102040815, + "loss": 1.5485, + "step": 489 + }, + { + "epoch": 9.91, + "learning_rate": 0.0001375, + "loss": 1.4783, + "step": 490 + }, + { + "epoch": 9.93, + "learning_rate": 0.00013737244897959184, + "loss": 1.4849, + "step": 491 + }, + { + "epoch": 9.96, + "learning_rate": 0.00013724489795918367, + "loss": 1.5382, + "step": 492 + }, + { + "epoch": 9.98, + "learning_rate": 0.00013711734693877553, + "loss": 1.4902, + "step": 493 + }, + { + "epoch": 10.0, + "learning_rate": 0.00013698979591836736, + "loss": 1.4865, + "step": 494 + }, + { + "epoch": 10.02, + "learning_rate": 0.0001368622448979592, + "loss": 1.4436, + "step": 495 + }, + { + "epoch": 10.04, + "learning_rate": 0.00013673469387755102, + "loss": 1.408, + "step": 496 + }, + { + "epoch": 10.06, + "learning_rate": 0.00013660714285714288, + "loss": 1.4764, + "step": 497 + }, + { + "epoch": 10.08, + "learning_rate": 0.0001364795918367347, + "loss": 1.4646, + "step": 498 + }, + { + "epoch": 10.1, + "learning_rate": 0.00013635204081632654, + "loss": 1.406, + "step": 499 + }, + { + "epoch": 10.12, + "learning_rate": 0.00013622448979591837, + "loss": 1.4785, + "step": 500 + }, + { + "epoch": 10.14, + "learning_rate": 0.00013609693877551023, + "loss": 1.4117, + "step": 501 + }, + { + "epoch": 10.16, + "learning_rate": 0.00013596938775510203, + "loss": 1.4108, + "step": 502 + }, + { + "epoch": 10.18, + "learning_rate": 0.0001358418367346939, + "loss": 1.4155, + "step": 503 + }, + { + "epoch": 10.2, + "learning_rate": 0.00013571428571428572, + "loss": 1.4021, + "step": 504 + }, + { + "epoch": 10.22, + "learning_rate": 0.00013558673469387755, + "loss": 1.411, + "step": 505 + }, + { + "epoch": 10.24, + "learning_rate": 0.00013545918367346938, + "loss": 1.3851, + "step": 506 + }, + { + "epoch": 10.26, + "learning_rate": 0.00013533163265306124, + "loss": 1.387, + "step": 507 + }, + { + "epoch": 10.28, + "learning_rate": 0.00013520408163265305, + "loss": 1.4163, + "step": 508 + }, + { + "epoch": 10.3, + "learning_rate": 0.0001350765306122449, + "loss": 1.3343, + "step": 509 + }, + { + "epoch": 10.32, + "learning_rate": 0.00013494897959183673, + "loss": 1.4811, + "step": 510 + }, + { + "epoch": 10.34, + "learning_rate": 0.0001348214285714286, + "loss": 1.4086, + "step": 511 + }, + { + "epoch": 10.36, + "learning_rate": 0.0001346938775510204, + "loss": 1.3879, + "step": 512 + }, + { + "epoch": 10.38, + "learning_rate": 0.00013456632653061225, + "loss": 1.4204, + "step": 513 + }, + { + "epoch": 10.4, + "learning_rate": 0.00013443877551020408, + "loss": 1.4158, + "step": 514 + }, + { + "epoch": 10.42, + "learning_rate": 0.00013431122448979592, + "loss": 1.4521, + "step": 515 + }, + { + "epoch": 10.44, + "learning_rate": 0.00013418367346938777, + "loss": 1.4196, + "step": 516 + }, + { + "epoch": 10.46, + "learning_rate": 0.0001340561224489796, + "loss": 1.4361, + "step": 517 + }, + { + "epoch": 10.48, + "learning_rate": 0.00013392857142857144, + "loss": 1.4482, + "step": 518 + }, + { + "epoch": 10.5, + "learning_rate": 0.00013380102040816327, + "loss": 1.4801, + "step": 519 + }, + { + "epoch": 10.52, + "learning_rate": 0.00013367346938775512, + "loss": 1.4556, + "step": 520 + }, + { + "epoch": 10.54, + "learning_rate": 0.00013354591836734695, + "loss": 1.3902, + "step": 521 + }, + { + "epoch": 10.56, + "learning_rate": 0.00013341836734693879, + "loss": 1.4269, + "step": 522 + }, + { + "epoch": 10.58, + "learning_rate": 0.00013329081632653062, + "loss": 1.4899, + "step": 523 + }, + { + "epoch": 10.6, + "learning_rate": 0.00013316326530612247, + "loss": 1.3952, + "step": 524 + }, + { + "epoch": 10.62, + "learning_rate": 0.00013303571428571428, + "loss": 1.4116, + "step": 525 + }, + { + "epoch": 10.64, + "learning_rate": 0.00013290816326530614, + "loss": 1.4583, + "step": 526 + }, + { + "epoch": 10.66, + "learning_rate": 0.00013278061224489797, + "loss": 1.4466, + "step": 527 + }, + { + "epoch": 10.68, + "learning_rate": 0.0001326530612244898, + "loss": 1.4242, + "step": 528 + }, + { + "epoch": 10.7, + "learning_rate": 0.00013252551020408163, + "loss": 1.3717, + "step": 529 + }, + { + "epoch": 10.72, + "learning_rate": 0.0001323979591836735, + "loss": 1.4583, + "step": 530 + }, + { + "epoch": 10.74, + "learning_rate": 0.0001322704081632653, + "loss": 1.4185, + "step": 531 + }, + { + "epoch": 10.76, + "learning_rate": 0.00013214285714285715, + "loss": 1.4287, + "step": 532 + }, + { + "epoch": 10.78, + "learning_rate": 0.00013201530612244898, + "loss": 1.4385, + "step": 533 + }, + { + "epoch": 10.8, + "learning_rate": 0.00013188775510204084, + "loss": 1.453, + "step": 534 + }, + { + "epoch": 10.83, + "learning_rate": 0.00013176020408163264, + "loss": 1.4161, + "step": 535 + }, + { + "epoch": 10.85, + "learning_rate": 0.0001316326530612245, + "loss": 1.457, + "step": 536 + }, + { + "epoch": 10.87, + "learning_rate": 0.00013150510204081633, + "loss": 1.4367, + "step": 537 + }, + { + "epoch": 10.89, + "learning_rate": 0.00013137755102040816, + "loss": 1.4256, + "step": 538 + }, + { + "epoch": 10.91, + "learning_rate": 0.00013125000000000002, + "loss": 1.424, + "step": 539 + }, + { + "epoch": 10.93, + "learning_rate": 0.00013112244897959185, + "loss": 1.3923, + "step": 540 + }, + { + "epoch": 10.95, + "learning_rate": 0.00013099489795918368, + "loss": 1.4225, + "step": 541 + }, + { + "epoch": 10.97, + "learning_rate": 0.0001308673469387755, + "loss": 1.3969, + "step": 542 + }, + { + "epoch": 10.99, + "learning_rate": 0.00013073979591836737, + "loss": 1.4446, + "step": 543 + }, + { + "epoch": 11.01, + "learning_rate": 0.00013061224489795917, + "loss": 1.4375, + "step": 544 + }, + { + "epoch": 11.03, + "learning_rate": 0.00013048469387755103, + "loss": 1.4064, + "step": 545 + }, + { + "epoch": 11.05, + "learning_rate": 0.00013035714285714286, + "loss": 1.3454, + "step": 546 + }, + { + "epoch": 11.07, + "learning_rate": 0.00013022959183673472, + "loss": 1.3234, + "step": 547 + }, + { + "epoch": 11.09, + "learning_rate": 0.00013010204081632652, + "loss": 1.3759, + "step": 548 + }, + { + "epoch": 11.11, + "learning_rate": 0.00012997448979591838, + "loss": 1.4221, + "step": 549 + }, + { + "epoch": 11.13, + "learning_rate": 0.0001298469387755102, + "loss": 1.4261, + "step": 550 + }, + { + "epoch": 11.15, + "learning_rate": 0.00012971938775510204, + "loss": 1.3341, + "step": 551 + }, + { + "epoch": 11.17, + "learning_rate": 0.00012959183673469387, + "loss": 1.3994, + "step": 552 + }, + { + "epoch": 11.19, + "learning_rate": 0.00012946428571428573, + "loss": 1.3894, + "step": 553 + }, + { + "epoch": 11.21, + "learning_rate": 0.00012933673469387754, + "loss": 1.3585, + "step": 554 + }, + { + "epoch": 11.23, + "learning_rate": 0.0001292091836734694, + "loss": 1.3763, + "step": 555 + }, + { + "epoch": 11.25, + "learning_rate": 0.00012908163265306123, + "loss": 1.3623, + "step": 556 + }, + { + "epoch": 11.27, + "learning_rate": 0.00012895408163265306, + "loss": 1.3907, + "step": 557 + }, + { + "epoch": 11.29, + "learning_rate": 0.0001288265306122449, + "loss": 1.3807, + "step": 558 + }, + { + "epoch": 11.31, + "learning_rate": 0.00012869897959183674, + "loss": 1.4045, + "step": 559 + }, + { + "epoch": 11.33, + "learning_rate": 0.00012857142857142858, + "loss": 1.4038, + "step": 560 + }, + { + "epoch": 11.35, + "learning_rate": 0.0001284438775510204, + "loss": 1.3466, + "step": 561 + }, + { + "epoch": 11.37, + "learning_rate": 0.00012831632653061226, + "loss": 1.3449, + "step": 562 + }, + { + "epoch": 11.39, + "learning_rate": 0.0001281887755102041, + "loss": 1.3866, + "step": 563 + }, + { + "epoch": 11.41, + "learning_rate": 0.00012806122448979593, + "loss": 1.3106, + "step": 564 + }, + { + "epoch": 11.43, + "learning_rate": 0.00012793367346938776, + "loss": 1.4414, + "step": 565 + }, + { + "epoch": 11.45, + "learning_rate": 0.00012780612244897962, + "loss": 1.3737, + "step": 566 + }, + { + "epoch": 11.47, + "learning_rate": 0.00012767857142857142, + "loss": 1.4053, + "step": 567 + }, + { + "epoch": 11.49, + "learning_rate": 0.00012755102040816328, + "loss": 1.4561, + "step": 568 + }, + { + "epoch": 11.51, + "learning_rate": 0.0001274234693877551, + "loss": 1.3684, + "step": 569 + }, + { + "epoch": 11.53, + "learning_rate": 0.00012729591836734697, + "loss": 1.3117, + "step": 570 + }, + { + "epoch": 11.55, + "learning_rate": 0.00012716836734693877, + "loss": 1.3474, + "step": 571 + }, + { + "epoch": 11.57, + "learning_rate": 0.00012704081632653063, + "loss": 1.3804, + "step": 572 + }, + { + "epoch": 11.59, + "learning_rate": 0.00012691326530612246, + "loss": 1.3656, + "step": 573 + }, + { + "epoch": 11.61, + "learning_rate": 0.0001267857142857143, + "loss": 1.3133, + "step": 574 + }, + { + "epoch": 11.63, + "learning_rate": 0.00012665816326530612, + "loss": 1.4077, + "step": 575 + }, + { + "epoch": 11.65, + "learning_rate": 0.00012653061224489798, + "loss": 1.4087, + "step": 576 + }, + { + "epoch": 11.67, + "learning_rate": 0.00012640306122448978, + "loss": 1.3524, + "step": 577 + }, + { + "epoch": 11.7, + "learning_rate": 0.00012627551020408164, + "loss": 1.3481, + "step": 578 + }, + { + "epoch": 11.72, + "learning_rate": 0.00012614795918367347, + "loss": 1.4497, + "step": 579 + }, + { + "epoch": 11.74, + "learning_rate": 0.0001260204081632653, + "loss": 1.3866, + "step": 580 + }, + { + "epoch": 11.76, + "learning_rate": 0.00012589285714285713, + "loss": 1.42, + "step": 581 + }, + { + "epoch": 11.78, + "learning_rate": 0.000125765306122449, + "loss": 1.3562, + "step": 582 + }, + { + "epoch": 11.8, + "learning_rate": 0.00012563775510204082, + "loss": 1.3249, + "step": 583 + }, + { + "epoch": 11.82, + "learning_rate": 0.00012551020408163265, + "loss": 1.4277, + "step": 584 + }, + { + "epoch": 11.84, + "learning_rate": 0.0001253826530612245, + "loss": 1.3734, + "step": 585 + }, + { + "epoch": 11.86, + "learning_rate": 0.00012525510204081634, + "loss": 1.3765, + "step": 586 + }, + { + "epoch": 11.88, + "learning_rate": 0.00012512755102040817, + "loss": 1.4153, + "step": 587 + }, + { + "epoch": 11.9, + "learning_rate": 0.000125, + "loss": 1.3847, + "step": 588 + }, + { + "epoch": 11.92, + "learning_rate": 0.00012487244897959186, + "loss": 1.3824, + "step": 589 + }, + { + "epoch": 11.94, + "learning_rate": 0.00012474489795918366, + "loss": 1.3938, + "step": 590 + }, + { + "epoch": 11.96, + "learning_rate": 0.00012461734693877552, + "loss": 1.4143, + "step": 591 + }, + { + "epoch": 11.98, + "learning_rate": 0.00012448979591836735, + "loss": 1.3794, + "step": 592 + }, + { + "epoch": 12.0, + "learning_rate": 0.00012436224489795918, + "loss": 1.3755, + "step": 593 + }, + { + "epoch": 12.02, + "learning_rate": 0.00012423469387755101, + "loss": 1.3736, + "step": 594 + }, + { + "epoch": 12.04, + "learning_rate": 0.00012410714285714287, + "loss": 1.2957, + "step": 595 + }, + { + "epoch": 12.06, + "learning_rate": 0.0001239795918367347, + "loss": 1.2996, + "step": 596 + }, + { + "epoch": 12.08, + "learning_rate": 0.00012385204081632653, + "loss": 1.3648, + "step": 597 + }, + { + "epoch": 12.1, + "learning_rate": 0.00012372448979591837, + "loss": 1.3031, + "step": 598 + }, + { + "epoch": 12.12, + "learning_rate": 0.00012359693877551022, + "loss": 1.2933, + "step": 599 + }, + { + "epoch": 12.14, + "learning_rate": 0.00012346938775510203, + "loss": 1.322, + "step": 600 + }, + { + "epoch": 12.16, + "learning_rate": 0.00012334183673469389, + "loss": 1.3123, + "step": 601 + }, + { + "epoch": 12.18, + "learning_rate": 0.00012321428571428572, + "loss": 1.3187, + "step": 602 + }, + { + "epoch": 12.2, + "learning_rate": 0.00012308673469387755, + "loss": 1.3353, + "step": 603 + }, + { + "epoch": 12.22, + "learning_rate": 0.0001229591836734694, + "loss": 1.3221, + "step": 604 + }, + { + "epoch": 12.24, + "learning_rate": 0.00012283163265306124, + "loss": 1.3458, + "step": 605 + }, + { + "epoch": 12.26, + "learning_rate": 0.00012270408163265307, + "loss": 1.275, + "step": 606 + }, + { + "epoch": 12.28, + "learning_rate": 0.0001225765306122449, + "loss": 1.3455, + "step": 607 + }, + { + "epoch": 12.3, + "learning_rate": 0.00012244897959183676, + "loss": 1.2769, + "step": 608 + }, + { + "epoch": 12.32, + "learning_rate": 0.00012232142857142859, + "loss": 1.3201, + "step": 609 + }, + { + "epoch": 12.34, + "learning_rate": 0.00012219387755102042, + "loss": 1.3073, + "step": 610 + }, + { + "epoch": 12.36, + "learning_rate": 0.00012206632653061225, + "loss": 1.3103, + "step": 611 + }, + { + "epoch": 12.38, + "learning_rate": 0.00012193877551020409, + "loss": 1.4437, + "step": 612 + }, + { + "epoch": 12.4, + "learning_rate": 0.00012181122448979591, + "loss": 1.3086, + "step": 613 + }, + { + "epoch": 12.42, + "learning_rate": 0.00012168367346938775, + "loss": 1.3867, + "step": 614 + }, + { + "epoch": 12.44, + "learning_rate": 0.0001215561224489796, + "loss": 1.2565, + "step": 615 + }, + { + "epoch": 12.46, + "learning_rate": 0.00012142857142857143, + "loss": 1.335, + "step": 616 + }, + { + "epoch": 12.48, + "learning_rate": 0.00012130102040816327, + "loss": 1.3423, + "step": 617 + }, + { + "epoch": 12.5, + "learning_rate": 0.00012117346938775512, + "loss": 1.3433, + "step": 618 + }, + { + "epoch": 12.52, + "learning_rate": 0.00012104591836734695, + "loss": 1.3387, + "step": 619 + }, + { + "epoch": 12.55, + "learning_rate": 0.00012091836734693878, + "loss": 1.3923, + "step": 620 + }, + { + "epoch": 12.57, + "learning_rate": 0.00012079081632653062, + "loss": 1.3774, + "step": 621 + }, + { + "epoch": 12.59, + "learning_rate": 0.00012066326530612247, + "loss": 1.3203, + "step": 622 + }, + { + "epoch": 12.61, + "learning_rate": 0.00012053571428571429, + "loss": 1.2924, + "step": 623 + }, + { + "epoch": 12.63, + "learning_rate": 0.00012040816326530613, + "loss": 1.3292, + "step": 624 + }, + { + "epoch": 12.65, + "learning_rate": 0.00012028061224489798, + "loss": 1.3161, + "step": 625 + }, + { + "epoch": 12.67, + "learning_rate": 0.00012015306122448979, + "loss": 1.352, + "step": 626 + }, + { + "epoch": 12.69, + "learning_rate": 0.00012002551020408164, + "loss": 1.3577, + "step": 627 + }, + { + "epoch": 12.71, + "learning_rate": 0.00011989795918367348, + "loss": 1.3575, + "step": 628 + }, + { + "epoch": 12.73, + "learning_rate": 0.0001197704081632653, + "loss": 1.3727, + "step": 629 + }, + { + "epoch": 12.75, + "learning_rate": 0.00011964285714285714, + "loss": 1.3312, + "step": 630 + }, + { + "epoch": 12.77, + "learning_rate": 0.00011951530612244899, + "loss": 1.3378, + "step": 631 + }, + { + "epoch": 12.79, + "learning_rate": 0.00011938775510204083, + "loss": 1.295, + "step": 632 + }, + { + "epoch": 12.81, + "learning_rate": 0.00011926020408163265, + "loss": 1.3447, + "step": 633 + }, + { + "epoch": 12.83, + "learning_rate": 0.0001191326530612245, + "loss": 1.3835, + "step": 634 + }, + { + "epoch": 12.85, + "learning_rate": 0.00011900510204081634, + "loss": 1.3222, + "step": 635 + }, + { + "epoch": 12.87, + "learning_rate": 0.00011887755102040817, + "loss": 1.2851, + "step": 636 + }, + { + "epoch": 12.89, + "learning_rate": 0.00011875, + "loss": 1.2723, + "step": 637 + }, + { + "epoch": 12.91, + "learning_rate": 0.00011862244897959184, + "loss": 1.3924, + "step": 638 + }, + { + "epoch": 12.93, + "learning_rate": 0.00011849489795918368, + "loss": 1.4625, + "step": 639 + }, + { + "epoch": 12.95, + "learning_rate": 0.00011836734693877552, + "loss": 1.3245, + "step": 640 + }, + { + "epoch": 12.97, + "learning_rate": 0.00011823979591836736, + "loss": 1.4042, + "step": 641 + }, + { + "epoch": 12.99, + "learning_rate": 0.00011811224489795918, + "loss": 1.3761, + "step": 642 + }, + { + "epoch": 13.01, + "learning_rate": 0.00011798469387755103, + "loss": 1.3376, + "step": 643 + }, + { + "epoch": 13.03, + "learning_rate": 0.00011785714285714287, + "loss": 1.2174, + "step": 644 + }, + { + "epoch": 13.05, + "learning_rate": 0.00011772959183673471, + "loss": 1.3602, + "step": 645 + }, + { + "epoch": 13.07, + "learning_rate": 0.00011760204081632653, + "loss": 1.3002, + "step": 646 + }, + { + "epoch": 13.09, + "learning_rate": 0.00011747448979591838, + "loss": 1.2262, + "step": 647 + }, + { + "epoch": 13.11, + "learning_rate": 0.00011734693877551022, + "loss": 1.3048, + "step": 648 + }, + { + "epoch": 13.13, + "learning_rate": 0.00011721938775510204, + "loss": 1.2231, + "step": 649 + }, + { + "epoch": 13.15, + "learning_rate": 0.00011709183673469388, + "loss": 1.2996, + "step": 650 + }, + { + "epoch": 13.17, + "learning_rate": 0.00011696428571428573, + "loss": 1.2708, + "step": 651 + }, + { + "epoch": 13.19, + "learning_rate": 0.00011683673469387754, + "loss": 1.2776, + "step": 652 + }, + { + "epoch": 13.21, + "learning_rate": 0.00011670918367346939, + "loss": 1.248, + "step": 653 + }, + { + "epoch": 13.23, + "learning_rate": 0.00011658163265306123, + "loss": 1.2582, + "step": 654 + }, + { + "epoch": 13.25, + "learning_rate": 0.00011645408163265305, + "loss": 1.3011, + "step": 655 + }, + { + "epoch": 13.27, + "learning_rate": 0.0001163265306122449, + "loss": 1.2969, + "step": 656 + }, + { + "epoch": 13.29, + "learning_rate": 0.00011619897959183674, + "loss": 1.2454, + "step": 657 + }, + { + "epoch": 13.31, + "learning_rate": 0.00011607142857142858, + "loss": 1.1914, + "step": 658 + }, + { + "epoch": 13.33, + "learning_rate": 0.00011594387755102041, + "loss": 1.34, + "step": 659 + }, + { + "epoch": 13.35, + "learning_rate": 0.00011581632653061225, + "loss": 1.2828, + "step": 660 + }, + { + "epoch": 13.37, + "learning_rate": 0.00011568877551020409, + "loss": 1.2962, + "step": 661 + }, + { + "epoch": 13.39, + "learning_rate": 0.00011556122448979592, + "loss": 1.3334, + "step": 662 + }, + { + "epoch": 13.42, + "learning_rate": 0.00011543367346938776, + "loss": 1.2832, + "step": 663 + }, + { + "epoch": 13.44, + "learning_rate": 0.00011530612244897961, + "loss": 1.3012, + "step": 664 + }, + { + "epoch": 13.46, + "learning_rate": 0.00011517857142857143, + "loss": 1.2857, + "step": 665 + }, + { + "epoch": 13.48, + "learning_rate": 0.00011505102040816327, + "loss": 1.2855, + "step": 666 + }, + { + "epoch": 13.5, + "learning_rate": 0.00011492346938775512, + "loss": 1.3077, + "step": 667 + }, + { + "epoch": 13.52, + "learning_rate": 0.00011479591836734696, + "loss": 1.3139, + "step": 668 + }, + { + "epoch": 13.54, + "learning_rate": 0.00011466836734693878, + "loss": 1.3138, + "step": 669 + }, + { + "epoch": 13.56, + "learning_rate": 0.00011454081632653062, + "loss": 1.2808, + "step": 670 + }, + { + "epoch": 13.58, + "learning_rate": 0.00011441326530612247, + "loss": 1.2492, + "step": 671 + }, + { + "epoch": 13.6, + "learning_rate": 0.00011428571428571428, + "loss": 1.2027, + "step": 672 + }, + { + "epoch": 13.62, + "learning_rate": 0.00011415816326530613, + "loss": 1.33, + "step": 673 + }, + { + "epoch": 13.64, + "learning_rate": 0.00011403061224489797, + "loss": 1.3112, + "step": 674 + }, + { + "epoch": 13.66, + "learning_rate": 0.00011390306122448979, + "loss": 1.2772, + "step": 675 + }, + { + "epoch": 13.68, + "learning_rate": 0.00011377551020408163, + "loss": 1.2701, + "step": 676 + }, + { + "epoch": 13.7, + "learning_rate": 0.00011364795918367348, + "loss": 1.1973, + "step": 677 + }, + { + "epoch": 13.72, + "learning_rate": 0.0001135204081632653, + "loss": 1.3124, + "step": 678 + }, + { + "epoch": 13.74, + "learning_rate": 0.00011339285714285714, + "loss": 1.3085, + "step": 679 + }, + { + "epoch": 13.76, + "learning_rate": 0.00011326530612244898, + "loss": 1.3457, + "step": 680 + }, + { + "epoch": 13.78, + "learning_rate": 0.00011313775510204083, + "loss": 1.3338, + "step": 681 + }, + { + "epoch": 13.8, + "learning_rate": 0.00011301020408163266, + "loss": 1.2753, + "step": 682 + }, + { + "epoch": 13.82, + "learning_rate": 0.00011288265306122449, + "loss": 1.2786, + "step": 683 + }, + { + "epoch": 13.84, + "learning_rate": 0.00011275510204081634, + "loss": 1.2584, + "step": 684 + }, + { + "epoch": 13.86, + "learning_rate": 0.00011262755102040817, + "loss": 1.2779, + "step": 685 + }, + { + "epoch": 13.88, + "learning_rate": 0.00011250000000000001, + "loss": 1.3502, + "step": 686 + }, + { + "epoch": 13.9, + "learning_rate": 0.00011237244897959185, + "loss": 1.3251, + "step": 687 + }, + { + "epoch": 13.92, + "learning_rate": 0.00011224489795918367, + "loss": 1.273, + "step": 688 + }, + { + "epoch": 13.94, + "learning_rate": 0.00011211734693877552, + "loss": 1.3341, + "step": 689 + }, + { + "epoch": 13.96, + "learning_rate": 0.00011198979591836736, + "loss": 1.2654, + "step": 690 + }, + { + "epoch": 13.98, + "learning_rate": 0.00011186224489795918, + "loss": 1.3333, + "step": 691 + }, + { + "epoch": 14.0, + "learning_rate": 0.00011173469387755102, + "loss": 1.3246, + "step": 692 + }, + { + "epoch": 14.02, + "learning_rate": 0.00011160714285714287, + "loss": 1.2547, + "step": 693 + }, + { + "epoch": 14.04, + "learning_rate": 0.00011147959183673471, + "loss": 1.208, + "step": 694 + }, + { + "epoch": 14.06, + "learning_rate": 0.00011135204081632653, + "loss": 1.223, + "step": 695 + }, + { + "epoch": 14.08, + "learning_rate": 0.00011122448979591837, + "loss": 1.2483, + "step": 696 + }, + { + "epoch": 14.1, + "learning_rate": 0.00011109693877551022, + "loss": 1.2823, + "step": 697 + }, + { + "epoch": 14.12, + "learning_rate": 0.00011096938775510204, + "loss": 1.2013, + "step": 698 + }, + { + "epoch": 14.14, + "learning_rate": 0.00011084183673469388, + "loss": 1.1883, + "step": 699 + }, + { + "epoch": 14.16, + "learning_rate": 0.00011071428571428572, + "loss": 1.2364, + "step": 700 + }, + { + "epoch": 14.18, + "learning_rate": 0.00011058673469387754, + "loss": 1.2069, + "step": 701 + }, + { + "epoch": 14.2, + "learning_rate": 0.00011045918367346939, + "loss": 1.1968, + "step": 702 + }, + { + "epoch": 14.22, + "learning_rate": 0.00011033163265306123, + "loss": 1.2236, + "step": 703 + }, + { + "epoch": 14.24, + "learning_rate": 0.00011020408163265306, + "loss": 1.1942, + "step": 704 + }, + { + "epoch": 14.26, + "learning_rate": 0.0001100765306122449, + "loss": 1.2561, + "step": 705 + }, + { + "epoch": 14.29, + "learning_rate": 0.00010994897959183674, + "loss": 1.1839, + "step": 706 + }, + { + "epoch": 14.31, + "learning_rate": 0.00010982142857142858, + "loss": 1.2128, + "step": 707 + }, + { + "epoch": 14.33, + "learning_rate": 0.00010969387755102041, + "loss": 1.3086, + "step": 708 + }, + { + "epoch": 14.35, + "learning_rate": 0.00010956632653061226, + "loss": 1.2379, + "step": 709 + }, + { + "epoch": 14.37, + "learning_rate": 0.0001094387755102041, + "loss": 1.176, + "step": 710 + }, + { + "epoch": 14.39, + "learning_rate": 0.00010931122448979592, + "loss": 1.2105, + "step": 711 + }, + { + "epoch": 14.41, + "learning_rate": 0.00010918367346938776, + "loss": 1.2149, + "step": 712 + }, + { + "epoch": 14.43, + "learning_rate": 0.0001090561224489796, + "loss": 1.2392, + "step": 713 + }, + { + "epoch": 14.45, + "learning_rate": 0.00010892857142857142, + "loss": 1.2471, + "step": 714 + }, + { + "epoch": 14.47, + "learning_rate": 0.00010880102040816327, + "loss": 1.2561, + "step": 715 + }, + { + "epoch": 14.49, + "learning_rate": 0.00010867346938775511, + "loss": 1.2179, + "step": 716 + }, + { + "epoch": 14.51, + "learning_rate": 0.00010854591836734696, + "loss": 1.2459, + "step": 717 + }, + { + "epoch": 14.53, + "learning_rate": 0.00010841836734693877, + "loss": 1.2933, + "step": 718 + }, + { + "epoch": 14.55, + "learning_rate": 0.00010829081632653062, + "loss": 1.2862, + "step": 719 + }, + { + "epoch": 14.57, + "learning_rate": 0.00010816326530612246, + "loss": 1.2976, + "step": 720 + }, + { + "epoch": 14.59, + "learning_rate": 0.00010803571428571428, + "loss": 1.231, + "step": 721 + }, + { + "epoch": 14.61, + "learning_rate": 0.00010790816326530613, + "loss": 1.2464, + "step": 722 + }, + { + "epoch": 14.63, + "learning_rate": 0.00010778061224489797, + "loss": 1.2181, + "step": 723 + }, + { + "epoch": 14.65, + "learning_rate": 0.00010765306122448979, + "loss": 1.3307, + "step": 724 + }, + { + "epoch": 14.67, + "learning_rate": 0.00010752551020408163, + "loss": 1.1723, + "step": 725 + }, + { + "epoch": 14.69, + "learning_rate": 0.00010739795918367348, + "loss": 1.1528, + "step": 726 + }, + { + "epoch": 14.71, + "learning_rate": 0.0001072704081632653, + "loss": 1.215, + "step": 727 + }, + { + "epoch": 14.73, + "learning_rate": 0.00010714285714285715, + "loss": 1.2624, + "step": 728 + }, + { + "epoch": 14.75, + "learning_rate": 0.00010701530612244898, + "loss": 1.3117, + "step": 729 + }, + { + "epoch": 14.77, + "learning_rate": 0.00010688775510204083, + "loss": 1.2572, + "step": 730 + }, + { + "epoch": 14.79, + "learning_rate": 0.00010676020408163266, + "loss": 1.222, + "step": 731 + }, + { + "epoch": 14.81, + "learning_rate": 0.0001066326530612245, + "loss": 1.2881, + "step": 732 + }, + { + "epoch": 14.83, + "learning_rate": 0.00010650510204081635, + "loss": 1.2676, + "step": 733 + }, + { + "epoch": 14.85, + "learning_rate": 0.00010637755102040816, + "loss": 1.2734, + "step": 734 + }, + { + "epoch": 14.87, + "learning_rate": 0.00010625000000000001, + "loss": 1.2885, + "step": 735 + }, + { + "epoch": 14.89, + "learning_rate": 0.00010612244897959185, + "loss": 1.2764, + "step": 736 + }, + { + "epoch": 14.91, + "learning_rate": 0.00010599489795918367, + "loss": 1.3267, + "step": 737 + }, + { + "epoch": 14.93, + "learning_rate": 0.00010586734693877551, + "loss": 1.2445, + "step": 738 + }, + { + "epoch": 14.95, + "learning_rate": 0.00010573979591836736, + "loss": 1.3359, + "step": 739 + }, + { + "epoch": 14.97, + "learning_rate": 0.00010561224489795918, + "loss": 1.2508, + "step": 740 + }, + { + "epoch": 14.99, + "learning_rate": 0.00010548469387755102, + "loss": 1.2227, + "step": 741 + }, + { + "epoch": 15.01, + "learning_rate": 0.00010535714285714286, + "loss": 1.1889, + "step": 742 + }, + { + "epoch": 15.03, + "learning_rate": 0.00010522959183673471, + "loss": 1.1919, + "step": 743 + }, + { + "epoch": 15.05, + "learning_rate": 0.00010510204081632653, + "loss": 1.2383, + "step": 744 + }, + { + "epoch": 15.07, + "learning_rate": 0.00010497448979591837, + "loss": 1.2401, + "step": 745 + }, + { + "epoch": 15.09, + "learning_rate": 0.00010484693877551021, + "loss": 1.2015, + "step": 746 + }, + { + "epoch": 15.11, + "learning_rate": 0.00010471938775510203, + "loss": 1.1509, + "step": 747 + }, + { + "epoch": 15.13, + "learning_rate": 0.00010459183673469388, + "loss": 1.1878, + "step": 748 + }, + { + "epoch": 15.16, + "learning_rate": 0.00010446428571428572, + "loss": 1.1706, + "step": 749 + }, + { + "epoch": 15.18, + "learning_rate": 0.00010433673469387755, + "loss": 1.1285, + "step": 750 + }, + { + "epoch": 15.2, + "learning_rate": 0.0001042091836734694, + "loss": 1.1608, + "step": 751 + }, + { + "epoch": 15.22, + "learning_rate": 0.00010408163265306123, + "loss": 1.1178, + "step": 752 + }, + { + "epoch": 15.24, + "learning_rate": 0.00010395408163265306, + "loss": 1.1293, + "step": 753 + }, + { + "epoch": 15.26, + "learning_rate": 0.0001038265306122449, + "loss": 1.2306, + "step": 754 + }, + { + "epoch": 15.28, + "learning_rate": 0.00010369897959183675, + "loss": 1.1541, + "step": 755 + }, + { + "epoch": 15.3, + "learning_rate": 0.00010357142857142859, + "loss": 1.1702, + "step": 756 + }, + { + "epoch": 15.32, + "learning_rate": 0.00010344387755102041, + "loss": 1.2119, + "step": 757 + }, + { + "epoch": 15.34, + "learning_rate": 0.00010331632653061225, + "loss": 1.2239, + "step": 758 + }, + { + "epoch": 15.36, + "learning_rate": 0.0001031887755102041, + "loss": 1.2019, + "step": 759 + }, + { + "epoch": 15.38, + "learning_rate": 0.00010306122448979591, + "loss": 1.2197, + "step": 760 + }, + { + "epoch": 15.4, + "learning_rate": 0.00010293367346938776, + "loss": 1.1769, + "step": 761 + }, + { + "epoch": 15.42, + "learning_rate": 0.0001028061224489796, + "loss": 1.1907, + "step": 762 + }, + { + "epoch": 15.44, + "learning_rate": 0.00010267857142857142, + "loss": 1.2089, + "step": 763 + }, + { + "epoch": 15.46, + "learning_rate": 0.00010255102040816327, + "loss": 1.1335, + "step": 764 + }, + { + "epoch": 15.48, + "learning_rate": 0.00010242346938775511, + "loss": 1.1633, + "step": 765 + }, + { + "epoch": 15.5, + "learning_rate": 0.00010229591836734695, + "loss": 1.1578, + "step": 766 + }, + { + "epoch": 15.52, + "learning_rate": 0.00010216836734693877, + "loss": 1.2236, + "step": 767 + }, + { + "epoch": 15.54, + "learning_rate": 0.00010204081632653062, + "loss": 1.1941, + "step": 768 + }, + { + "epoch": 15.56, + "learning_rate": 0.00010191326530612246, + "loss": 1.2666, + "step": 769 + }, + { + "epoch": 15.58, + "learning_rate": 0.00010178571428571428, + "loss": 1.1232, + "step": 770 + }, + { + "epoch": 15.6, + "learning_rate": 0.00010165816326530612, + "loss": 1.2242, + "step": 771 + }, + { + "epoch": 15.62, + "learning_rate": 0.00010153061224489797, + "loss": 1.1852, + "step": 772 + }, + { + "epoch": 15.64, + "learning_rate": 0.0001014030612244898, + "loss": 1.2626, + "step": 773 + }, + { + "epoch": 15.66, + "learning_rate": 0.00010127551020408164, + "loss": 1.1873, + "step": 774 + }, + { + "epoch": 15.68, + "learning_rate": 0.00010114795918367349, + "loss": 1.3005, + "step": 775 + }, + { + "epoch": 15.7, + "learning_rate": 0.0001010204081632653, + "loss": 1.1904, + "step": 776 + }, + { + "epoch": 15.72, + "learning_rate": 0.00010089285714285715, + "loss": 1.2927, + "step": 777 + }, + { + "epoch": 15.74, + "learning_rate": 0.00010076530612244899, + "loss": 1.179, + "step": 778 + }, + { + "epoch": 15.76, + "learning_rate": 0.00010063775510204084, + "loss": 1.2027, + "step": 779 + }, + { + "epoch": 15.78, + "learning_rate": 0.00010051020408163265, + "loss": 1.2428, + "step": 780 + }, + { + "epoch": 15.8, + "learning_rate": 0.0001003826530612245, + "loss": 1.2324, + "step": 781 + }, + { + "epoch": 15.82, + "learning_rate": 0.00010025510204081634, + "loss": 1.1251, + "step": 782 + }, + { + "epoch": 15.84, + "learning_rate": 0.00010012755102040816, + "loss": 1.2405, + "step": 783 + }, + { + "epoch": 15.86, + "learning_rate": 0.0001, + "loss": 1.2005, + "step": 784 + }, + { + "epoch": 15.88, + "learning_rate": 9.987244897959184e-05, + "loss": 1.2259, + "step": 785 + }, + { + "epoch": 15.9, + "learning_rate": 9.974489795918368e-05, + "loss": 1.1576, + "step": 786 + }, + { + "epoch": 15.92, + "learning_rate": 9.961734693877551e-05, + "loss": 1.1834, + "step": 787 + }, + { + "epoch": 15.94, + "learning_rate": 9.948979591836736e-05, + "loss": 1.2396, + "step": 788 + }, + { + "epoch": 15.96, + "learning_rate": 9.936224489795919e-05, + "loss": 1.1865, + "step": 789 + }, + { + "epoch": 15.98, + "learning_rate": 9.923469387755102e-05, + "loss": 1.2356, + "step": 790 + }, + { + "epoch": 16.01, + "learning_rate": 9.910714285714286e-05, + "loss": 1.2639, + "step": 791 + }, + { + "epoch": 16.03, + "learning_rate": 9.897959183673469e-05, + "loss": 1.1216, + "step": 792 + }, + { + "epoch": 16.05, + "learning_rate": 9.885204081632652e-05, + "loss": 1.1051, + "step": 793 + }, + { + "epoch": 16.07, + "learning_rate": 9.872448979591837e-05, + "loss": 1.0864, + "step": 794 + }, + { + "epoch": 16.09, + "learning_rate": 9.859693877551021e-05, + "loss": 1.182, + "step": 795 + }, + { + "epoch": 16.11, + "learning_rate": 9.846938775510204e-05, + "loss": 1.1272, + "step": 796 + }, + { + "epoch": 16.13, + "learning_rate": 9.834183673469389e-05, + "loss": 1.1946, + "step": 797 + }, + { + "epoch": 16.15, + "learning_rate": 9.821428571428572e-05, + "loss": 1.0875, + "step": 798 + }, + { + "epoch": 16.17, + "learning_rate": 9.808673469387756e-05, + "loss": 1.1671, + "step": 799 + }, + { + "epoch": 16.19, + "learning_rate": 9.79591836734694e-05, + "loss": 1.1502, + "step": 800 + }, + { + "epoch": 16.21, + "learning_rate": 9.783163265306124e-05, + "loss": 1.19, + "step": 801 + }, + { + "epoch": 16.23, + "learning_rate": 9.770408163265307e-05, + "loss": 1.1258, + "step": 802 + }, + { + "epoch": 16.25, + "learning_rate": 9.75765306122449e-05, + "loss": 1.1765, + "step": 803 + }, + { + "epoch": 16.27, + "learning_rate": 9.744897959183674e-05, + "loss": 1.1217, + "step": 804 + }, + { + "epoch": 16.29, + "learning_rate": 9.732142857142858e-05, + "loss": 1.1293, + "step": 805 + }, + { + "epoch": 16.31, + "learning_rate": 9.719387755102042e-05, + "loss": 1.17, + "step": 806 + }, + { + "epoch": 16.33, + "learning_rate": 9.706632653061225e-05, + "loss": 1.17, + "step": 807 + }, + { + "epoch": 16.35, + "learning_rate": 9.693877551020408e-05, + "loss": 1.2004, + "step": 808 + }, + { + "epoch": 16.37, + "learning_rate": 9.681122448979593e-05, + "loss": 1.1648, + "step": 809 + }, + { + "epoch": 16.39, + "learning_rate": 9.668367346938776e-05, + "loss": 1.0688, + "step": 810 + }, + { + "epoch": 16.41, + "learning_rate": 9.655612244897959e-05, + "loss": 1.1607, + "step": 811 + }, + { + "epoch": 16.43, + "learning_rate": 9.642857142857143e-05, + "loss": 1.1298, + "step": 812 + }, + { + "epoch": 16.45, + "learning_rate": 9.630102040816326e-05, + "loss": 1.1064, + "step": 813 + }, + { + "epoch": 16.47, + "learning_rate": 9.617346938775511e-05, + "loss": 1.1472, + "step": 814 + }, + { + "epoch": 16.49, + "learning_rate": 9.604591836734694e-05, + "loss": 1.1577, + "step": 815 + }, + { + "epoch": 16.51, + "learning_rate": 9.591836734693878e-05, + "loss": 1.1436, + "step": 816 + }, + { + "epoch": 16.53, + "learning_rate": 9.579081632653061e-05, + "loss": 1.1657, + "step": 817 + }, + { + "epoch": 16.55, + "learning_rate": 9.566326530612246e-05, + "loss": 1.1147, + "step": 818 + }, + { + "epoch": 16.57, + "learning_rate": 9.553571428571429e-05, + "loss": 1.1839, + "step": 819 + }, + { + "epoch": 16.59, + "learning_rate": 9.540816326530613e-05, + "loss": 1.1298, + "step": 820 + }, + { + "epoch": 16.61, + "learning_rate": 9.528061224489796e-05, + "loss": 1.2141, + "step": 821 + }, + { + "epoch": 16.63, + "learning_rate": 9.515306122448981e-05, + "loss": 1.2045, + "step": 822 + }, + { + "epoch": 16.65, + "learning_rate": 9.502551020408164e-05, + "loss": 1.1791, + "step": 823 + }, + { + "epoch": 16.67, + "learning_rate": 9.489795918367348e-05, + "loss": 1.1137, + "step": 824 + }, + { + "epoch": 16.69, + "learning_rate": 9.477040816326531e-05, + "loss": 1.1312, + "step": 825 + }, + { + "epoch": 16.71, + "learning_rate": 9.464285714285715e-05, + "loss": 1.1102, + "step": 826 + }, + { + "epoch": 16.73, + "learning_rate": 9.451530612244899e-05, + "loss": 1.1865, + "step": 827 + }, + { + "epoch": 16.75, + "learning_rate": 9.438775510204082e-05, + "loss": 1.1232, + "step": 828 + }, + { + "epoch": 16.77, + "learning_rate": 9.426020408163265e-05, + "loss": 1.2068, + "step": 829 + }, + { + "epoch": 16.79, + "learning_rate": 9.41326530612245e-05, + "loss": 1.1864, + "step": 830 + }, + { + "epoch": 16.81, + "learning_rate": 9.400510204081633e-05, + "loss": 1.2195, + "step": 831 + }, + { + "epoch": 16.83, + "learning_rate": 9.387755102040817e-05, + "loss": 1.2063, + "step": 832 + }, + { + "epoch": 16.85, + "learning_rate": 9.375e-05, + "loss": 1.1455, + "step": 833 + }, + { + "epoch": 16.88, + "learning_rate": 9.362244897959183e-05, + "loss": 1.1819, + "step": 834 + }, + { + "epoch": 16.9, + "learning_rate": 9.349489795918368e-05, + "loss": 1.1887, + "step": 835 + }, + { + "epoch": 16.92, + "learning_rate": 9.336734693877551e-05, + "loss": 1.1557, + "step": 836 + }, + { + "epoch": 16.94, + "learning_rate": 9.323979591836735e-05, + "loss": 1.2094, + "step": 837 + }, + { + "epoch": 16.96, + "learning_rate": 9.311224489795918e-05, + "loss": 1.1512, + "step": 838 + }, + { + "epoch": 16.98, + "learning_rate": 9.298469387755103e-05, + "loss": 1.1463, + "step": 839 + }, + { + "epoch": 17.0, + "learning_rate": 9.285714285714286e-05, + "loss": 1.155, + "step": 840 + }, + { + "epoch": 17.02, + "learning_rate": 9.27295918367347e-05, + "loss": 1.1292, + "step": 841 + }, + { + "epoch": 17.04, + "learning_rate": 9.260204081632653e-05, + "loss": 1.0996, + "step": 842 + }, + { + "epoch": 17.06, + "learning_rate": 9.247448979591838e-05, + "loss": 1.0662, + "step": 843 + }, + { + "epoch": 17.08, + "learning_rate": 9.234693877551021e-05, + "loss": 1.0931, + "step": 844 + }, + { + "epoch": 17.1, + "learning_rate": 9.221938775510205e-05, + "loss": 1.0727, + "step": 845 + }, + { + "epoch": 17.12, + "learning_rate": 9.209183673469388e-05, + "loss": 1.1043, + "step": 846 + }, + { + "epoch": 17.14, + "learning_rate": 9.196428571428572e-05, + "loss": 1.0594, + "step": 847 + }, + { + "epoch": 17.16, + "learning_rate": 9.183673469387756e-05, + "loss": 1.0952, + "step": 848 + }, + { + "epoch": 17.18, + "learning_rate": 9.170918367346939e-05, + "loss": 1.0639, + "step": 849 + }, + { + "epoch": 17.2, + "learning_rate": 9.158163265306124e-05, + "loss": 1.132, + "step": 850 + }, + { + "epoch": 17.22, + "learning_rate": 9.145408163265307e-05, + "loss": 1.1083, + "step": 851 + }, + { + "epoch": 17.24, + "learning_rate": 9.13265306122449e-05, + "loss": 1.1282, + "step": 852 + }, + { + "epoch": 17.26, + "learning_rate": 9.119897959183674e-05, + "loss": 1.0474, + "step": 853 + }, + { + "epoch": 17.28, + "learning_rate": 9.107142857142857e-05, + "loss": 1.1138, + "step": 854 + }, + { + "epoch": 17.3, + "learning_rate": 9.094387755102042e-05, + "loss": 1.1025, + "step": 855 + }, + { + "epoch": 17.32, + "learning_rate": 9.081632653061225e-05, + "loss": 1.0968, + "step": 856 + }, + { + "epoch": 17.34, + "learning_rate": 9.068877551020408e-05, + "loss": 1.1683, + "step": 857 + }, + { + "epoch": 17.36, + "learning_rate": 9.056122448979592e-05, + "loss": 1.0975, + "step": 858 + }, + { + "epoch": 17.38, + "learning_rate": 9.043367346938775e-05, + "loss": 1.1274, + "step": 859 + }, + { + "epoch": 17.4, + "learning_rate": 9.030612244897958e-05, + "loss": 1.0916, + "step": 860 + }, + { + "epoch": 17.42, + "learning_rate": 9.017857142857143e-05, + "loss": 1.0912, + "step": 861 + }, + { + "epoch": 17.44, + "learning_rate": 9.005102040816327e-05, + "loss": 1.0875, + "step": 862 + }, + { + "epoch": 17.46, + "learning_rate": 8.99234693877551e-05, + "loss": 1.05, + "step": 863 + }, + { + "epoch": 17.48, + "learning_rate": 8.979591836734695e-05, + "loss": 1.1418, + "step": 864 + }, + { + "epoch": 17.5, + "learning_rate": 8.966836734693878e-05, + "loss": 1.0609, + "step": 865 + }, + { + "epoch": 17.52, + "learning_rate": 8.954081632653062e-05, + "loss": 1.1611, + "step": 866 + }, + { + "epoch": 17.54, + "learning_rate": 8.941326530612245e-05, + "loss": 1.1065, + "step": 867 + }, + { + "epoch": 17.56, + "learning_rate": 8.92857142857143e-05, + "loss": 1.1611, + "step": 868 + }, + { + "epoch": 17.58, + "learning_rate": 8.915816326530613e-05, + "loss": 1.1398, + "step": 869 + }, + { + "epoch": 17.6, + "learning_rate": 8.903061224489796e-05, + "loss": 1.1055, + "step": 870 + }, + { + "epoch": 17.62, + "learning_rate": 8.89030612244898e-05, + "loss": 1.1314, + "step": 871 + }, + { + "epoch": 17.64, + "learning_rate": 8.877551020408164e-05, + "loss": 1.1084, + "step": 872 + }, + { + "epoch": 17.66, + "learning_rate": 8.864795918367348e-05, + "loss": 1.1254, + "step": 873 + }, + { + "epoch": 17.68, + "learning_rate": 8.852040816326531e-05, + "loss": 1.142, + "step": 874 + }, + { + "epoch": 17.7, + "learning_rate": 8.839285714285714e-05, + "loss": 1.1371, + "step": 875 + }, + { + "epoch": 17.72, + "learning_rate": 8.826530612244899e-05, + "loss": 1.1092, + "step": 876 + }, + { + "epoch": 17.75, + "learning_rate": 8.813775510204082e-05, + "loss": 1.161, + "step": 877 + }, + { + "epoch": 17.77, + "learning_rate": 8.801020408163265e-05, + "loss": 1.1044, + "step": 878 + }, + { + "epoch": 17.79, + "learning_rate": 8.788265306122449e-05, + "loss": 1.117, + "step": 879 + }, + { + "epoch": 17.81, + "learning_rate": 8.775510204081632e-05, + "loss": 1.1262, + "step": 880 + }, + { + "epoch": 17.83, + "learning_rate": 8.762755102040817e-05, + "loss": 1.0829, + "step": 881 + }, + { + "epoch": 17.85, + "learning_rate": 8.75e-05, + "loss": 1.1393, + "step": 882 + }, + { + "epoch": 17.87, + "learning_rate": 8.737244897959183e-05, + "loss": 1.1781, + "step": 883 + }, + { + "epoch": 17.89, + "learning_rate": 8.724489795918367e-05, + "loss": 1.1582, + "step": 884 + }, + { + "epoch": 17.91, + "learning_rate": 8.711734693877552e-05, + "loss": 1.1469, + "step": 885 + }, + { + "epoch": 17.93, + "learning_rate": 8.698979591836735e-05, + "loss": 1.1494, + "step": 886 + }, + { + "epoch": 17.95, + "learning_rate": 8.68622448979592e-05, + "loss": 1.1251, + "step": 887 + }, + { + "epoch": 17.97, + "learning_rate": 8.673469387755102e-05, + "loss": 1.1624, + "step": 888 + }, + { + "epoch": 17.99, + "learning_rate": 8.660714285714287e-05, + "loss": 1.0842, + "step": 889 + }, + { + "epoch": 18.01, + "learning_rate": 8.64795918367347e-05, + "loss": 1.1944, + "step": 890 + }, + { + "epoch": 18.03, + "learning_rate": 8.635204081632653e-05, + "loss": 1.0642, + "step": 891 + }, + { + "epoch": 18.05, + "learning_rate": 8.622448979591838e-05, + "loss": 1.0459, + "step": 892 + }, + { + "epoch": 18.07, + "learning_rate": 8.60969387755102e-05, + "loss": 1.0941, + "step": 893 + }, + { + "epoch": 18.09, + "learning_rate": 8.596938775510205e-05, + "loss": 1.0457, + "step": 894 + }, + { + "epoch": 18.11, + "learning_rate": 8.584183673469388e-05, + "loss": 1.1033, + "step": 895 + }, + { + "epoch": 18.13, + "learning_rate": 8.571428571428571e-05, + "loss": 1.0756, + "step": 896 + }, + { + "epoch": 18.15, + "learning_rate": 8.558673469387756e-05, + "loss": 1.0615, + "step": 897 + }, + { + "epoch": 18.17, + "learning_rate": 8.545918367346939e-05, + "loss": 1.0828, + "step": 898 + }, + { + "epoch": 18.19, + "learning_rate": 8.533163265306123e-05, + "loss": 1.1158, + "step": 899 + }, + { + "epoch": 18.21, + "learning_rate": 8.520408163265306e-05, + "loss": 1.0133, + "step": 900 + }, + { + "epoch": 18.23, + "learning_rate": 8.50765306122449e-05, + "loss": 1.0437, + "step": 901 + }, + { + "epoch": 18.25, + "learning_rate": 8.494897959183674e-05, + "loss": 1.0372, + "step": 902 + }, + { + "epoch": 18.27, + "learning_rate": 8.482142857142857e-05, + "loss": 1.1012, + "step": 903 + }, + { + "epoch": 18.29, + "learning_rate": 8.469387755102041e-05, + "loss": 1.0777, + "step": 904 + }, + { + "epoch": 18.31, + "learning_rate": 8.456632653061224e-05, + "loss": 1.0799, + "step": 905 + }, + { + "epoch": 18.33, + "learning_rate": 8.443877551020409e-05, + "loss": 0.9846, + "step": 906 + }, + { + "epoch": 18.35, + "learning_rate": 8.431122448979592e-05, + "loss": 1.1, + "step": 907 + }, + { + "epoch": 18.37, + "learning_rate": 8.418367346938776e-05, + "loss": 1.0787, + "step": 908 + }, + { + "epoch": 18.39, + "learning_rate": 8.40561224489796e-05, + "loss": 1.0647, + "step": 909 + }, + { + "epoch": 18.41, + "learning_rate": 8.392857142857144e-05, + "loss": 1.056, + "step": 910 + }, + { + "epoch": 18.43, + "learning_rate": 8.380102040816327e-05, + "loss": 1.1131, + "step": 911 + }, + { + "epoch": 18.45, + "learning_rate": 8.367346938775511e-05, + "loss": 1.0825, + "step": 912 + }, + { + "epoch": 18.47, + "learning_rate": 8.354591836734695e-05, + "loss": 1.0681, + "step": 913 + }, + { + "epoch": 18.49, + "learning_rate": 8.341836734693878e-05, + "loss": 1.0479, + "step": 914 + }, + { + "epoch": 18.51, + "learning_rate": 8.329081632653062e-05, + "loss": 1.0921, + "step": 915 + }, + { + "epoch": 18.53, + "learning_rate": 8.316326530612245e-05, + "loss": 1.0626, + "step": 916 + }, + { + "epoch": 18.55, + "learning_rate": 8.30357142857143e-05, + "loss": 1.0518, + "step": 917 + }, + { + "epoch": 18.57, + "learning_rate": 8.290816326530613e-05, + "loss": 1.0557, + "step": 918 + }, + { + "epoch": 18.6, + "learning_rate": 8.278061224489796e-05, + "loss": 1.0831, + "step": 919 + }, + { + "epoch": 18.62, + "learning_rate": 8.26530612244898e-05, + "loss": 1.0307, + "step": 920 + }, + { + "epoch": 18.64, + "learning_rate": 8.252551020408163e-05, + "loss": 1.0455, + "step": 921 + }, + { + "epoch": 18.66, + "learning_rate": 8.239795918367348e-05, + "loss": 1.0667, + "step": 922 + }, + { + "epoch": 18.68, + "learning_rate": 8.227040816326531e-05, + "loss": 1.0736, + "step": 923 + }, + { + "epoch": 18.7, + "learning_rate": 8.214285714285714e-05, + "loss": 1.0108, + "step": 924 + }, + { + "epoch": 18.72, + "learning_rate": 8.201530612244898e-05, + "loss": 1.0458, + "step": 925 + }, + { + "epoch": 18.74, + "learning_rate": 8.188775510204081e-05, + "loss": 1.0852, + "step": 926 + }, + { + "epoch": 18.76, + "learning_rate": 8.176020408163265e-05, + "loss": 1.1207, + "step": 927 + }, + { + "epoch": 18.78, + "learning_rate": 8.163265306122449e-05, + "loss": 1.0914, + "step": 928 + }, + { + "epoch": 18.8, + "learning_rate": 8.150510204081633e-05, + "loss": 1.1108, + "step": 929 + }, + { + "epoch": 18.82, + "learning_rate": 8.137755102040817e-05, + "loss": 1.1394, + "step": 930 + }, + { + "epoch": 18.84, + "learning_rate": 8.125000000000001e-05, + "loss": 1.029, + "step": 931 + }, + { + "epoch": 18.86, + "learning_rate": 8.112244897959184e-05, + "loss": 1.0661, + "step": 932 + }, + { + "epoch": 18.88, + "learning_rate": 8.099489795918369e-05, + "loss": 1.0303, + "step": 933 + }, + { + "epoch": 18.9, + "learning_rate": 8.086734693877552e-05, + "loss": 1.1144, + "step": 934 + }, + { + "epoch": 18.92, + "learning_rate": 8.073979591836736e-05, + "loss": 1.1096, + "step": 935 + }, + { + "epoch": 18.94, + "learning_rate": 8.061224489795919e-05, + "loss": 1.123, + "step": 936 + }, + { + "epoch": 18.96, + "learning_rate": 8.048469387755102e-05, + "loss": 1.1002, + "step": 937 + }, + { + "epoch": 18.98, + "learning_rate": 8.035714285714287e-05, + "loss": 1.1016, + "step": 938 + }, + { + "epoch": 19.0, + "learning_rate": 8.02295918367347e-05, + "loss": 1.0847, + "step": 939 + }, + { + "epoch": 19.02, + "learning_rate": 8.010204081632653e-05, + "loss": 1.1029, + "step": 940 + }, + { + "epoch": 19.04, + "learning_rate": 7.997448979591837e-05, + "loss": 1.041, + "step": 941 + }, + { + "epoch": 19.06, + "learning_rate": 7.98469387755102e-05, + "loss": 1.01, + "step": 942 + }, + { + "epoch": 19.08, + "learning_rate": 7.971938775510205e-05, + "loss": 1.0197, + "step": 943 + }, + { + "epoch": 19.1, + "learning_rate": 7.959183673469388e-05, + "loss": 1.0543, + "step": 944 + }, + { + "epoch": 19.12, + "learning_rate": 7.946428571428571e-05, + "loss": 1.0369, + "step": 945 + }, + { + "epoch": 19.14, + "learning_rate": 7.933673469387755e-05, + "loss": 1.0154, + "step": 946 + }, + { + "epoch": 19.16, + "learning_rate": 7.920918367346939e-05, + "loss": 0.9546, + "step": 947 + }, + { + "epoch": 19.18, + "learning_rate": 7.908163265306123e-05, + "loss": 0.9982, + "step": 948 + }, + { + "epoch": 19.2, + "learning_rate": 7.895408163265306e-05, + "loss": 1.0748, + "step": 949 + }, + { + "epoch": 19.22, + "learning_rate": 7.882653061224489e-05, + "loss": 1.0562, + "step": 950 + }, + { + "epoch": 19.24, + "learning_rate": 7.869897959183674e-05, + "loss": 1.0352, + "step": 951 + }, + { + "epoch": 19.26, + "learning_rate": 7.857142857142858e-05, + "loss": 0.9976, + "step": 952 + }, + { + "epoch": 19.28, + "learning_rate": 7.844387755102041e-05, + "loss": 1.0221, + "step": 953 + }, + { + "epoch": 19.3, + "learning_rate": 7.831632653061226e-05, + "loss": 1.0119, + "step": 954 + }, + { + "epoch": 19.32, + "learning_rate": 7.818877551020409e-05, + "loss": 1.0657, + "step": 955 + }, + { + "epoch": 19.34, + "learning_rate": 7.806122448979593e-05, + "loss": 0.9591, + "step": 956 + }, + { + "epoch": 19.36, + "learning_rate": 7.793367346938776e-05, + "loss": 1.0101, + "step": 957 + }, + { + "epoch": 19.38, + "learning_rate": 7.780612244897959e-05, + "loss": 1.0453, + "step": 958 + }, + { + "epoch": 19.4, + "learning_rate": 7.767857142857144e-05, + "loss": 1.0461, + "step": 959 + }, + { + "epoch": 19.42, + "learning_rate": 7.755102040816327e-05, + "loss": 1.0959, + "step": 960 + }, + { + "epoch": 19.44, + "learning_rate": 7.742346938775511e-05, + "loss": 1.0608, + "step": 961 + }, + { + "epoch": 19.47, + "learning_rate": 7.729591836734694e-05, + "loss": 1.1177, + "step": 962 + }, + { + "epoch": 19.49, + "learning_rate": 7.716836734693877e-05, + "loss": 1.0354, + "step": 963 + }, + { + "epoch": 19.51, + "learning_rate": 7.704081632653062e-05, + "loss": 1.0507, + "step": 964 + }, + { + "epoch": 19.53, + "learning_rate": 7.691326530612245e-05, + "loss": 1.0313, + "step": 965 + }, + { + "epoch": 19.55, + "learning_rate": 7.67857142857143e-05, + "loss": 1.0569, + "step": 966 + }, + { + "epoch": 19.57, + "learning_rate": 7.665816326530612e-05, + "loss": 1.0862, + "step": 967 + }, + { + "epoch": 19.59, + "learning_rate": 7.653061224489796e-05, + "loss": 1.0593, + "step": 968 + }, + { + "epoch": 19.61, + "learning_rate": 7.64030612244898e-05, + "loss": 1.0602, + "step": 969 + }, + { + "epoch": 19.63, + "learning_rate": 7.627551020408163e-05, + "loss": 1.0048, + "step": 970 + }, + { + "epoch": 19.65, + "learning_rate": 7.614795918367347e-05, + "loss": 1.0346, + "step": 971 + }, + { + "epoch": 19.67, + "learning_rate": 7.60204081632653e-05, + "loss": 1.0172, + "step": 972 + }, + { + "epoch": 19.69, + "learning_rate": 7.589285714285714e-05, + "loss": 1.02, + "step": 973 + }, + { + "epoch": 19.71, + "learning_rate": 7.576530612244898e-05, + "loss": 1.0028, + "step": 974 + }, + { + "epoch": 19.73, + "learning_rate": 7.563775510204083e-05, + "loss": 1.08, + "step": 975 + }, + { + "epoch": 19.75, + "learning_rate": 7.551020408163266e-05, + "loss": 1.0402, + "step": 976 + }, + { + "epoch": 19.77, + "learning_rate": 7.53826530612245e-05, + "loss": 1.0567, + "step": 977 + }, + { + "epoch": 19.79, + "learning_rate": 7.525510204081633e-05, + "loss": 1.0169, + "step": 978 + }, + { + "epoch": 19.81, + "learning_rate": 7.512755102040818e-05, + "loss": 0.9881, + "step": 979 + }, + { + "epoch": 19.83, + "learning_rate": 7.500000000000001e-05, + "loss": 1.0677, + "step": 980 + }, + { + "epoch": 19.85, + "learning_rate": 7.487244897959184e-05, + "loss": 1.1026, + "step": 981 + }, + { + "epoch": 19.87, + "learning_rate": 7.474489795918368e-05, + "loss": 1.0101, + "step": 982 + }, + { + "epoch": 19.89, + "learning_rate": 7.461734693877551e-05, + "loss": 1.069, + "step": 983 + }, + { + "epoch": 19.91, + "learning_rate": 7.448979591836736e-05, + "loss": 1.0493, + "step": 984 + }, + { + "epoch": 19.93, + "learning_rate": 7.436224489795919e-05, + "loss": 1.0858, + "step": 985 + }, + { + "epoch": 19.95, + "learning_rate": 7.423469387755102e-05, + "loss": 1.0734, + "step": 986 + }, + { + "epoch": 19.97, + "learning_rate": 7.410714285714286e-05, + "loss": 1.0203, + "step": 987 + }, + { + "epoch": 19.99, + "learning_rate": 7.39795918367347e-05, + "loss": 1.0285, + "step": 988 + }, + { + "epoch": 20.01, + "learning_rate": 7.385204081632653e-05, + "loss": 0.9446, + "step": 989 + }, + { + "epoch": 20.03, + "learning_rate": 7.372448979591837e-05, + "loss": 0.9915, + "step": 990 + }, + { + "epoch": 20.05, + "learning_rate": 7.35969387755102e-05, + "loss": 0.9882, + "step": 991 + }, + { + "epoch": 20.07, + "learning_rate": 7.346938775510205e-05, + "loss": 0.9338, + "step": 992 + }, + { + "epoch": 20.09, + "learning_rate": 7.334183673469388e-05, + "loss": 0.942, + "step": 993 + }, + { + "epoch": 20.11, + "learning_rate": 7.321428571428571e-05, + "loss": 0.9725, + "step": 994 + }, + { + "epoch": 20.13, + "learning_rate": 7.308673469387755e-05, + "loss": 1.027, + "step": 995 + }, + { + "epoch": 20.15, + "learning_rate": 7.29591836734694e-05, + "loss": 1.0081, + "step": 996 + }, + { + "epoch": 20.17, + "learning_rate": 7.283163265306123e-05, + "loss": 1.0117, + "step": 997 + }, + { + "epoch": 20.19, + "learning_rate": 7.270408163265307e-05, + "loss": 0.969, + "step": 998 + }, + { + "epoch": 20.21, + "learning_rate": 7.25765306122449e-05, + "loss": 1.0024, + "step": 999 + }, + { + "epoch": 20.23, + "learning_rate": 7.244897959183675e-05, + "loss": 0.994, + "step": 1000 + }, + { + "epoch": 20.25, + "learning_rate": 7.232142857142858e-05, + "loss": 1.0248, + "step": 1001 + }, + { + "epoch": 20.27, + "learning_rate": 7.219387755102042e-05, + "loss": 1.0493, + "step": 1002 + }, + { + "epoch": 20.29, + "learning_rate": 7.206632653061225e-05, + "loss": 1.0011, + "step": 1003 + }, + { + "epoch": 20.31, + "learning_rate": 7.193877551020408e-05, + "loss": 0.9874, + "step": 1004 + }, + { + "epoch": 20.34, + "learning_rate": 7.181122448979593e-05, + "loss": 1.0049, + "step": 1005 + }, + { + "epoch": 20.36, + "learning_rate": 7.168367346938776e-05, + "loss": 1.0314, + "step": 1006 + }, + { + "epoch": 20.38, + "learning_rate": 7.155612244897959e-05, + "loss": 0.9742, + "step": 1007 + }, + { + "epoch": 20.4, + "learning_rate": 7.142857142857143e-05, + "loss": 1.0621, + "step": 1008 + }, + { + "epoch": 20.42, + "learning_rate": 7.130102040816326e-05, + "loss": 0.9672, + "step": 1009 + }, + { + "epoch": 20.44, + "learning_rate": 7.117346938775511e-05, + "loss": 1.0018, + "step": 1010 + }, + { + "epoch": 20.46, + "learning_rate": 7.104591836734694e-05, + "loss": 1.0045, + "step": 1011 + }, + { + "epoch": 20.48, + "learning_rate": 7.091836734693877e-05, + "loss": 0.9675, + "step": 1012 + }, + { + "epoch": 20.5, + "learning_rate": 7.079081632653062e-05, + "loss": 0.976, + "step": 1013 + }, + { + "epoch": 20.52, + "learning_rate": 7.066326530612245e-05, + "loss": 1.0523, + "step": 1014 + }, + { + "epoch": 20.54, + "learning_rate": 7.053571428571429e-05, + "loss": 1.052, + "step": 1015 + }, + { + "epoch": 20.56, + "learning_rate": 7.040816326530612e-05, + "loss": 0.9903, + "step": 1016 + }, + { + "epoch": 20.58, + "learning_rate": 7.028061224489795e-05, + "loss": 1.0337, + "step": 1017 + }, + { + "epoch": 20.6, + "learning_rate": 7.01530612244898e-05, + "loss": 1.1122, + "step": 1018 + }, + { + "epoch": 20.62, + "learning_rate": 7.002551020408164e-05, + "loss": 1.0133, + "step": 1019 + }, + { + "epoch": 20.64, + "learning_rate": 6.989795918367347e-05, + "loss": 0.9588, + "step": 1020 + }, + { + "epoch": 20.66, + "learning_rate": 6.977040816326532e-05, + "loss": 0.9892, + "step": 1021 + }, + { + "epoch": 20.68, + "learning_rate": 6.964285714285715e-05, + "loss": 1.025, + "step": 1022 + }, + { + "epoch": 20.7, + "learning_rate": 6.951530612244899e-05, + "loss": 1.0196, + "step": 1023 + }, + { + "epoch": 20.72, + "learning_rate": 6.938775510204082e-05, + "loss": 1.0146, + "step": 1024 + }, + { + "epoch": 20.74, + "learning_rate": 6.926020408163265e-05, + "loss": 1.0656, + "step": 1025 + }, + { + "epoch": 20.76, + "learning_rate": 6.91326530612245e-05, + "loss": 0.9584, + "step": 1026 + }, + { + "epoch": 20.78, + "learning_rate": 6.900510204081633e-05, + "loss": 0.9877, + "step": 1027 + }, + { + "epoch": 20.8, + "learning_rate": 6.887755102040817e-05, + "loss": 1.0607, + "step": 1028 + }, + { + "epoch": 20.82, + "learning_rate": 6.875e-05, + "loss": 0.9969, + "step": 1029 + }, + { + "epoch": 20.84, + "learning_rate": 6.862244897959184e-05, + "loss": 0.9506, + "step": 1030 + }, + { + "epoch": 20.86, + "learning_rate": 6.849489795918368e-05, + "loss": 1.0576, + "step": 1031 + }, + { + "epoch": 20.88, + "learning_rate": 6.836734693877551e-05, + "loss": 1.0094, + "step": 1032 + }, + { + "epoch": 20.9, + "learning_rate": 6.823979591836735e-05, + "loss": 0.9872, + "step": 1033 + }, + { + "epoch": 20.92, + "learning_rate": 6.811224489795919e-05, + "loss": 1.0544, + "step": 1034 + }, + { + "epoch": 20.94, + "learning_rate": 6.798469387755102e-05, + "loss": 1.0194, + "step": 1035 + }, + { + "epoch": 20.96, + "learning_rate": 6.785714285714286e-05, + "loss": 1.0009, + "step": 1036 + }, + { + "epoch": 20.98, + "learning_rate": 6.772959183673469e-05, + "loss": 0.9727, + "step": 1037 + }, + { + "epoch": 21.0, + "learning_rate": 6.760204081632652e-05, + "loss": 0.9754, + "step": 1038 + }, + { + "epoch": 21.02, + "learning_rate": 6.747448979591837e-05, + "loss": 0.9953, + "step": 1039 + }, + { + "epoch": 21.04, + "learning_rate": 6.73469387755102e-05, + "loss": 0.9307, + "step": 1040 + }, + { + "epoch": 21.06, + "learning_rate": 6.721938775510204e-05, + "loss": 0.9151, + "step": 1041 + }, + { + "epoch": 21.08, + "learning_rate": 6.709183673469389e-05, + "loss": 0.9474, + "step": 1042 + }, + { + "epoch": 21.1, + "learning_rate": 6.696428571428572e-05, + "loss": 0.9697, + "step": 1043 + }, + { + "epoch": 21.12, + "learning_rate": 6.683673469387756e-05, + "loss": 0.9423, + "step": 1044 + }, + { + "epoch": 21.14, + "learning_rate": 6.670918367346939e-05, + "loss": 0.9797, + "step": 1045 + }, + { + "epoch": 21.16, + "learning_rate": 6.658163265306124e-05, + "loss": 0.919, + "step": 1046 + }, + { + "epoch": 21.18, + "learning_rate": 6.645408163265307e-05, + "loss": 0.9743, + "step": 1047 + }, + { + "epoch": 21.21, + "learning_rate": 6.63265306122449e-05, + "loss": 0.9575, + "step": 1048 + }, + { + "epoch": 21.23, + "learning_rate": 6.619897959183674e-05, + "loss": 0.9861, + "step": 1049 + }, + { + "epoch": 21.25, + "learning_rate": 6.607142857142857e-05, + "loss": 0.9103, + "step": 1050 + }, + { + "epoch": 21.27, + "learning_rate": 6.594387755102042e-05, + "loss": 0.993, + "step": 1051 + }, + { + "epoch": 21.29, + "learning_rate": 6.581632653061225e-05, + "loss": 0.9668, + "step": 1052 + }, + { + "epoch": 21.31, + "learning_rate": 6.568877551020408e-05, + "loss": 1.0008, + "step": 1053 + }, + { + "epoch": 21.33, + "learning_rate": 6.556122448979592e-05, + "loss": 0.9825, + "step": 1054 + }, + { + "epoch": 21.35, + "learning_rate": 6.543367346938776e-05, + "loss": 1.0174, + "step": 1055 + }, + { + "epoch": 21.37, + "learning_rate": 6.530612244897959e-05, + "loss": 0.9685, + "step": 1056 + }, + { + "epoch": 21.39, + "learning_rate": 6.517857142857143e-05, + "loss": 0.9265, + "step": 1057 + }, + { + "epoch": 21.41, + "learning_rate": 6.505102040816326e-05, + "loss": 0.9495, + "step": 1058 + }, + { + "epoch": 21.43, + "learning_rate": 6.49234693877551e-05, + "loss": 0.9541, + "step": 1059 + }, + { + "epoch": 21.45, + "learning_rate": 6.479591836734694e-05, + "loss": 0.9299, + "step": 1060 + }, + { + "epoch": 21.47, + "learning_rate": 6.466836734693877e-05, + "loss": 0.9625, + "step": 1061 + }, + { + "epoch": 21.49, + "learning_rate": 6.454081632653061e-05, + "loss": 1.0054, + "step": 1062 + }, + { + "epoch": 21.51, + "learning_rate": 6.441326530612244e-05, + "loss": 0.9893, + "step": 1063 + }, + { + "epoch": 21.53, + "learning_rate": 6.428571428571429e-05, + "loss": 0.9906, + "step": 1064 + }, + { + "epoch": 21.55, + "learning_rate": 6.415816326530613e-05, + "loss": 0.9487, + "step": 1065 + }, + { + "epoch": 21.57, + "learning_rate": 6.403061224489796e-05, + "loss": 0.9728, + "step": 1066 + }, + { + "epoch": 21.59, + "learning_rate": 6.390306122448981e-05, + "loss": 0.9883, + "step": 1067 + }, + { + "epoch": 21.61, + "learning_rate": 6.377551020408164e-05, + "loss": 1.053, + "step": 1068 + }, + { + "epoch": 21.63, + "learning_rate": 6.364795918367348e-05, + "loss": 1.012, + "step": 1069 + }, + { + "epoch": 21.65, + "learning_rate": 6.352040816326531e-05, + "loss": 0.962, + "step": 1070 + }, + { + "epoch": 21.67, + "learning_rate": 6.339285714285714e-05, + "loss": 0.9955, + "step": 1071 + }, + { + "epoch": 21.69, + "learning_rate": 6.326530612244899e-05, + "loss": 0.9908, + "step": 1072 + }, + { + "epoch": 21.71, + "learning_rate": 6.313775510204082e-05, + "loss": 1.0327, + "step": 1073 + }, + { + "epoch": 21.73, + "learning_rate": 6.301020408163265e-05, + "loss": 0.9255, + "step": 1074 + }, + { + "epoch": 21.75, + "learning_rate": 6.28826530612245e-05, + "loss": 0.9268, + "step": 1075 + }, + { + "epoch": 21.77, + "learning_rate": 6.275510204081633e-05, + "loss": 0.9204, + "step": 1076 + }, + { + "epoch": 21.79, + "learning_rate": 6.262755102040817e-05, + "loss": 0.9838, + "step": 1077 + }, + { + "epoch": 21.81, + "learning_rate": 6.25e-05, + "loss": 0.954, + "step": 1078 + }, + { + "epoch": 21.83, + "learning_rate": 6.237244897959183e-05, + "loss": 1.0102, + "step": 1079 + }, + { + "epoch": 21.85, + "learning_rate": 6.224489795918368e-05, + "loss": 0.916, + "step": 1080 + }, + { + "epoch": 21.87, + "learning_rate": 6.211734693877551e-05, + "loss": 0.9939, + "step": 1081 + }, + { + "epoch": 21.89, + "learning_rate": 6.198979591836735e-05, + "loss": 0.9675, + "step": 1082 + }, + { + "epoch": 21.91, + "learning_rate": 6.186224489795918e-05, + "loss": 0.9666, + "step": 1083 + }, + { + "epoch": 21.93, + "learning_rate": 6.173469387755101e-05, + "loss": 0.9919, + "step": 1084 + }, + { + "epoch": 21.95, + "learning_rate": 6.160714285714286e-05, + "loss": 1.0106, + "step": 1085 + }, + { + "epoch": 21.97, + "learning_rate": 6.14795918367347e-05, + "loss": 0.9982, + "step": 1086 + }, + { + "epoch": 21.99, + "learning_rate": 6.135204081632653e-05, + "loss": 1.0137, + "step": 1087 + }, + { + "epoch": 22.01, + "learning_rate": 6.122448979591838e-05, + "loss": 0.9331, + "step": 1088 + }, + { + "epoch": 22.03, + "learning_rate": 6.109693877551021e-05, + "loss": 0.8834, + "step": 1089 + }, + { + "epoch": 22.06, + "learning_rate": 6.0969387755102046e-05, + "loss": 0.9757, + "step": 1090 + }, + { + "epoch": 22.08, + "learning_rate": 6.084183673469388e-05, + "loss": 0.9038, + "step": 1091 + }, + { + "epoch": 22.1, + "learning_rate": 6.0714285714285715e-05, + "loss": 0.9097, + "step": 1092 + }, + { + "epoch": 22.12, + "learning_rate": 6.058673469387756e-05, + "loss": 0.8972, + "step": 1093 + }, + { + "epoch": 22.14, + "learning_rate": 6.045918367346939e-05, + "loss": 0.8825, + "step": 1094 + }, + { + "epoch": 22.16, + "learning_rate": 6.0331632653061234e-05, + "loss": 0.9814, + "step": 1095 + }, + { + "epoch": 22.18, + "learning_rate": 6.0204081632653065e-05, + "loss": 0.9874, + "step": 1096 + }, + { + "epoch": 22.2, + "learning_rate": 6.0076530612244896e-05, + "loss": 0.912, + "step": 1097 + }, + { + "epoch": 22.22, + "learning_rate": 5.994897959183674e-05, + "loss": 0.9206, + "step": 1098 + }, + { + "epoch": 22.24, + "learning_rate": 5.982142857142857e-05, + "loss": 0.9497, + "step": 1099 + }, + { + "epoch": 22.26, + "learning_rate": 5.9693877551020416e-05, + "loss": 0.9269, + "step": 1100 + } + ], + "logging_steps": 1, + "max_steps": 1568, + "num_train_epochs": 32, + "save_steps": 100, + "total_flos": 1.628705955351122e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-1100/training_args.bin b/checkpoint-1100/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..db23e07d097c18532e52f58a70eb72d22e39c8c1 --- /dev/null +++ b/checkpoint-1100/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b610cbc4242bb50b4985b00e205994ae514fec6d9e2273f2b545a583a07b154b +size 4155 diff --git a/checkpoint-1200/README.md b/checkpoint-1200/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-1200/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-1200/adapter_config.json b/checkpoint-1200/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b --- /dev/null +++ b/checkpoint-1200/adapter_config.json @@ -0,0 +1,23 @@ +{ + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-1200/adapter_model.bin b/checkpoint-1200/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..fe838bfb517b69e550de232754730118ea65bb04 --- /dev/null +++ b/checkpoint-1200/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d13f1953081527a4a05ac5762a6e373fae509922a72baa18bfbc0ea2fdd0db77 +size 39409357 diff --git a/checkpoint-1200/optimizer.pt b/checkpoint-1200/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..2a557a97984c4cc74844fb337ee8e2cc0c298b34 --- /dev/null +++ b/checkpoint-1200/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b4d37b67055d94cd6568b24b76e241acc964153f215407a64e94710d00d1e06 +size 78844421 diff --git a/checkpoint-1200/rng_state.pth b/checkpoint-1200/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..fead25ae6e1694df464049cc69133704457634ec --- /dev/null +++ b/checkpoint-1200/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b32bbef891dbdd3524f32551a03a56bb081b6c86879cea3d24ae28b3c8f4bc5f +size 14575 diff --git a/checkpoint-1200/scheduler.pt b/checkpoint-1200/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..6552b7f80e3e5fe7062d2376620abb1ac8c373f3 --- /dev/null +++ b/checkpoint-1200/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:13122a9f43d31f9abd38d193f16e53342905df794d9995e7f5483234c3e54c54 +size 627 diff --git a/checkpoint-1200/special_tokens_map.json b/checkpoint-1200/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-1200/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-1200/tokenizer.json b/checkpoint-1200/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf002cafbd4818dcff2abc9156c088d681b4533 --- /dev/null +++ b/checkpoint-1200/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba +size 14500471 diff --git a/checkpoint-1200/tokenizer_config.json b/checkpoint-1200/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-1200/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-1200/trainer_state.json b/checkpoint-1200/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..e6d8ccf4d007ba938f1e3a0c7b69dfee967403ff --- /dev/null +++ b/checkpoint-1200/trainer_state.json @@ -0,0 +1,7219 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 24.28074612709453, + "eval_steps": 500, + "global_step": 1200, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.02, + "learning_rate": 0.00019987244897959184, + "loss": 3.2215, + "step": 1 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019974489795918367, + "loss": 2.8365, + "step": 2 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019961734693877553, + "loss": 2.602, + "step": 3 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019948979591836736, + "loss": 2.4196, + "step": 4 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001993622448979592, + "loss": 2.2574, + "step": 5 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019923469387755102, + "loss": 2.2239, + "step": 6 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019910714285714288, + "loss": 2.1661, + "step": 7 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001989795918367347, + "loss": 2.0987, + "step": 8 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019885204081632654, + "loss": 2.015, + "step": 9 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019872448979591837, + "loss": 1.9771, + "step": 10 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019859693877551023, + "loss": 2.0271, + "step": 11 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019846938775510203, + "loss": 1.9812, + "step": 12 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001983418367346939, + "loss": 2.0834, + "step": 13 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019821428571428572, + "loss": 1.9174, + "step": 14 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019808673469387755, + "loss": 1.8409, + "step": 15 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019795918367346938, + "loss": 1.929, + "step": 16 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019783163265306124, + "loss": 2.0041, + "step": 17 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019770408163265305, + "loss": 1.9385, + "step": 18 + }, + { + "epoch": 0.38, + "learning_rate": 0.0001975765306122449, + "loss": 1.9592, + "step": 19 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019744897959183674, + "loss": 1.9701, + "step": 20 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001973214285714286, + "loss": 1.9277, + "step": 21 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019719387755102042, + "loss": 1.8394, + "step": 22 + }, + { + "epoch": 0.47, + "learning_rate": 0.00019706632653061226, + "loss": 1.8666, + "step": 23 + }, + { + "epoch": 0.49, + "learning_rate": 0.00019693877551020409, + "loss": 1.8997, + "step": 24 + }, + { + "epoch": 0.51, + "learning_rate": 0.00019681122448979592, + "loss": 1.9432, + "step": 25 + }, + { + "epoch": 0.53, + "learning_rate": 0.00019668367346938777, + "loss": 1.9137, + "step": 26 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001965561224489796, + "loss": 1.905, + "step": 27 + }, + { + "epoch": 0.57, + "learning_rate": 0.00019642857142857144, + "loss": 1.8708, + "step": 28 + }, + { + "epoch": 0.59, + "learning_rate": 0.00019630102040816327, + "loss": 1.9097, + "step": 29 + }, + { + "epoch": 0.61, + "learning_rate": 0.00019617346938775513, + "loss": 1.896, + "step": 30 + }, + { + "epoch": 0.63, + "learning_rate": 0.00019604591836734696, + "loss": 1.8834, + "step": 31 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001959183673469388, + "loss": 1.8323, + "step": 32 + }, + { + "epoch": 0.67, + "learning_rate": 0.00019579081632653062, + "loss": 1.804, + "step": 33 + }, + { + "epoch": 0.69, + "learning_rate": 0.00019566326530612248, + "loss": 1.8906, + "step": 34 + }, + { + "epoch": 0.71, + "learning_rate": 0.00019553571428571428, + "loss": 1.8693, + "step": 35 + }, + { + "epoch": 0.73, + "learning_rate": 0.00019540816326530614, + "loss": 1.9308, + "step": 36 + }, + { + "epoch": 0.75, + "learning_rate": 0.00019528061224489797, + "loss": 1.8082, + "step": 37 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001951530612244898, + "loss": 1.848, + "step": 38 + }, + { + "epoch": 0.79, + "learning_rate": 0.00019502551020408163, + "loss": 1.8866, + "step": 39 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001948979591836735, + "loss": 1.7844, + "step": 40 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001947704081632653, + "loss": 1.8485, + "step": 41 + }, + { + "epoch": 0.85, + "learning_rate": 0.00019464285714285715, + "loss": 1.7917, + "step": 42 + }, + { + "epoch": 0.87, + "learning_rate": 0.00019451530612244898, + "loss": 1.7342, + "step": 43 + }, + { + "epoch": 0.89, + "learning_rate": 0.00019438775510204084, + "loss": 1.8479, + "step": 44 + }, + { + "epoch": 0.91, + "learning_rate": 0.00019426020408163267, + "loss": 1.8639, + "step": 45 + }, + { + "epoch": 0.93, + "learning_rate": 0.0001941326530612245, + "loss": 1.8166, + "step": 46 + }, + { + "epoch": 0.95, + "learning_rate": 0.00019400510204081633, + "loss": 1.7566, + "step": 47 + }, + { + "epoch": 0.97, + "learning_rate": 0.00019387755102040816, + "loss": 1.8071, + "step": 48 + }, + { + "epoch": 0.99, + "learning_rate": 0.00019375000000000002, + "loss": 1.8612, + "step": 49 + }, + { + "epoch": 1.01, + "learning_rate": 0.00019362244897959185, + "loss": 1.7819, + "step": 50 + }, + { + "epoch": 1.03, + "learning_rate": 0.00019349489795918368, + "loss": 1.8647, + "step": 51 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001933673469387755, + "loss": 1.8196, + "step": 52 + }, + { + "epoch": 1.07, + "learning_rate": 0.00019323979591836737, + "loss": 1.8027, + "step": 53 + }, + { + "epoch": 1.09, + "learning_rate": 0.00019311224489795917, + "loss": 1.8927, + "step": 54 + }, + { + "epoch": 1.11, + "learning_rate": 0.00019298469387755103, + "loss": 1.8481, + "step": 55 + }, + { + "epoch": 1.13, + "learning_rate": 0.00019285714285714286, + "loss": 1.7781, + "step": 56 + }, + { + "epoch": 1.15, + "learning_rate": 0.00019272959183673472, + "loss": 1.8101, + "step": 57 + }, + { + "epoch": 1.17, + "learning_rate": 0.00019260204081632653, + "loss": 1.7257, + "step": 58 + }, + { + "epoch": 1.19, + "learning_rate": 0.00019247448979591838, + "loss": 1.8185, + "step": 59 + }, + { + "epoch": 1.21, + "learning_rate": 0.00019234693877551021, + "loss": 1.8557, + "step": 60 + }, + { + "epoch": 1.23, + "learning_rate": 0.00019221938775510204, + "loss": 1.7418, + "step": 61 + }, + { + "epoch": 1.25, + "learning_rate": 0.00019209183673469388, + "loss": 1.6879, + "step": 62 + }, + { + "epoch": 1.27, + "learning_rate": 0.00019196428571428573, + "loss": 1.7651, + "step": 63 + }, + { + "epoch": 1.29, + "learning_rate": 0.00019183673469387756, + "loss": 1.7759, + "step": 64 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001917091836734694, + "loss": 1.7691, + "step": 65 + }, + { + "epoch": 1.34, + "learning_rate": 0.00019158163265306123, + "loss": 1.7794, + "step": 66 + }, + { + "epoch": 1.36, + "learning_rate": 0.00019145408163265306, + "loss": 1.8152, + "step": 67 + }, + { + "epoch": 1.38, + "learning_rate": 0.00019132653061224492, + "loss": 1.8052, + "step": 68 + }, + { + "epoch": 1.4, + "learning_rate": 0.00019119897959183675, + "loss": 1.8054, + "step": 69 + }, + { + "epoch": 1.42, + "learning_rate": 0.00019107142857142858, + "loss": 1.8114, + "step": 70 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001909438775510204, + "loss": 1.7749, + "step": 71 + }, + { + "epoch": 1.46, + "learning_rate": 0.00019081632653061227, + "loss": 1.777, + "step": 72 + }, + { + "epoch": 1.48, + "learning_rate": 0.0001906887755102041, + "loss": 1.7896, + "step": 73 + }, + { + "epoch": 1.5, + "learning_rate": 0.00019056122448979593, + "loss": 1.8335, + "step": 74 + }, + { + "epoch": 1.52, + "learning_rate": 0.00019043367346938776, + "loss": 1.8155, + "step": 75 + }, + { + "epoch": 1.54, + "learning_rate": 0.00019030612244897962, + "loss": 1.8224, + "step": 76 + }, + { + "epoch": 1.56, + "learning_rate": 0.00019017857142857142, + "loss": 1.7889, + "step": 77 + }, + { + "epoch": 1.58, + "learning_rate": 0.00019005102040816328, + "loss": 1.8866, + "step": 78 + }, + { + "epoch": 1.6, + "learning_rate": 0.0001899234693877551, + "loss": 1.8439, + "step": 79 + }, + { + "epoch": 1.62, + "learning_rate": 0.00018979591836734697, + "loss": 1.7906, + "step": 80 + }, + { + "epoch": 1.64, + "learning_rate": 0.00018966836734693877, + "loss": 1.8627, + "step": 81 + }, + { + "epoch": 1.66, + "learning_rate": 0.00018954081632653063, + "loss": 1.7497, + "step": 82 + }, + { + "epoch": 1.68, + "learning_rate": 0.00018941326530612246, + "loss": 1.7936, + "step": 83 + }, + { + "epoch": 1.7, + "learning_rate": 0.0001892857142857143, + "loss": 1.8341, + "step": 84 + }, + { + "epoch": 1.72, + "learning_rate": 0.00018915816326530612, + "loss": 1.7868, + "step": 85 + }, + { + "epoch": 1.74, + "learning_rate": 0.00018903061224489798, + "loss": 1.7493, + "step": 86 + }, + { + "epoch": 1.76, + "learning_rate": 0.0001889030612244898, + "loss": 1.7926, + "step": 87 + }, + { + "epoch": 1.78, + "learning_rate": 0.00018877551020408164, + "loss": 1.8278, + "step": 88 + }, + { + "epoch": 1.8, + "learning_rate": 0.00018864795918367347, + "loss": 1.7387, + "step": 89 + }, + { + "epoch": 1.82, + "learning_rate": 0.0001885204081632653, + "loss": 1.7669, + "step": 90 + }, + { + "epoch": 1.84, + "learning_rate": 0.00018839285714285716, + "loss": 1.7686, + "step": 91 + }, + { + "epoch": 1.86, + "learning_rate": 0.000188265306122449, + "loss": 1.7759, + "step": 92 + }, + { + "epoch": 1.88, + "learning_rate": 0.00018813775510204082, + "loss": 1.7016, + "step": 93 + }, + { + "epoch": 1.9, + "learning_rate": 0.00018801020408163265, + "loss": 1.8123, + "step": 94 + }, + { + "epoch": 1.92, + "learning_rate": 0.0001878826530612245, + "loss": 1.8315, + "step": 95 + }, + { + "epoch": 1.94, + "learning_rate": 0.00018775510204081634, + "loss": 1.7679, + "step": 96 + }, + { + "epoch": 1.96, + "learning_rate": 0.00018762755102040817, + "loss": 1.7874, + "step": 97 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001875, + "loss": 1.8008, + "step": 98 + }, + { + "epoch": 2.0, + "learning_rate": 0.00018737244897959186, + "loss": 1.7177, + "step": 99 + }, + { + "epoch": 2.02, + "learning_rate": 0.00018724489795918367, + "loss": 1.7272, + "step": 100 + }, + { + "epoch": 2.04, + "learning_rate": 0.00018711734693877552, + "loss": 1.7848, + "step": 101 + }, + { + "epoch": 2.06, + "learning_rate": 0.00018698979591836735, + "loss": 1.744, + "step": 102 + }, + { + "epoch": 2.08, + "learning_rate": 0.00018686224489795919, + "loss": 1.7005, + "step": 103 + }, + { + "epoch": 2.1, + "learning_rate": 0.00018673469387755102, + "loss": 1.8247, + "step": 104 + }, + { + "epoch": 2.12, + "learning_rate": 0.00018660714285714287, + "loss": 1.6855, + "step": 105 + }, + { + "epoch": 2.14, + "learning_rate": 0.0001864795918367347, + "loss": 1.7627, + "step": 106 + }, + { + "epoch": 2.17, + "learning_rate": 0.00018635204081632654, + "loss": 1.7564, + "step": 107 + }, + { + "epoch": 2.19, + "learning_rate": 0.00018622448979591837, + "loss": 1.8237, + "step": 108 + }, + { + "epoch": 2.21, + "learning_rate": 0.00018609693877551022, + "loss": 1.7421, + "step": 109 + }, + { + "epoch": 2.23, + "learning_rate": 0.00018596938775510206, + "loss": 1.7517, + "step": 110 + }, + { + "epoch": 2.25, + "learning_rate": 0.0001858418367346939, + "loss": 1.7515, + "step": 111 + }, + { + "epoch": 2.27, + "learning_rate": 0.00018571428571428572, + "loss": 1.7842, + "step": 112 + }, + { + "epoch": 2.29, + "learning_rate": 0.00018558673469387755, + "loss": 1.8001, + "step": 113 + }, + { + "epoch": 2.31, + "learning_rate": 0.0001854591836734694, + "loss": 1.7653, + "step": 114 + }, + { + "epoch": 2.33, + "learning_rate": 0.00018533163265306124, + "loss": 1.694, + "step": 115 + }, + { + "epoch": 2.35, + "learning_rate": 0.00018520408163265307, + "loss": 1.7457, + "step": 116 + }, + { + "epoch": 2.37, + "learning_rate": 0.0001850765306122449, + "loss": 1.7899, + "step": 117 + }, + { + "epoch": 2.39, + "learning_rate": 0.00018494897959183676, + "loss": 1.7473, + "step": 118 + }, + { + "epoch": 2.41, + "learning_rate": 0.0001848214285714286, + "loss": 1.6639, + "step": 119 + }, + { + "epoch": 2.43, + "learning_rate": 0.00018469387755102042, + "loss": 1.762, + "step": 120 + }, + { + "epoch": 2.45, + "learning_rate": 0.00018456632653061225, + "loss": 1.7378, + "step": 121 + }, + { + "epoch": 2.47, + "learning_rate": 0.0001844387755102041, + "loss": 1.672, + "step": 122 + }, + { + "epoch": 2.49, + "learning_rate": 0.0001843112244897959, + "loss": 1.7267, + "step": 123 + }, + { + "epoch": 2.51, + "learning_rate": 0.00018418367346938777, + "loss": 1.7825, + "step": 124 + }, + { + "epoch": 2.53, + "learning_rate": 0.0001840561224489796, + "loss": 1.7566, + "step": 125 + }, + { + "epoch": 2.55, + "learning_rate": 0.00018392857142857143, + "loss": 1.8169, + "step": 126 + }, + { + "epoch": 2.57, + "learning_rate": 0.00018380102040816326, + "loss": 1.6801, + "step": 127 + }, + { + "epoch": 2.59, + "learning_rate": 0.00018367346938775512, + "loss": 1.7292, + "step": 128 + }, + { + "epoch": 2.61, + "learning_rate": 0.00018354591836734695, + "loss": 1.737, + "step": 129 + }, + { + "epoch": 2.63, + "learning_rate": 0.00018341836734693878, + "loss": 1.7696, + "step": 130 + }, + { + "epoch": 2.65, + "learning_rate": 0.0001832908163265306, + "loss": 1.7239, + "step": 131 + }, + { + "epoch": 2.67, + "learning_rate": 0.00018316326530612247, + "loss": 1.7441, + "step": 132 + }, + { + "epoch": 2.69, + "learning_rate": 0.0001830357142857143, + "loss": 1.7825, + "step": 133 + }, + { + "epoch": 2.71, + "learning_rate": 0.00018290816326530613, + "loss": 1.7411, + "step": 134 + }, + { + "epoch": 2.73, + "learning_rate": 0.00018278061224489796, + "loss": 1.7119, + "step": 135 + }, + { + "epoch": 2.75, + "learning_rate": 0.0001826530612244898, + "loss": 1.7443, + "step": 136 + }, + { + "epoch": 2.77, + "learning_rate": 0.00018252551020408165, + "loss": 1.7197, + "step": 137 + }, + { + "epoch": 2.79, + "learning_rate": 0.00018239795918367348, + "loss": 1.7273, + "step": 138 + }, + { + "epoch": 2.81, + "learning_rate": 0.0001822704081632653, + "loss": 1.7681, + "step": 139 + }, + { + "epoch": 2.83, + "learning_rate": 0.00018214285714285714, + "loss": 1.8088, + "step": 140 + }, + { + "epoch": 2.85, + "learning_rate": 0.000182015306122449, + "loss": 1.7301, + "step": 141 + }, + { + "epoch": 2.87, + "learning_rate": 0.00018188775510204083, + "loss": 1.6853, + "step": 142 + }, + { + "epoch": 2.89, + "learning_rate": 0.00018176020408163266, + "loss": 1.6966, + "step": 143 + }, + { + "epoch": 2.91, + "learning_rate": 0.0001816326530612245, + "loss": 1.7938, + "step": 144 + }, + { + "epoch": 2.93, + "learning_rate": 0.00018150510204081635, + "loss": 1.7639, + "step": 145 + }, + { + "epoch": 2.95, + "learning_rate": 0.00018137755102040816, + "loss": 1.7527, + "step": 146 + }, + { + "epoch": 2.97, + "learning_rate": 0.00018125000000000001, + "loss": 1.7386, + "step": 147 + }, + { + "epoch": 2.99, + "learning_rate": 0.00018112244897959185, + "loss": 1.7223, + "step": 148 + }, + { + "epoch": 3.01, + "learning_rate": 0.00018099489795918368, + "loss": 1.7571, + "step": 149 + }, + { + "epoch": 3.04, + "learning_rate": 0.0001808673469387755, + "loss": 1.7054, + "step": 150 + }, + { + "epoch": 3.06, + "learning_rate": 0.00018073979591836737, + "loss": 1.6581, + "step": 151 + }, + { + "epoch": 3.08, + "learning_rate": 0.00018061224489795917, + "loss": 1.681, + "step": 152 + }, + { + "epoch": 3.1, + "learning_rate": 0.00018048469387755103, + "loss": 1.7425, + "step": 153 + }, + { + "epoch": 3.12, + "learning_rate": 0.00018035714285714286, + "loss": 1.7108, + "step": 154 + }, + { + "epoch": 3.14, + "learning_rate": 0.00018022959183673472, + "loss": 1.7194, + "step": 155 + }, + { + "epoch": 3.16, + "learning_rate": 0.00018010204081632655, + "loss": 1.6953, + "step": 156 + }, + { + "epoch": 3.18, + "learning_rate": 0.00017997448979591838, + "loss": 1.669, + "step": 157 + }, + { + "epoch": 3.2, + "learning_rate": 0.0001798469387755102, + "loss": 1.744, + "step": 158 + }, + { + "epoch": 3.22, + "learning_rate": 0.00017971938775510204, + "loss": 1.6467, + "step": 159 + }, + { + "epoch": 3.24, + "learning_rate": 0.0001795918367346939, + "loss": 1.7103, + "step": 160 + }, + { + "epoch": 3.26, + "learning_rate": 0.00017946428571428573, + "loss": 1.6662, + "step": 161 + }, + { + "epoch": 3.28, + "learning_rate": 0.00017933673469387756, + "loss": 1.6657, + "step": 162 + }, + { + "epoch": 3.3, + "learning_rate": 0.0001792091836734694, + "loss": 1.791, + "step": 163 + }, + { + "epoch": 3.32, + "learning_rate": 0.00017908163265306125, + "loss": 1.7704, + "step": 164 + }, + { + "epoch": 3.34, + "learning_rate": 0.00017895408163265305, + "loss": 1.7229, + "step": 165 + }, + { + "epoch": 3.36, + "learning_rate": 0.0001788265306122449, + "loss": 1.76, + "step": 166 + }, + { + "epoch": 3.38, + "learning_rate": 0.00017869897959183674, + "loss": 1.6482, + "step": 167 + }, + { + "epoch": 3.4, + "learning_rate": 0.0001785714285714286, + "loss": 1.8076, + "step": 168 + }, + { + "epoch": 3.42, + "learning_rate": 0.0001784438775510204, + "loss": 1.7368, + "step": 169 + }, + { + "epoch": 3.44, + "learning_rate": 0.00017831632653061226, + "loss": 1.6264, + "step": 170 + }, + { + "epoch": 3.46, + "learning_rate": 0.0001781887755102041, + "loss": 1.6289, + "step": 171 + }, + { + "epoch": 3.48, + "learning_rate": 0.00017806122448979592, + "loss": 1.7913, + "step": 172 + }, + { + "epoch": 3.5, + "learning_rate": 0.00017793367346938775, + "loss": 1.6985, + "step": 173 + }, + { + "epoch": 3.52, + "learning_rate": 0.0001778061224489796, + "loss": 1.6936, + "step": 174 + }, + { + "epoch": 3.54, + "learning_rate": 0.00017767857142857141, + "loss": 1.8068, + "step": 175 + }, + { + "epoch": 3.56, + "learning_rate": 0.00017755102040816327, + "loss": 1.7243, + "step": 176 + }, + { + "epoch": 3.58, + "learning_rate": 0.0001774234693877551, + "loss": 1.6893, + "step": 177 + }, + { + "epoch": 3.6, + "learning_rate": 0.00017729591836734696, + "loss": 1.8122, + "step": 178 + }, + { + "epoch": 3.62, + "learning_rate": 0.0001771683673469388, + "loss": 1.6562, + "step": 179 + }, + { + "epoch": 3.64, + "learning_rate": 0.00017704081632653062, + "loss": 1.6999, + "step": 180 + }, + { + "epoch": 3.66, + "learning_rate": 0.00017691326530612245, + "loss": 1.7229, + "step": 181 + }, + { + "epoch": 3.68, + "learning_rate": 0.00017678571428571428, + "loss": 1.6764, + "step": 182 + }, + { + "epoch": 3.7, + "learning_rate": 0.00017665816326530614, + "loss": 1.6982, + "step": 183 + }, + { + "epoch": 3.72, + "learning_rate": 0.00017653061224489797, + "loss": 1.696, + "step": 184 + }, + { + "epoch": 3.74, + "learning_rate": 0.0001764030612244898, + "loss": 1.6797, + "step": 185 + }, + { + "epoch": 3.76, + "learning_rate": 0.00017627551020408164, + "loss": 1.637, + "step": 186 + }, + { + "epoch": 3.78, + "learning_rate": 0.0001761479591836735, + "loss": 1.7074, + "step": 187 + }, + { + "epoch": 3.8, + "learning_rate": 0.0001760204081632653, + "loss": 1.705, + "step": 188 + }, + { + "epoch": 3.82, + "learning_rate": 0.00017589285714285716, + "loss": 1.6153, + "step": 189 + }, + { + "epoch": 3.84, + "learning_rate": 0.00017576530612244899, + "loss": 1.7354, + "step": 190 + }, + { + "epoch": 3.86, + "learning_rate": 0.00017563775510204084, + "loss": 1.6941, + "step": 191 + }, + { + "epoch": 3.88, + "learning_rate": 0.00017551020408163265, + "loss": 1.7231, + "step": 192 + }, + { + "epoch": 3.91, + "learning_rate": 0.0001753826530612245, + "loss": 1.7663, + "step": 193 + }, + { + "epoch": 3.93, + "learning_rate": 0.00017525510204081634, + "loss": 1.6532, + "step": 194 + }, + { + "epoch": 3.95, + "learning_rate": 0.00017512755102040817, + "loss": 1.7115, + "step": 195 + }, + { + "epoch": 3.97, + "learning_rate": 0.000175, + "loss": 1.6955, + "step": 196 + }, + { + "epoch": 3.99, + "learning_rate": 0.00017487244897959186, + "loss": 1.6863, + "step": 197 + }, + { + "epoch": 4.01, + "learning_rate": 0.00017474489795918366, + "loss": 1.7012, + "step": 198 + }, + { + "epoch": 4.03, + "learning_rate": 0.00017461734693877552, + "loss": 1.5927, + "step": 199 + }, + { + "epoch": 4.05, + "learning_rate": 0.00017448979591836735, + "loss": 1.6272, + "step": 200 + }, + { + "epoch": 4.07, + "learning_rate": 0.00017436224489795918, + "loss": 1.5994, + "step": 201 + }, + { + "epoch": 4.09, + "learning_rate": 0.00017423469387755104, + "loss": 1.7141, + "step": 202 + }, + { + "epoch": 4.11, + "learning_rate": 0.00017410714285714287, + "loss": 1.7547, + "step": 203 + }, + { + "epoch": 4.13, + "learning_rate": 0.0001739795918367347, + "loss": 1.6254, + "step": 204 + }, + { + "epoch": 4.15, + "learning_rate": 0.00017385204081632653, + "loss": 1.6686, + "step": 205 + }, + { + "epoch": 4.17, + "learning_rate": 0.0001737244897959184, + "loss": 1.6684, + "step": 206 + }, + { + "epoch": 4.19, + "learning_rate": 0.00017359693877551022, + "loss": 1.6724, + "step": 207 + }, + { + "epoch": 4.21, + "learning_rate": 0.00017346938775510205, + "loss": 1.7361, + "step": 208 + }, + { + "epoch": 4.23, + "learning_rate": 0.00017334183673469388, + "loss": 1.7167, + "step": 209 + }, + { + "epoch": 4.25, + "learning_rate": 0.00017321428571428574, + "loss": 1.7226, + "step": 210 + }, + { + "epoch": 4.27, + "learning_rate": 0.00017308673469387754, + "loss": 1.7133, + "step": 211 + }, + { + "epoch": 4.29, + "learning_rate": 0.0001729591836734694, + "loss": 1.649, + "step": 212 + }, + { + "epoch": 4.31, + "learning_rate": 0.00017283163265306123, + "loss": 1.7104, + "step": 213 + }, + { + "epoch": 4.33, + "learning_rate": 0.00017270408163265306, + "loss": 1.6861, + "step": 214 + }, + { + "epoch": 4.35, + "learning_rate": 0.0001725765306122449, + "loss": 1.648, + "step": 215 + }, + { + "epoch": 4.37, + "learning_rate": 0.00017244897959183675, + "loss": 1.6215, + "step": 216 + }, + { + "epoch": 4.39, + "learning_rate": 0.00017232142857142858, + "loss": 1.6334, + "step": 217 + }, + { + "epoch": 4.41, + "learning_rate": 0.0001721938775510204, + "loss": 1.6283, + "step": 218 + }, + { + "epoch": 4.43, + "learning_rate": 0.00017206632653061224, + "loss": 1.6462, + "step": 219 + }, + { + "epoch": 4.45, + "learning_rate": 0.0001719387755102041, + "loss": 1.7233, + "step": 220 + }, + { + "epoch": 4.47, + "learning_rate": 0.0001718112244897959, + "loss": 1.7839, + "step": 221 + }, + { + "epoch": 4.49, + "learning_rate": 0.00017168367346938776, + "loss": 1.7204, + "step": 222 + }, + { + "epoch": 4.51, + "learning_rate": 0.0001715561224489796, + "loss": 1.7671, + "step": 223 + }, + { + "epoch": 4.53, + "learning_rate": 0.00017142857142857143, + "loss": 1.6824, + "step": 224 + }, + { + "epoch": 4.55, + "learning_rate": 0.00017130102040816328, + "loss": 1.7068, + "step": 225 + }, + { + "epoch": 4.57, + "learning_rate": 0.00017117346938775511, + "loss": 1.6515, + "step": 226 + }, + { + "epoch": 4.59, + "learning_rate": 0.00017104591836734694, + "loss": 1.6586, + "step": 227 + }, + { + "epoch": 4.61, + "learning_rate": 0.00017091836734693878, + "loss": 1.6355, + "step": 228 + }, + { + "epoch": 4.63, + "learning_rate": 0.00017079081632653063, + "loss": 1.7173, + "step": 229 + }, + { + "epoch": 4.65, + "learning_rate": 0.00017066326530612246, + "loss": 1.6585, + "step": 230 + }, + { + "epoch": 4.67, + "learning_rate": 0.0001705357142857143, + "loss": 1.5856, + "step": 231 + }, + { + "epoch": 4.69, + "learning_rate": 0.00017040816326530613, + "loss": 1.5923, + "step": 232 + }, + { + "epoch": 4.71, + "learning_rate": 0.00017028061224489798, + "loss": 1.7128, + "step": 233 + }, + { + "epoch": 4.73, + "learning_rate": 0.0001701530612244898, + "loss": 1.6971, + "step": 234 + }, + { + "epoch": 4.75, + "learning_rate": 0.00017002551020408165, + "loss": 1.6416, + "step": 235 + }, + { + "epoch": 4.78, + "learning_rate": 0.00016989795918367348, + "loss": 1.645, + "step": 236 + }, + { + "epoch": 4.8, + "learning_rate": 0.0001697704081632653, + "loss": 1.6792, + "step": 237 + }, + { + "epoch": 4.82, + "learning_rate": 0.00016964285714285714, + "loss": 1.6522, + "step": 238 + }, + { + "epoch": 4.84, + "learning_rate": 0.000169515306122449, + "loss": 1.6315, + "step": 239 + }, + { + "epoch": 4.86, + "learning_rate": 0.00016938775510204083, + "loss": 1.6622, + "step": 240 + }, + { + "epoch": 4.88, + "learning_rate": 0.00016926020408163266, + "loss": 1.6566, + "step": 241 + }, + { + "epoch": 4.9, + "learning_rate": 0.0001691326530612245, + "loss": 1.7141, + "step": 242 + }, + { + "epoch": 4.92, + "learning_rate": 0.00016900510204081635, + "loss": 1.5873, + "step": 243 + }, + { + "epoch": 4.94, + "learning_rate": 0.00016887755102040818, + "loss": 1.6571, + "step": 244 + }, + { + "epoch": 4.96, + "learning_rate": 0.00016875, + "loss": 1.6829, + "step": 245 + }, + { + "epoch": 4.98, + "learning_rate": 0.00016862244897959184, + "loss": 1.6935, + "step": 246 + }, + { + "epoch": 5.0, + "learning_rate": 0.00016849489795918367, + "loss": 1.6782, + "step": 247 + }, + { + "epoch": 5.02, + "learning_rate": 0.00016836734693877553, + "loss": 1.622, + "step": 248 + }, + { + "epoch": 5.04, + "learning_rate": 0.00016823979591836736, + "loss": 1.6596, + "step": 249 + }, + { + "epoch": 5.06, + "learning_rate": 0.0001681122448979592, + "loss": 1.5821, + "step": 250 + }, + { + "epoch": 5.08, + "learning_rate": 0.00016798469387755102, + "loss": 1.7292, + "step": 251 + }, + { + "epoch": 5.1, + "learning_rate": 0.00016785714285714288, + "loss": 1.646, + "step": 252 + }, + { + "epoch": 5.12, + "learning_rate": 0.0001677295918367347, + "loss": 1.6969, + "step": 253 + }, + { + "epoch": 5.14, + "learning_rate": 0.00016760204081632654, + "loss": 1.6082, + "step": 254 + }, + { + "epoch": 5.16, + "learning_rate": 0.00016747448979591837, + "loss": 1.5843, + "step": 255 + }, + { + "epoch": 5.18, + "learning_rate": 0.00016734693877551023, + "loss": 1.6827, + "step": 256 + }, + { + "epoch": 5.2, + "learning_rate": 0.00016721938775510203, + "loss": 1.5824, + "step": 257 + }, + { + "epoch": 5.22, + "learning_rate": 0.0001670918367346939, + "loss": 1.6795, + "step": 258 + }, + { + "epoch": 5.24, + "learning_rate": 0.00016696428571428572, + "loss": 1.5639, + "step": 259 + }, + { + "epoch": 5.26, + "learning_rate": 0.00016683673469387755, + "loss": 1.592, + "step": 260 + }, + { + "epoch": 5.28, + "learning_rate": 0.00016670918367346938, + "loss": 1.65, + "step": 261 + }, + { + "epoch": 5.3, + "learning_rate": 0.00016658163265306124, + "loss": 1.5592, + "step": 262 + }, + { + "epoch": 5.32, + "learning_rate": 0.00016645408163265305, + "loss": 1.5091, + "step": 263 + }, + { + "epoch": 5.34, + "learning_rate": 0.0001663265306122449, + "loss": 1.6138, + "step": 264 + }, + { + "epoch": 5.36, + "learning_rate": 0.00016619897959183673, + "loss": 1.625, + "step": 265 + }, + { + "epoch": 5.38, + "learning_rate": 0.0001660714285714286, + "loss": 1.5757, + "step": 266 + }, + { + "epoch": 5.4, + "learning_rate": 0.00016594387755102042, + "loss": 1.6372, + "step": 267 + }, + { + "epoch": 5.42, + "learning_rate": 0.00016581632653061225, + "loss": 1.5891, + "step": 268 + }, + { + "epoch": 5.44, + "learning_rate": 0.00016568877551020409, + "loss": 1.6893, + "step": 269 + }, + { + "epoch": 5.46, + "learning_rate": 0.00016556122448979592, + "loss": 1.6662, + "step": 270 + }, + { + "epoch": 5.48, + "learning_rate": 0.00016543367346938777, + "loss": 1.7132, + "step": 271 + }, + { + "epoch": 5.5, + "learning_rate": 0.0001653061224489796, + "loss": 1.5835, + "step": 272 + }, + { + "epoch": 5.52, + "learning_rate": 0.00016517857142857144, + "loss": 1.6342, + "step": 273 + }, + { + "epoch": 5.54, + "learning_rate": 0.00016505102040816327, + "loss": 1.6717, + "step": 274 + }, + { + "epoch": 5.56, + "learning_rate": 0.00016492346938775512, + "loss": 1.6248, + "step": 275 + }, + { + "epoch": 5.58, + "learning_rate": 0.00016479591836734696, + "loss": 1.6117, + "step": 276 + }, + { + "epoch": 5.6, + "learning_rate": 0.0001646683673469388, + "loss": 1.6798, + "step": 277 + }, + { + "epoch": 5.63, + "learning_rate": 0.00016454081632653062, + "loss": 1.6406, + "step": 278 + }, + { + "epoch": 5.65, + "learning_rate": 0.00016441326530612248, + "loss": 1.6512, + "step": 279 + }, + { + "epoch": 5.67, + "learning_rate": 0.00016428571428571428, + "loss": 1.6102, + "step": 280 + }, + { + "epoch": 5.69, + "learning_rate": 0.00016415816326530614, + "loss": 1.6113, + "step": 281 + }, + { + "epoch": 5.71, + "learning_rate": 0.00016403061224489797, + "loss": 1.7116, + "step": 282 + }, + { + "epoch": 5.73, + "learning_rate": 0.0001639030612244898, + "loss": 1.6846, + "step": 283 + }, + { + "epoch": 5.75, + "learning_rate": 0.00016377551020408163, + "loss": 1.6911, + "step": 284 + }, + { + "epoch": 5.77, + "learning_rate": 0.0001636479591836735, + "loss": 1.6202, + "step": 285 + }, + { + "epoch": 5.79, + "learning_rate": 0.0001635204081632653, + "loss": 1.5715, + "step": 286 + }, + { + "epoch": 5.81, + "learning_rate": 0.00016339285714285715, + "loss": 1.6461, + "step": 287 + }, + { + "epoch": 5.83, + "learning_rate": 0.00016326530612244898, + "loss": 1.6624, + "step": 288 + }, + { + "epoch": 5.85, + "learning_rate": 0.00016313775510204084, + "loss": 1.6535, + "step": 289 + }, + { + "epoch": 5.87, + "learning_rate": 0.00016301020408163267, + "loss": 1.6275, + "step": 290 + }, + { + "epoch": 5.89, + "learning_rate": 0.0001628826530612245, + "loss": 1.6636, + "step": 291 + }, + { + "epoch": 5.91, + "learning_rate": 0.00016275510204081633, + "loss": 1.6546, + "step": 292 + }, + { + "epoch": 5.93, + "learning_rate": 0.00016262755102040816, + "loss": 1.7274, + "step": 293 + }, + { + "epoch": 5.95, + "learning_rate": 0.00016250000000000002, + "loss": 1.5901, + "step": 294 + }, + { + "epoch": 5.97, + "learning_rate": 0.00016237244897959185, + "loss": 1.6046, + "step": 295 + }, + { + "epoch": 5.99, + "learning_rate": 0.00016224489795918368, + "loss": 1.5828, + "step": 296 + }, + { + "epoch": 6.01, + "learning_rate": 0.0001621173469387755, + "loss": 1.6435, + "step": 297 + }, + { + "epoch": 6.03, + "learning_rate": 0.00016198979591836737, + "loss": 1.6263, + "step": 298 + }, + { + "epoch": 6.05, + "learning_rate": 0.00016186224489795917, + "loss": 1.4944, + "step": 299 + }, + { + "epoch": 6.07, + "learning_rate": 0.00016173469387755103, + "loss": 1.6286, + "step": 300 + }, + { + "epoch": 6.09, + "learning_rate": 0.00016160714285714286, + "loss": 1.694, + "step": 301 + }, + { + "epoch": 6.11, + "learning_rate": 0.00016147959183673472, + "loss": 1.6197, + "step": 302 + }, + { + "epoch": 6.13, + "learning_rate": 0.00016135204081632652, + "loss": 1.5597, + "step": 303 + }, + { + "epoch": 6.15, + "learning_rate": 0.00016122448979591838, + "loss": 1.5487, + "step": 304 + }, + { + "epoch": 6.17, + "learning_rate": 0.0001610969387755102, + "loss": 1.5769, + "step": 305 + }, + { + "epoch": 6.19, + "learning_rate": 0.00016096938775510204, + "loss": 1.6367, + "step": 306 + }, + { + "epoch": 6.21, + "learning_rate": 0.00016084183673469388, + "loss": 1.583, + "step": 307 + }, + { + "epoch": 6.23, + "learning_rate": 0.00016071428571428573, + "loss": 1.6201, + "step": 308 + }, + { + "epoch": 6.25, + "learning_rate": 0.00016058673469387754, + "loss": 1.6586, + "step": 309 + }, + { + "epoch": 6.27, + "learning_rate": 0.0001604591836734694, + "loss": 1.6711, + "step": 310 + }, + { + "epoch": 6.29, + "learning_rate": 0.00016033163265306123, + "loss": 1.6402, + "step": 311 + }, + { + "epoch": 6.31, + "learning_rate": 0.00016020408163265306, + "loss": 1.5247, + "step": 312 + }, + { + "epoch": 6.33, + "learning_rate": 0.00016007653061224491, + "loss": 1.5356, + "step": 313 + }, + { + "epoch": 6.35, + "learning_rate": 0.00015994897959183675, + "loss": 1.564, + "step": 314 + }, + { + "epoch": 6.37, + "learning_rate": 0.00015982142857142858, + "loss": 1.563, + "step": 315 + }, + { + "epoch": 6.39, + "learning_rate": 0.0001596938775510204, + "loss": 1.5198, + "step": 316 + }, + { + "epoch": 6.41, + "learning_rate": 0.00015956632653061227, + "loss": 1.6558, + "step": 317 + }, + { + "epoch": 6.43, + "learning_rate": 0.0001594387755102041, + "loss": 1.5534, + "step": 318 + }, + { + "epoch": 6.45, + "learning_rate": 0.00015931122448979593, + "loss": 1.6239, + "step": 319 + }, + { + "epoch": 6.47, + "learning_rate": 0.00015918367346938776, + "loss": 1.5645, + "step": 320 + }, + { + "epoch": 6.5, + "learning_rate": 0.00015905612244897962, + "loss": 1.5713, + "step": 321 + }, + { + "epoch": 6.52, + "learning_rate": 0.00015892857142857142, + "loss": 1.6176, + "step": 322 + }, + { + "epoch": 6.54, + "learning_rate": 0.00015880102040816328, + "loss": 1.502, + "step": 323 + }, + { + "epoch": 6.56, + "learning_rate": 0.0001586734693877551, + "loss": 1.645, + "step": 324 + }, + { + "epoch": 6.58, + "learning_rate": 0.00015854591836734697, + "loss": 1.5904, + "step": 325 + }, + { + "epoch": 6.6, + "learning_rate": 0.00015841836734693877, + "loss": 1.6149, + "step": 326 + }, + { + "epoch": 6.62, + "learning_rate": 0.00015829081632653063, + "loss": 1.6757, + "step": 327 + }, + { + "epoch": 6.64, + "learning_rate": 0.00015816326530612246, + "loss": 1.541, + "step": 328 + }, + { + "epoch": 6.66, + "learning_rate": 0.0001580357142857143, + "loss": 1.5898, + "step": 329 + }, + { + "epoch": 6.68, + "learning_rate": 0.00015790816326530612, + "loss": 1.5441, + "step": 330 + }, + { + "epoch": 6.7, + "learning_rate": 0.00015778061224489798, + "loss": 1.61, + "step": 331 + }, + { + "epoch": 6.72, + "learning_rate": 0.00015765306122448978, + "loss": 1.615, + "step": 332 + }, + { + "epoch": 6.74, + "learning_rate": 0.00015752551020408164, + "loss": 1.6575, + "step": 333 + }, + { + "epoch": 6.76, + "learning_rate": 0.00015739795918367347, + "loss": 1.6702, + "step": 334 + }, + { + "epoch": 6.78, + "learning_rate": 0.0001572704081632653, + "loss": 1.6009, + "step": 335 + }, + { + "epoch": 6.8, + "learning_rate": 0.00015714285714285716, + "loss": 1.5568, + "step": 336 + }, + { + "epoch": 6.82, + "learning_rate": 0.000157015306122449, + "loss": 1.619, + "step": 337 + }, + { + "epoch": 6.84, + "learning_rate": 0.00015688775510204082, + "loss": 1.5563, + "step": 338 + }, + { + "epoch": 6.86, + "learning_rate": 0.00015676020408163265, + "loss": 1.6328, + "step": 339 + }, + { + "epoch": 6.88, + "learning_rate": 0.0001566326530612245, + "loss": 1.5726, + "step": 340 + }, + { + "epoch": 6.9, + "learning_rate": 0.00015650510204081634, + "loss": 1.6199, + "step": 341 + }, + { + "epoch": 6.92, + "learning_rate": 0.00015637755102040817, + "loss": 1.5722, + "step": 342 + }, + { + "epoch": 6.94, + "learning_rate": 0.00015625, + "loss": 1.5685, + "step": 343 + }, + { + "epoch": 6.96, + "learning_rate": 0.00015612244897959186, + "loss": 1.5615, + "step": 344 + }, + { + "epoch": 6.98, + "learning_rate": 0.00015599489795918366, + "loss": 1.5994, + "step": 345 + }, + { + "epoch": 7.0, + "learning_rate": 0.00015586734693877552, + "loss": 1.5579, + "step": 346 + }, + { + "epoch": 7.02, + "learning_rate": 0.00015573979591836735, + "loss": 1.547, + "step": 347 + }, + { + "epoch": 7.04, + "learning_rate": 0.00015561224489795918, + "loss": 1.5292, + "step": 348 + }, + { + "epoch": 7.06, + "learning_rate": 0.00015548469387755102, + "loss": 1.6032, + "step": 349 + }, + { + "epoch": 7.08, + "learning_rate": 0.00015535714285714287, + "loss": 1.5149, + "step": 350 + }, + { + "epoch": 7.1, + "learning_rate": 0.0001552295918367347, + "loss": 1.6093, + "step": 351 + }, + { + "epoch": 7.12, + "learning_rate": 0.00015510204081632654, + "loss": 1.5421, + "step": 352 + }, + { + "epoch": 7.14, + "learning_rate": 0.00015497448979591837, + "loss": 1.5733, + "step": 353 + }, + { + "epoch": 7.16, + "learning_rate": 0.00015484693877551022, + "loss": 1.5703, + "step": 354 + }, + { + "epoch": 7.18, + "learning_rate": 0.00015471938775510203, + "loss": 1.6141, + "step": 355 + }, + { + "epoch": 7.2, + "learning_rate": 0.00015459183673469389, + "loss": 1.5526, + "step": 356 + }, + { + "epoch": 7.22, + "learning_rate": 0.00015446428571428572, + "loss": 1.5347, + "step": 357 + }, + { + "epoch": 7.24, + "learning_rate": 0.00015433673469387755, + "loss": 1.5682, + "step": 358 + }, + { + "epoch": 7.26, + "learning_rate": 0.0001542091836734694, + "loss": 1.5292, + "step": 359 + }, + { + "epoch": 7.28, + "learning_rate": 0.00015408163265306124, + "loss": 1.499, + "step": 360 + }, + { + "epoch": 7.3, + "learning_rate": 0.00015395408163265307, + "loss": 1.5624, + "step": 361 + }, + { + "epoch": 7.32, + "learning_rate": 0.0001538265306122449, + "loss": 1.627, + "step": 362 + }, + { + "epoch": 7.34, + "learning_rate": 0.00015369897959183676, + "loss": 1.5327, + "step": 363 + }, + { + "epoch": 7.37, + "learning_rate": 0.0001535714285714286, + "loss": 1.5622, + "step": 364 + }, + { + "epoch": 7.39, + "learning_rate": 0.00015344387755102042, + "loss": 1.5659, + "step": 365 + }, + { + "epoch": 7.41, + "learning_rate": 0.00015331632653061225, + "loss": 1.5019, + "step": 366 + }, + { + "epoch": 7.43, + "learning_rate": 0.0001531887755102041, + "loss": 1.5921, + "step": 367 + }, + { + "epoch": 7.45, + "learning_rate": 0.0001530612244897959, + "loss": 1.5914, + "step": 368 + }, + { + "epoch": 7.47, + "learning_rate": 0.00015293367346938777, + "loss": 1.5045, + "step": 369 + }, + { + "epoch": 7.49, + "learning_rate": 0.0001528061224489796, + "loss": 1.6209, + "step": 370 + }, + { + "epoch": 7.51, + "learning_rate": 0.00015267857142857143, + "loss": 1.5198, + "step": 371 + }, + { + "epoch": 7.53, + "learning_rate": 0.00015255102040816326, + "loss": 1.5363, + "step": 372 + }, + { + "epoch": 7.55, + "learning_rate": 0.00015242346938775512, + "loss": 1.5391, + "step": 373 + }, + { + "epoch": 7.57, + "learning_rate": 0.00015229591836734695, + "loss": 1.4546, + "step": 374 + }, + { + "epoch": 7.59, + "learning_rate": 0.00015216836734693878, + "loss": 1.5546, + "step": 375 + }, + { + "epoch": 7.61, + "learning_rate": 0.0001520408163265306, + "loss": 1.5629, + "step": 376 + }, + { + "epoch": 7.63, + "learning_rate": 0.00015191326530612247, + "loss": 1.6002, + "step": 377 + }, + { + "epoch": 7.65, + "learning_rate": 0.00015178571428571427, + "loss": 1.5543, + "step": 378 + }, + { + "epoch": 7.67, + "learning_rate": 0.00015165816326530613, + "loss": 1.5925, + "step": 379 + }, + { + "epoch": 7.69, + "learning_rate": 0.00015153061224489796, + "loss": 1.5631, + "step": 380 + }, + { + "epoch": 7.71, + "learning_rate": 0.0001514030612244898, + "loss": 1.5677, + "step": 381 + }, + { + "epoch": 7.73, + "learning_rate": 0.00015127551020408165, + "loss": 1.5828, + "step": 382 + }, + { + "epoch": 7.75, + "learning_rate": 0.00015114795918367348, + "loss": 1.6494, + "step": 383 + }, + { + "epoch": 7.77, + "learning_rate": 0.0001510204081632653, + "loss": 1.553, + "step": 384 + }, + { + "epoch": 7.79, + "learning_rate": 0.00015089285714285714, + "loss": 1.6156, + "step": 385 + }, + { + "epoch": 7.81, + "learning_rate": 0.000150765306122449, + "loss": 1.5001, + "step": 386 + }, + { + "epoch": 7.83, + "learning_rate": 0.00015063775510204083, + "loss": 1.5321, + "step": 387 + }, + { + "epoch": 7.85, + "learning_rate": 0.00015051020408163266, + "loss": 1.5307, + "step": 388 + }, + { + "epoch": 7.87, + "learning_rate": 0.0001503826530612245, + "loss": 1.5639, + "step": 389 + }, + { + "epoch": 7.89, + "learning_rate": 0.00015025510204081635, + "loss": 1.517, + "step": 390 + }, + { + "epoch": 7.91, + "learning_rate": 0.00015012755102040816, + "loss": 1.4776, + "step": 391 + }, + { + "epoch": 7.93, + "learning_rate": 0.00015000000000000001, + "loss": 1.5368, + "step": 392 + }, + { + "epoch": 7.95, + "learning_rate": 0.00014987244897959184, + "loss": 1.5636, + "step": 393 + }, + { + "epoch": 7.97, + "learning_rate": 0.00014974489795918368, + "loss": 1.6004, + "step": 394 + }, + { + "epoch": 7.99, + "learning_rate": 0.0001496173469387755, + "loss": 1.5524, + "step": 395 + }, + { + "epoch": 8.01, + "learning_rate": 0.00014948979591836736, + "loss": 1.5307, + "step": 396 + }, + { + "epoch": 8.03, + "learning_rate": 0.00014936224489795917, + "loss": 1.5123, + "step": 397 + }, + { + "epoch": 8.05, + "learning_rate": 0.00014923469387755103, + "loss": 1.5132, + "step": 398 + }, + { + "epoch": 8.07, + "learning_rate": 0.00014910714285714286, + "loss": 1.5109, + "step": 399 + }, + { + "epoch": 8.09, + "learning_rate": 0.00014897959183673472, + "loss": 1.5302, + "step": 400 + }, + { + "epoch": 8.11, + "learning_rate": 0.00014885204081632652, + "loss": 1.5238, + "step": 401 + }, + { + "epoch": 8.13, + "learning_rate": 0.00014872448979591838, + "loss": 1.4781, + "step": 402 + }, + { + "epoch": 8.15, + "learning_rate": 0.0001485969387755102, + "loss": 1.5446, + "step": 403 + }, + { + "epoch": 8.17, + "learning_rate": 0.00014846938775510204, + "loss": 1.5, + "step": 404 + }, + { + "epoch": 8.19, + "learning_rate": 0.0001483418367346939, + "loss": 1.5458, + "step": 405 + }, + { + "epoch": 8.21, + "learning_rate": 0.00014821428571428573, + "loss": 1.5257, + "step": 406 + }, + { + "epoch": 8.24, + "learning_rate": 0.00014808673469387756, + "loss": 1.4607, + "step": 407 + }, + { + "epoch": 8.26, + "learning_rate": 0.0001479591836734694, + "loss": 1.4282, + "step": 408 + }, + { + "epoch": 8.28, + "learning_rate": 0.00014783163265306125, + "loss": 1.4519, + "step": 409 + }, + { + "epoch": 8.3, + "learning_rate": 0.00014770408163265305, + "loss": 1.475, + "step": 410 + }, + { + "epoch": 8.32, + "learning_rate": 0.0001475765306122449, + "loss": 1.5425, + "step": 411 + }, + { + "epoch": 8.34, + "learning_rate": 0.00014744897959183674, + "loss": 1.5407, + "step": 412 + }, + { + "epoch": 8.36, + "learning_rate": 0.0001473214285714286, + "loss": 1.5698, + "step": 413 + }, + { + "epoch": 8.38, + "learning_rate": 0.0001471938775510204, + "loss": 1.4282, + "step": 414 + }, + { + "epoch": 8.4, + "learning_rate": 0.00014706632653061226, + "loss": 1.5301, + "step": 415 + }, + { + "epoch": 8.42, + "learning_rate": 0.0001469387755102041, + "loss": 1.5083, + "step": 416 + }, + { + "epoch": 8.44, + "learning_rate": 0.00014681122448979592, + "loss": 1.5712, + "step": 417 + }, + { + "epoch": 8.46, + "learning_rate": 0.00014668367346938775, + "loss": 1.4363, + "step": 418 + }, + { + "epoch": 8.48, + "learning_rate": 0.0001465561224489796, + "loss": 1.4463, + "step": 419 + }, + { + "epoch": 8.5, + "learning_rate": 0.00014642857142857141, + "loss": 1.4738, + "step": 420 + }, + { + "epoch": 8.52, + "learning_rate": 0.00014630102040816327, + "loss": 1.5396, + "step": 421 + }, + { + "epoch": 8.54, + "learning_rate": 0.0001461734693877551, + "loss": 1.4384, + "step": 422 + }, + { + "epoch": 8.56, + "learning_rate": 0.00014604591836734696, + "loss": 1.5345, + "step": 423 + }, + { + "epoch": 8.58, + "learning_rate": 0.0001459183673469388, + "loss": 1.5355, + "step": 424 + }, + { + "epoch": 8.6, + "learning_rate": 0.00014579081632653062, + "loss": 1.5188, + "step": 425 + }, + { + "epoch": 8.62, + "learning_rate": 0.00014566326530612245, + "loss": 1.5575, + "step": 426 + }, + { + "epoch": 8.64, + "learning_rate": 0.00014553571428571428, + "loss": 1.5279, + "step": 427 + }, + { + "epoch": 8.66, + "learning_rate": 0.00014540816326530614, + "loss": 1.5484, + "step": 428 + }, + { + "epoch": 8.68, + "learning_rate": 0.00014528061224489797, + "loss": 1.4878, + "step": 429 + }, + { + "epoch": 8.7, + "learning_rate": 0.0001451530612244898, + "loss": 1.503, + "step": 430 + }, + { + "epoch": 8.72, + "learning_rate": 0.00014502551020408163, + "loss": 1.4723, + "step": 431 + }, + { + "epoch": 8.74, + "learning_rate": 0.0001448979591836735, + "loss": 1.5579, + "step": 432 + }, + { + "epoch": 8.76, + "learning_rate": 0.0001447704081632653, + "loss": 1.4789, + "step": 433 + }, + { + "epoch": 8.78, + "learning_rate": 0.00014464285714285715, + "loss": 1.5501, + "step": 434 + }, + { + "epoch": 8.8, + "learning_rate": 0.00014451530612244899, + "loss": 1.5204, + "step": 435 + }, + { + "epoch": 8.82, + "learning_rate": 0.00014438775510204084, + "loss": 1.5489, + "step": 436 + }, + { + "epoch": 8.84, + "learning_rate": 0.00014426020408163265, + "loss": 1.5464, + "step": 437 + }, + { + "epoch": 8.86, + "learning_rate": 0.0001441326530612245, + "loss": 1.5896, + "step": 438 + }, + { + "epoch": 8.88, + "learning_rate": 0.00014400510204081634, + "loss": 1.5465, + "step": 439 + }, + { + "epoch": 8.9, + "learning_rate": 0.00014387755102040817, + "loss": 1.5094, + "step": 440 + }, + { + "epoch": 8.92, + "learning_rate": 0.00014375, + "loss": 1.5144, + "step": 441 + }, + { + "epoch": 8.94, + "learning_rate": 0.00014362244897959186, + "loss": 1.4919, + "step": 442 + }, + { + "epoch": 8.96, + "learning_rate": 0.00014349489795918366, + "loss": 1.4702, + "step": 443 + }, + { + "epoch": 8.98, + "learning_rate": 0.00014336734693877552, + "loss": 1.4996, + "step": 444 + }, + { + "epoch": 9.0, + "learning_rate": 0.00014323979591836735, + "loss": 1.5503, + "step": 445 + }, + { + "epoch": 9.02, + "learning_rate": 0.00014311224489795918, + "loss": 1.4125, + "step": 446 + }, + { + "epoch": 9.04, + "learning_rate": 0.00014298469387755104, + "loss": 1.4722, + "step": 447 + }, + { + "epoch": 9.06, + "learning_rate": 0.00014285714285714287, + "loss": 1.5199, + "step": 448 + }, + { + "epoch": 9.09, + "learning_rate": 0.0001427295918367347, + "loss": 1.4571, + "step": 449 + }, + { + "epoch": 9.11, + "learning_rate": 0.00014260204081632653, + "loss": 1.4996, + "step": 450 + }, + { + "epoch": 9.13, + "learning_rate": 0.0001424744897959184, + "loss": 1.4092, + "step": 451 + }, + { + "epoch": 9.15, + "learning_rate": 0.00014234693877551022, + "loss": 1.4198, + "step": 452 + }, + { + "epoch": 9.17, + "learning_rate": 0.00014221938775510205, + "loss": 1.4916, + "step": 453 + }, + { + "epoch": 9.19, + "learning_rate": 0.00014209183673469388, + "loss": 1.5051, + "step": 454 + }, + { + "epoch": 9.21, + "learning_rate": 0.00014196428571428574, + "loss": 1.4321, + "step": 455 + }, + { + "epoch": 9.23, + "learning_rate": 0.00014183673469387754, + "loss": 1.4097, + "step": 456 + }, + { + "epoch": 9.25, + "learning_rate": 0.0001417091836734694, + "loss": 1.4853, + "step": 457 + }, + { + "epoch": 9.27, + "learning_rate": 0.00014158163265306123, + "loss": 1.4593, + "step": 458 + }, + { + "epoch": 9.29, + "learning_rate": 0.00014145408163265306, + "loss": 1.3729, + "step": 459 + }, + { + "epoch": 9.31, + "learning_rate": 0.0001413265306122449, + "loss": 1.4467, + "step": 460 + }, + { + "epoch": 9.33, + "learning_rate": 0.00014119897959183675, + "loss": 1.4467, + "step": 461 + }, + { + "epoch": 9.35, + "learning_rate": 0.00014107142857142858, + "loss": 1.4785, + "step": 462 + }, + { + "epoch": 9.37, + "learning_rate": 0.0001409438775510204, + "loss": 1.4089, + "step": 463 + }, + { + "epoch": 9.39, + "learning_rate": 0.00014081632653061224, + "loss": 1.5026, + "step": 464 + }, + { + "epoch": 9.41, + "learning_rate": 0.0001406887755102041, + "loss": 1.4857, + "step": 465 + }, + { + "epoch": 9.43, + "learning_rate": 0.0001405612244897959, + "loss": 1.3745, + "step": 466 + }, + { + "epoch": 9.45, + "learning_rate": 0.00014043367346938776, + "loss": 1.4733, + "step": 467 + }, + { + "epoch": 9.47, + "learning_rate": 0.0001403061224489796, + "loss": 1.5212, + "step": 468 + }, + { + "epoch": 9.49, + "learning_rate": 0.00014017857142857142, + "loss": 1.5398, + "step": 469 + }, + { + "epoch": 9.51, + "learning_rate": 0.00014005102040816328, + "loss": 1.478, + "step": 470 + }, + { + "epoch": 9.53, + "learning_rate": 0.0001399234693877551, + "loss": 1.496, + "step": 471 + }, + { + "epoch": 9.55, + "learning_rate": 0.00013979591836734694, + "loss": 1.4837, + "step": 472 + }, + { + "epoch": 9.57, + "learning_rate": 0.00013966836734693878, + "loss": 1.4724, + "step": 473 + }, + { + "epoch": 9.59, + "learning_rate": 0.00013954081632653063, + "loss": 1.4828, + "step": 474 + }, + { + "epoch": 9.61, + "learning_rate": 0.00013941326530612246, + "loss": 1.5012, + "step": 475 + }, + { + "epoch": 9.63, + "learning_rate": 0.0001392857142857143, + "loss": 1.4879, + "step": 476 + }, + { + "epoch": 9.65, + "learning_rate": 0.00013915816326530613, + "loss": 1.4196, + "step": 477 + }, + { + "epoch": 9.67, + "learning_rate": 0.00013903061224489798, + "loss": 1.4915, + "step": 478 + }, + { + "epoch": 9.69, + "learning_rate": 0.0001389030612244898, + "loss": 1.3878, + "step": 479 + }, + { + "epoch": 9.71, + "learning_rate": 0.00013877551020408165, + "loss": 1.466, + "step": 480 + }, + { + "epoch": 9.73, + "learning_rate": 0.00013864795918367348, + "loss": 1.4582, + "step": 481 + }, + { + "epoch": 9.75, + "learning_rate": 0.0001385204081632653, + "loss": 1.533, + "step": 482 + }, + { + "epoch": 9.77, + "learning_rate": 0.00013839285714285714, + "loss": 1.4697, + "step": 483 + }, + { + "epoch": 9.79, + "learning_rate": 0.000138265306122449, + "loss": 1.3989, + "step": 484 + }, + { + "epoch": 9.81, + "learning_rate": 0.00013813775510204083, + "loss": 1.4361, + "step": 485 + }, + { + "epoch": 9.83, + "learning_rate": 0.00013801020408163266, + "loss": 1.5271, + "step": 486 + }, + { + "epoch": 9.85, + "learning_rate": 0.0001378826530612245, + "loss": 1.4905, + "step": 487 + }, + { + "epoch": 9.87, + "learning_rate": 0.00013775510204081635, + "loss": 1.4757, + "step": 488 + }, + { + "epoch": 9.89, + "learning_rate": 0.00013762755102040815, + "loss": 1.5485, + "step": 489 + }, + { + "epoch": 9.91, + "learning_rate": 0.0001375, + "loss": 1.4783, + "step": 490 + }, + { + "epoch": 9.93, + "learning_rate": 0.00013737244897959184, + "loss": 1.4849, + "step": 491 + }, + { + "epoch": 9.96, + "learning_rate": 0.00013724489795918367, + "loss": 1.5382, + "step": 492 + }, + { + "epoch": 9.98, + "learning_rate": 0.00013711734693877553, + "loss": 1.4902, + "step": 493 + }, + { + "epoch": 10.0, + "learning_rate": 0.00013698979591836736, + "loss": 1.4865, + "step": 494 + }, + { + "epoch": 10.02, + "learning_rate": 0.0001368622448979592, + "loss": 1.4436, + "step": 495 + }, + { + "epoch": 10.04, + "learning_rate": 0.00013673469387755102, + "loss": 1.408, + "step": 496 + }, + { + "epoch": 10.06, + "learning_rate": 0.00013660714285714288, + "loss": 1.4764, + "step": 497 + }, + { + "epoch": 10.08, + "learning_rate": 0.0001364795918367347, + "loss": 1.4646, + "step": 498 + }, + { + "epoch": 10.1, + "learning_rate": 0.00013635204081632654, + "loss": 1.406, + "step": 499 + }, + { + "epoch": 10.12, + "learning_rate": 0.00013622448979591837, + "loss": 1.4785, + "step": 500 + }, + { + "epoch": 10.14, + "learning_rate": 0.00013609693877551023, + "loss": 1.4117, + "step": 501 + }, + { + "epoch": 10.16, + "learning_rate": 0.00013596938775510203, + "loss": 1.4108, + "step": 502 + }, + { + "epoch": 10.18, + "learning_rate": 0.0001358418367346939, + "loss": 1.4155, + "step": 503 + }, + { + "epoch": 10.2, + "learning_rate": 0.00013571428571428572, + "loss": 1.4021, + "step": 504 + }, + { + "epoch": 10.22, + "learning_rate": 0.00013558673469387755, + "loss": 1.411, + "step": 505 + }, + { + "epoch": 10.24, + "learning_rate": 0.00013545918367346938, + "loss": 1.3851, + "step": 506 + }, + { + "epoch": 10.26, + "learning_rate": 0.00013533163265306124, + "loss": 1.387, + "step": 507 + }, + { + "epoch": 10.28, + "learning_rate": 0.00013520408163265305, + "loss": 1.4163, + "step": 508 + }, + { + "epoch": 10.3, + "learning_rate": 0.0001350765306122449, + "loss": 1.3343, + "step": 509 + }, + { + "epoch": 10.32, + "learning_rate": 0.00013494897959183673, + "loss": 1.4811, + "step": 510 + }, + { + "epoch": 10.34, + "learning_rate": 0.0001348214285714286, + "loss": 1.4086, + "step": 511 + }, + { + "epoch": 10.36, + "learning_rate": 0.0001346938775510204, + "loss": 1.3879, + "step": 512 + }, + { + "epoch": 10.38, + "learning_rate": 0.00013456632653061225, + "loss": 1.4204, + "step": 513 + }, + { + "epoch": 10.4, + "learning_rate": 0.00013443877551020408, + "loss": 1.4158, + "step": 514 + }, + { + "epoch": 10.42, + "learning_rate": 0.00013431122448979592, + "loss": 1.4521, + "step": 515 + }, + { + "epoch": 10.44, + "learning_rate": 0.00013418367346938777, + "loss": 1.4196, + "step": 516 + }, + { + "epoch": 10.46, + "learning_rate": 0.0001340561224489796, + "loss": 1.4361, + "step": 517 + }, + { + "epoch": 10.48, + "learning_rate": 0.00013392857142857144, + "loss": 1.4482, + "step": 518 + }, + { + "epoch": 10.5, + "learning_rate": 0.00013380102040816327, + "loss": 1.4801, + "step": 519 + }, + { + "epoch": 10.52, + "learning_rate": 0.00013367346938775512, + "loss": 1.4556, + "step": 520 + }, + { + "epoch": 10.54, + "learning_rate": 0.00013354591836734695, + "loss": 1.3902, + "step": 521 + }, + { + "epoch": 10.56, + "learning_rate": 0.00013341836734693879, + "loss": 1.4269, + "step": 522 + }, + { + "epoch": 10.58, + "learning_rate": 0.00013329081632653062, + "loss": 1.4899, + "step": 523 + }, + { + "epoch": 10.6, + "learning_rate": 0.00013316326530612247, + "loss": 1.3952, + "step": 524 + }, + { + "epoch": 10.62, + "learning_rate": 0.00013303571428571428, + "loss": 1.4116, + "step": 525 + }, + { + "epoch": 10.64, + "learning_rate": 0.00013290816326530614, + "loss": 1.4583, + "step": 526 + }, + { + "epoch": 10.66, + "learning_rate": 0.00013278061224489797, + "loss": 1.4466, + "step": 527 + }, + { + "epoch": 10.68, + "learning_rate": 0.0001326530612244898, + "loss": 1.4242, + "step": 528 + }, + { + "epoch": 10.7, + "learning_rate": 0.00013252551020408163, + "loss": 1.3717, + "step": 529 + }, + { + "epoch": 10.72, + "learning_rate": 0.0001323979591836735, + "loss": 1.4583, + "step": 530 + }, + { + "epoch": 10.74, + "learning_rate": 0.0001322704081632653, + "loss": 1.4185, + "step": 531 + }, + { + "epoch": 10.76, + "learning_rate": 0.00013214285714285715, + "loss": 1.4287, + "step": 532 + }, + { + "epoch": 10.78, + "learning_rate": 0.00013201530612244898, + "loss": 1.4385, + "step": 533 + }, + { + "epoch": 10.8, + "learning_rate": 0.00013188775510204084, + "loss": 1.453, + "step": 534 + }, + { + "epoch": 10.83, + "learning_rate": 0.00013176020408163264, + "loss": 1.4161, + "step": 535 + }, + { + "epoch": 10.85, + "learning_rate": 0.0001316326530612245, + "loss": 1.457, + "step": 536 + }, + { + "epoch": 10.87, + "learning_rate": 0.00013150510204081633, + "loss": 1.4367, + "step": 537 + }, + { + "epoch": 10.89, + "learning_rate": 0.00013137755102040816, + "loss": 1.4256, + "step": 538 + }, + { + "epoch": 10.91, + "learning_rate": 0.00013125000000000002, + "loss": 1.424, + "step": 539 + }, + { + "epoch": 10.93, + "learning_rate": 0.00013112244897959185, + "loss": 1.3923, + "step": 540 + }, + { + "epoch": 10.95, + "learning_rate": 0.00013099489795918368, + "loss": 1.4225, + "step": 541 + }, + { + "epoch": 10.97, + "learning_rate": 0.0001308673469387755, + "loss": 1.3969, + "step": 542 + }, + { + "epoch": 10.99, + "learning_rate": 0.00013073979591836737, + "loss": 1.4446, + "step": 543 + }, + { + "epoch": 11.01, + "learning_rate": 0.00013061224489795917, + "loss": 1.4375, + "step": 544 + }, + { + "epoch": 11.03, + "learning_rate": 0.00013048469387755103, + "loss": 1.4064, + "step": 545 + }, + { + "epoch": 11.05, + "learning_rate": 0.00013035714285714286, + "loss": 1.3454, + "step": 546 + }, + { + "epoch": 11.07, + "learning_rate": 0.00013022959183673472, + "loss": 1.3234, + "step": 547 + }, + { + "epoch": 11.09, + "learning_rate": 0.00013010204081632652, + "loss": 1.3759, + "step": 548 + }, + { + "epoch": 11.11, + "learning_rate": 0.00012997448979591838, + "loss": 1.4221, + "step": 549 + }, + { + "epoch": 11.13, + "learning_rate": 0.0001298469387755102, + "loss": 1.4261, + "step": 550 + }, + { + "epoch": 11.15, + "learning_rate": 0.00012971938775510204, + "loss": 1.3341, + "step": 551 + }, + { + "epoch": 11.17, + "learning_rate": 0.00012959183673469387, + "loss": 1.3994, + "step": 552 + }, + { + "epoch": 11.19, + "learning_rate": 0.00012946428571428573, + "loss": 1.3894, + "step": 553 + }, + { + "epoch": 11.21, + "learning_rate": 0.00012933673469387754, + "loss": 1.3585, + "step": 554 + }, + { + "epoch": 11.23, + "learning_rate": 0.0001292091836734694, + "loss": 1.3763, + "step": 555 + }, + { + "epoch": 11.25, + "learning_rate": 0.00012908163265306123, + "loss": 1.3623, + "step": 556 + }, + { + "epoch": 11.27, + "learning_rate": 0.00012895408163265306, + "loss": 1.3907, + "step": 557 + }, + { + "epoch": 11.29, + "learning_rate": 0.0001288265306122449, + "loss": 1.3807, + "step": 558 + }, + { + "epoch": 11.31, + "learning_rate": 0.00012869897959183674, + "loss": 1.4045, + "step": 559 + }, + { + "epoch": 11.33, + "learning_rate": 0.00012857142857142858, + "loss": 1.4038, + "step": 560 + }, + { + "epoch": 11.35, + "learning_rate": 0.0001284438775510204, + "loss": 1.3466, + "step": 561 + }, + { + "epoch": 11.37, + "learning_rate": 0.00012831632653061226, + "loss": 1.3449, + "step": 562 + }, + { + "epoch": 11.39, + "learning_rate": 0.0001281887755102041, + "loss": 1.3866, + "step": 563 + }, + { + "epoch": 11.41, + "learning_rate": 0.00012806122448979593, + "loss": 1.3106, + "step": 564 + }, + { + "epoch": 11.43, + "learning_rate": 0.00012793367346938776, + "loss": 1.4414, + "step": 565 + }, + { + "epoch": 11.45, + "learning_rate": 0.00012780612244897962, + "loss": 1.3737, + "step": 566 + }, + { + "epoch": 11.47, + "learning_rate": 0.00012767857142857142, + "loss": 1.4053, + "step": 567 + }, + { + "epoch": 11.49, + "learning_rate": 0.00012755102040816328, + "loss": 1.4561, + "step": 568 + }, + { + "epoch": 11.51, + "learning_rate": 0.0001274234693877551, + "loss": 1.3684, + "step": 569 + }, + { + "epoch": 11.53, + "learning_rate": 0.00012729591836734697, + "loss": 1.3117, + "step": 570 + }, + { + "epoch": 11.55, + "learning_rate": 0.00012716836734693877, + "loss": 1.3474, + "step": 571 + }, + { + "epoch": 11.57, + "learning_rate": 0.00012704081632653063, + "loss": 1.3804, + "step": 572 + }, + { + "epoch": 11.59, + "learning_rate": 0.00012691326530612246, + "loss": 1.3656, + "step": 573 + }, + { + "epoch": 11.61, + "learning_rate": 0.0001267857142857143, + "loss": 1.3133, + "step": 574 + }, + { + "epoch": 11.63, + "learning_rate": 0.00012665816326530612, + "loss": 1.4077, + "step": 575 + }, + { + "epoch": 11.65, + "learning_rate": 0.00012653061224489798, + "loss": 1.4087, + "step": 576 + }, + { + "epoch": 11.67, + "learning_rate": 0.00012640306122448978, + "loss": 1.3524, + "step": 577 + }, + { + "epoch": 11.7, + "learning_rate": 0.00012627551020408164, + "loss": 1.3481, + "step": 578 + }, + { + "epoch": 11.72, + "learning_rate": 0.00012614795918367347, + "loss": 1.4497, + "step": 579 + }, + { + "epoch": 11.74, + "learning_rate": 0.0001260204081632653, + "loss": 1.3866, + "step": 580 + }, + { + "epoch": 11.76, + "learning_rate": 0.00012589285714285713, + "loss": 1.42, + "step": 581 + }, + { + "epoch": 11.78, + "learning_rate": 0.000125765306122449, + "loss": 1.3562, + "step": 582 + }, + { + "epoch": 11.8, + "learning_rate": 0.00012563775510204082, + "loss": 1.3249, + "step": 583 + }, + { + "epoch": 11.82, + "learning_rate": 0.00012551020408163265, + "loss": 1.4277, + "step": 584 + }, + { + "epoch": 11.84, + "learning_rate": 0.0001253826530612245, + "loss": 1.3734, + "step": 585 + }, + { + "epoch": 11.86, + "learning_rate": 0.00012525510204081634, + "loss": 1.3765, + "step": 586 + }, + { + "epoch": 11.88, + "learning_rate": 0.00012512755102040817, + "loss": 1.4153, + "step": 587 + }, + { + "epoch": 11.9, + "learning_rate": 0.000125, + "loss": 1.3847, + "step": 588 + }, + { + "epoch": 11.92, + "learning_rate": 0.00012487244897959186, + "loss": 1.3824, + "step": 589 + }, + { + "epoch": 11.94, + "learning_rate": 0.00012474489795918366, + "loss": 1.3938, + "step": 590 + }, + { + "epoch": 11.96, + "learning_rate": 0.00012461734693877552, + "loss": 1.4143, + "step": 591 + }, + { + "epoch": 11.98, + "learning_rate": 0.00012448979591836735, + "loss": 1.3794, + "step": 592 + }, + { + "epoch": 12.0, + "learning_rate": 0.00012436224489795918, + "loss": 1.3755, + "step": 593 + }, + { + "epoch": 12.02, + "learning_rate": 0.00012423469387755101, + "loss": 1.3736, + "step": 594 + }, + { + "epoch": 12.04, + "learning_rate": 0.00012410714285714287, + "loss": 1.2957, + "step": 595 + }, + { + "epoch": 12.06, + "learning_rate": 0.0001239795918367347, + "loss": 1.2996, + "step": 596 + }, + { + "epoch": 12.08, + "learning_rate": 0.00012385204081632653, + "loss": 1.3648, + "step": 597 + }, + { + "epoch": 12.1, + "learning_rate": 0.00012372448979591837, + "loss": 1.3031, + "step": 598 + }, + { + "epoch": 12.12, + "learning_rate": 0.00012359693877551022, + "loss": 1.2933, + "step": 599 + }, + { + "epoch": 12.14, + "learning_rate": 0.00012346938775510203, + "loss": 1.322, + "step": 600 + }, + { + "epoch": 12.16, + "learning_rate": 0.00012334183673469389, + "loss": 1.3123, + "step": 601 + }, + { + "epoch": 12.18, + "learning_rate": 0.00012321428571428572, + "loss": 1.3187, + "step": 602 + }, + { + "epoch": 12.2, + "learning_rate": 0.00012308673469387755, + "loss": 1.3353, + "step": 603 + }, + { + "epoch": 12.22, + "learning_rate": 0.0001229591836734694, + "loss": 1.3221, + "step": 604 + }, + { + "epoch": 12.24, + "learning_rate": 0.00012283163265306124, + "loss": 1.3458, + "step": 605 + }, + { + "epoch": 12.26, + "learning_rate": 0.00012270408163265307, + "loss": 1.275, + "step": 606 + }, + { + "epoch": 12.28, + "learning_rate": 0.0001225765306122449, + "loss": 1.3455, + "step": 607 + }, + { + "epoch": 12.3, + "learning_rate": 0.00012244897959183676, + "loss": 1.2769, + "step": 608 + }, + { + "epoch": 12.32, + "learning_rate": 0.00012232142857142859, + "loss": 1.3201, + "step": 609 + }, + { + "epoch": 12.34, + "learning_rate": 0.00012219387755102042, + "loss": 1.3073, + "step": 610 + }, + { + "epoch": 12.36, + "learning_rate": 0.00012206632653061225, + "loss": 1.3103, + "step": 611 + }, + { + "epoch": 12.38, + "learning_rate": 0.00012193877551020409, + "loss": 1.4437, + "step": 612 + }, + { + "epoch": 12.4, + "learning_rate": 0.00012181122448979591, + "loss": 1.3086, + "step": 613 + }, + { + "epoch": 12.42, + "learning_rate": 0.00012168367346938775, + "loss": 1.3867, + "step": 614 + }, + { + "epoch": 12.44, + "learning_rate": 0.0001215561224489796, + "loss": 1.2565, + "step": 615 + }, + { + "epoch": 12.46, + "learning_rate": 0.00012142857142857143, + "loss": 1.335, + "step": 616 + }, + { + "epoch": 12.48, + "learning_rate": 0.00012130102040816327, + "loss": 1.3423, + "step": 617 + }, + { + "epoch": 12.5, + "learning_rate": 0.00012117346938775512, + "loss": 1.3433, + "step": 618 + }, + { + "epoch": 12.52, + "learning_rate": 0.00012104591836734695, + "loss": 1.3387, + "step": 619 + }, + { + "epoch": 12.55, + "learning_rate": 0.00012091836734693878, + "loss": 1.3923, + "step": 620 + }, + { + "epoch": 12.57, + "learning_rate": 0.00012079081632653062, + "loss": 1.3774, + "step": 621 + }, + { + "epoch": 12.59, + "learning_rate": 0.00012066326530612247, + "loss": 1.3203, + "step": 622 + }, + { + "epoch": 12.61, + "learning_rate": 0.00012053571428571429, + "loss": 1.2924, + "step": 623 + }, + { + "epoch": 12.63, + "learning_rate": 0.00012040816326530613, + "loss": 1.3292, + "step": 624 + }, + { + "epoch": 12.65, + "learning_rate": 0.00012028061224489798, + "loss": 1.3161, + "step": 625 + }, + { + "epoch": 12.67, + "learning_rate": 0.00012015306122448979, + "loss": 1.352, + "step": 626 + }, + { + "epoch": 12.69, + "learning_rate": 0.00012002551020408164, + "loss": 1.3577, + "step": 627 + }, + { + "epoch": 12.71, + "learning_rate": 0.00011989795918367348, + "loss": 1.3575, + "step": 628 + }, + { + "epoch": 12.73, + "learning_rate": 0.0001197704081632653, + "loss": 1.3727, + "step": 629 + }, + { + "epoch": 12.75, + "learning_rate": 0.00011964285714285714, + "loss": 1.3312, + "step": 630 + }, + { + "epoch": 12.77, + "learning_rate": 0.00011951530612244899, + "loss": 1.3378, + "step": 631 + }, + { + "epoch": 12.79, + "learning_rate": 0.00011938775510204083, + "loss": 1.295, + "step": 632 + }, + { + "epoch": 12.81, + "learning_rate": 0.00011926020408163265, + "loss": 1.3447, + "step": 633 + }, + { + "epoch": 12.83, + "learning_rate": 0.0001191326530612245, + "loss": 1.3835, + "step": 634 + }, + { + "epoch": 12.85, + "learning_rate": 0.00011900510204081634, + "loss": 1.3222, + "step": 635 + }, + { + "epoch": 12.87, + "learning_rate": 0.00011887755102040817, + "loss": 1.2851, + "step": 636 + }, + { + "epoch": 12.89, + "learning_rate": 0.00011875, + "loss": 1.2723, + "step": 637 + }, + { + "epoch": 12.91, + "learning_rate": 0.00011862244897959184, + "loss": 1.3924, + "step": 638 + }, + { + "epoch": 12.93, + "learning_rate": 0.00011849489795918368, + "loss": 1.4625, + "step": 639 + }, + { + "epoch": 12.95, + "learning_rate": 0.00011836734693877552, + "loss": 1.3245, + "step": 640 + }, + { + "epoch": 12.97, + "learning_rate": 0.00011823979591836736, + "loss": 1.4042, + "step": 641 + }, + { + "epoch": 12.99, + "learning_rate": 0.00011811224489795918, + "loss": 1.3761, + "step": 642 + }, + { + "epoch": 13.01, + "learning_rate": 0.00011798469387755103, + "loss": 1.3376, + "step": 643 + }, + { + "epoch": 13.03, + "learning_rate": 0.00011785714285714287, + "loss": 1.2174, + "step": 644 + }, + { + "epoch": 13.05, + "learning_rate": 0.00011772959183673471, + "loss": 1.3602, + "step": 645 + }, + { + "epoch": 13.07, + "learning_rate": 0.00011760204081632653, + "loss": 1.3002, + "step": 646 + }, + { + "epoch": 13.09, + "learning_rate": 0.00011747448979591838, + "loss": 1.2262, + "step": 647 + }, + { + "epoch": 13.11, + "learning_rate": 0.00011734693877551022, + "loss": 1.3048, + "step": 648 + }, + { + "epoch": 13.13, + "learning_rate": 0.00011721938775510204, + "loss": 1.2231, + "step": 649 + }, + { + "epoch": 13.15, + "learning_rate": 0.00011709183673469388, + "loss": 1.2996, + "step": 650 + }, + { + "epoch": 13.17, + "learning_rate": 0.00011696428571428573, + "loss": 1.2708, + "step": 651 + }, + { + "epoch": 13.19, + "learning_rate": 0.00011683673469387754, + "loss": 1.2776, + "step": 652 + }, + { + "epoch": 13.21, + "learning_rate": 0.00011670918367346939, + "loss": 1.248, + "step": 653 + }, + { + "epoch": 13.23, + "learning_rate": 0.00011658163265306123, + "loss": 1.2582, + "step": 654 + }, + { + "epoch": 13.25, + "learning_rate": 0.00011645408163265305, + "loss": 1.3011, + "step": 655 + }, + { + "epoch": 13.27, + "learning_rate": 0.0001163265306122449, + "loss": 1.2969, + "step": 656 + }, + { + "epoch": 13.29, + "learning_rate": 0.00011619897959183674, + "loss": 1.2454, + "step": 657 + }, + { + "epoch": 13.31, + "learning_rate": 0.00011607142857142858, + "loss": 1.1914, + "step": 658 + }, + { + "epoch": 13.33, + "learning_rate": 0.00011594387755102041, + "loss": 1.34, + "step": 659 + }, + { + "epoch": 13.35, + "learning_rate": 0.00011581632653061225, + "loss": 1.2828, + "step": 660 + }, + { + "epoch": 13.37, + "learning_rate": 0.00011568877551020409, + "loss": 1.2962, + "step": 661 + }, + { + "epoch": 13.39, + "learning_rate": 0.00011556122448979592, + "loss": 1.3334, + "step": 662 + }, + { + "epoch": 13.42, + "learning_rate": 0.00011543367346938776, + "loss": 1.2832, + "step": 663 + }, + { + "epoch": 13.44, + "learning_rate": 0.00011530612244897961, + "loss": 1.3012, + "step": 664 + }, + { + "epoch": 13.46, + "learning_rate": 0.00011517857142857143, + "loss": 1.2857, + "step": 665 + }, + { + "epoch": 13.48, + "learning_rate": 0.00011505102040816327, + "loss": 1.2855, + "step": 666 + }, + { + "epoch": 13.5, + "learning_rate": 0.00011492346938775512, + "loss": 1.3077, + "step": 667 + }, + { + "epoch": 13.52, + "learning_rate": 0.00011479591836734696, + "loss": 1.3139, + "step": 668 + }, + { + "epoch": 13.54, + "learning_rate": 0.00011466836734693878, + "loss": 1.3138, + "step": 669 + }, + { + "epoch": 13.56, + "learning_rate": 0.00011454081632653062, + "loss": 1.2808, + "step": 670 + }, + { + "epoch": 13.58, + "learning_rate": 0.00011441326530612247, + "loss": 1.2492, + "step": 671 + }, + { + "epoch": 13.6, + "learning_rate": 0.00011428571428571428, + "loss": 1.2027, + "step": 672 + }, + { + "epoch": 13.62, + "learning_rate": 0.00011415816326530613, + "loss": 1.33, + "step": 673 + }, + { + "epoch": 13.64, + "learning_rate": 0.00011403061224489797, + "loss": 1.3112, + "step": 674 + }, + { + "epoch": 13.66, + "learning_rate": 0.00011390306122448979, + "loss": 1.2772, + "step": 675 + }, + { + "epoch": 13.68, + "learning_rate": 0.00011377551020408163, + "loss": 1.2701, + "step": 676 + }, + { + "epoch": 13.7, + "learning_rate": 0.00011364795918367348, + "loss": 1.1973, + "step": 677 + }, + { + "epoch": 13.72, + "learning_rate": 0.0001135204081632653, + "loss": 1.3124, + "step": 678 + }, + { + "epoch": 13.74, + "learning_rate": 0.00011339285714285714, + "loss": 1.3085, + "step": 679 + }, + { + "epoch": 13.76, + "learning_rate": 0.00011326530612244898, + "loss": 1.3457, + "step": 680 + }, + { + "epoch": 13.78, + "learning_rate": 0.00011313775510204083, + "loss": 1.3338, + "step": 681 + }, + { + "epoch": 13.8, + "learning_rate": 0.00011301020408163266, + "loss": 1.2753, + "step": 682 + }, + { + "epoch": 13.82, + "learning_rate": 0.00011288265306122449, + "loss": 1.2786, + "step": 683 + }, + { + "epoch": 13.84, + "learning_rate": 0.00011275510204081634, + "loss": 1.2584, + "step": 684 + }, + { + "epoch": 13.86, + "learning_rate": 0.00011262755102040817, + "loss": 1.2779, + "step": 685 + }, + { + "epoch": 13.88, + "learning_rate": 0.00011250000000000001, + "loss": 1.3502, + "step": 686 + }, + { + "epoch": 13.9, + "learning_rate": 0.00011237244897959185, + "loss": 1.3251, + "step": 687 + }, + { + "epoch": 13.92, + "learning_rate": 0.00011224489795918367, + "loss": 1.273, + "step": 688 + }, + { + "epoch": 13.94, + "learning_rate": 0.00011211734693877552, + "loss": 1.3341, + "step": 689 + }, + { + "epoch": 13.96, + "learning_rate": 0.00011198979591836736, + "loss": 1.2654, + "step": 690 + }, + { + "epoch": 13.98, + "learning_rate": 0.00011186224489795918, + "loss": 1.3333, + "step": 691 + }, + { + "epoch": 14.0, + "learning_rate": 0.00011173469387755102, + "loss": 1.3246, + "step": 692 + }, + { + "epoch": 14.02, + "learning_rate": 0.00011160714285714287, + "loss": 1.2547, + "step": 693 + }, + { + "epoch": 14.04, + "learning_rate": 0.00011147959183673471, + "loss": 1.208, + "step": 694 + }, + { + "epoch": 14.06, + "learning_rate": 0.00011135204081632653, + "loss": 1.223, + "step": 695 + }, + { + "epoch": 14.08, + "learning_rate": 0.00011122448979591837, + "loss": 1.2483, + "step": 696 + }, + { + "epoch": 14.1, + "learning_rate": 0.00011109693877551022, + "loss": 1.2823, + "step": 697 + }, + { + "epoch": 14.12, + "learning_rate": 0.00011096938775510204, + "loss": 1.2013, + "step": 698 + }, + { + "epoch": 14.14, + "learning_rate": 0.00011084183673469388, + "loss": 1.1883, + "step": 699 + }, + { + "epoch": 14.16, + "learning_rate": 0.00011071428571428572, + "loss": 1.2364, + "step": 700 + }, + { + "epoch": 14.18, + "learning_rate": 0.00011058673469387754, + "loss": 1.2069, + "step": 701 + }, + { + "epoch": 14.2, + "learning_rate": 0.00011045918367346939, + "loss": 1.1968, + "step": 702 + }, + { + "epoch": 14.22, + "learning_rate": 0.00011033163265306123, + "loss": 1.2236, + "step": 703 + }, + { + "epoch": 14.24, + "learning_rate": 0.00011020408163265306, + "loss": 1.1942, + "step": 704 + }, + { + "epoch": 14.26, + "learning_rate": 0.0001100765306122449, + "loss": 1.2561, + "step": 705 + }, + { + "epoch": 14.29, + "learning_rate": 0.00010994897959183674, + "loss": 1.1839, + "step": 706 + }, + { + "epoch": 14.31, + "learning_rate": 0.00010982142857142858, + "loss": 1.2128, + "step": 707 + }, + { + "epoch": 14.33, + "learning_rate": 0.00010969387755102041, + "loss": 1.3086, + "step": 708 + }, + { + "epoch": 14.35, + "learning_rate": 0.00010956632653061226, + "loss": 1.2379, + "step": 709 + }, + { + "epoch": 14.37, + "learning_rate": 0.0001094387755102041, + "loss": 1.176, + "step": 710 + }, + { + "epoch": 14.39, + "learning_rate": 0.00010931122448979592, + "loss": 1.2105, + "step": 711 + }, + { + "epoch": 14.41, + "learning_rate": 0.00010918367346938776, + "loss": 1.2149, + "step": 712 + }, + { + "epoch": 14.43, + "learning_rate": 0.0001090561224489796, + "loss": 1.2392, + "step": 713 + }, + { + "epoch": 14.45, + "learning_rate": 0.00010892857142857142, + "loss": 1.2471, + "step": 714 + }, + { + "epoch": 14.47, + "learning_rate": 0.00010880102040816327, + "loss": 1.2561, + "step": 715 + }, + { + "epoch": 14.49, + "learning_rate": 0.00010867346938775511, + "loss": 1.2179, + "step": 716 + }, + { + "epoch": 14.51, + "learning_rate": 0.00010854591836734696, + "loss": 1.2459, + "step": 717 + }, + { + "epoch": 14.53, + "learning_rate": 0.00010841836734693877, + "loss": 1.2933, + "step": 718 + }, + { + "epoch": 14.55, + "learning_rate": 0.00010829081632653062, + "loss": 1.2862, + "step": 719 + }, + { + "epoch": 14.57, + "learning_rate": 0.00010816326530612246, + "loss": 1.2976, + "step": 720 + }, + { + "epoch": 14.59, + "learning_rate": 0.00010803571428571428, + "loss": 1.231, + "step": 721 + }, + { + "epoch": 14.61, + "learning_rate": 0.00010790816326530613, + "loss": 1.2464, + "step": 722 + }, + { + "epoch": 14.63, + "learning_rate": 0.00010778061224489797, + "loss": 1.2181, + "step": 723 + }, + { + "epoch": 14.65, + "learning_rate": 0.00010765306122448979, + "loss": 1.3307, + "step": 724 + }, + { + "epoch": 14.67, + "learning_rate": 0.00010752551020408163, + "loss": 1.1723, + "step": 725 + }, + { + "epoch": 14.69, + "learning_rate": 0.00010739795918367348, + "loss": 1.1528, + "step": 726 + }, + { + "epoch": 14.71, + "learning_rate": 0.0001072704081632653, + "loss": 1.215, + "step": 727 + }, + { + "epoch": 14.73, + "learning_rate": 0.00010714285714285715, + "loss": 1.2624, + "step": 728 + }, + { + "epoch": 14.75, + "learning_rate": 0.00010701530612244898, + "loss": 1.3117, + "step": 729 + }, + { + "epoch": 14.77, + "learning_rate": 0.00010688775510204083, + "loss": 1.2572, + "step": 730 + }, + { + "epoch": 14.79, + "learning_rate": 0.00010676020408163266, + "loss": 1.222, + "step": 731 + }, + { + "epoch": 14.81, + "learning_rate": 0.0001066326530612245, + "loss": 1.2881, + "step": 732 + }, + { + "epoch": 14.83, + "learning_rate": 0.00010650510204081635, + "loss": 1.2676, + "step": 733 + }, + { + "epoch": 14.85, + "learning_rate": 0.00010637755102040816, + "loss": 1.2734, + "step": 734 + }, + { + "epoch": 14.87, + "learning_rate": 0.00010625000000000001, + "loss": 1.2885, + "step": 735 + }, + { + "epoch": 14.89, + "learning_rate": 0.00010612244897959185, + "loss": 1.2764, + "step": 736 + }, + { + "epoch": 14.91, + "learning_rate": 0.00010599489795918367, + "loss": 1.3267, + "step": 737 + }, + { + "epoch": 14.93, + "learning_rate": 0.00010586734693877551, + "loss": 1.2445, + "step": 738 + }, + { + "epoch": 14.95, + "learning_rate": 0.00010573979591836736, + "loss": 1.3359, + "step": 739 + }, + { + "epoch": 14.97, + "learning_rate": 0.00010561224489795918, + "loss": 1.2508, + "step": 740 + }, + { + "epoch": 14.99, + "learning_rate": 0.00010548469387755102, + "loss": 1.2227, + "step": 741 + }, + { + "epoch": 15.01, + "learning_rate": 0.00010535714285714286, + "loss": 1.1889, + "step": 742 + }, + { + "epoch": 15.03, + "learning_rate": 0.00010522959183673471, + "loss": 1.1919, + "step": 743 + }, + { + "epoch": 15.05, + "learning_rate": 0.00010510204081632653, + "loss": 1.2383, + "step": 744 + }, + { + "epoch": 15.07, + "learning_rate": 0.00010497448979591837, + "loss": 1.2401, + "step": 745 + }, + { + "epoch": 15.09, + "learning_rate": 0.00010484693877551021, + "loss": 1.2015, + "step": 746 + }, + { + "epoch": 15.11, + "learning_rate": 0.00010471938775510203, + "loss": 1.1509, + "step": 747 + }, + { + "epoch": 15.13, + "learning_rate": 0.00010459183673469388, + "loss": 1.1878, + "step": 748 + }, + { + "epoch": 15.16, + "learning_rate": 0.00010446428571428572, + "loss": 1.1706, + "step": 749 + }, + { + "epoch": 15.18, + "learning_rate": 0.00010433673469387755, + "loss": 1.1285, + "step": 750 + }, + { + "epoch": 15.2, + "learning_rate": 0.0001042091836734694, + "loss": 1.1608, + "step": 751 + }, + { + "epoch": 15.22, + "learning_rate": 0.00010408163265306123, + "loss": 1.1178, + "step": 752 + }, + { + "epoch": 15.24, + "learning_rate": 0.00010395408163265306, + "loss": 1.1293, + "step": 753 + }, + { + "epoch": 15.26, + "learning_rate": 0.0001038265306122449, + "loss": 1.2306, + "step": 754 + }, + { + "epoch": 15.28, + "learning_rate": 0.00010369897959183675, + "loss": 1.1541, + "step": 755 + }, + { + "epoch": 15.3, + "learning_rate": 0.00010357142857142859, + "loss": 1.1702, + "step": 756 + }, + { + "epoch": 15.32, + "learning_rate": 0.00010344387755102041, + "loss": 1.2119, + "step": 757 + }, + { + "epoch": 15.34, + "learning_rate": 0.00010331632653061225, + "loss": 1.2239, + "step": 758 + }, + { + "epoch": 15.36, + "learning_rate": 0.0001031887755102041, + "loss": 1.2019, + "step": 759 + }, + { + "epoch": 15.38, + "learning_rate": 0.00010306122448979591, + "loss": 1.2197, + "step": 760 + }, + { + "epoch": 15.4, + "learning_rate": 0.00010293367346938776, + "loss": 1.1769, + "step": 761 + }, + { + "epoch": 15.42, + "learning_rate": 0.0001028061224489796, + "loss": 1.1907, + "step": 762 + }, + { + "epoch": 15.44, + "learning_rate": 0.00010267857142857142, + "loss": 1.2089, + "step": 763 + }, + { + "epoch": 15.46, + "learning_rate": 0.00010255102040816327, + "loss": 1.1335, + "step": 764 + }, + { + "epoch": 15.48, + "learning_rate": 0.00010242346938775511, + "loss": 1.1633, + "step": 765 + }, + { + "epoch": 15.5, + "learning_rate": 0.00010229591836734695, + "loss": 1.1578, + "step": 766 + }, + { + "epoch": 15.52, + "learning_rate": 0.00010216836734693877, + "loss": 1.2236, + "step": 767 + }, + { + "epoch": 15.54, + "learning_rate": 0.00010204081632653062, + "loss": 1.1941, + "step": 768 + }, + { + "epoch": 15.56, + "learning_rate": 0.00010191326530612246, + "loss": 1.2666, + "step": 769 + }, + { + "epoch": 15.58, + "learning_rate": 0.00010178571428571428, + "loss": 1.1232, + "step": 770 + }, + { + "epoch": 15.6, + "learning_rate": 0.00010165816326530612, + "loss": 1.2242, + "step": 771 + }, + { + "epoch": 15.62, + "learning_rate": 0.00010153061224489797, + "loss": 1.1852, + "step": 772 + }, + { + "epoch": 15.64, + "learning_rate": 0.0001014030612244898, + "loss": 1.2626, + "step": 773 + }, + { + "epoch": 15.66, + "learning_rate": 0.00010127551020408164, + "loss": 1.1873, + "step": 774 + }, + { + "epoch": 15.68, + "learning_rate": 0.00010114795918367349, + "loss": 1.3005, + "step": 775 + }, + { + "epoch": 15.7, + "learning_rate": 0.0001010204081632653, + "loss": 1.1904, + "step": 776 + }, + { + "epoch": 15.72, + "learning_rate": 0.00010089285714285715, + "loss": 1.2927, + "step": 777 + }, + { + "epoch": 15.74, + "learning_rate": 0.00010076530612244899, + "loss": 1.179, + "step": 778 + }, + { + "epoch": 15.76, + "learning_rate": 0.00010063775510204084, + "loss": 1.2027, + "step": 779 + }, + { + "epoch": 15.78, + "learning_rate": 0.00010051020408163265, + "loss": 1.2428, + "step": 780 + }, + { + "epoch": 15.8, + "learning_rate": 0.0001003826530612245, + "loss": 1.2324, + "step": 781 + }, + { + "epoch": 15.82, + "learning_rate": 0.00010025510204081634, + "loss": 1.1251, + "step": 782 + }, + { + "epoch": 15.84, + "learning_rate": 0.00010012755102040816, + "loss": 1.2405, + "step": 783 + }, + { + "epoch": 15.86, + "learning_rate": 0.0001, + "loss": 1.2005, + "step": 784 + }, + { + "epoch": 15.88, + "learning_rate": 9.987244897959184e-05, + "loss": 1.2259, + "step": 785 + }, + { + "epoch": 15.9, + "learning_rate": 9.974489795918368e-05, + "loss": 1.1576, + "step": 786 + }, + { + "epoch": 15.92, + "learning_rate": 9.961734693877551e-05, + "loss": 1.1834, + "step": 787 + }, + { + "epoch": 15.94, + "learning_rate": 9.948979591836736e-05, + "loss": 1.2396, + "step": 788 + }, + { + "epoch": 15.96, + "learning_rate": 9.936224489795919e-05, + "loss": 1.1865, + "step": 789 + }, + { + "epoch": 15.98, + "learning_rate": 9.923469387755102e-05, + "loss": 1.2356, + "step": 790 + }, + { + "epoch": 16.01, + "learning_rate": 9.910714285714286e-05, + "loss": 1.2639, + "step": 791 + }, + { + "epoch": 16.03, + "learning_rate": 9.897959183673469e-05, + "loss": 1.1216, + "step": 792 + }, + { + "epoch": 16.05, + "learning_rate": 9.885204081632652e-05, + "loss": 1.1051, + "step": 793 + }, + { + "epoch": 16.07, + "learning_rate": 9.872448979591837e-05, + "loss": 1.0864, + "step": 794 + }, + { + "epoch": 16.09, + "learning_rate": 9.859693877551021e-05, + "loss": 1.182, + "step": 795 + }, + { + "epoch": 16.11, + "learning_rate": 9.846938775510204e-05, + "loss": 1.1272, + "step": 796 + }, + { + "epoch": 16.13, + "learning_rate": 9.834183673469389e-05, + "loss": 1.1946, + "step": 797 + }, + { + "epoch": 16.15, + "learning_rate": 9.821428571428572e-05, + "loss": 1.0875, + "step": 798 + }, + { + "epoch": 16.17, + "learning_rate": 9.808673469387756e-05, + "loss": 1.1671, + "step": 799 + }, + { + "epoch": 16.19, + "learning_rate": 9.79591836734694e-05, + "loss": 1.1502, + "step": 800 + }, + { + "epoch": 16.21, + "learning_rate": 9.783163265306124e-05, + "loss": 1.19, + "step": 801 + }, + { + "epoch": 16.23, + "learning_rate": 9.770408163265307e-05, + "loss": 1.1258, + "step": 802 + }, + { + "epoch": 16.25, + "learning_rate": 9.75765306122449e-05, + "loss": 1.1765, + "step": 803 + }, + { + "epoch": 16.27, + "learning_rate": 9.744897959183674e-05, + "loss": 1.1217, + "step": 804 + }, + { + "epoch": 16.29, + "learning_rate": 9.732142857142858e-05, + "loss": 1.1293, + "step": 805 + }, + { + "epoch": 16.31, + "learning_rate": 9.719387755102042e-05, + "loss": 1.17, + "step": 806 + }, + { + "epoch": 16.33, + "learning_rate": 9.706632653061225e-05, + "loss": 1.17, + "step": 807 + }, + { + "epoch": 16.35, + "learning_rate": 9.693877551020408e-05, + "loss": 1.2004, + "step": 808 + }, + { + "epoch": 16.37, + "learning_rate": 9.681122448979593e-05, + "loss": 1.1648, + "step": 809 + }, + { + "epoch": 16.39, + "learning_rate": 9.668367346938776e-05, + "loss": 1.0688, + "step": 810 + }, + { + "epoch": 16.41, + "learning_rate": 9.655612244897959e-05, + "loss": 1.1607, + "step": 811 + }, + { + "epoch": 16.43, + "learning_rate": 9.642857142857143e-05, + "loss": 1.1298, + "step": 812 + }, + { + "epoch": 16.45, + "learning_rate": 9.630102040816326e-05, + "loss": 1.1064, + "step": 813 + }, + { + "epoch": 16.47, + "learning_rate": 9.617346938775511e-05, + "loss": 1.1472, + "step": 814 + }, + { + "epoch": 16.49, + "learning_rate": 9.604591836734694e-05, + "loss": 1.1577, + "step": 815 + }, + { + "epoch": 16.51, + "learning_rate": 9.591836734693878e-05, + "loss": 1.1436, + "step": 816 + }, + { + "epoch": 16.53, + "learning_rate": 9.579081632653061e-05, + "loss": 1.1657, + "step": 817 + }, + { + "epoch": 16.55, + "learning_rate": 9.566326530612246e-05, + "loss": 1.1147, + "step": 818 + }, + { + "epoch": 16.57, + "learning_rate": 9.553571428571429e-05, + "loss": 1.1839, + "step": 819 + }, + { + "epoch": 16.59, + "learning_rate": 9.540816326530613e-05, + "loss": 1.1298, + "step": 820 + }, + { + "epoch": 16.61, + "learning_rate": 9.528061224489796e-05, + "loss": 1.2141, + "step": 821 + }, + { + "epoch": 16.63, + "learning_rate": 9.515306122448981e-05, + "loss": 1.2045, + "step": 822 + }, + { + "epoch": 16.65, + "learning_rate": 9.502551020408164e-05, + "loss": 1.1791, + "step": 823 + }, + { + "epoch": 16.67, + "learning_rate": 9.489795918367348e-05, + "loss": 1.1137, + "step": 824 + }, + { + "epoch": 16.69, + "learning_rate": 9.477040816326531e-05, + "loss": 1.1312, + "step": 825 + }, + { + "epoch": 16.71, + "learning_rate": 9.464285714285715e-05, + "loss": 1.1102, + "step": 826 + }, + { + "epoch": 16.73, + "learning_rate": 9.451530612244899e-05, + "loss": 1.1865, + "step": 827 + }, + { + "epoch": 16.75, + "learning_rate": 9.438775510204082e-05, + "loss": 1.1232, + "step": 828 + }, + { + "epoch": 16.77, + "learning_rate": 9.426020408163265e-05, + "loss": 1.2068, + "step": 829 + }, + { + "epoch": 16.79, + "learning_rate": 9.41326530612245e-05, + "loss": 1.1864, + "step": 830 + }, + { + "epoch": 16.81, + "learning_rate": 9.400510204081633e-05, + "loss": 1.2195, + "step": 831 + }, + { + "epoch": 16.83, + "learning_rate": 9.387755102040817e-05, + "loss": 1.2063, + "step": 832 + }, + { + "epoch": 16.85, + "learning_rate": 9.375e-05, + "loss": 1.1455, + "step": 833 + }, + { + "epoch": 16.88, + "learning_rate": 9.362244897959183e-05, + "loss": 1.1819, + "step": 834 + }, + { + "epoch": 16.9, + "learning_rate": 9.349489795918368e-05, + "loss": 1.1887, + "step": 835 + }, + { + "epoch": 16.92, + "learning_rate": 9.336734693877551e-05, + "loss": 1.1557, + "step": 836 + }, + { + "epoch": 16.94, + "learning_rate": 9.323979591836735e-05, + "loss": 1.2094, + "step": 837 + }, + { + "epoch": 16.96, + "learning_rate": 9.311224489795918e-05, + "loss": 1.1512, + "step": 838 + }, + { + "epoch": 16.98, + "learning_rate": 9.298469387755103e-05, + "loss": 1.1463, + "step": 839 + }, + { + "epoch": 17.0, + "learning_rate": 9.285714285714286e-05, + "loss": 1.155, + "step": 840 + }, + { + "epoch": 17.02, + "learning_rate": 9.27295918367347e-05, + "loss": 1.1292, + "step": 841 + }, + { + "epoch": 17.04, + "learning_rate": 9.260204081632653e-05, + "loss": 1.0996, + "step": 842 + }, + { + "epoch": 17.06, + "learning_rate": 9.247448979591838e-05, + "loss": 1.0662, + "step": 843 + }, + { + "epoch": 17.08, + "learning_rate": 9.234693877551021e-05, + "loss": 1.0931, + "step": 844 + }, + { + "epoch": 17.1, + "learning_rate": 9.221938775510205e-05, + "loss": 1.0727, + "step": 845 + }, + { + "epoch": 17.12, + "learning_rate": 9.209183673469388e-05, + "loss": 1.1043, + "step": 846 + }, + { + "epoch": 17.14, + "learning_rate": 9.196428571428572e-05, + "loss": 1.0594, + "step": 847 + }, + { + "epoch": 17.16, + "learning_rate": 9.183673469387756e-05, + "loss": 1.0952, + "step": 848 + }, + { + "epoch": 17.18, + "learning_rate": 9.170918367346939e-05, + "loss": 1.0639, + "step": 849 + }, + { + "epoch": 17.2, + "learning_rate": 9.158163265306124e-05, + "loss": 1.132, + "step": 850 + }, + { + "epoch": 17.22, + "learning_rate": 9.145408163265307e-05, + "loss": 1.1083, + "step": 851 + }, + { + "epoch": 17.24, + "learning_rate": 9.13265306122449e-05, + "loss": 1.1282, + "step": 852 + }, + { + "epoch": 17.26, + "learning_rate": 9.119897959183674e-05, + "loss": 1.0474, + "step": 853 + }, + { + "epoch": 17.28, + "learning_rate": 9.107142857142857e-05, + "loss": 1.1138, + "step": 854 + }, + { + "epoch": 17.3, + "learning_rate": 9.094387755102042e-05, + "loss": 1.1025, + "step": 855 + }, + { + "epoch": 17.32, + "learning_rate": 9.081632653061225e-05, + "loss": 1.0968, + "step": 856 + }, + { + "epoch": 17.34, + "learning_rate": 9.068877551020408e-05, + "loss": 1.1683, + "step": 857 + }, + { + "epoch": 17.36, + "learning_rate": 9.056122448979592e-05, + "loss": 1.0975, + "step": 858 + }, + { + "epoch": 17.38, + "learning_rate": 9.043367346938775e-05, + "loss": 1.1274, + "step": 859 + }, + { + "epoch": 17.4, + "learning_rate": 9.030612244897958e-05, + "loss": 1.0916, + "step": 860 + }, + { + "epoch": 17.42, + "learning_rate": 9.017857142857143e-05, + "loss": 1.0912, + "step": 861 + }, + { + "epoch": 17.44, + "learning_rate": 9.005102040816327e-05, + "loss": 1.0875, + "step": 862 + }, + { + "epoch": 17.46, + "learning_rate": 8.99234693877551e-05, + "loss": 1.05, + "step": 863 + }, + { + "epoch": 17.48, + "learning_rate": 8.979591836734695e-05, + "loss": 1.1418, + "step": 864 + }, + { + "epoch": 17.5, + "learning_rate": 8.966836734693878e-05, + "loss": 1.0609, + "step": 865 + }, + { + "epoch": 17.52, + "learning_rate": 8.954081632653062e-05, + "loss": 1.1611, + "step": 866 + }, + { + "epoch": 17.54, + "learning_rate": 8.941326530612245e-05, + "loss": 1.1065, + "step": 867 + }, + { + "epoch": 17.56, + "learning_rate": 8.92857142857143e-05, + "loss": 1.1611, + "step": 868 + }, + { + "epoch": 17.58, + "learning_rate": 8.915816326530613e-05, + "loss": 1.1398, + "step": 869 + }, + { + "epoch": 17.6, + "learning_rate": 8.903061224489796e-05, + "loss": 1.1055, + "step": 870 + }, + { + "epoch": 17.62, + "learning_rate": 8.89030612244898e-05, + "loss": 1.1314, + "step": 871 + }, + { + "epoch": 17.64, + "learning_rate": 8.877551020408164e-05, + "loss": 1.1084, + "step": 872 + }, + { + "epoch": 17.66, + "learning_rate": 8.864795918367348e-05, + "loss": 1.1254, + "step": 873 + }, + { + "epoch": 17.68, + "learning_rate": 8.852040816326531e-05, + "loss": 1.142, + "step": 874 + }, + { + "epoch": 17.7, + "learning_rate": 8.839285714285714e-05, + "loss": 1.1371, + "step": 875 + }, + { + "epoch": 17.72, + "learning_rate": 8.826530612244899e-05, + "loss": 1.1092, + "step": 876 + }, + { + "epoch": 17.75, + "learning_rate": 8.813775510204082e-05, + "loss": 1.161, + "step": 877 + }, + { + "epoch": 17.77, + "learning_rate": 8.801020408163265e-05, + "loss": 1.1044, + "step": 878 + }, + { + "epoch": 17.79, + "learning_rate": 8.788265306122449e-05, + "loss": 1.117, + "step": 879 + }, + { + "epoch": 17.81, + "learning_rate": 8.775510204081632e-05, + "loss": 1.1262, + "step": 880 + }, + { + "epoch": 17.83, + "learning_rate": 8.762755102040817e-05, + "loss": 1.0829, + "step": 881 + }, + { + "epoch": 17.85, + "learning_rate": 8.75e-05, + "loss": 1.1393, + "step": 882 + }, + { + "epoch": 17.87, + "learning_rate": 8.737244897959183e-05, + "loss": 1.1781, + "step": 883 + }, + { + "epoch": 17.89, + "learning_rate": 8.724489795918367e-05, + "loss": 1.1582, + "step": 884 + }, + { + "epoch": 17.91, + "learning_rate": 8.711734693877552e-05, + "loss": 1.1469, + "step": 885 + }, + { + "epoch": 17.93, + "learning_rate": 8.698979591836735e-05, + "loss": 1.1494, + "step": 886 + }, + { + "epoch": 17.95, + "learning_rate": 8.68622448979592e-05, + "loss": 1.1251, + "step": 887 + }, + { + "epoch": 17.97, + "learning_rate": 8.673469387755102e-05, + "loss": 1.1624, + "step": 888 + }, + { + "epoch": 17.99, + "learning_rate": 8.660714285714287e-05, + "loss": 1.0842, + "step": 889 + }, + { + "epoch": 18.01, + "learning_rate": 8.64795918367347e-05, + "loss": 1.1944, + "step": 890 + }, + { + "epoch": 18.03, + "learning_rate": 8.635204081632653e-05, + "loss": 1.0642, + "step": 891 + }, + { + "epoch": 18.05, + "learning_rate": 8.622448979591838e-05, + "loss": 1.0459, + "step": 892 + }, + { + "epoch": 18.07, + "learning_rate": 8.60969387755102e-05, + "loss": 1.0941, + "step": 893 + }, + { + "epoch": 18.09, + "learning_rate": 8.596938775510205e-05, + "loss": 1.0457, + "step": 894 + }, + { + "epoch": 18.11, + "learning_rate": 8.584183673469388e-05, + "loss": 1.1033, + "step": 895 + }, + { + "epoch": 18.13, + "learning_rate": 8.571428571428571e-05, + "loss": 1.0756, + "step": 896 + }, + { + "epoch": 18.15, + "learning_rate": 8.558673469387756e-05, + "loss": 1.0615, + "step": 897 + }, + { + "epoch": 18.17, + "learning_rate": 8.545918367346939e-05, + "loss": 1.0828, + "step": 898 + }, + { + "epoch": 18.19, + "learning_rate": 8.533163265306123e-05, + "loss": 1.1158, + "step": 899 + }, + { + "epoch": 18.21, + "learning_rate": 8.520408163265306e-05, + "loss": 1.0133, + "step": 900 + }, + { + "epoch": 18.23, + "learning_rate": 8.50765306122449e-05, + "loss": 1.0437, + "step": 901 + }, + { + "epoch": 18.25, + "learning_rate": 8.494897959183674e-05, + "loss": 1.0372, + "step": 902 + }, + { + "epoch": 18.27, + "learning_rate": 8.482142857142857e-05, + "loss": 1.1012, + "step": 903 + }, + { + "epoch": 18.29, + "learning_rate": 8.469387755102041e-05, + "loss": 1.0777, + "step": 904 + }, + { + "epoch": 18.31, + "learning_rate": 8.456632653061224e-05, + "loss": 1.0799, + "step": 905 + }, + { + "epoch": 18.33, + "learning_rate": 8.443877551020409e-05, + "loss": 0.9846, + "step": 906 + }, + { + "epoch": 18.35, + "learning_rate": 8.431122448979592e-05, + "loss": 1.1, + "step": 907 + }, + { + "epoch": 18.37, + "learning_rate": 8.418367346938776e-05, + "loss": 1.0787, + "step": 908 + }, + { + "epoch": 18.39, + "learning_rate": 8.40561224489796e-05, + "loss": 1.0647, + "step": 909 + }, + { + "epoch": 18.41, + "learning_rate": 8.392857142857144e-05, + "loss": 1.056, + "step": 910 + }, + { + "epoch": 18.43, + "learning_rate": 8.380102040816327e-05, + "loss": 1.1131, + "step": 911 + }, + { + "epoch": 18.45, + "learning_rate": 8.367346938775511e-05, + "loss": 1.0825, + "step": 912 + }, + { + "epoch": 18.47, + "learning_rate": 8.354591836734695e-05, + "loss": 1.0681, + "step": 913 + }, + { + "epoch": 18.49, + "learning_rate": 8.341836734693878e-05, + "loss": 1.0479, + "step": 914 + }, + { + "epoch": 18.51, + "learning_rate": 8.329081632653062e-05, + "loss": 1.0921, + "step": 915 + }, + { + "epoch": 18.53, + "learning_rate": 8.316326530612245e-05, + "loss": 1.0626, + "step": 916 + }, + { + "epoch": 18.55, + "learning_rate": 8.30357142857143e-05, + "loss": 1.0518, + "step": 917 + }, + { + "epoch": 18.57, + "learning_rate": 8.290816326530613e-05, + "loss": 1.0557, + "step": 918 + }, + { + "epoch": 18.6, + "learning_rate": 8.278061224489796e-05, + "loss": 1.0831, + "step": 919 + }, + { + "epoch": 18.62, + "learning_rate": 8.26530612244898e-05, + "loss": 1.0307, + "step": 920 + }, + { + "epoch": 18.64, + "learning_rate": 8.252551020408163e-05, + "loss": 1.0455, + "step": 921 + }, + { + "epoch": 18.66, + "learning_rate": 8.239795918367348e-05, + "loss": 1.0667, + "step": 922 + }, + { + "epoch": 18.68, + "learning_rate": 8.227040816326531e-05, + "loss": 1.0736, + "step": 923 + }, + { + "epoch": 18.7, + "learning_rate": 8.214285714285714e-05, + "loss": 1.0108, + "step": 924 + }, + { + "epoch": 18.72, + "learning_rate": 8.201530612244898e-05, + "loss": 1.0458, + "step": 925 + }, + { + "epoch": 18.74, + "learning_rate": 8.188775510204081e-05, + "loss": 1.0852, + "step": 926 + }, + { + "epoch": 18.76, + "learning_rate": 8.176020408163265e-05, + "loss": 1.1207, + "step": 927 + }, + { + "epoch": 18.78, + "learning_rate": 8.163265306122449e-05, + "loss": 1.0914, + "step": 928 + }, + { + "epoch": 18.8, + "learning_rate": 8.150510204081633e-05, + "loss": 1.1108, + "step": 929 + }, + { + "epoch": 18.82, + "learning_rate": 8.137755102040817e-05, + "loss": 1.1394, + "step": 930 + }, + { + "epoch": 18.84, + "learning_rate": 8.125000000000001e-05, + "loss": 1.029, + "step": 931 + }, + { + "epoch": 18.86, + "learning_rate": 8.112244897959184e-05, + "loss": 1.0661, + "step": 932 + }, + { + "epoch": 18.88, + "learning_rate": 8.099489795918369e-05, + "loss": 1.0303, + "step": 933 + }, + { + "epoch": 18.9, + "learning_rate": 8.086734693877552e-05, + "loss": 1.1144, + "step": 934 + }, + { + "epoch": 18.92, + "learning_rate": 8.073979591836736e-05, + "loss": 1.1096, + "step": 935 + }, + { + "epoch": 18.94, + "learning_rate": 8.061224489795919e-05, + "loss": 1.123, + "step": 936 + }, + { + "epoch": 18.96, + "learning_rate": 8.048469387755102e-05, + "loss": 1.1002, + "step": 937 + }, + { + "epoch": 18.98, + "learning_rate": 8.035714285714287e-05, + "loss": 1.1016, + "step": 938 + }, + { + "epoch": 19.0, + "learning_rate": 8.02295918367347e-05, + "loss": 1.0847, + "step": 939 + }, + { + "epoch": 19.02, + "learning_rate": 8.010204081632653e-05, + "loss": 1.1029, + "step": 940 + }, + { + "epoch": 19.04, + "learning_rate": 7.997448979591837e-05, + "loss": 1.041, + "step": 941 + }, + { + "epoch": 19.06, + "learning_rate": 7.98469387755102e-05, + "loss": 1.01, + "step": 942 + }, + { + "epoch": 19.08, + "learning_rate": 7.971938775510205e-05, + "loss": 1.0197, + "step": 943 + }, + { + "epoch": 19.1, + "learning_rate": 7.959183673469388e-05, + "loss": 1.0543, + "step": 944 + }, + { + "epoch": 19.12, + "learning_rate": 7.946428571428571e-05, + "loss": 1.0369, + "step": 945 + }, + { + "epoch": 19.14, + "learning_rate": 7.933673469387755e-05, + "loss": 1.0154, + "step": 946 + }, + { + "epoch": 19.16, + "learning_rate": 7.920918367346939e-05, + "loss": 0.9546, + "step": 947 + }, + { + "epoch": 19.18, + "learning_rate": 7.908163265306123e-05, + "loss": 0.9982, + "step": 948 + }, + { + "epoch": 19.2, + "learning_rate": 7.895408163265306e-05, + "loss": 1.0748, + "step": 949 + }, + { + "epoch": 19.22, + "learning_rate": 7.882653061224489e-05, + "loss": 1.0562, + "step": 950 + }, + { + "epoch": 19.24, + "learning_rate": 7.869897959183674e-05, + "loss": 1.0352, + "step": 951 + }, + { + "epoch": 19.26, + "learning_rate": 7.857142857142858e-05, + "loss": 0.9976, + "step": 952 + }, + { + "epoch": 19.28, + "learning_rate": 7.844387755102041e-05, + "loss": 1.0221, + "step": 953 + }, + { + "epoch": 19.3, + "learning_rate": 7.831632653061226e-05, + "loss": 1.0119, + "step": 954 + }, + { + "epoch": 19.32, + "learning_rate": 7.818877551020409e-05, + "loss": 1.0657, + "step": 955 + }, + { + "epoch": 19.34, + "learning_rate": 7.806122448979593e-05, + "loss": 0.9591, + "step": 956 + }, + { + "epoch": 19.36, + "learning_rate": 7.793367346938776e-05, + "loss": 1.0101, + "step": 957 + }, + { + "epoch": 19.38, + "learning_rate": 7.780612244897959e-05, + "loss": 1.0453, + "step": 958 + }, + { + "epoch": 19.4, + "learning_rate": 7.767857142857144e-05, + "loss": 1.0461, + "step": 959 + }, + { + "epoch": 19.42, + "learning_rate": 7.755102040816327e-05, + "loss": 1.0959, + "step": 960 + }, + { + "epoch": 19.44, + "learning_rate": 7.742346938775511e-05, + "loss": 1.0608, + "step": 961 + }, + { + "epoch": 19.47, + "learning_rate": 7.729591836734694e-05, + "loss": 1.1177, + "step": 962 + }, + { + "epoch": 19.49, + "learning_rate": 7.716836734693877e-05, + "loss": 1.0354, + "step": 963 + }, + { + "epoch": 19.51, + "learning_rate": 7.704081632653062e-05, + "loss": 1.0507, + "step": 964 + }, + { + "epoch": 19.53, + "learning_rate": 7.691326530612245e-05, + "loss": 1.0313, + "step": 965 + }, + { + "epoch": 19.55, + "learning_rate": 7.67857142857143e-05, + "loss": 1.0569, + "step": 966 + }, + { + "epoch": 19.57, + "learning_rate": 7.665816326530612e-05, + "loss": 1.0862, + "step": 967 + }, + { + "epoch": 19.59, + "learning_rate": 7.653061224489796e-05, + "loss": 1.0593, + "step": 968 + }, + { + "epoch": 19.61, + "learning_rate": 7.64030612244898e-05, + "loss": 1.0602, + "step": 969 + }, + { + "epoch": 19.63, + "learning_rate": 7.627551020408163e-05, + "loss": 1.0048, + "step": 970 + }, + { + "epoch": 19.65, + "learning_rate": 7.614795918367347e-05, + "loss": 1.0346, + "step": 971 + }, + { + "epoch": 19.67, + "learning_rate": 7.60204081632653e-05, + "loss": 1.0172, + "step": 972 + }, + { + "epoch": 19.69, + "learning_rate": 7.589285714285714e-05, + "loss": 1.02, + "step": 973 + }, + { + "epoch": 19.71, + "learning_rate": 7.576530612244898e-05, + "loss": 1.0028, + "step": 974 + }, + { + "epoch": 19.73, + "learning_rate": 7.563775510204083e-05, + "loss": 1.08, + "step": 975 + }, + { + "epoch": 19.75, + "learning_rate": 7.551020408163266e-05, + "loss": 1.0402, + "step": 976 + }, + { + "epoch": 19.77, + "learning_rate": 7.53826530612245e-05, + "loss": 1.0567, + "step": 977 + }, + { + "epoch": 19.79, + "learning_rate": 7.525510204081633e-05, + "loss": 1.0169, + "step": 978 + }, + { + "epoch": 19.81, + "learning_rate": 7.512755102040818e-05, + "loss": 0.9881, + "step": 979 + }, + { + "epoch": 19.83, + "learning_rate": 7.500000000000001e-05, + "loss": 1.0677, + "step": 980 + }, + { + "epoch": 19.85, + "learning_rate": 7.487244897959184e-05, + "loss": 1.1026, + "step": 981 + }, + { + "epoch": 19.87, + "learning_rate": 7.474489795918368e-05, + "loss": 1.0101, + "step": 982 + }, + { + "epoch": 19.89, + "learning_rate": 7.461734693877551e-05, + "loss": 1.069, + "step": 983 + }, + { + "epoch": 19.91, + "learning_rate": 7.448979591836736e-05, + "loss": 1.0493, + "step": 984 + }, + { + "epoch": 19.93, + "learning_rate": 7.436224489795919e-05, + "loss": 1.0858, + "step": 985 + }, + { + "epoch": 19.95, + "learning_rate": 7.423469387755102e-05, + "loss": 1.0734, + "step": 986 + }, + { + "epoch": 19.97, + "learning_rate": 7.410714285714286e-05, + "loss": 1.0203, + "step": 987 + }, + { + "epoch": 19.99, + "learning_rate": 7.39795918367347e-05, + "loss": 1.0285, + "step": 988 + }, + { + "epoch": 20.01, + "learning_rate": 7.385204081632653e-05, + "loss": 0.9446, + "step": 989 + }, + { + "epoch": 20.03, + "learning_rate": 7.372448979591837e-05, + "loss": 0.9915, + "step": 990 + }, + { + "epoch": 20.05, + "learning_rate": 7.35969387755102e-05, + "loss": 0.9882, + "step": 991 + }, + { + "epoch": 20.07, + "learning_rate": 7.346938775510205e-05, + "loss": 0.9338, + "step": 992 + }, + { + "epoch": 20.09, + "learning_rate": 7.334183673469388e-05, + "loss": 0.942, + "step": 993 + }, + { + "epoch": 20.11, + "learning_rate": 7.321428571428571e-05, + "loss": 0.9725, + "step": 994 + }, + { + "epoch": 20.13, + "learning_rate": 7.308673469387755e-05, + "loss": 1.027, + "step": 995 + }, + { + "epoch": 20.15, + "learning_rate": 7.29591836734694e-05, + "loss": 1.0081, + "step": 996 + }, + { + "epoch": 20.17, + "learning_rate": 7.283163265306123e-05, + "loss": 1.0117, + "step": 997 + }, + { + "epoch": 20.19, + "learning_rate": 7.270408163265307e-05, + "loss": 0.969, + "step": 998 + }, + { + "epoch": 20.21, + "learning_rate": 7.25765306122449e-05, + "loss": 1.0024, + "step": 999 + }, + { + "epoch": 20.23, + "learning_rate": 7.244897959183675e-05, + "loss": 0.994, + "step": 1000 + }, + { + "epoch": 20.25, + "learning_rate": 7.232142857142858e-05, + "loss": 1.0248, + "step": 1001 + }, + { + "epoch": 20.27, + "learning_rate": 7.219387755102042e-05, + "loss": 1.0493, + "step": 1002 + }, + { + "epoch": 20.29, + "learning_rate": 7.206632653061225e-05, + "loss": 1.0011, + "step": 1003 + }, + { + "epoch": 20.31, + "learning_rate": 7.193877551020408e-05, + "loss": 0.9874, + "step": 1004 + }, + { + "epoch": 20.34, + "learning_rate": 7.181122448979593e-05, + "loss": 1.0049, + "step": 1005 + }, + { + "epoch": 20.36, + "learning_rate": 7.168367346938776e-05, + "loss": 1.0314, + "step": 1006 + }, + { + "epoch": 20.38, + "learning_rate": 7.155612244897959e-05, + "loss": 0.9742, + "step": 1007 + }, + { + "epoch": 20.4, + "learning_rate": 7.142857142857143e-05, + "loss": 1.0621, + "step": 1008 + }, + { + "epoch": 20.42, + "learning_rate": 7.130102040816326e-05, + "loss": 0.9672, + "step": 1009 + }, + { + "epoch": 20.44, + "learning_rate": 7.117346938775511e-05, + "loss": 1.0018, + "step": 1010 + }, + { + "epoch": 20.46, + "learning_rate": 7.104591836734694e-05, + "loss": 1.0045, + "step": 1011 + }, + { + "epoch": 20.48, + "learning_rate": 7.091836734693877e-05, + "loss": 0.9675, + "step": 1012 + }, + { + "epoch": 20.5, + "learning_rate": 7.079081632653062e-05, + "loss": 0.976, + "step": 1013 + }, + { + "epoch": 20.52, + "learning_rate": 7.066326530612245e-05, + "loss": 1.0523, + "step": 1014 + }, + { + "epoch": 20.54, + "learning_rate": 7.053571428571429e-05, + "loss": 1.052, + "step": 1015 + }, + { + "epoch": 20.56, + "learning_rate": 7.040816326530612e-05, + "loss": 0.9903, + "step": 1016 + }, + { + "epoch": 20.58, + "learning_rate": 7.028061224489795e-05, + "loss": 1.0337, + "step": 1017 + }, + { + "epoch": 20.6, + "learning_rate": 7.01530612244898e-05, + "loss": 1.1122, + "step": 1018 + }, + { + "epoch": 20.62, + "learning_rate": 7.002551020408164e-05, + "loss": 1.0133, + "step": 1019 + }, + { + "epoch": 20.64, + "learning_rate": 6.989795918367347e-05, + "loss": 0.9588, + "step": 1020 + }, + { + "epoch": 20.66, + "learning_rate": 6.977040816326532e-05, + "loss": 0.9892, + "step": 1021 + }, + { + "epoch": 20.68, + "learning_rate": 6.964285714285715e-05, + "loss": 1.025, + "step": 1022 + }, + { + "epoch": 20.7, + "learning_rate": 6.951530612244899e-05, + "loss": 1.0196, + "step": 1023 + }, + { + "epoch": 20.72, + "learning_rate": 6.938775510204082e-05, + "loss": 1.0146, + "step": 1024 + }, + { + "epoch": 20.74, + "learning_rate": 6.926020408163265e-05, + "loss": 1.0656, + "step": 1025 + }, + { + "epoch": 20.76, + "learning_rate": 6.91326530612245e-05, + "loss": 0.9584, + "step": 1026 + }, + { + "epoch": 20.78, + "learning_rate": 6.900510204081633e-05, + "loss": 0.9877, + "step": 1027 + }, + { + "epoch": 20.8, + "learning_rate": 6.887755102040817e-05, + "loss": 1.0607, + "step": 1028 + }, + { + "epoch": 20.82, + "learning_rate": 6.875e-05, + "loss": 0.9969, + "step": 1029 + }, + { + "epoch": 20.84, + "learning_rate": 6.862244897959184e-05, + "loss": 0.9506, + "step": 1030 + }, + { + "epoch": 20.86, + "learning_rate": 6.849489795918368e-05, + "loss": 1.0576, + "step": 1031 + }, + { + "epoch": 20.88, + "learning_rate": 6.836734693877551e-05, + "loss": 1.0094, + "step": 1032 + }, + { + "epoch": 20.9, + "learning_rate": 6.823979591836735e-05, + "loss": 0.9872, + "step": 1033 + }, + { + "epoch": 20.92, + "learning_rate": 6.811224489795919e-05, + "loss": 1.0544, + "step": 1034 + }, + { + "epoch": 20.94, + "learning_rate": 6.798469387755102e-05, + "loss": 1.0194, + "step": 1035 + }, + { + "epoch": 20.96, + "learning_rate": 6.785714285714286e-05, + "loss": 1.0009, + "step": 1036 + }, + { + "epoch": 20.98, + "learning_rate": 6.772959183673469e-05, + "loss": 0.9727, + "step": 1037 + }, + { + "epoch": 21.0, + "learning_rate": 6.760204081632652e-05, + "loss": 0.9754, + "step": 1038 + }, + { + "epoch": 21.02, + "learning_rate": 6.747448979591837e-05, + "loss": 0.9953, + "step": 1039 + }, + { + "epoch": 21.04, + "learning_rate": 6.73469387755102e-05, + "loss": 0.9307, + "step": 1040 + }, + { + "epoch": 21.06, + "learning_rate": 6.721938775510204e-05, + "loss": 0.9151, + "step": 1041 + }, + { + "epoch": 21.08, + "learning_rate": 6.709183673469389e-05, + "loss": 0.9474, + "step": 1042 + }, + { + "epoch": 21.1, + "learning_rate": 6.696428571428572e-05, + "loss": 0.9697, + "step": 1043 + }, + { + "epoch": 21.12, + "learning_rate": 6.683673469387756e-05, + "loss": 0.9423, + "step": 1044 + }, + { + "epoch": 21.14, + "learning_rate": 6.670918367346939e-05, + "loss": 0.9797, + "step": 1045 + }, + { + "epoch": 21.16, + "learning_rate": 6.658163265306124e-05, + "loss": 0.919, + "step": 1046 + }, + { + "epoch": 21.18, + "learning_rate": 6.645408163265307e-05, + "loss": 0.9743, + "step": 1047 + }, + { + "epoch": 21.21, + "learning_rate": 6.63265306122449e-05, + "loss": 0.9575, + "step": 1048 + }, + { + "epoch": 21.23, + "learning_rate": 6.619897959183674e-05, + "loss": 0.9861, + "step": 1049 + }, + { + "epoch": 21.25, + "learning_rate": 6.607142857142857e-05, + "loss": 0.9103, + "step": 1050 + }, + { + "epoch": 21.27, + "learning_rate": 6.594387755102042e-05, + "loss": 0.993, + "step": 1051 + }, + { + "epoch": 21.29, + "learning_rate": 6.581632653061225e-05, + "loss": 0.9668, + "step": 1052 + }, + { + "epoch": 21.31, + "learning_rate": 6.568877551020408e-05, + "loss": 1.0008, + "step": 1053 + }, + { + "epoch": 21.33, + "learning_rate": 6.556122448979592e-05, + "loss": 0.9825, + "step": 1054 + }, + { + "epoch": 21.35, + "learning_rate": 6.543367346938776e-05, + "loss": 1.0174, + "step": 1055 + }, + { + "epoch": 21.37, + "learning_rate": 6.530612244897959e-05, + "loss": 0.9685, + "step": 1056 + }, + { + "epoch": 21.39, + "learning_rate": 6.517857142857143e-05, + "loss": 0.9265, + "step": 1057 + }, + { + "epoch": 21.41, + "learning_rate": 6.505102040816326e-05, + "loss": 0.9495, + "step": 1058 + }, + { + "epoch": 21.43, + "learning_rate": 6.49234693877551e-05, + "loss": 0.9541, + "step": 1059 + }, + { + "epoch": 21.45, + "learning_rate": 6.479591836734694e-05, + "loss": 0.9299, + "step": 1060 + }, + { + "epoch": 21.47, + "learning_rate": 6.466836734693877e-05, + "loss": 0.9625, + "step": 1061 + }, + { + "epoch": 21.49, + "learning_rate": 6.454081632653061e-05, + "loss": 1.0054, + "step": 1062 + }, + { + "epoch": 21.51, + "learning_rate": 6.441326530612244e-05, + "loss": 0.9893, + "step": 1063 + }, + { + "epoch": 21.53, + "learning_rate": 6.428571428571429e-05, + "loss": 0.9906, + "step": 1064 + }, + { + "epoch": 21.55, + "learning_rate": 6.415816326530613e-05, + "loss": 0.9487, + "step": 1065 + }, + { + "epoch": 21.57, + "learning_rate": 6.403061224489796e-05, + "loss": 0.9728, + "step": 1066 + }, + { + "epoch": 21.59, + "learning_rate": 6.390306122448981e-05, + "loss": 0.9883, + "step": 1067 + }, + { + "epoch": 21.61, + "learning_rate": 6.377551020408164e-05, + "loss": 1.053, + "step": 1068 + }, + { + "epoch": 21.63, + "learning_rate": 6.364795918367348e-05, + "loss": 1.012, + "step": 1069 + }, + { + "epoch": 21.65, + "learning_rate": 6.352040816326531e-05, + "loss": 0.962, + "step": 1070 + }, + { + "epoch": 21.67, + "learning_rate": 6.339285714285714e-05, + "loss": 0.9955, + "step": 1071 + }, + { + "epoch": 21.69, + "learning_rate": 6.326530612244899e-05, + "loss": 0.9908, + "step": 1072 + }, + { + "epoch": 21.71, + "learning_rate": 6.313775510204082e-05, + "loss": 1.0327, + "step": 1073 + }, + { + "epoch": 21.73, + "learning_rate": 6.301020408163265e-05, + "loss": 0.9255, + "step": 1074 + }, + { + "epoch": 21.75, + "learning_rate": 6.28826530612245e-05, + "loss": 0.9268, + "step": 1075 + }, + { + "epoch": 21.77, + "learning_rate": 6.275510204081633e-05, + "loss": 0.9204, + "step": 1076 + }, + { + "epoch": 21.79, + "learning_rate": 6.262755102040817e-05, + "loss": 0.9838, + "step": 1077 + }, + { + "epoch": 21.81, + "learning_rate": 6.25e-05, + "loss": 0.954, + "step": 1078 + }, + { + "epoch": 21.83, + "learning_rate": 6.237244897959183e-05, + "loss": 1.0102, + "step": 1079 + }, + { + "epoch": 21.85, + "learning_rate": 6.224489795918368e-05, + "loss": 0.916, + "step": 1080 + }, + { + "epoch": 21.87, + "learning_rate": 6.211734693877551e-05, + "loss": 0.9939, + "step": 1081 + }, + { + "epoch": 21.89, + "learning_rate": 6.198979591836735e-05, + "loss": 0.9675, + "step": 1082 + }, + { + "epoch": 21.91, + "learning_rate": 6.186224489795918e-05, + "loss": 0.9666, + "step": 1083 + }, + { + "epoch": 21.93, + "learning_rate": 6.173469387755101e-05, + "loss": 0.9919, + "step": 1084 + }, + { + "epoch": 21.95, + "learning_rate": 6.160714285714286e-05, + "loss": 1.0106, + "step": 1085 + }, + { + "epoch": 21.97, + "learning_rate": 6.14795918367347e-05, + "loss": 0.9982, + "step": 1086 + }, + { + "epoch": 21.99, + "learning_rate": 6.135204081632653e-05, + "loss": 1.0137, + "step": 1087 + }, + { + "epoch": 22.01, + "learning_rate": 6.122448979591838e-05, + "loss": 0.9331, + "step": 1088 + }, + { + "epoch": 22.03, + "learning_rate": 6.109693877551021e-05, + "loss": 0.8834, + "step": 1089 + }, + { + "epoch": 22.06, + "learning_rate": 6.0969387755102046e-05, + "loss": 0.9757, + "step": 1090 + }, + { + "epoch": 22.08, + "learning_rate": 6.084183673469388e-05, + "loss": 0.9038, + "step": 1091 + }, + { + "epoch": 22.1, + "learning_rate": 6.0714285714285715e-05, + "loss": 0.9097, + "step": 1092 + }, + { + "epoch": 22.12, + "learning_rate": 6.058673469387756e-05, + "loss": 0.8972, + "step": 1093 + }, + { + "epoch": 22.14, + "learning_rate": 6.045918367346939e-05, + "loss": 0.8825, + "step": 1094 + }, + { + "epoch": 22.16, + "learning_rate": 6.0331632653061234e-05, + "loss": 0.9814, + "step": 1095 + }, + { + "epoch": 22.18, + "learning_rate": 6.0204081632653065e-05, + "loss": 0.9874, + "step": 1096 + }, + { + "epoch": 22.2, + "learning_rate": 6.0076530612244896e-05, + "loss": 0.912, + "step": 1097 + }, + { + "epoch": 22.22, + "learning_rate": 5.994897959183674e-05, + "loss": 0.9206, + "step": 1098 + }, + { + "epoch": 22.24, + "learning_rate": 5.982142857142857e-05, + "loss": 0.9497, + "step": 1099 + }, + { + "epoch": 22.26, + "learning_rate": 5.9693877551020416e-05, + "loss": 0.9269, + "step": 1100 + }, + { + "epoch": 22.28, + "learning_rate": 5.956632653061225e-05, + "loss": 0.9452, + "step": 1101 + }, + { + "epoch": 22.3, + "learning_rate": 5.9438775510204084e-05, + "loss": 0.9548, + "step": 1102 + }, + { + "epoch": 22.32, + "learning_rate": 5.931122448979592e-05, + "loss": 0.9689, + "step": 1103 + }, + { + "epoch": 22.34, + "learning_rate": 5.918367346938776e-05, + "loss": 0.9455, + "step": 1104 + }, + { + "epoch": 22.36, + "learning_rate": 5.905612244897959e-05, + "loss": 0.9409, + "step": 1105 + }, + { + "epoch": 22.38, + "learning_rate": 5.8928571428571435e-05, + "loss": 0.9093, + "step": 1106 + }, + { + "epoch": 22.4, + "learning_rate": 5.8801020408163266e-05, + "loss": 0.921, + "step": 1107 + }, + { + "epoch": 22.42, + "learning_rate": 5.867346938775511e-05, + "loss": 0.9368, + "step": 1108 + }, + { + "epoch": 22.44, + "learning_rate": 5.854591836734694e-05, + "loss": 0.907, + "step": 1109 + }, + { + "epoch": 22.46, + "learning_rate": 5.841836734693877e-05, + "loss": 0.9126, + "step": 1110 + }, + { + "epoch": 22.48, + "learning_rate": 5.8290816326530616e-05, + "loss": 0.9161, + "step": 1111 + }, + { + "epoch": 22.5, + "learning_rate": 5.816326530612245e-05, + "loss": 0.9542, + "step": 1112 + }, + { + "epoch": 22.52, + "learning_rate": 5.803571428571429e-05, + "loss": 0.9775, + "step": 1113 + }, + { + "epoch": 22.54, + "learning_rate": 5.790816326530612e-05, + "loss": 1.0006, + "step": 1114 + }, + { + "epoch": 22.56, + "learning_rate": 5.778061224489796e-05, + "loss": 0.8965, + "step": 1115 + }, + { + "epoch": 22.58, + "learning_rate": 5.7653061224489805e-05, + "loss": 0.944, + "step": 1116 + }, + { + "epoch": 22.6, + "learning_rate": 5.7525510204081636e-05, + "loss": 0.9162, + "step": 1117 + }, + { + "epoch": 22.62, + "learning_rate": 5.739795918367348e-05, + "loss": 0.9325, + "step": 1118 + }, + { + "epoch": 22.64, + "learning_rate": 5.727040816326531e-05, + "loss": 0.8998, + "step": 1119 + }, + { + "epoch": 22.66, + "learning_rate": 5.714285714285714e-05, + "loss": 0.9362, + "step": 1120 + }, + { + "epoch": 22.68, + "learning_rate": 5.7015306122448986e-05, + "loss": 0.9969, + "step": 1121 + }, + { + "epoch": 22.7, + "learning_rate": 5.688775510204082e-05, + "loss": 0.9104, + "step": 1122 + }, + { + "epoch": 22.72, + "learning_rate": 5.676020408163265e-05, + "loss": 0.9746, + "step": 1123 + }, + { + "epoch": 22.74, + "learning_rate": 5.663265306122449e-05, + "loss": 0.9821, + "step": 1124 + }, + { + "epoch": 22.76, + "learning_rate": 5.650510204081633e-05, + "loss": 0.9526, + "step": 1125 + }, + { + "epoch": 22.78, + "learning_rate": 5.637755102040817e-05, + "loss": 0.871, + "step": 1126 + }, + { + "epoch": 22.8, + "learning_rate": 5.6250000000000005e-05, + "loss": 0.9534, + "step": 1127 + }, + { + "epoch": 22.82, + "learning_rate": 5.6122448979591836e-05, + "loss": 0.9616, + "step": 1128 + }, + { + "epoch": 22.84, + "learning_rate": 5.599489795918368e-05, + "loss": 0.9627, + "step": 1129 + }, + { + "epoch": 22.86, + "learning_rate": 5.586734693877551e-05, + "loss": 0.9704, + "step": 1130 + }, + { + "epoch": 22.88, + "learning_rate": 5.5739795918367356e-05, + "loss": 0.9506, + "step": 1131 + }, + { + "epoch": 22.9, + "learning_rate": 5.561224489795919e-05, + "loss": 0.9553, + "step": 1132 + }, + { + "epoch": 22.93, + "learning_rate": 5.548469387755102e-05, + "loss": 0.9294, + "step": 1133 + }, + { + "epoch": 22.95, + "learning_rate": 5.535714285714286e-05, + "loss": 0.8979, + "step": 1134 + }, + { + "epoch": 22.97, + "learning_rate": 5.522959183673469e-05, + "loss": 1.0004, + "step": 1135 + }, + { + "epoch": 22.99, + "learning_rate": 5.510204081632653e-05, + "loss": 0.9821, + "step": 1136 + }, + { + "epoch": 23.01, + "learning_rate": 5.497448979591837e-05, + "loss": 0.9607, + "step": 1137 + }, + { + "epoch": 23.03, + "learning_rate": 5.4846938775510206e-05, + "loss": 0.9757, + "step": 1138 + }, + { + "epoch": 23.05, + "learning_rate": 5.471938775510205e-05, + "loss": 0.9096, + "step": 1139 + }, + { + "epoch": 23.07, + "learning_rate": 5.459183673469388e-05, + "loss": 0.9144, + "step": 1140 + }, + { + "epoch": 23.09, + "learning_rate": 5.446428571428571e-05, + "loss": 0.8667, + "step": 1141 + }, + { + "epoch": 23.11, + "learning_rate": 5.4336734693877556e-05, + "loss": 0.8993, + "step": 1142 + }, + { + "epoch": 23.13, + "learning_rate": 5.420918367346939e-05, + "loss": 0.8964, + "step": 1143 + }, + { + "epoch": 23.15, + "learning_rate": 5.408163265306123e-05, + "loss": 0.9173, + "step": 1144 + }, + { + "epoch": 23.17, + "learning_rate": 5.395408163265306e-05, + "loss": 0.9019, + "step": 1145 + }, + { + "epoch": 23.19, + "learning_rate": 5.382653061224489e-05, + "loss": 0.9303, + "step": 1146 + }, + { + "epoch": 23.21, + "learning_rate": 5.369897959183674e-05, + "loss": 0.9268, + "step": 1147 + }, + { + "epoch": 23.23, + "learning_rate": 5.3571428571428575e-05, + "loss": 0.8803, + "step": 1148 + }, + { + "epoch": 23.25, + "learning_rate": 5.344387755102041e-05, + "loss": 0.9197, + "step": 1149 + }, + { + "epoch": 23.27, + "learning_rate": 5.331632653061225e-05, + "loss": 0.9204, + "step": 1150 + }, + { + "epoch": 23.29, + "learning_rate": 5.318877551020408e-05, + "loss": 0.8802, + "step": 1151 + }, + { + "epoch": 23.31, + "learning_rate": 5.3061224489795926e-05, + "loss": 0.9044, + "step": 1152 + }, + { + "epoch": 23.33, + "learning_rate": 5.293367346938776e-05, + "loss": 0.8893, + "step": 1153 + }, + { + "epoch": 23.35, + "learning_rate": 5.280612244897959e-05, + "loss": 0.8928, + "step": 1154 + }, + { + "epoch": 23.37, + "learning_rate": 5.267857142857143e-05, + "loss": 0.9353, + "step": 1155 + }, + { + "epoch": 23.39, + "learning_rate": 5.255102040816326e-05, + "loss": 0.9345, + "step": 1156 + }, + { + "epoch": 23.41, + "learning_rate": 5.242346938775511e-05, + "loss": 0.9372, + "step": 1157 + }, + { + "epoch": 23.43, + "learning_rate": 5.229591836734694e-05, + "loss": 0.9234, + "step": 1158 + }, + { + "epoch": 23.45, + "learning_rate": 5.2168367346938776e-05, + "loss": 0.9177, + "step": 1159 + }, + { + "epoch": 23.47, + "learning_rate": 5.2040816326530614e-05, + "loss": 0.8757, + "step": 1160 + }, + { + "epoch": 23.49, + "learning_rate": 5.191326530612245e-05, + "loss": 0.9048, + "step": 1161 + }, + { + "epoch": 23.51, + "learning_rate": 5.1785714285714296e-05, + "loss": 0.9248, + "step": 1162 + }, + { + "epoch": 23.53, + "learning_rate": 5.1658163265306127e-05, + "loss": 0.9379, + "step": 1163 + }, + { + "epoch": 23.55, + "learning_rate": 5.153061224489796e-05, + "loss": 0.8596, + "step": 1164 + }, + { + "epoch": 23.57, + "learning_rate": 5.14030612244898e-05, + "loss": 0.9751, + "step": 1165 + }, + { + "epoch": 23.59, + "learning_rate": 5.127551020408163e-05, + "loss": 0.8842, + "step": 1166 + }, + { + "epoch": 23.61, + "learning_rate": 5.114795918367348e-05, + "loss": 0.8765, + "step": 1167 + }, + { + "epoch": 23.63, + "learning_rate": 5.102040816326531e-05, + "loss": 0.8942, + "step": 1168 + }, + { + "epoch": 23.65, + "learning_rate": 5.089285714285714e-05, + "loss": 0.938, + "step": 1169 + }, + { + "epoch": 23.67, + "learning_rate": 5.076530612244898e-05, + "loss": 0.8993, + "step": 1170 + }, + { + "epoch": 23.69, + "learning_rate": 5.063775510204082e-05, + "loss": 0.9362, + "step": 1171 + }, + { + "epoch": 23.71, + "learning_rate": 5.051020408163265e-05, + "loss": 0.9249, + "step": 1172 + }, + { + "epoch": 23.73, + "learning_rate": 5.0382653061224496e-05, + "loss": 0.9055, + "step": 1173 + }, + { + "epoch": 23.75, + "learning_rate": 5.025510204081633e-05, + "loss": 0.8967, + "step": 1174 + }, + { + "epoch": 23.77, + "learning_rate": 5.012755102040817e-05, + "loss": 0.8795, + "step": 1175 + }, + { + "epoch": 23.8, + "learning_rate": 5e-05, + "loss": 0.9452, + "step": 1176 + }, + { + "epoch": 23.82, + "learning_rate": 4.987244897959184e-05, + "loss": 0.926, + "step": 1177 + }, + { + "epoch": 23.84, + "learning_rate": 4.974489795918368e-05, + "loss": 0.8948, + "step": 1178 + }, + { + "epoch": 23.86, + "learning_rate": 4.961734693877551e-05, + "loss": 0.8926, + "step": 1179 + }, + { + "epoch": 23.88, + "learning_rate": 4.9489795918367346e-05, + "loss": 0.8949, + "step": 1180 + }, + { + "epoch": 23.9, + "learning_rate": 4.9362244897959184e-05, + "loss": 0.9648, + "step": 1181 + }, + { + "epoch": 23.92, + "learning_rate": 4.923469387755102e-05, + "loss": 0.9599, + "step": 1182 + }, + { + "epoch": 23.94, + "learning_rate": 4.910714285714286e-05, + "loss": 0.9603, + "step": 1183 + }, + { + "epoch": 23.96, + "learning_rate": 4.89795918367347e-05, + "loss": 0.9302, + "step": 1184 + }, + { + "epoch": 23.98, + "learning_rate": 4.8852040816326534e-05, + "loss": 0.9261, + "step": 1185 + }, + { + "epoch": 24.0, + "learning_rate": 4.872448979591837e-05, + "loss": 0.9257, + "step": 1186 + }, + { + "epoch": 24.02, + "learning_rate": 4.859693877551021e-05, + "loss": 0.8725, + "step": 1187 + }, + { + "epoch": 24.04, + "learning_rate": 4.846938775510204e-05, + "loss": 0.8486, + "step": 1188 + }, + { + "epoch": 24.06, + "learning_rate": 4.834183673469388e-05, + "loss": 0.8457, + "step": 1189 + }, + { + "epoch": 24.08, + "learning_rate": 4.8214285714285716e-05, + "loss": 0.7848, + "step": 1190 + }, + { + "epoch": 24.1, + "learning_rate": 4.8086734693877554e-05, + "loss": 0.8885, + "step": 1191 + }, + { + "epoch": 24.12, + "learning_rate": 4.795918367346939e-05, + "loss": 0.9099, + "step": 1192 + }, + { + "epoch": 24.14, + "learning_rate": 4.783163265306123e-05, + "loss": 0.9147, + "step": 1193 + }, + { + "epoch": 24.16, + "learning_rate": 4.7704081632653066e-05, + "loss": 0.8781, + "step": 1194 + }, + { + "epoch": 24.18, + "learning_rate": 4.7576530612244904e-05, + "loss": 0.8847, + "step": 1195 + }, + { + "epoch": 24.2, + "learning_rate": 4.744897959183674e-05, + "loss": 0.9041, + "step": 1196 + }, + { + "epoch": 24.22, + "learning_rate": 4.732142857142857e-05, + "loss": 0.8639, + "step": 1197 + }, + { + "epoch": 24.24, + "learning_rate": 4.719387755102041e-05, + "loss": 0.8831, + "step": 1198 + }, + { + "epoch": 24.26, + "learning_rate": 4.706632653061225e-05, + "loss": 0.9063, + "step": 1199 + }, + { + "epoch": 24.28, + "learning_rate": 4.6938775510204086e-05, + "loss": 0.8753, + "step": 1200 + } + ], + "logging_steps": 1, + "max_steps": 1568, + "num_train_epochs": 32, + "save_steps": 100, + "total_flos": 1.7762072708554138e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-1200/training_args.bin b/checkpoint-1200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..db23e07d097c18532e52f58a70eb72d22e39c8c1 --- /dev/null +++ b/checkpoint-1200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b610cbc4242bb50b4985b00e205994ae514fec6d9e2273f2b545a583a07b154b +size 4155 diff --git a/checkpoint-1300/README.md b/checkpoint-1300/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-1300/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-1300/adapter_config.json b/checkpoint-1300/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b --- /dev/null +++ b/checkpoint-1300/adapter_config.json @@ -0,0 +1,23 @@ +{ + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-1300/adapter_model.bin b/checkpoint-1300/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..2a46567e8a873934c32b5f6716c212c0583d11c5 --- /dev/null +++ b/checkpoint-1300/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:557e20c98ce81a1b2174b31161d704a79e1057d7cba9a8a3a044698f748e1a96 +size 39409357 diff --git a/checkpoint-1300/optimizer.pt b/checkpoint-1300/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..dc193c111e311e6c8b784c0d8a731044dca46428 --- /dev/null +++ b/checkpoint-1300/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2311ed46f1164ff3772d9c69e8f7347964538333edd4438abef82e7683876dc +size 78844421 diff --git a/checkpoint-1300/rng_state.pth b/checkpoint-1300/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..c16fb5959f3252446d7952ca7b642c0f6b79609e --- /dev/null +++ b/checkpoint-1300/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e2233b71d28d82b8331de6224e1abb884bd9108ce746c680fd1c4d641c935668 +size 14575 diff --git a/checkpoint-1300/scheduler.pt b/checkpoint-1300/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..6d80e9e6b9dff077ceec581106fad9de9f7ae96c --- /dev/null +++ b/checkpoint-1300/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:af8fb4309008de879e60055efbf110ed70c676d2270a50eb4a6b5b40910d4878 +size 627 diff --git a/checkpoint-1300/special_tokens_map.json b/checkpoint-1300/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-1300/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-1300/tokenizer.json b/checkpoint-1300/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf002cafbd4818dcff2abc9156c088d681b4533 --- /dev/null +++ b/checkpoint-1300/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba +size 14500471 diff --git a/checkpoint-1300/tokenizer_config.json b/checkpoint-1300/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-1300/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-1300/trainer_state.json b/checkpoint-1300/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..b03ed0161efaed231027c7690ba0e05c13ddb02c --- /dev/null +++ b/checkpoint-1300/trainer_state.json @@ -0,0 +1,7819 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 26.304141637685742, + "eval_steps": 500, + "global_step": 1300, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.02, + "learning_rate": 0.00019987244897959184, + "loss": 3.2215, + "step": 1 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019974489795918367, + "loss": 2.8365, + "step": 2 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019961734693877553, + "loss": 2.602, + "step": 3 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019948979591836736, + "loss": 2.4196, + "step": 4 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001993622448979592, + "loss": 2.2574, + "step": 5 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019923469387755102, + "loss": 2.2239, + "step": 6 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019910714285714288, + "loss": 2.1661, + "step": 7 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001989795918367347, + "loss": 2.0987, + "step": 8 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019885204081632654, + "loss": 2.015, + "step": 9 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019872448979591837, + "loss": 1.9771, + "step": 10 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019859693877551023, + "loss": 2.0271, + "step": 11 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019846938775510203, + "loss": 1.9812, + "step": 12 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001983418367346939, + "loss": 2.0834, + "step": 13 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019821428571428572, + "loss": 1.9174, + "step": 14 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019808673469387755, + "loss": 1.8409, + "step": 15 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019795918367346938, + "loss": 1.929, + "step": 16 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019783163265306124, + "loss": 2.0041, + "step": 17 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019770408163265305, + "loss": 1.9385, + "step": 18 + }, + { + "epoch": 0.38, + "learning_rate": 0.0001975765306122449, + "loss": 1.9592, + "step": 19 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019744897959183674, + "loss": 1.9701, + "step": 20 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001973214285714286, + "loss": 1.9277, + "step": 21 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019719387755102042, + "loss": 1.8394, + "step": 22 + }, + { + "epoch": 0.47, + "learning_rate": 0.00019706632653061226, + "loss": 1.8666, + "step": 23 + }, + { + "epoch": 0.49, + "learning_rate": 0.00019693877551020409, + "loss": 1.8997, + "step": 24 + }, + { + "epoch": 0.51, + "learning_rate": 0.00019681122448979592, + "loss": 1.9432, + "step": 25 + }, + { + "epoch": 0.53, + "learning_rate": 0.00019668367346938777, + "loss": 1.9137, + "step": 26 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001965561224489796, + "loss": 1.905, + "step": 27 + }, + { + "epoch": 0.57, + "learning_rate": 0.00019642857142857144, + "loss": 1.8708, + "step": 28 + }, + { + "epoch": 0.59, + "learning_rate": 0.00019630102040816327, + "loss": 1.9097, + "step": 29 + }, + { + "epoch": 0.61, + "learning_rate": 0.00019617346938775513, + "loss": 1.896, + "step": 30 + }, + { + "epoch": 0.63, + "learning_rate": 0.00019604591836734696, + "loss": 1.8834, + "step": 31 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001959183673469388, + "loss": 1.8323, + "step": 32 + }, + { + "epoch": 0.67, + "learning_rate": 0.00019579081632653062, + "loss": 1.804, + "step": 33 + }, + { + "epoch": 0.69, + "learning_rate": 0.00019566326530612248, + "loss": 1.8906, + "step": 34 + }, + { + "epoch": 0.71, + "learning_rate": 0.00019553571428571428, + "loss": 1.8693, + "step": 35 + }, + { + "epoch": 0.73, + "learning_rate": 0.00019540816326530614, + "loss": 1.9308, + "step": 36 + }, + { + "epoch": 0.75, + "learning_rate": 0.00019528061224489797, + "loss": 1.8082, + "step": 37 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001951530612244898, + "loss": 1.848, + "step": 38 + }, + { + "epoch": 0.79, + "learning_rate": 0.00019502551020408163, + "loss": 1.8866, + "step": 39 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001948979591836735, + "loss": 1.7844, + "step": 40 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001947704081632653, + "loss": 1.8485, + "step": 41 + }, + { + "epoch": 0.85, + "learning_rate": 0.00019464285714285715, + "loss": 1.7917, + "step": 42 + }, + { + "epoch": 0.87, + "learning_rate": 0.00019451530612244898, + "loss": 1.7342, + "step": 43 + }, + { + "epoch": 0.89, + "learning_rate": 0.00019438775510204084, + "loss": 1.8479, + "step": 44 + }, + { + "epoch": 0.91, + "learning_rate": 0.00019426020408163267, + "loss": 1.8639, + "step": 45 + }, + { + "epoch": 0.93, + "learning_rate": 0.0001941326530612245, + "loss": 1.8166, + "step": 46 + }, + { + "epoch": 0.95, + "learning_rate": 0.00019400510204081633, + "loss": 1.7566, + "step": 47 + }, + { + "epoch": 0.97, + "learning_rate": 0.00019387755102040816, + "loss": 1.8071, + "step": 48 + }, + { + "epoch": 0.99, + "learning_rate": 0.00019375000000000002, + "loss": 1.8612, + "step": 49 + }, + { + "epoch": 1.01, + "learning_rate": 0.00019362244897959185, + "loss": 1.7819, + "step": 50 + }, + { + "epoch": 1.03, + "learning_rate": 0.00019349489795918368, + "loss": 1.8647, + "step": 51 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001933673469387755, + "loss": 1.8196, + "step": 52 + }, + { + "epoch": 1.07, + "learning_rate": 0.00019323979591836737, + "loss": 1.8027, + "step": 53 + }, + { + "epoch": 1.09, + "learning_rate": 0.00019311224489795917, + "loss": 1.8927, + "step": 54 + }, + { + "epoch": 1.11, + "learning_rate": 0.00019298469387755103, + "loss": 1.8481, + "step": 55 + }, + { + "epoch": 1.13, + "learning_rate": 0.00019285714285714286, + "loss": 1.7781, + "step": 56 + }, + { + "epoch": 1.15, + "learning_rate": 0.00019272959183673472, + "loss": 1.8101, + "step": 57 + }, + { + "epoch": 1.17, + "learning_rate": 0.00019260204081632653, + "loss": 1.7257, + "step": 58 + }, + { + "epoch": 1.19, + "learning_rate": 0.00019247448979591838, + "loss": 1.8185, + "step": 59 + }, + { + "epoch": 1.21, + "learning_rate": 0.00019234693877551021, + "loss": 1.8557, + "step": 60 + }, + { + "epoch": 1.23, + "learning_rate": 0.00019221938775510204, + "loss": 1.7418, + "step": 61 + }, + { + "epoch": 1.25, + "learning_rate": 0.00019209183673469388, + "loss": 1.6879, + "step": 62 + }, + { + "epoch": 1.27, + "learning_rate": 0.00019196428571428573, + "loss": 1.7651, + "step": 63 + }, + { + "epoch": 1.29, + "learning_rate": 0.00019183673469387756, + "loss": 1.7759, + "step": 64 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001917091836734694, + "loss": 1.7691, + "step": 65 + }, + { + "epoch": 1.34, + "learning_rate": 0.00019158163265306123, + "loss": 1.7794, + "step": 66 + }, + { + "epoch": 1.36, + "learning_rate": 0.00019145408163265306, + "loss": 1.8152, + "step": 67 + }, + { + "epoch": 1.38, + "learning_rate": 0.00019132653061224492, + "loss": 1.8052, + "step": 68 + }, + { + "epoch": 1.4, + "learning_rate": 0.00019119897959183675, + "loss": 1.8054, + "step": 69 + }, + { + "epoch": 1.42, + "learning_rate": 0.00019107142857142858, + "loss": 1.8114, + "step": 70 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001909438775510204, + "loss": 1.7749, + "step": 71 + }, + { + "epoch": 1.46, + "learning_rate": 0.00019081632653061227, + "loss": 1.777, + "step": 72 + }, + { + "epoch": 1.48, + "learning_rate": 0.0001906887755102041, + "loss": 1.7896, + "step": 73 + }, + { + "epoch": 1.5, + "learning_rate": 0.00019056122448979593, + "loss": 1.8335, + "step": 74 + }, + { + "epoch": 1.52, + "learning_rate": 0.00019043367346938776, + "loss": 1.8155, + "step": 75 + }, + { + "epoch": 1.54, + "learning_rate": 0.00019030612244897962, + "loss": 1.8224, + "step": 76 + }, + { + "epoch": 1.56, + "learning_rate": 0.00019017857142857142, + "loss": 1.7889, + "step": 77 + }, + { + "epoch": 1.58, + "learning_rate": 0.00019005102040816328, + "loss": 1.8866, + "step": 78 + }, + { + "epoch": 1.6, + "learning_rate": 0.0001899234693877551, + "loss": 1.8439, + "step": 79 + }, + { + "epoch": 1.62, + "learning_rate": 0.00018979591836734697, + "loss": 1.7906, + "step": 80 + }, + { + "epoch": 1.64, + "learning_rate": 0.00018966836734693877, + "loss": 1.8627, + "step": 81 + }, + { + "epoch": 1.66, + "learning_rate": 0.00018954081632653063, + "loss": 1.7497, + "step": 82 + }, + { + "epoch": 1.68, + "learning_rate": 0.00018941326530612246, + "loss": 1.7936, + "step": 83 + }, + { + "epoch": 1.7, + "learning_rate": 0.0001892857142857143, + "loss": 1.8341, + "step": 84 + }, + { + "epoch": 1.72, + "learning_rate": 0.00018915816326530612, + "loss": 1.7868, + "step": 85 + }, + { + "epoch": 1.74, + "learning_rate": 0.00018903061224489798, + "loss": 1.7493, + "step": 86 + }, + { + "epoch": 1.76, + "learning_rate": 0.0001889030612244898, + "loss": 1.7926, + "step": 87 + }, + { + "epoch": 1.78, + "learning_rate": 0.00018877551020408164, + "loss": 1.8278, + "step": 88 + }, + { + "epoch": 1.8, + "learning_rate": 0.00018864795918367347, + "loss": 1.7387, + "step": 89 + }, + { + "epoch": 1.82, + "learning_rate": 0.0001885204081632653, + "loss": 1.7669, + "step": 90 + }, + { + "epoch": 1.84, + "learning_rate": 0.00018839285714285716, + "loss": 1.7686, + "step": 91 + }, + { + "epoch": 1.86, + "learning_rate": 0.000188265306122449, + "loss": 1.7759, + "step": 92 + }, + { + "epoch": 1.88, + "learning_rate": 0.00018813775510204082, + "loss": 1.7016, + "step": 93 + }, + { + "epoch": 1.9, + "learning_rate": 0.00018801020408163265, + "loss": 1.8123, + "step": 94 + }, + { + "epoch": 1.92, + "learning_rate": 0.0001878826530612245, + "loss": 1.8315, + "step": 95 + }, + { + "epoch": 1.94, + "learning_rate": 0.00018775510204081634, + "loss": 1.7679, + "step": 96 + }, + { + "epoch": 1.96, + "learning_rate": 0.00018762755102040817, + "loss": 1.7874, + "step": 97 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001875, + "loss": 1.8008, + "step": 98 + }, + { + "epoch": 2.0, + "learning_rate": 0.00018737244897959186, + "loss": 1.7177, + "step": 99 + }, + { + "epoch": 2.02, + "learning_rate": 0.00018724489795918367, + "loss": 1.7272, + "step": 100 + }, + { + "epoch": 2.04, + "learning_rate": 0.00018711734693877552, + "loss": 1.7848, + "step": 101 + }, + { + "epoch": 2.06, + "learning_rate": 0.00018698979591836735, + "loss": 1.744, + "step": 102 + }, + { + "epoch": 2.08, + "learning_rate": 0.00018686224489795919, + "loss": 1.7005, + "step": 103 + }, + { + "epoch": 2.1, + "learning_rate": 0.00018673469387755102, + "loss": 1.8247, + "step": 104 + }, + { + "epoch": 2.12, + "learning_rate": 0.00018660714285714287, + "loss": 1.6855, + "step": 105 + }, + { + "epoch": 2.14, + "learning_rate": 0.0001864795918367347, + "loss": 1.7627, + "step": 106 + }, + { + "epoch": 2.17, + "learning_rate": 0.00018635204081632654, + "loss": 1.7564, + "step": 107 + }, + { + "epoch": 2.19, + "learning_rate": 0.00018622448979591837, + "loss": 1.8237, + "step": 108 + }, + { + "epoch": 2.21, + "learning_rate": 0.00018609693877551022, + "loss": 1.7421, + "step": 109 + }, + { + "epoch": 2.23, + "learning_rate": 0.00018596938775510206, + "loss": 1.7517, + "step": 110 + }, + { + "epoch": 2.25, + "learning_rate": 0.0001858418367346939, + "loss": 1.7515, + "step": 111 + }, + { + "epoch": 2.27, + "learning_rate": 0.00018571428571428572, + "loss": 1.7842, + "step": 112 + }, + { + "epoch": 2.29, + "learning_rate": 0.00018558673469387755, + "loss": 1.8001, + "step": 113 + }, + { + "epoch": 2.31, + "learning_rate": 0.0001854591836734694, + "loss": 1.7653, + "step": 114 + }, + { + "epoch": 2.33, + "learning_rate": 0.00018533163265306124, + "loss": 1.694, + "step": 115 + }, + { + "epoch": 2.35, + "learning_rate": 0.00018520408163265307, + "loss": 1.7457, + "step": 116 + }, + { + "epoch": 2.37, + "learning_rate": 0.0001850765306122449, + "loss": 1.7899, + "step": 117 + }, + { + "epoch": 2.39, + "learning_rate": 0.00018494897959183676, + "loss": 1.7473, + "step": 118 + }, + { + "epoch": 2.41, + "learning_rate": 0.0001848214285714286, + "loss": 1.6639, + "step": 119 + }, + { + "epoch": 2.43, + "learning_rate": 0.00018469387755102042, + "loss": 1.762, + "step": 120 + }, + { + "epoch": 2.45, + "learning_rate": 0.00018456632653061225, + "loss": 1.7378, + "step": 121 + }, + { + "epoch": 2.47, + "learning_rate": 0.0001844387755102041, + "loss": 1.672, + "step": 122 + }, + { + "epoch": 2.49, + "learning_rate": 0.0001843112244897959, + "loss": 1.7267, + "step": 123 + }, + { + "epoch": 2.51, + "learning_rate": 0.00018418367346938777, + "loss": 1.7825, + "step": 124 + }, + { + "epoch": 2.53, + "learning_rate": 0.0001840561224489796, + "loss": 1.7566, + "step": 125 + }, + { + "epoch": 2.55, + "learning_rate": 0.00018392857142857143, + "loss": 1.8169, + "step": 126 + }, + { + "epoch": 2.57, + "learning_rate": 0.00018380102040816326, + "loss": 1.6801, + "step": 127 + }, + { + "epoch": 2.59, + "learning_rate": 0.00018367346938775512, + "loss": 1.7292, + "step": 128 + }, + { + "epoch": 2.61, + "learning_rate": 0.00018354591836734695, + "loss": 1.737, + "step": 129 + }, + { + "epoch": 2.63, + "learning_rate": 0.00018341836734693878, + "loss": 1.7696, + "step": 130 + }, + { + "epoch": 2.65, + "learning_rate": 0.0001832908163265306, + "loss": 1.7239, + "step": 131 + }, + { + "epoch": 2.67, + "learning_rate": 0.00018316326530612247, + "loss": 1.7441, + "step": 132 + }, + { + "epoch": 2.69, + "learning_rate": 0.0001830357142857143, + "loss": 1.7825, + "step": 133 + }, + { + "epoch": 2.71, + "learning_rate": 0.00018290816326530613, + "loss": 1.7411, + "step": 134 + }, + { + "epoch": 2.73, + "learning_rate": 0.00018278061224489796, + "loss": 1.7119, + "step": 135 + }, + { + "epoch": 2.75, + "learning_rate": 0.0001826530612244898, + "loss": 1.7443, + "step": 136 + }, + { + "epoch": 2.77, + "learning_rate": 0.00018252551020408165, + "loss": 1.7197, + "step": 137 + }, + { + "epoch": 2.79, + "learning_rate": 0.00018239795918367348, + "loss": 1.7273, + "step": 138 + }, + { + "epoch": 2.81, + "learning_rate": 0.0001822704081632653, + "loss": 1.7681, + "step": 139 + }, + { + "epoch": 2.83, + "learning_rate": 0.00018214285714285714, + "loss": 1.8088, + "step": 140 + }, + { + "epoch": 2.85, + "learning_rate": 0.000182015306122449, + "loss": 1.7301, + "step": 141 + }, + { + "epoch": 2.87, + "learning_rate": 0.00018188775510204083, + "loss": 1.6853, + "step": 142 + }, + { + "epoch": 2.89, + "learning_rate": 0.00018176020408163266, + "loss": 1.6966, + "step": 143 + }, + { + "epoch": 2.91, + "learning_rate": 0.0001816326530612245, + "loss": 1.7938, + "step": 144 + }, + { + "epoch": 2.93, + "learning_rate": 0.00018150510204081635, + "loss": 1.7639, + "step": 145 + }, + { + "epoch": 2.95, + "learning_rate": 0.00018137755102040816, + "loss": 1.7527, + "step": 146 + }, + { + "epoch": 2.97, + "learning_rate": 0.00018125000000000001, + "loss": 1.7386, + "step": 147 + }, + { + "epoch": 2.99, + "learning_rate": 0.00018112244897959185, + "loss": 1.7223, + "step": 148 + }, + { + "epoch": 3.01, + "learning_rate": 0.00018099489795918368, + "loss": 1.7571, + "step": 149 + }, + { + "epoch": 3.04, + "learning_rate": 0.0001808673469387755, + "loss": 1.7054, + "step": 150 + }, + { + "epoch": 3.06, + "learning_rate": 0.00018073979591836737, + "loss": 1.6581, + "step": 151 + }, + { + "epoch": 3.08, + "learning_rate": 0.00018061224489795917, + "loss": 1.681, + "step": 152 + }, + { + "epoch": 3.1, + "learning_rate": 0.00018048469387755103, + "loss": 1.7425, + "step": 153 + }, + { + "epoch": 3.12, + "learning_rate": 0.00018035714285714286, + "loss": 1.7108, + "step": 154 + }, + { + "epoch": 3.14, + "learning_rate": 0.00018022959183673472, + "loss": 1.7194, + "step": 155 + }, + { + "epoch": 3.16, + "learning_rate": 0.00018010204081632655, + "loss": 1.6953, + "step": 156 + }, + { + "epoch": 3.18, + "learning_rate": 0.00017997448979591838, + "loss": 1.669, + "step": 157 + }, + { + "epoch": 3.2, + "learning_rate": 0.0001798469387755102, + "loss": 1.744, + "step": 158 + }, + { + "epoch": 3.22, + "learning_rate": 0.00017971938775510204, + "loss": 1.6467, + "step": 159 + }, + { + "epoch": 3.24, + "learning_rate": 0.0001795918367346939, + "loss": 1.7103, + "step": 160 + }, + { + "epoch": 3.26, + "learning_rate": 0.00017946428571428573, + "loss": 1.6662, + "step": 161 + }, + { + "epoch": 3.28, + "learning_rate": 0.00017933673469387756, + "loss": 1.6657, + "step": 162 + }, + { + "epoch": 3.3, + "learning_rate": 0.0001792091836734694, + "loss": 1.791, + "step": 163 + }, + { + "epoch": 3.32, + "learning_rate": 0.00017908163265306125, + "loss": 1.7704, + "step": 164 + }, + { + "epoch": 3.34, + "learning_rate": 0.00017895408163265305, + "loss": 1.7229, + "step": 165 + }, + { + "epoch": 3.36, + "learning_rate": 0.0001788265306122449, + "loss": 1.76, + "step": 166 + }, + { + "epoch": 3.38, + "learning_rate": 0.00017869897959183674, + "loss": 1.6482, + "step": 167 + }, + { + "epoch": 3.4, + "learning_rate": 0.0001785714285714286, + "loss": 1.8076, + "step": 168 + }, + { + "epoch": 3.42, + "learning_rate": 0.0001784438775510204, + "loss": 1.7368, + "step": 169 + }, + { + "epoch": 3.44, + "learning_rate": 0.00017831632653061226, + "loss": 1.6264, + "step": 170 + }, + { + "epoch": 3.46, + "learning_rate": 0.0001781887755102041, + "loss": 1.6289, + "step": 171 + }, + { + "epoch": 3.48, + "learning_rate": 0.00017806122448979592, + "loss": 1.7913, + "step": 172 + }, + { + "epoch": 3.5, + "learning_rate": 0.00017793367346938775, + "loss": 1.6985, + "step": 173 + }, + { + "epoch": 3.52, + "learning_rate": 0.0001778061224489796, + "loss": 1.6936, + "step": 174 + }, + { + "epoch": 3.54, + "learning_rate": 0.00017767857142857141, + "loss": 1.8068, + "step": 175 + }, + { + "epoch": 3.56, + "learning_rate": 0.00017755102040816327, + "loss": 1.7243, + "step": 176 + }, + { + "epoch": 3.58, + "learning_rate": 0.0001774234693877551, + "loss": 1.6893, + "step": 177 + }, + { + "epoch": 3.6, + "learning_rate": 0.00017729591836734696, + "loss": 1.8122, + "step": 178 + }, + { + "epoch": 3.62, + "learning_rate": 0.0001771683673469388, + "loss": 1.6562, + "step": 179 + }, + { + "epoch": 3.64, + "learning_rate": 0.00017704081632653062, + "loss": 1.6999, + "step": 180 + }, + { + "epoch": 3.66, + "learning_rate": 0.00017691326530612245, + "loss": 1.7229, + "step": 181 + }, + { + "epoch": 3.68, + "learning_rate": 0.00017678571428571428, + "loss": 1.6764, + "step": 182 + }, + { + "epoch": 3.7, + "learning_rate": 0.00017665816326530614, + "loss": 1.6982, + "step": 183 + }, + { + "epoch": 3.72, + "learning_rate": 0.00017653061224489797, + "loss": 1.696, + "step": 184 + }, + { + "epoch": 3.74, + "learning_rate": 0.0001764030612244898, + "loss": 1.6797, + "step": 185 + }, + { + "epoch": 3.76, + "learning_rate": 0.00017627551020408164, + "loss": 1.637, + "step": 186 + }, + { + "epoch": 3.78, + "learning_rate": 0.0001761479591836735, + "loss": 1.7074, + "step": 187 + }, + { + "epoch": 3.8, + "learning_rate": 0.0001760204081632653, + "loss": 1.705, + "step": 188 + }, + { + "epoch": 3.82, + "learning_rate": 0.00017589285714285716, + "loss": 1.6153, + "step": 189 + }, + { + "epoch": 3.84, + "learning_rate": 0.00017576530612244899, + "loss": 1.7354, + "step": 190 + }, + { + "epoch": 3.86, + "learning_rate": 0.00017563775510204084, + "loss": 1.6941, + "step": 191 + }, + { + "epoch": 3.88, + "learning_rate": 0.00017551020408163265, + "loss": 1.7231, + "step": 192 + }, + { + "epoch": 3.91, + "learning_rate": 0.0001753826530612245, + "loss": 1.7663, + "step": 193 + }, + { + "epoch": 3.93, + "learning_rate": 0.00017525510204081634, + "loss": 1.6532, + "step": 194 + }, + { + "epoch": 3.95, + "learning_rate": 0.00017512755102040817, + "loss": 1.7115, + "step": 195 + }, + { + "epoch": 3.97, + "learning_rate": 0.000175, + "loss": 1.6955, + "step": 196 + }, + { + "epoch": 3.99, + "learning_rate": 0.00017487244897959186, + "loss": 1.6863, + "step": 197 + }, + { + "epoch": 4.01, + "learning_rate": 0.00017474489795918366, + "loss": 1.7012, + "step": 198 + }, + { + "epoch": 4.03, + "learning_rate": 0.00017461734693877552, + "loss": 1.5927, + "step": 199 + }, + { + "epoch": 4.05, + "learning_rate": 0.00017448979591836735, + "loss": 1.6272, + "step": 200 + }, + { + "epoch": 4.07, + "learning_rate": 0.00017436224489795918, + "loss": 1.5994, + "step": 201 + }, + { + "epoch": 4.09, + "learning_rate": 0.00017423469387755104, + "loss": 1.7141, + "step": 202 + }, + { + "epoch": 4.11, + "learning_rate": 0.00017410714285714287, + "loss": 1.7547, + "step": 203 + }, + { + "epoch": 4.13, + "learning_rate": 0.0001739795918367347, + "loss": 1.6254, + "step": 204 + }, + { + "epoch": 4.15, + "learning_rate": 0.00017385204081632653, + "loss": 1.6686, + "step": 205 + }, + { + "epoch": 4.17, + "learning_rate": 0.0001737244897959184, + "loss": 1.6684, + "step": 206 + }, + { + "epoch": 4.19, + "learning_rate": 0.00017359693877551022, + "loss": 1.6724, + "step": 207 + }, + { + "epoch": 4.21, + "learning_rate": 0.00017346938775510205, + "loss": 1.7361, + "step": 208 + }, + { + "epoch": 4.23, + "learning_rate": 0.00017334183673469388, + "loss": 1.7167, + "step": 209 + }, + { + "epoch": 4.25, + "learning_rate": 0.00017321428571428574, + "loss": 1.7226, + "step": 210 + }, + { + "epoch": 4.27, + "learning_rate": 0.00017308673469387754, + "loss": 1.7133, + "step": 211 + }, + { + "epoch": 4.29, + "learning_rate": 0.0001729591836734694, + "loss": 1.649, + "step": 212 + }, + { + "epoch": 4.31, + "learning_rate": 0.00017283163265306123, + "loss": 1.7104, + "step": 213 + }, + { + "epoch": 4.33, + "learning_rate": 0.00017270408163265306, + "loss": 1.6861, + "step": 214 + }, + { + "epoch": 4.35, + "learning_rate": 0.0001725765306122449, + "loss": 1.648, + "step": 215 + }, + { + "epoch": 4.37, + "learning_rate": 0.00017244897959183675, + "loss": 1.6215, + "step": 216 + }, + { + "epoch": 4.39, + "learning_rate": 0.00017232142857142858, + "loss": 1.6334, + "step": 217 + }, + { + "epoch": 4.41, + "learning_rate": 0.0001721938775510204, + "loss": 1.6283, + "step": 218 + }, + { + "epoch": 4.43, + "learning_rate": 0.00017206632653061224, + "loss": 1.6462, + "step": 219 + }, + { + "epoch": 4.45, + "learning_rate": 0.0001719387755102041, + "loss": 1.7233, + "step": 220 + }, + { + "epoch": 4.47, + "learning_rate": 0.0001718112244897959, + "loss": 1.7839, + "step": 221 + }, + { + "epoch": 4.49, + "learning_rate": 0.00017168367346938776, + "loss": 1.7204, + "step": 222 + }, + { + "epoch": 4.51, + "learning_rate": 0.0001715561224489796, + "loss": 1.7671, + "step": 223 + }, + { + "epoch": 4.53, + "learning_rate": 0.00017142857142857143, + "loss": 1.6824, + "step": 224 + }, + { + "epoch": 4.55, + "learning_rate": 0.00017130102040816328, + "loss": 1.7068, + "step": 225 + }, + { + "epoch": 4.57, + "learning_rate": 0.00017117346938775511, + "loss": 1.6515, + "step": 226 + }, + { + "epoch": 4.59, + "learning_rate": 0.00017104591836734694, + "loss": 1.6586, + "step": 227 + }, + { + "epoch": 4.61, + "learning_rate": 0.00017091836734693878, + "loss": 1.6355, + "step": 228 + }, + { + "epoch": 4.63, + "learning_rate": 0.00017079081632653063, + "loss": 1.7173, + "step": 229 + }, + { + "epoch": 4.65, + "learning_rate": 0.00017066326530612246, + "loss": 1.6585, + "step": 230 + }, + { + "epoch": 4.67, + "learning_rate": 0.0001705357142857143, + "loss": 1.5856, + "step": 231 + }, + { + "epoch": 4.69, + "learning_rate": 0.00017040816326530613, + "loss": 1.5923, + "step": 232 + }, + { + "epoch": 4.71, + "learning_rate": 0.00017028061224489798, + "loss": 1.7128, + "step": 233 + }, + { + "epoch": 4.73, + "learning_rate": 0.0001701530612244898, + "loss": 1.6971, + "step": 234 + }, + { + "epoch": 4.75, + "learning_rate": 0.00017002551020408165, + "loss": 1.6416, + "step": 235 + }, + { + "epoch": 4.78, + "learning_rate": 0.00016989795918367348, + "loss": 1.645, + "step": 236 + }, + { + "epoch": 4.8, + "learning_rate": 0.0001697704081632653, + "loss": 1.6792, + "step": 237 + }, + { + "epoch": 4.82, + "learning_rate": 0.00016964285714285714, + "loss": 1.6522, + "step": 238 + }, + { + "epoch": 4.84, + "learning_rate": 0.000169515306122449, + "loss": 1.6315, + "step": 239 + }, + { + "epoch": 4.86, + "learning_rate": 0.00016938775510204083, + "loss": 1.6622, + "step": 240 + }, + { + "epoch": 4.88, + "learning_rate": 0.00016926020408163266, + "loss": 1.6566, + "step": 241 + }, + { + "epoch": 4.9, + "learning_rate": 0.0001691326530612245, + "loss": 1.7141, + "step": 242 + }, + { + "epoch": 4.92, + "learning_rate": 0.00016900510204081635, + "loss": 1.5873, + "step": 243 + }, + { + "epoch": 4.94, + "learning_rate": 0.00016887755102040818, + "loss": 1.6571, + "step": 244 + }, + { + "epoch": 4.96, + "learning_rate": 0.00016875, + "loss": 1.6829, + "step": 245 + }, + { + "epoch": 4.98, + "learning_rate": 0.00016862244897959184, + "loss": 1.6935, + "step": 246 + }, + { + "epoch": 5.0, + "learning_rate": 0.00016849489795918367, + "loss": 1.6782, + "step": 247 + }, + { + "epoch": 5.02, + "learning_rate": 0.00016836734693877553, + "loss": 1.622, + "step": 248 + }, + { + "epoch": 5.04, + "learning_rate": 0.00016823979591836736, + "loss": 1.6596, + "step": 249 + }, + { + "epoch": 5.06, + "learning_rate": 0.0001681122448979592, + "loss": 1.5821, + "step": 250 + }, + { + "epoch": 5.08, + "learning_rate": 0.00016798469387755102, + "loss": 1.7292, + "step": 251 + }, + { + "epoch": 5.1, + "learning_rate": 0.00016785714285714288, + "loss": 1.646, + "step": 252 + }, + { + "epoch": 5.12, + "learning_rate": 0.0001677295918367347, + "loss": 1.6969, + "step": 253 + }, + { + "epoch": 5.14, + "learning_rate": 0.00016760204081632654, + "loss": 1.6082, + "step": 254 + }, + { + "epoch": 5.16, + "learning_rate": 0.00016747448979591837, + "loss": 1.5843, + "step": 255 + }, + { + "epoch": 5.18, + "learning_rate": 0.00016734693877551023, + "loss": 1.6827, + "step": 256 + }, + { + "epoch": 5.2, + "learning_rate": 0.00016721938775510203, + "loss": 1.5824, + "step": 257 + }, + { + "epoch": 5.22, + "learning_rate": 0.0001670918367346939, + "loss": 1.6795, + "step": 258 + }, + { + "epoch": 5.24, + "learning_rate": 0.00016696428571428572, + "loss": 1.5639, + "step": 259 + }, + { + "epoch": 5.26, + "learning_rate": 0.00016683673469387755, + "loss": 1.592, + "step": 260 + }, + { + "epoch": 5.28, + "learning_rate": 0.00016670918367346938, + "loss": 1.65, + "step": 261 + }, + { + "epoch": 5.3, + "learning_rate": 0.00016658163265306124, + "loss": 1.5592, + "step": 262 + }, + { + "epoch": 5.32, + "learning_rate": 0.00016645408163265305, + "loss": 1.5091, + "step": 263 + }, + { + "epoch": 5.34, + "learning_rate": 0.0001663265306122449, + "loss": 1.6138, + "step": 264 + }, + { + "epoch": 5.36, + "learning_rate": 0.00016619897959183673, + "loss": 1.625, + "step": 265 + }, + { + "epoch": 5.38, + "learning_rate": 0.0001660714285714286, + "loss": 1.5757, + "step": 266 + }, + { + "epoch": 5.4, + "learning_rate": 0.00016594387755102042, + "loss": 1.6372, + "step": 267 + }, + { + "epoch": 5.42, + "learning_rate": 0.00016581632653061225, + "loss": 1.5891, + "step": 268 + }, + { + "epoch": 5.44, + "learning_rate": 0.00016568877551020409, + "loss": 1.6893, + "step": 269 + }, + { + "epoch": 5.46, + "learning_rate": 0.00016556122448979592, + "loss": 1.6662, + "step": 270 + }, + { + "epoch": 5.48, + "learning_rate": 0.00016543367346938777, + "loss": 1.7132, + "step": 271 + }, + { + "epoch": 5.5, + "learning_rate": 0.0001653061224489796, + "loss": 1.5835, + "step": 272 + }, + { + "epoch": 5.52, + "learning_rate": 0.00016517857142857144, + "loss": 1.6342, + "step": 273 + }, + { + "epoch": 5.54, + "learning_rate": 0.00016505102040816327, + "loss": 1.6717, + "step": 274 + }, + { + "epoch": 5.56, + "learning_rate": 0.00016492346938775512, + "loss": 1.6248, + "step": 275 + }, + { + "epoch": 5.58, + "learning_rate": 0.00016479591836734696, + "loss": 1.6117, + "step": 276 + }, + { + "epoch": 5.6, + "learning_rate": 0.0001646683673469388, + "loss": 1.6798, + "step": 277 + }, + { + "epoch": 5.63, + "learning_rate": 0.00016454081632653062, + "loss": 1.6406, + "step": 278 + }, + { + "epoch": 5.65, + "learning_rate": 0.00016441326530612248, + "loss": 1.6512, + "step": 279 + }, + { + "epoch": 5.67, + "learning_rate": 0.00016428571428571428, + "loss": 1.6102, + "step": 280 + }, + { + "epoch": 5.69, + "learning_rate": 0.00016415816326530614, + "loss": 1.6113, + "step": 281 + }, + { + "epoch": 5.71, + "learning_rate": 0.00016403061224489797, + "loss": 1.7116, + "step": 282 + }, + { + "epoch": 5.73, + "learning_rate": 0.0001639030612244898, + "loss": 1.6846, + "step": 283 + }, + { + "epoch": 5.75, + "learning_rate": 0.00016377551020408163, + "loss": 1.6911, + "step": 284 + }, + { + "epoch": 5.77, + "learning_rate": 0.0001636479591836735, + "loss": 1.6202, + "step": 285 + }, + { + "epoch": 5.79, + "learning_rate": 0.0001635204081632653, + "loss": 1.5715, + "step": 286 + }, + { + "epoch": 5.81, + "learning_rate": 0.00016339285714285715, + "loss": 1.6461, + "step": 287 + }, + { + "epoch": 5.83, + "learning_rate": 0.00016326530612244898, + "loss": 1.6624, + "step": 288 + }, + { + "epoch": 5.85, + "learning_rate": 0.00016313775510204084, + "loss": 1.6535, + "step": 289 + }, + { + "epoch": 5.87, + "learning_rate": 0.00016301020408163267, + "loss": 1.6275, + "step": 290 + }, + { + "epoch": 5.89, + "learning_rate": 0.0001628826530612245, + "loss": 1.6636, + "step": 291 + }, + { + "epoch": 5.91, + "learning_rate": 0.00016275510204081633, + "loss": 1.6546, + "step": 292 + }, + { + "epoch": 5.93, + "learning_rate": 0.00016262755102040816, + "loss": 1.7274, + "step": 293 + }, + { + "epoch": 5.95, + "learning_rate": 0.00016250000000000002, + "loss": 1.5901, + "step": 294 + }, + { + "epoch": 5.97, + "learning_rate": 0.00016237244897959185, + "loss": 1.6046, + "step": 295 + }, + { + "epoch": 5.99, + "learning_rate": 0.00016224489795918368, + "loss": 1.5828, + "step": 296 + }, + { + "epoch": 6.01, + "learning_rate": 0.0001621173469387755, + "loss": 1.6435, + "step": 297 + }, + { + "epoch": 6.03, + "learning_rate": 0.00016198979591836737, + "loss": 1.6263, + "step": 298 + }, + { + "epoch": 6.05, + "learning_rate": 0.00016186224489795917, + "loss": 1.4944, + "step": 299 + }, + { + "epoch": 6.07, + "learning_rate": 0.00016173469387755103, + "loss": 1.6286, + "step": 300 + }, + { + "epoch": 6.09, + "learning_rate": 0.00016160714285714286, + "loss": 1.694, + "step": 301 + }, + { + "epoch": 6.11, + "learning_rate": 0.00016147959183673472, + "loss": 1.6197, + "step": 302 + }, + { + "epoch": 6.13, + "learning_rate": 0.00016135204081632652, + "loss": 1.5597, + "step": 303 + }, + { + "epoch": 6.15, + "learning_rate": 0.00016122448979591838, + "loss": 1.5487, + "step": 304 + }, + { + "epoch": 6.17, + "learning_rate": 0.0001610969387755102, + "loss": 1.5769, + "step": 305 + }, + { + "epoch": 6.19, + "learning_rate": 0.00016096938775510204, + "loss": 1.6367, + "step": 306 + }, + { + "epoch": 6.21, + "learning_rate": 0.00016084183673469388, + "loss": 1.583, + "step": 307 + }, + { + "epoch": 6.23, + "learning_rate": 0.00016071428571428573, + "loss": 1.6201, + "step": 308 + }, + { + "epoch": 6.25, + "learning_rate": 0.00016058673469387754, + "loss": 1.6586, + "step": 309 + }, + { + "epoch": 6.27, + "learning_rate": 0.0001604591836734694, + "loss": 1.6711, + "step": 310 + }, + { + "epoch": 6.29, + "learning_rate": 0.00016033163265306123, + "loss": 1.6402, + "step": 311 + }, + { + "epoch": 6.31, + "learning_rate": 0.00016020408163265306, + "loss": 1.5247, + "step": 312 + }, + { + "epoch": 6.33, + "learning_rate": 0.00016007653061224491, + "loss": 1.5356, + "step": 313 + }, + { + "epoch": 6.35, + "learning_rate": 0.00015994897959183675, + "loss": 1.564, + "step": 314 + }, + { + "epoch": 6.37, + "learning_rate": 0.00015982142857142858, + "loss": 1.563, + "step": 315 + }, + { + "epoch": 6.39, + "learning_rate": 0.0001596938775510204, + "loss": 1.5198, + "step": 316 + }, + { + "epoch": 6.41, + "learning_rate": 0.00015956632653061227, + "loss": 1.6558, + "step": 317 + }, + { + "epoch": 6.43, + "learning_rate": 0.0001594387755102041, + "loss": 1.5534, + "step": 318 + }, + { + "epoch": 6.45, + "learning_rate": 0.00015931122448979593, + "loss": 1.6239, + "step": 319 + }, + { + "epoch": 6.47, + "learning_rate": 0.00015918367346938776, + "loss": 1.5645, + "step": 320 + }, + { + "epoch": 6.5, + "learning_rate": 0.00015905612244897962, + "loss": 1.5713, + "step": 321 + }, + { + "epoch": 6.52, + "learning_rate": 0.00015892857142857142, + "loss": 1.6176, + "step": 322 + }, + { + "epoch": 6.54, + "learning_rate": 0.00015880102040816328, + "loss": 1.502, + "step": 323 + }, + { + "epoch": 6.56, + "learning_rate": 0.0001586734693877551, + "loss": 1.645, + "step": 324 + }, + { + "epoch": 6.58, + "learning_rate": 0.00015854591836734697, + "loss": 1.5904, + "step": 325 + }, + { + "epoch": 6.6, + "learning_rate": 0.00015841836734693877, + "loss": 1.6149, + "step": 326 + }, + { + "epoch": 6.62, + "learning_rate": 0.00015829081632653063, + "loss": 1.6757, + "step": 327 + }, + { + "epoch": 6.64, + "learning_rate": 0.00015816326530612246, + "loss": 1.541, + "step": 328 + }, + { + "epoch": 6.66, + "learning_rate": 0.0001580357142857143, + "loss": 1.5898, + "step": 329 + }, + { + "epoch": 6.68, + "learning_rate": 0.00015790816326530612, + "loss": 1.5441, + "step": 330 + }, + { + "epoch": 6.7, + "learning_rate": 0.00015778061224489798, + "loss": 1.61, + "step": 331 + }, + { + "epoch": 6.72, + "learning_rate": 0.00015765306122448978, + "loss": 1.615, + "step": 332 + }, + { + "epoch": 6.74, + "learning_rate": 0.00015752551020408164, + "loss": 1.6575, + "step": 333 + }, + { + "epoch": 6.76, + "learning_rate": 0.00015739795918367347, + "loss": 1.6702, + "step": 334 + }, + { + "epoch": 6.78, + "learning_rate": 0.0001572704081632653, + "loss": 1.6009, + "step": 335 + }, + { + "epoch": 6.8, + "learning_rate": 0.00015714285714285716, + "loss": 1.5568, + "step": 336 + }, + { + "epoch": 6.82, + "learning_rate": 0.000157015306122449, + "loss": 1.619, + "step": 337 + }, + { + "epoch": 6.84, + "learning_rate": 0.00015688775510204082, + "loss": 1.5563, + "step": 338 + }, + { + "epoch": 6.86, + "learning_rate": 0.00015676020408163265, + "loss": 1.6328, + "step": 339 + }, + { + "epoch": 6.88, + "learning_rate": 0.0001566326530612245, + "loss": 1.5726, + "step": 340 + }, + { + "epoch": 6.9, + "learning_rate": 0.00015650510204081634, + "loss": 1.6199, + "step": 341 + }, + { + "epoch": 6.92, + "learning_rate": 0.00015637755102040817, + "loss": 1.5722, + "step": 342 + }, + { + "epoch": 6.94, + "learning_rate": 0.00015625, + "loss": 1.5685, + "step": 343 + }, + { + "epoch": 6.96, + "learning_rate": 0.00015612244897959186, + "loss": 1.5615, + "step": 344 + }, + { + "epoch": 6.98, + "learning_rate": 0.00015599489795918366, + "loss": 1.5994, + "step": 345 + }, + { + "epoch": 7.0, + "learning_rate": 0.00015586734693877552, + "loss": 1.5579, + "step": 346 + }, + { + "epoch": 7.02, + "learning_rate": 0.00015573979591836735, + "loss": 1.547, + "step": 347 + }, + { + "epoch": 7.04, + "learning_rate": 0.00015561224489795918, + "loss": 1.5292, + "step": 348 + }, + { + "epoch": 7.06, + "learning_rate": 0.00015548469387755102, + "loss": 1.6032, + "step": 349 + }, + { + "epoch": 7.08, + "learning_rate": 0.00015535714285714287, + "loss": 1.5149, + "step": 350 + }, + { + "epoch": 7.1, + "learning_rate": 0.0001552295918367347, + "loss": 1.6093, + "step": 351 + }, + { + "epoch": 7.12, + "learning_rate": 0.00015510204081632654, + "loss": 1.5421, + "step": 352 + }, + { + "epoch": 7.14, + "learning_rate": 0.00015497448979591837, + "loss": 1.5733, + "step": 353 + }, + { + "epoch": 7.16, + "learning_rate": 0.00015484693877551022, + "loss": 1.5703, + "step": 354 + }, + { + "epoch": 7.18, + "learning_rate": 0.00015471938775510203, + "loss": 1.6141, + "step": 355 + }, + { + "epoch": 7.2, + "learning_rate": 0.00015459183673469389, + "loss": 1.5526, + "step": 356 + }, + { + "epoch": 7.22, + "learning_rate": 0.00015446428571428572, + "loss": 1.5347, + "step": 357 + }, + { + "epoch": 7.24, + "learning_rate": 0.00015433673469387755, + "loss": 1.5682, + "step": 358 + }, + { + "epoch": 7.26, + "learning_rate": 0.0001542091836734694, + "loss": 1.5292, + "step": 359 + }, + { + "epoch": 7.28, + "learning_rate": 0.00015408163265306124, + "loss": 1.499, + "step": 360 + }, + { + "epoch": 7.3, + "learning_rate": 0.00015395408163265307, + "loss": 1.5624, + "step": 361 + }, + { + "epoch": 7.32, + "learning_rate": 0.0001538265306122449, + "loss": 1.627, + "step": 362 + }, + { + "epoch": 7.34, + "learning_rate": 0.00015369897959183676, + "loss": 1.5327, + "step": 363 + }, + { + "epoch": 7.37, + "learning_rate": 0.0001535714285714286, + "loss": 1.5622, + "step": 364 + }, + { + "epoch": 7.39, + "learning_rate": 0.00015344387755102042, + "loss": 1.5659, + "step": 365 + }, + { + "epoch": 7.41, + "learning_rate": 0.00015331632653061225, + "loss": 1.5019, + "step": 366 + }, + { + "epoch": 7.43, + "learning_rate": 0.0001531887755102041, + "loss": 1.5921, + "step": 367 + }, + { + "epoch": 7.45, + "learning_rate": 0.0001530612244897959, + "loss": 1.5914, + "step": 368 + }, + { + "epoch": 7.47, + "learning_rate": 0.00015293367346938777, + "loss": 1.5045, + "step": 369 + }, + { + "epoch": 7.49, + "learning_rate": 0.0001528061224489796, + "loss": 1.6209, + "step": 370 + }, + { + "epoch": 7.51, + "learning_rate": 0.00015267857142857143, + "loss": 1.5198, + "step": 371 + }, + { + "epoch": 7.53, + "learning_rate": 0.00015255102040816326, + "loss": 1.5363, + "step": 372 + }, + { + "epoch": 7.55, + "learning_rate": 0.00015242346938775512, + "loss": 1.5391, + "step": 373 + }, + { + "epoch": 7.57, + "learning_rate": 0.00015229591836734695, + "loss": 1.4546, + "step": 374 + }, + { + "epoch": 7.59, + "learning_rate": 0.00015216836734693878, + "loss": 1.5546, + "step": 375 + }, + { + "epoch": 7.61, + "learning_rate": 0.0001520408163265306, + "loss": 1.5629, + "step": 376 + }, + { + "epoch": 7.63, + "learning_rate": 0.00015191326530612247, + "loss": 1.6002, + "step": 377 + }, + { + "epoch": 7.65, + "learning_rate": 0.00015178571428571427, + "loss": 1.5543, + "step": 378 + }, + { + "epoch": 7.67, + "learning_rate": 0.00015165816326530613, + "loss": 1.5925, + "step": 379 + }, + { + "epoch": 7.69, + "learning_rate": 0.00015153061224489796, + "loss": 1.5631, + "step": 380 + }, + { + "epoch": 7.71, + "learning_rate": 0.0001514030612244898, + "loss": 1.5677, + "step": 381 + }, + { + "epoch": 7.73, + "learning_rate": 0.00015127551020408165, + "loss": 1.5828, + "step": 382 + }, + { + "epoch": 7.75, + "learning_rate": 0.00015114795918367348, + "loss": 1.6494, + "step": 383 + }, + { + "epoch": 7.77, + "learning_rate": 0.0001510204081632653, + "loss": 1.553, + "step": 384 + }, + { + "epoch": 7.79, + "learning_rate": 0.00015089285714285714, + "loss": 1.6156, + "step": 385 + }, + { + "epoch": 7.81, + "learning_rate": 0.000150765306122449, + "loss": 1.5001, + "step": 386 + }, + { + "epoch": 7.83, + "learning_rate": 0.00015063775510204083, + "loss": 1.5321, + "step": 387 + }, + { + "epoch": 7.85, + "learning_rate": 0.00015051020408163266, + "loss": 1.5307, + "step": 388 + }, + { + "epoch": 7.87, + "learning_rate": 0.0001503826530612245, + "loss": 1.5639, + "step": 389 + }, + { + "epoch": 7.89, + "learning_rate": 0.00015025510204081635, + "loss": 1.517, + "step": 390 + }, + { + "epoch": 7.91, + "learning_rate": 0.00015012755102040816, + "loss": 1.4776, + "step": 391 + }, + { + "epoch": 7.93, + "learning_rate": 0.00015000000000000001, + "loss": 1.5368, + "step": 392 + }, + { + "epoch": 7.95, + "learning_rate": 0.00014987244897959184, + "loss": 1.5636, + "step": 393 + }, + { + "epoch": 7.97, + "learning_rate": 0.00014974489795918368, + "loss": 1.6004, + "step": 394 + }, + { + "epoch": 7.99, + "learning_rate": 0.0001496173469387755, + "loss": 1.5524, + "step": 395 + }, + { + "epoch": 8.01, + "learning_rate": 0.00014948979591836736, + "loss": 1.5307, + "step": 396 + }, + { + "epoch": 8.03, + "learning_rate": 0.00014936224489795917, + "loss": 1.5123, + "step": 397 + }, + { + "epoch": 8.05, + "learning_rate": 0.00014923469387755103, + "loss": 1.5132, + "step": 398 + }, + { + "epoch": 8.07, + "learning_rate": 0.00014910714285714286, + "loss": 1.5109, + "step": 399 + }, + { + "epoch": 8.09, + "learning_rate": 0.00014897959183673472, + "loss": 1.5302, + "step": 400 + }, + { + "epoch": 8.11, + "learning_rate": 0.00014885204081632652, + "loss": 1.5238, + "step": 401 + }, + { + "epoch": 8.13, + "learning_rate": 0.00014872448979591838, + "loss": 1.4781, + "step": 402 + }, + { + "epoch": 8.15, + "learning_rate": 0.0001485969387755102, + "loss": 1.5446, + "step": 403 + }, + { + "epoch": 8.17, + "learning_rate": 0.00014846938775510204, + "loss": 1.5, + "step": 404 + }, + { + "epoch": 8.19, + "learning_rate": 0.0001483418367346939, + "loss": 1.5458, + "step": 405 + }, + { + "epoch": 8.21, + "learning_rate": 0.00014821428571428573, + "loss": 1.5257, + "step": 406 + }, + { + "epoch": 8.24, + "learning_rate": 0.00014808673469387756, + "loss": 1.4607, + "step": 407 + }, + { + "epoch": 8.26, + "learning_rate": 0.0001479591836734694, + "loss": 1.4282, + "step": 408 + }, + { + "epoch": 8.28, + "learning_rate": 0.00014783163265306125, + "loss": 1.4519, + "step": 409 + }, + { + "epoch": 8.3, + "learning_rate": 0.00014770408163265305, + "loss": 1.475, + "step": 410 + }, + { + "epoch": 8.32, + "learning_rate": 0.0001475765306122449, + "loss": 1.5425, + "step": 411 + }, + { + "epoch": 8.34, + "learning_rate": 0.00014744897959183674, + "loss": 1.5407, + "step": 412 + }, + { + "epoch": 8.36, + "learning_rate": 0.0001473214285714286, + "loss": 1.5698, + "step": 413 + }, + { + "epoch": 8.38, + "learning_rate": 0.0001471938775510204, + "loss": 1.4282, + "step": 414 + }, + { + "epoch": 8.4, + "learning_rate": 0.00014706632653061226, + "loss": 1.5301, + "step": 415 + }, + { + "epoch": 8.42, + "learning_rate": 0.0001469387755102041, + "loss": 1.5083, + "step": 416 + }, + { + "epoch": 8.44, + "learning_rate": 0.00014681122448979592, + "loss": 1.5712, + "step": 417 + }, + { + "epoch": 8.46, + "learning_rate": 0.00014668367346938775, + "loss": 1.4363, + "step": 418 + }, + { + "epoch": 8.48, + "learning_rate": 0.0001465561224489796, + "loss": 1.4463, + "step": 419 + }, + { + "epoch": 8.5, + "learning_rate": 0.00014642857142857141, + "loss": 1.4738, + "step": 420 + }, + { + "epoch": 8.52, + "learning_rate": 0.00014630102040816327, + "loss": 1.5396, + "step": 421 + }, + { + "epoch": 8.54, + "learning_rate": 0.0001461734693877551, + "loss": 1.4384, + "step": 422 + }, + { + "epoch": 8.56, + "learning_rate": 0.00014604591836734696, + "loss": 1.5345, + "step": 423 + }, + { + "epoch": 8.58, + "learning_rate": 0.0001459183673469388, + "loss": 1.5355, + "step": 424 + }, + { + "epoch": 8.6, + "learning_rate": 0.00014579081632653062, + "loss": 1.5188, + "step": 425 + }, + { + "epoch": 8.62, + "learning_rate": 0.00014566326530612245, + "loss": 1.5575, + "step": 426 + }, + { + "epoch": 8.64, + "learning_rate": 0.00014553571428571428, + "loss": 1.5279, + "step": 427 + }, + { + "epoch": 8.66, + "learning_rate": 0.00014540816326530614, + "loss": 1.5484, + "step": 428 + }, + { + "epoch": 8.68, + "learning_rate": 0.00014528061224489797, + "loss": 1.4878, + "step": 429 + }, + { + "epoch": 8.7, + "learning_rate": 0.0001451530612244898, + "loss": 1.503, + "step": 430 + }, + { + "epoch": 8.72, + "learning_rate": 0.00014502551020408163, + "loss": 1.4723, + "step": 431 + }, + { + "epoch": 8.74, + "learning_rate": 0.0001448979591836735, + "loss": 1.5579, + "step": 432 + }, + { + "epoch": 8.76, + "learning_rate": 0.0001447704081632653, + "loss": 1.4789, + "step": 433 + }, + { + "epoch": 8.78, + "learning_rate": 0.00014464285714285715, + "loss": 1.5501, + "step": 434 + }, + { + "epoch": 8.8, + "learning_rate": 0.00014451530612244899, + "loss": 1.5204, + "step": 435 + }, + { + "epoch": 8.82, + "learning_rate": 0.00014438775510204084, + "loss": 1.5489, + "step": 436 + }, + { + "epoch": 8.84, + "learning_rate": 0.00014426020408163265, + "loss": 1.5464, + "step": 437 + }, + { + "epoch": 8.86, + "learning_rate": 0.0001441326530612245, + "loss": 1.5896, + "step": 438 + }, + { + "epoch": 8.88, + "learning_rate": 0.00014400510204081634, + "loss": 1.5465, + "step": 439 + }, + { + "epoch": 8.9, + "learning_rate": 0.00014387755102040817, + "loss": 1.5094, + "step": 440 + }, + { + "epoch": 8.92, + "learning_rate": 0.00014375, + "loss": 1.5144, + "step": 441 + }, + { + "epoch": 8.94, + "learning_rate": 0.00014362244897959186, + "loss": 1.4919, + "step": 442 + }, + { + "epoch": 8.96, + "learning_rate": 0.00014349489795918366, + "loss": 1.4702, + "step": 443 + }, + { + "epoch": 8.98, + "learning_rate": 0.00014336734693877552, + "loss": 1.4996, + "step": 444 + }, + { + "epoch": 9.0, + "learning_rate": 0.00014323979591836735, + "loss": 1.5503, + "step": 445 + }, + { + "epoch": 9.02, + "learning_rate": 0.00014311224489795918, + "loss": 1.4125, + "step": 446 + }, + { + "epoch": 9.04, + "learning_rate": 0.00014298469387755104, + "loss": 1.4722, + "step": 447 + }, + { + "epoch": 9.06, + "learning_rate": 0.00014285714285714287, + "loss": 1.5199, + "step": 448 + }, + { + "epoch": 9.09, + "learning_rate": 0.0001427295918367347, + "loss": 1.4571, + "step": 449 + }, + { + "epoch": 9.11, + "learning_rate": 0.00014260204081632653, + "loss": 1.4996, + "step": 450 + }, + { + "epoch": 9.13, + "learning_rate": 0.0001424744897959184, + "loss": 1.4092, + "step": 451 + }, + { + "epoch": 9.15, + "learning_rate": 0.00014234693877551022, + "loss": 1.4198, + "step": 452 + }, + { + "epoch": 9.17, + "learning_rate": 0.00014221938775510205, + "loss": 1.4916, + "step": 453 + }, + { + "epoch": 9.19, + "learning_rate": 0.00014209183673469388, + "loss": 1.5051, + "step": 454 + }, + { + "epoch": 9.21, + "learning_rate": 0.00014196428571428574, + "loss": 1.4321, + "step": 455 + }, + { + "epoch": 9.23, + "learning_rate": 0.00014183673469387754, + "loss": 1.4097, + "step": 456 + }, + { + "epoch": 9.25, + "learning_rate": 0.0001417091836734694, + "loss": 1.4853, + "step": 457 + }, + { + "epoch": 9.27, + "learning_rate": 0.00014158163265306123, + "loss": 1.4593, + "step": 458 + }, + { + "epoch": 9.29, + "learning_rate": 0.00014145408163265306, + "loss": 1.3729, + "step": 459 + }, + { + "epoch": 9.31, + "learning_rate": 0.0001413265306122449, + "loss": 1.4467, + "step": 460 + }, + { + "epoch": 9.33, + "learning_rate": 0.00014119897959183675, + "loss": 1.4467, + "step": 461 + }, + { + "epoch": 9.35, + "learning_rate": 0.00014107142857142858, + "loss": 1.4785, + "step": 462 + }, + { + "epoch": 9.37, + "learning_rate": 0.0001409438775510204, + "loss": 1.4089, + "step": 463 + }, + { + "epoch": 9.39, + "learning_rate": 0.00014081632653061224, + "loss": 1.5026, + "step": 464 + }, + { + "epoch": 9.41, + "learning_rate": 0.0001406887755102041, + "loss": 1.4857, + "step": 465 + }, + { + "epoch": 9.43, + "learning_rate": 0.0001405612244897959, + "loss": 1.3745, + "step": 466 + }, + { + "epoch": 9.45, + "learning_rate": 0.00014043367346938776, + "loss": 1.4733, + "step": 467 + }, + { + "epoch": 9.47, + "learning_rate": 0.0001403061224489796, + "loss": 1.5212, + "step": 468 + }, + { + "epoch": 9.49, + "learning_rate": 0.00014017857142857142, + "loss": 1.5398, + "step": 469 + }, + { + "epoch": 9.51, + "learning_rate": 0.00014005102040816328, + "loss": 1.478, + "step": 470 + }, + { + "epoch": 9.53, + "learning_rate": 0.0001399234693877551, + "loss": 1.496, + "step": 471 + }, + { + "epoch": 9.55, + "learning_rate": 0.00013979591836734694, + "loss": 1.4837, + "step": 472 + }, + { + "epoch": 9.57, + "learning_rate": 0.00013966836734693878, + "loss": 1.4724, + "step": 473 + }, + { + "epoch": 9.59, + "learning_rate": 0.00013954081632653063, + "loss": 1.4828, + "step": 474 + }, + { + "epoch": 9.61, + "learning_rate": 0.00013941326530612246, + "loss": 1.5012, + "step": 475 + }, + { + "epoch": 9.63, + "learning_rate": 0.0001392857142857143, + "loss": 1.4879, + "step": 476 + }, + { + "epoch": 9.65, + "learning_rate": 0.00013915816326530613, + "loss": 1.4196, + "step": 477 + }, + { + "epoch": 9.67, + "learning_rate": 0.00013903061224489798, + "loss": 1.4915, + "step": 478 + }, + { + "epoch": 9.69, + "learning_rate": 0.0001389030612244898, + "loss": 1.3878, + "step": 479 + }, + { + "epoch": 9.71, + "learning_rate": 0.00013877551020408165, + "loss": 1.466, + "step": 480 + }, + { + "epoch": 9.73, + "learning_rate": 0.00013864795918367348, + "loss": 1.4582, + "step": 481 + }, + { + "epoch": 9.75, + "learning_rate": 0.0001385204081632653, + "loss": 1.533, + "step": 482 + }, + { + "epoch": 9.77, + "learning_rate": 0.00013839285714285714, + "loss": 1.4697, + "step": 483 + }, + { + "epoch": 9.79, + "learning_rate": 0.000138265306122449, + "loss": 1.3989, + "step": 484 + }, + { + "epoch": 9.81, + "learning_rate": 0.00013813775510204083, + "loss": 1.4361, + "step": 485 + }, + { + "epoch": 9.83, + "learning_rate": 0.00013801020408163266, + "loss": 1.5271, + "step": 486 + }, + { + "epoch": 9.85, + "learning_rate": 0.0001378826530612245, + "loss": 1.4905, + "step": 487 + }, + { + "epoch": 9.87, + "learning_rate": 0.00013775510204081635, + "loss": 1.4757, + "step": 488 + }, + { + "epoch": 9.89, + "learning_rate": 0.00013762755102040815, + "loss": 1.5485, + "step": 489 + }, + { + "epoch": 9.91, + "learning_rate": 0.0001375, + "loss": 1.4783, + "step": 490 + }, + { + "epoch": 9.93, + "learning_rate": 0.00013737244897959184, + "loss": 1.4849, + "step": 491 + }, + { + "epoch": 9.96, + "learning_rate": 0.00013724489795918367, + "loss": 1.5382, + "step": 492 + }, + { + "epoch": 9.98, + "learning_rate": 0.00013711734693877553, + "loss": 1.4902, + "step": 493 + }, + { + "epoch": 10.0, + "learning_rate": 0.00013698979591836736, + "loss": 1.4865, + "step": 494 + }, + { + "epoch": 10.02, + "learning_rate": 0.0001368622448979592, + "loss": 1.4436, + "step": 495 + }, + { + "epoch": 10.04, + "learning_rate": 0.00013673469387755102, + "loss": 1.408, + "step": 496 + }, + { + "epoch": 10.06, + "learning_rate": 0.00013660714285714288, + "loss": 1.4764, + "step": 497 + }, + { + "epoch": 10.08, + "learning_rate": 0.0001364795918367347, + "loss": 1.4646, + "step": 498 + }, + { + "epoch": 10.1, + "learning_rate": 0.00013635204081632654, + "loss": 1.406, + "step": 499 + }, + { + "epoch": 10.12, + "learning_rate": 0.00013622448979591837, + "loss": 1.4785, + "step": 500 + }, + { + "epoch": 10.14, + "learning_rate": 0.00013609693877551023, + "loss": 1.4117, + "step": 501 + }, + { + "epoch": 10.16, + "learning_rate": 0.00013596938775510203, + "loss": 1.4108, + "step": 502 + }, + { + "epoch": 10.18, + "learning_rate": 0.0001358418367346939, + "loss": 1.4155, + "step": 503 + }, + { + "epoch": 10.2, + "learning_rate": 0.00013571428571428572, + "loss": 1.4021, + "step": 504 + }, + { + "epoch": 10.22, + "learning_rate": 0.00013558673469387755, + "loss": 1.411, + "step": 505 + }, + { + "epoch": 10.24, + "learning_rate": 0.00013545918367346938, + "loss": 1.3851, + "step": 506 + }, + { + "epoch": 10.26, + "learning_rate": 0.00013533163265306124, + "loss": 1.387, + "step": 507 + }, + { + "epoch": 10.28, + "learning_rate": 0.00013520408163265305, + "loss": 1.4163, + "step": 508 + }, + { + "epoch": 10.3, + "learning_rate": 0.0001350765306122449, + "loss": 1.3343, + "step": 509 + }, + { + "epoch": 10.32, + "learning_rate": 0.00013494897959183673, + "loss": 1.4811, + "step": 510 + }, + { + "epoch": 10.34, + "learning_rate": 0.0001348214285714286, + "loss": 1.4086, + "step": 511 + }, + { + "epoch": 10.36, + "learning_rate": 0.0001346938775510204, + "loss": 1.3879, + "step": 512 + }, + { + "epoch": 10.38, + "learning_rate": 0.00013456632653061225, + "loss": 1.4204, + "step": 513 + }, + { + "epoch": 10.4, + "learning_rate": 0.00013443877551020408, + "loss": 1.4158, + "step": 514 + }, + { + "epoch": 10.42, + "learning_rate": 0.00013431122448979592, + "loss": 1.4521, + "step": 515 + }, + { + "epoch": 10.44, + "learning_rate": 0.00013418367346938777, + "loss": 1.4196, + "step": 516 + }, + { + "epoch": 10.46, + "learning_rate": 0.0001340561224489796, + "loss": 1.4361, + "step": 517 + }, + { + "epoch": 10.48, + "learning_rate": 0.00013392857142857144, + "loss": 1.4482, + "step": 518 + }, + { + "epoch": 10.5, + "learning_rate": 0.00013380102040816327, + "loss": 1.4801, + "step": 519 + }, + { + "epoch": 10.52, + "learning_rate": 0.00013367346938775512, + "loss": 1.4556, + "step": 520 + }, + { + "epoch": 10.54, + "learning_rate": 0.00013354591836734695, + "loss": 1.3902, + "step": 521 + }, + { + "epoch": 10.56, + "learning_rate": 0.00013341836734693879, + "loss": 1.4269, + "step": 522 + }, + { + "epoch": 10.58, + "learning_rate": 0.00013329081632653062, + "loss": 1.4899, + "step": 523 + }, + { + "epoch": 10.6, + "learning_rate": 0.00013316326530612247, + "loss": 1.3952, + "step": 524 + }, + { + "epoch": 10.62, + "learning_rate": 0.00013303571428571428, + "loss": 1.4116, + "step": 525 + }, + { + "epoch": 10.64, + "learning_rate": 0.00013290816326530614, + "loss": 1.4583, + "step": 526 + }, + { + "epoch": 10.66, + "learning_rate": 0.00013278061224489797, + "loss": 1.4466, + "step": 527 + }, + { + "epoch": 10.68, + "learning_rate": 0.0001326530612244898, + "loss": 1.4242, + "step": 528 + }, + { + "epoch": 10.7, + "learning_rate": 0.00013252551020408163, + "loss": 1.3717, + "step": 529 + }, + { + "epoch": 10.72, + "learning_rate": 0.0001323979591836735, + "loss": 1.4583, + "step": 530 + }, + { + "epoch": 10.74, + "learning_rate": 0.0001322704081632653, + "loss": 1.4185, + "step": 531 + }, + { + "epoch": 10.76, + "learning_rate": 0.00013214285714285715, + "loss": 1.4287, + "step": 532 + }, + { + "epoch": 10.78, + "learning_rate": 0.00013201530612244898, + "loss": 1.4385, + "step": 533 + }, + { + "epoch": 10.8, + "learning_rate": 0.00013188775510204084, + "loss": 1.453, + "step": 534 + }, + { + "epoch": 10.83, + "learning_rate": 0.00013176020408163264, + "loss": 1.4161, + "step": 535 + }, + { + "epoch": 10.85, + "learning_rate": 0.0001316326530612245, + "loss": 1.457, + "step": 536 + }, + { + "epoch": 10.87, + "learning_rate": 0.00013150510204081633, + "loss": 1.4367, + "step": 537 + }, + { + "epoch": 10.89, + "learning_rate": 0.00013137755102040816, + "loss": 1.4256, + "step": 538 + }, + { + "epoch": 10.91, + "learning_rate": 0.00013125000000000002, + "loss": 1.424, + "step": 539 + }, + { + "epoch": 10.93, + "learning_rate": 0.00013112244897959185, + "loss": 1.3923, + "step": 540 + }, + { + "epoch": 10.95, + "learning_rate": 0.00013099489795918368, + "loss": 1.4225, + "step": 541 + }, + { + "epoch": 10.97, + "learning_rate": 0.0001308673469387755, + "loss": 1.3969, + "step": 542 + }, + { + "epoch": 10.99, + "learning_rate": 0.00013073979591836737, + "loss": 1.4446, + "step": 543 + }, + { + "epoch": 11.01, + "learning_rate": 0.00013061224489795917, + "loss": 1.4375, + "step": 544 + }, + { + "epoch": 11.03, + "learning_rate": 0.00013048469387755103, + "loss": 1.4064, + "step": 545 + }, + { + "epoch": 11.05, + "learning_rate": 0.00013035714285714286, + "loss": 1.3454, + "step": 546 + }, + { + "epoch": 11.07, + "learning_rate": 0.00013022959183673472, + "loss": 1.3234, + "step": 547 + }, + { + "epoch": 11.09, + "learning_rate": 0.00013010204081632652, + "loss": 1.3759, + "step": 548 + }, + { + "epoch": 11.11, + "learning_rate": 0.00012997448979591838, + "loss": 1.4221, + "step": 549 + }, + { + "epoch": 11.13, + "learning_rate": 0.0001298469387755102, + "loss": 1.4261, + "step": 550 + }, + { + "epoch": 11.15, + "learning_rate": 0.00012971938775510204, + "loss": 1.3341, + "step": 551 + }, + { + "epoch": 11.17, + "learning_rate": 0.00012959183673469387, + "loss": 1.3994, + "step": 552 + }, + { + "epoch": 11.19, + "learning_rate": 0.00012946428571428573, + "loss": 1.3894, + "step": 553 + }, + { + "epoch": 11.21, + "learning_rate": 0.00012933673469387754, + "loss": 1.3585, + "step": 554 + }, + { + "epoch": 11.23, + "learning_rate": 0.0001292091836734694, + "loss": 1.3763, + "step": 555 + }, + { + "epoch": 11.25, + "learning_rate": 0.00012908163265306123, + "loss": 1.3623, + "step": 556 + }, + { + "epoch": 11.27, + "learning_rate": 0.00012895408163265306, + "loss": 1.3907, + "step": 557 + }, + { + "epoch": 11.29, + "learning_rate": 0.0001288265306122449, + "loss": 1.3807, + "step": 558 + }, + { + "epoch": 11.31, + "learning_rate": 0.00012869897959183674, + "loss": 1.4045, + "step": 559 + }, + { + "epoch": 11.33, + "learning_rate": 0.00012857142857142858, + "loss": 1.4038, + "step": 560 + }, + { + "epoch": 11.35, + "learning_rate": 0.0001284438775510204, + "loss": 1.3466, + "step": 561 + }, + { + "epoch": 11.37, + "learning_rate": 0.00012831632653061226, + "loss": 1.3449, + "step": 562 + }, + { + "epoch": 11.39, + "learning_rate": 0.0001281887755102041, + "loss": 1.3866, + "step": 563 + }, + { + "epoch": 11.41, + "learning_rate": 0.00012806122448979593, + "loss": 1.3106, + "step": 564 + }, + { + "epoch": 11.43, + "learning_rate": 0.00012793367346938776, + "loss": 1.4414, + "step": 565 + }, + { + "epoch": 11.45, + "learning_rate": 0.00012780612244897962, + "loss": 1.3737, + "step": 566 + }, + { + "epoch": 11.47, + "learning_rate": 0.00012767857142857142, + "loss": 1.4053, + "step": 567 + }, + { + "epoch": 11.49, + "learning_rate": 0.00012755102040816328, + "loss": 1.4561, + "step": 568 + }, + { + "epoch": 11.51, + "learning_rate": 0.0001274234693877551, + "loss": 1.3684, + "step": 569 + }, + { + "epoch": 11.53, + "learning_rate": 0.00012729591836734697, + "loss": 1.3117, + "step": 570 + }, + { + "epoch": 11.55, + "learning_rate": 0.00012716836734693877, + "loss": 1.3474, + "step": 571 + }, + { + "epoch": 11.57, + "learning_rate": 0.00012704081632653063, + "loss": 1.3804, + "step": 572 + }, + { + "epoch": 11.59, + "learning_rate": 0.00012691326530612246, + "loss": 1.3656, + "step": 573 + }, + { + "epoch": 11.61, + "learning_rate": 0.0001267857142857143, + "loss": 1.3133, + "step": 574 + }, + { + "epoch": 11.63, + "learning_rate": 0.00012665816326530612, + "loss": 1.4077, + "step": 575 + }, + { + "epoch": 11.65, + "learning_rate": 0.00012653061224489798, + "loss": 1.4087, + "step": 576 + }, + { + "epoch": 11.67, + "learning_rate": 0.00012640306122448978, + "loss": 1.3524, + "step": 577 + }, + { + "epoch": 11.7, + "learning_rate": 0.00012627551020408164, + "loss": 1.3481, + "step": 578 + }, + { + "epoch": 11.72, + "learning_rate": 0.00012614795918367347, + "loss": 1.4497, + "step": 579 + }, + { + "epoch": 11.74, + "learning_rate": 0.0001260204081632653, + "loss": 1.3866, + "step": 580 + }, + { + "epoch": 11.76, + "learning_rate": 0.00012589285714285713, + "loss": 1.42, + "step": 581 + }, + { + "epoch": 11.78, + "learning_rate": 0.000125765306122449, + "loss": 1.3562, + "step": 582 + }, + { + "epoch": 11.8, + "learning_rate": 0.00012563775510204082, + "loss": 1.3249, + "step": 583 + }, + { + "epoch": 11.82, + "learning_rate": 0.00012551020408163265, + "loss": 1.4277, + "step": 584 + }, + { + "epoch": 11.84, + "learning_rate": 0.0001253826530612245, + "loss": 1.3734, + "step": 585 + }, + { + "epoch": 11.86, + "learning_rate": 0.00012525510204081634, + "loss": 1.3765, + "step": 586 + }, + { + "epoch": 11.88, + "learning_rate": 0.00012512755102040817, + "loss": 1.4153, + "step": 587 + }, + { + "epoch": 11.9, + "learning_rate": 0.000125, + "loss": 1.3847, + "step": 588 + }, + { + "epoch": 11.92, + "learning_rate": 0.00012487244897959186, + "loss": 1.3824, + "step": 589 + }, + { + "epoch": 11.94, + "learning_rate": 0.00012474489795918366, + "loss": 1.3938, + "step": 590 + }, + { + "epoch": 11.96, + "learning_rate": 0.00012461734693877552, + "loss": 1.4143, + "step": 591 + }, + { + "epoch": 11.98, + "learning_rate": 0.00012448979591836735, + "loss": 1.3794, + "step": 592 + }, + { + "epoch": 12.0, + "learning_rate": 0.00012436224489795918, + "loss": 1.3755, + "step": 593 + }, + { + "epoch": 12.02, + "learning_rate": 0.00012423469387755101, + "loss": 1.3736, + "step": 594 + }, + { + "epoch": 12.04, + "learning_rate": 0.00012410714285714287, + "loss": 1.2957, + "step": 595 + }, + { + "epoch": 12.06, + "learning_rate": 0.0001239795918367347, + "loss": 1.2996, + "step": 596 + }, + { + "epoch": 12.08, + "learning_rate": 0.00012385204081632653, + "loss": 1.3648, + "step": 597 + }, + { + "epoch": 12.1, + "learning_rate": 0.00012372448979591837, + "loss": 1.3031, + "step": 598 + }, + { + "epoch": 12.12, + "learning_rate": 0.00012359693877551022, + "loss": 1.2933, + "step": 599 + }, + { + "epoch": 12.14, + "learning_rate": 0.00012346938775510203, + "loss": 1.322, + "step": 600 + }, + { + "epoch": 12.16, + "learning_rate": 0.00012334183673469389, + "loss": 1.3123, + "step": 601 + }, + { + "epoch": 12.18, + "learning_rate": 0.00012321428571428572, + "loss": 1.3187, + "step": 602 + }, + { + "epoch": 12.2, + "learning_rate": 0.00012308673469387755, + "loss": 1.3353, + "step": 603 + }, + { + "epoch": 12.22, + "learning_rate": 0.0001229591836734694, + "loss": 1.3221, + "step": 604 + }, + { + "epoch": 12.24, + "learning_rate": 0.00012283163265306124, + "loss": 1.3458, + "step": 605 + }, + { + "epoch": 12.26, + "learning_rate": 0.00012270408163265307, + "loss": 1.275, + "step": 606 + }, + { + "epoch": 12.28, + "learning_rate": 0.0001225765306122449, + "loss": 1.3455, + "step": 607 + }, + { + "epoch": 12.3, + "learning_rate": 0.00012244897959183676, + "loss": 1.2769, + "step": 608 + }, + { + "epoch": 12.32, + "learning_rate": 0.00012232142857142859, + "loss": 1.3201, + "step": 609 + }, + { + "epoch": 12.34, + "learning_rate": 0.00012219387755102042, + "loss": 1.3073, + "step": 610 + }, + { + "epoch": 12.36, + "learning_rate": 0.00012206632653061225, + "loss": 1.3103, + "step": 611 + }, + { + "epoch": 12.38, + "learning_rate": 0.00012193877551020409, + "loss": 1.4437, + "step": 612 + }, + { + "epoch": 12.4, + "learning_rate": 0.00012181122448979591, + "loss": 1.3086, + "step": 613 + }, + { + "epoch": 12.42, + "learning_rate": 0.00012168367346938775, + "loss": 1.3867, + "step": 614 + }, + { + "epoch": 12.44, + "learning_rate": 0.0001215561224489796, + "loss": 1.2565, + "step": 615 + }, + { + "epoch": 12.46, + "learning_rate": 0.00012142857142857143, + "loss": 1.335, + "step": 616 + }, + { + "epoch": 12.48, + "learning_rate": 0.00012130102040816327, + "loss": 1.3423, + "step": 617 + }, + { + "epoch": 12.5, + "learning_rate": 0.00012117346938775512, + "loss": 1.3433, + "step": 618 + }, + { + "epoch": 12.52, + "learning_rate": 0.00012104591836734695, + "loss": 1.3387, + "step": 619 + }, + { + "epoch": 12.55, + "learning_rate": 0.00012091836734693878, + "loss": 1.3923, + "step": 620 + }, + { + "epoch": 12.57, + "learning_rate": 0.00012079081632653062, + "loss": 1.3774, + "step": 621 + }, + { + "epoch": 12.59, + "learning_rate": 0.00012066326530612247, + "loss": 1.3203, + "step": 622 + }, + { + "epoch": 12.61, + "learning_rate": 0.00012053571428571429, + "loss": 1.2924, + "step": 623 + }, + { + "epoch": 12.63, + "learning_rate": 0.00012040816326530613, + "loss": 1.3292, + "step": 624 + }, + { + "epoch": 12.65, + "learning_rate": 0.00012028061224489798, + "loss": 1.3161, + "step": 625 + }, + { + "epoch": 12.67, + "learning_rate": 0.00012015306122448979, + "loss": 1.352, + "step": 626 + }, + { + "epoch": 12.69, + "learning_rate": 0.00012002551020408164, + "loss": 1.3577, + "step": 627 + }, + { + "epoch": 12.71, + "learning_rate": 0.00011989795918367348, + "loss": 1.3575, + "step": 628 + }, + { + "epoch": 12.73, + "learning_rate": 0.0001197704081632653, + "loss": 1.3727, + "step": 629 + }, + { + "epoch": 12.75, + "learning_rate": 0.00011964285714285714, + "loss": 1.3312, + "step": 630 + }, + { + "epoch": 12.77, + "learning_rate": 0.00011951530612244899, + "loss": 1.3378, + "step": 631 + }, + { + "epoch": 12.79, + "learning_rate": 0.00011938775510204083, + "loss": 1.295, + "step": 632 + }, + { + "epoch": 12.81, + "learning_rate": 0.00011926020408163265, + "loss": 1.3447, + "step": 633 + }, + { + "epoch": 12.83, + "learning_rate": 0.0001191326530612245, + "loss": 1.3835, + "step": 634 + }, + { + "epoch": 12.85, + "learning_rate": 0.00011900510204081634, + "loss": 1.3222, + "step": 635 + }, + { + "epoch": 12.87, + "learning_rate": 0.00011887755102040817, + "loss": 1.2851, + "step": 636 + }, + { + "epoch": 12.89, + "learning_rate": 0.00011875, + "loss": 1.2723, + "step": 637 + }, + { + "epoch": 12.91, + "learning_rate": 0.00011862244897959184, + "loss": 1.3924, + "step": 638 + }, + { + "epoch": 12.93, + "learning_rate": 0.00011849489795918368, + "loss": 1.4625, + "step": 639 + }, + { + "epoch": 12.95, + "learning_rate": 0.00011836734693877552, + "loss": 1.3245, + "step": 640 + }, + { + "epoch": 12.97, + "learning_rate": 0.00011823979591836736, + "loss": 1.4042, + "step": 641 + }, + { + "epoch": 12.99, + "learning_rate": 0.00011811224489795918, + "loss": 1.3761, + "step": 642 + }, + { + "epoch": 13.01, + "learning_rate": 0.00011798469387755103, + "loss": 1.3376, + "step": 643 + }, + { + "epoch": 13.03, + "learning_rate": 0.00011785714285714287, + "loss": 1.2174, + "step": 644 + }, + { + "epoch": 13.05, + "learning_rate": 0.00011772959183673471, + "loss": 1.3602, + "step": 645 + }, + { + "epoch": 13.07, + "learning_rate": 0.00011760204081632653, + "loss": 1.3002, + "step": 646 + }, + { + "epoch": 13.09, + "learning_rate": 0.00011747448979591838, + "loss": 1.2262, + "step": 647 + }, + { + "epoch": 13.11, + "learning_rate": 0.00011734693877551022, + "loss": 1.3048, + "step": 648 + }, + { + "epoch": 13.13, + "learning_rate": 0.00011721938775510204, + "loss": 1.2231, + "step": 649 + }, + { + "epoch": 13.15, + "learning_rate": 0.00011709183673469388, + "loss": 1.2996, + "step": 650 + }, + { + "epoch": 13.17, + "learning_rate": 0.00011696428571428573, + "loss": 1.2708, + "step": 651 + }, + { + "epoch": 13.19, + "learning_rate": 0.00011683673469387754, + "loss": 1.2776, + "step": 652 + }, + { + "epoch": 13.21, + "learning_rate": 0.00011670918367346939, + "loss": 1.248, + "step": 653 + }, + { + "epoch": 13.23, + "learning_rate": 0.00011658163265306123, + "loss": 1.2582, + "step": 654 + }, + { + "epoch": 13.25, + "learning_rate": 0.00011645408163265305, + "loss": 1.3011, + "step": 655 + }, + { + "epoch": 13.27, + "learning_rate": 0.0001163265306122449, + "loss": 1.2969, + "step": 656 + }, + { + "epoch": 13.29, + "learning_rate": 0.00011619897959183674, + "loss": 1.2454, + "step": 657 + }, + { + "epoch": 13.31, + "learning_rate": 0.00011607142857142858, + "loss": 1.1914, + "step": 658 + }, + { + "epoch": 13.33, + "learning_rate": 0.00011594387755102041, + "loss": 1.34, + "step": 659 + }, + { + "epoch": 13.35, + "learning_rate": 0.00011581632653061225, + "loss": 1.2828, + "step": 660 + }, + { + "epoch": 13.37, + "learning_rate": 0.00011568877551020409, + "loss": 1.2962, + "step": 661 + }, + { + "epoch": 13.39, + "learning_rate": 0.00011556122448979592, + "loss": 1.3334, + "step": 662 + }, + { + "epoch": 13.42, + "learning_rate": 0.00011543367346938776, + "loss": 1.2832, + "step": 663 + }, + { + "epoch": 13.44, + "learning_rate": 0.00011530612244897961, + "loss": 1.3012, + "step": 664 + }, + { + "epoch": 13.46, + "learning_rate": 0.00011517857142857143, + "loss": 1.2857, + "step": 665 + }, + { + "epoch": 13.48, + "learning_rate": 0.00011505102040816327, + "loss": 1.2855, + "step": 666 + }, + { + "epoch": 13.5, + "learning_rate": 0.00011492346938775512, + "loss": 1.3077, + "step": 667 + }, + { + "epoch": 13.52, + "learning_rate": 0.00011479591836734696, + "loss": 1.3139, + "step": 668 + }, + { + "epoch": 13.54, + "learning_rate": 0.00011466836734693878, + "loss": 1.3138, + "step": 669 + }, + { + "epoch": 13.56, + "learning_rate": 0.00011454081632653062, + "loss": 1.2808, + "step": 670 + }, + { + "epoch": 13.58, + "learning_rate": 0.00011441326530612247, + "loss": 1.2492, + "step": 671 + }, + { + "epoch": 13.6, + "learning_rate": 0.00011428571428571428, + "loss": 1.2027, + "step": 672 + }, + { + "epoch": 13.62, + "learning_rate": 0.00011415816326530613, + "loss": 1.33, + "step": 673 + }, + { + "epoch": 13.64, + "learning_rate": 0.00011403061224489797, + "loss": 1.3112, + "step": 674 + }, + { + "epoch": 13.66, + "learning_rate": 0.00011390306122448979, + "loss": 1.2772, + "step": 675 + }, + { + "epoch": 13.68, + "learning_rate": 0.00011377551020408163, + "loss": 1.2701, + "step": 676 + }, + { + "epoch": 13.7, + "learning_rate": 0.00011364795918367348, + "loss": 1.1973, + "step": 677 + }, + { + "epoch": 13.72, + "learning_rate": 0.0001135204081632653, + "loss": 1.3124, + "step": 678 + }, + { + "epoch": 13.74, + "learning_rate": 0.00011339285714285714, + "loss": 1.3085, + "step": 679 + }, + { + "epoch": 13.76, + "learning_rate": 0.00011326530612244898, + "loss": 1.3457, + "step": 680 + }, + { + "epoch": 13.78, + "learning_rate": 0.00011313775510204083, + "loss": 1.3338, + "step": 681 + }, + { + "epoch": 13.8, + "learning_rate": 0.00011301020408163266, + "loss": 1.2753, + "step": 682 + }, + { + "epoch": 13.82, + "learning_rate": 0.00011288265306122449, + "loss": 1.2786, + "step": 683 + }, + { + "epoch": 13.84, + "learning_rate": 0.00011275510204081634, + "loss": 1.2584, + "step": 684 + }, + { + "epoch": 13.86, + "learning_rate": 0.00011262755102040817, + "loss": 1.2779, + "step": 685 + }, + { + "epoch": 13.88, + "learning_rate": 0.00011250000000000001, + "loss": 1.3502, + "step": 686 + }, + { + "epoch": 13.9, + "learning_rate": 0.00011237244897959185, + "loss": 1.3251, + "step": 687 + }, + { + "epoch": 13.92, + "learning_rate": 0.00011224489795918367, + "loss": 1.273, + "step": 688 + }, + { + "epoch": 13.94, + "learning_rate": 0.00011211734693877552, + "loss": 1.3341, + "step": 689 + }, + { + "epoch": 13.96, + "learning_rate": 0.00011198979591836736, + "loss": 1.2654, + "step": 690 + }, + { + "epoch": 13.98, + "learning_rate": 0.00011186224489795918, + "loss": 1.3333, + "step": 691 + }, + { + "epoch": 14.0, + "learning_rate": 0.00011173469387755102, + "loss": 1.3246, + "step": 692 + }, + { + "epoch": 14.02, + "learning_rate": 0.00011160714285714287, + "loss": 1.2547, + "step": 693 + }, + { + "epoch": 14.04, + "learning_rate": 0.00011147959183673471, + "loss": 1.208, + "step": 694 + }, + { + "epoch": 14.06, + "learning_rate": 0.00011135204081632653, + "loss": 1.223, + "step": 695 + }, + { + "epoch": 14.08, + "learning_rate": 0.00011122448979591837, + "loss": 1.2483, + "step": 696 + }, + { + "epoch": 14.1, + "learning_rate": 0.00011109693877551022, + "loss": 1.2823, + "step": 697 + }, + { + "epoch": 14.12, + "learning_rate": 0.00011096938775510204, + "loss": 1.2013, + "step": 698 + }, + { + "epoch": 14.14, + "learning_rate": 0.00011084183673469388, + "loss": 1.1883, + "step": 699 + }, + { + "epoch": 14.16, + "learning_rate": 0.00011071428571428572, + "loss": 1.2364, + "step": 700 + }, + { + "epoch": 14.18, + "learning_rate": 0.00011058673469387754, + "loss": 1.2069, + "step": 701 + }, + { + "epoch": 14.2, + "learning_rate": 0.00011045918367346939, + "loss": 1.1968, + "step": 702 + }, + { + "epoch": 14.22, + "learning_rate": 0.00011033163265306123, + "loss": 1.2236, + "step": 703 + }, + { + "epoch": 14.24, + "learning_rate": 0.00011020408163265306, + "loss": 1.1942, + "step": 704 + }, + { + "epoch": 14.26, + "learning_rate": 0.0001100765306122449, + "loss": 1.2561, + "step": 705 + }, + { + "epoch": 14.29, + "learning_rate": 0.00010994897959183674, + "loss": 1.1839, + "step": 706 + }, + { + "epoch": 14.31, + "learning_rate": 0.00010982142857142858, + "loss": 1.2128, + "step": 707 + }, + { + "epoch": 14.33, + "learning_rate": 0.00010969387755102041, + "loss": 1.3086, + "step": 708 + }, + { + "epoch": 14.35, + "learning_rate": 0.00010956632653061226, + "loss": 1.2379, + "step": 709 + }, + { + "epoch": 14.37, + "learning_rate": 0.0001094387755102041, + "loss": 1.176, + "step": 710 + }, + { + "epoch": 14.39, + "learning_rate": 0.00010931122448979592, + "loss": 1.2105, + "step": 711 + }, + { + "epoch": 14.41, + "learning_rate": 0.00010918367346938776, + "loss": 1.2149, + "step": 712 + }, + { + "epoch": 14.43, + "learning_rate": 0.0001090561224489796, + "loss": 1.2392, + "step": 713 + }, + { + "epoch": 14.45, + "learning_rate": 0.00010892857142857142, + "loss": 1.2471, + "step": 714 + }, + { + "epoch": 14.47, + "learning_rate": 0.00010880102040816327, + "loss": 1.2561, + "step": 715 + }, + { + "epoch": 14.49, + "learning_rate": 0.00010867346938775511, + "loss": 1.2179, + "step": 716 + }, + { + "epoch": 14.51, + "learning_rate": 0.00010854591836734696, + "loss": 1.2459, + "step": 717 + }, + { + "epoch": 14.53, + "learning_rate": 0.00010841836734693877, + "loss": 1.2933, + "step": 718 + }, + { + "epoch": 14.55, + "learning_rate": 0.00010829081632653062, + "loss": 1.2862, + "step": 719 + }, + { + "epoch": 14.57, + "learning_rate": 0.00010816326530612246, + "loss": 1.2976, + "step": 720 + }, + { + "epoch": 14.59, + "learning_rate": 0.00010803571428571428, + "loss": 1.231, + "step": 721 + }, + { + "epoch": 14.61, + "learning_rate": 0.00010790816326530613, + "loss": 1.2464, + "step": 722 + }, + { + "epoch": 14.63, + "learning_rate": 0.00010778061224489797, + "loss": 1.2181, + "step": 723 + }, + { + "epoch": 14.65, + "learning_rate": 0.00010765306122448979, + "loss": 1.3307, + "step": 724 + }, + { + "epoch": 14.67, + "learning_rate": 0.00010752551020408163, + "loss": 1.1723, + "step": 725 + }, + { + "epoch": 14.69, + "learning_rate": 0.00010739795918367348, + "loss": 1.1528, + "step": 726 + }, + { + "epoch": 14.71, + "learning_rate": 0.0001072704081632653, + "loss": 1.215, + "step": 727 + }, + { + "epoch": 14.73, + "learning_rate": 0.00010714285714285715, + "loss": 1.2624, + "step": 728 + }, + { + "epoch": 14.75, + "learning_rate": 0.00010701530612244898, + "loss": 1.3117, + "step": 729 + }, + { + "epoch": 14.77, + "learning_rate": 0.00010688775510204083, + "loss": 1.2572, + "step": 730 + }, + { + "epoch": 14.79, + "learning_rate": 0.00010676020408163266, + "loss": 1.222, + "step": 731 + }, + { + "epoch": 14.81, + "learning_rate": 0.0001066326530612245, + "loss": 1.2881, + "step": 732 + }, + { + "epoch": 14.83, + "learning_rate": 0.00010650510204081635, + "loss": 1.2676, + "step": 733 + }, + { + "epoch": 14.85, + "learning_rate": 0.00010637755102040816, + "loss": 1.2734, + "step": 734 + }, + { + "epoch": 14.87, + "learning_rate": 0.00010625000000000001, + "loss": 1.2885, + "step": 735 + }, + { + "epoch": 14.89, + "learning_rate": 0.00010612244897959185, + "loss": 1.2764, + "step": 736 + }, + { + "epoch": 14.91, + "learning_rate": 0.00010599489795918367, + "loss": 1.3267, + "step": 737 + }, + { + "epoch": 14.93, + "learning_rate": 0.00010586734693877551, + "loss": 1.2445, + "step": 738 + }, + { + "epoch": 14.95, + "learning_rate": 0.00010573979591836736, + "loss": 1.3359, + "step": 739 + }, + { + "epoch": 14.97, + "learning_rate": 0.00010561224489795918, + "loss": 1.2508, + "step": 740 + }, + { + "epoch": 14.99, + "learning_rate": 0.00010548469387755102, + "loss": 1.2227, + "step": 741 + }, + { + "epoch": 15.01, + "learning_rate": 0.00010535714285714286, + "loss": 1.1889, + "step": 742 + }, + { + "epoch": 15.03, + "learning_rate": 0.00010522959183673471, + "loss": 1.1919, + "step": 743 + }, + { + "epoch": 15.05, + "learning_rate": 0.00010510204081632653, + "loss": 1.2383, + "step": 744 + }, + { + "epoch": 15.07, + "learning_rate": 0.00010497448979591837, + "loss": 1.2401, + "step": 745 + }, + { + "epoch": 15.09, + "learning_rate": 0.00010484693877551021, + "loss": 1.2015, + "step": 746 + }, + { + "epoch": 15.11, + "learning_rate": 0.00010471938775510203, + "loss": 1.1509, + "step": 747 + }, + { + "epoch": 15.13, + "learning_rate": 0.00010459183673469388, + "loss": 1.1878, + "step": 748 + }, + { + "epoch": 15.16, + "learning_rate": 0.00010446428571428572, + "loss": 1.1706, + "step": 749 + }, + { + "epoch": 15.18, + "learning_rate": 0.00010433673469387755, + "loss": 1.1285, + "step": 750 + }, + { + "epoch": 15.2, + "learning_rate": 0.0001042091836734694, + "loss": 1.1608, + "step": 751 + }, + { + "epoch": 15.22, + "learning_rate": 0.00010408163265306123, + "loss": 1.1178, + "step": 752 + }, + { + "epoch": 15.24, + "learning_rate": 0.00010395408163265306, + "loss": 1.1293, + "step": 753 + }, + { + "epoch": 15.26, + "learning_rate": 0.0001038265306122449, + "loss": 1.2306, + "step": 754 + }, + { + "epoch": 15.28, + "learning_rate": 0.00010369897959183675, + "loss": 1.1541, + "step": 755 + }, + { + "epoch": 15.3, + "learning_rate": 0.00010357142857142859, + "loss": 1.1702, + "step": 756 + }, + { + "epoch": 15.32, + "learning_rate": 0.00010344387755102041, + "loss": 1.2119, + "step": 757 + }, + { + "epoch": 15.34, + "learning_rate": 0.00010331632653061225, + "loss": 1.2239, + "step": 758 + }, + { + "epoch": 15.36, + "learning_rate": 0.0001031887755102041, + "loss": 1.2019, + "step": 759 + }, + { + "epoch": 15.38, + "learning_rate": 0.00010306122448979591, + "loss": 1.2197, + "step": 760 + }, + { + "epoch": 15.4, + "learning_rate": 0.00010293367346938776, + "loss": 1.1769, + "step": 761 + }, + { + "epoch": 15.42, + "learning_rate": 0.0001028061224489796, + "loss": 1.1907, + "step": 762 + }, + { + "epoch": 15.44, + "learning_rate": 0.00010267857142857142, + "loss": 1.2089, + "step": 763 + }, + { + "epoch": 15.46, + "learning_rate": 0.00010255102040816327, + "loss": 1.1335, + "step": 764 + }, + { + "epoch": 15.48, + "learning_rate": 0.00010242346938775511, + "loss": 1.1633, + "step": 765 + }, + { + "epoch": 15.5, + "learning_rate": 0.00010229591836734695, + "loss": 1.1578, + "step": 766 + }, + { + "epoch": 15.52, + "learning_rate": 0.00010216836734693877, + "loss": 1.2236, + "step": 767 + }, + { + "epoch": 15.54, + "learning_rate": 0.00010204081632653062, + "loss": 1.1941, + "step": 768 + }, + { + "epoch": 15.56, + "learning_rate": 0.00010191326530612246, + "loss": 1.2666, + "step": 769 + }, + { + "epoch": 15.58, + "learning_rate": 0.00010178571428571428, + "loss": 1.1232, + "step": 770 + }, + { + "epoch": 15.6, + "learning_rate": 0.00010165816326530612, + "loss": 1.2242, + "step": 771 + }, + { + "epoch": 15.62, + "learning_rate": 0.00010153061224489797, + "loss": 1.1852, + "step": 772 + }, + { + "epoch": 15.64, + "learning_rate": 0.0001014030612244898, + "loss": 1.2626, + "step": 773 + }, + { + "epoch": 15.66, + "learning_rate": 0.00010127551020408164, + "loss": 1.1873, + "step": 774 + }, + { + "epoch": 15.68, + "learning_rate": 0.00010114795918367349, + "loss": 1.3005, + "step": 775 + }, + { + "epoch": 15.7, + "learning_rate": 0.0001010204081632653, + "loss": 1.1904, + "step": 776 + }, + { + "epoch": 15.72, + "learning_rate": 0.00010089285714285715, + "loss": 1.2927, + "step": 777 + }, + { + "epoch": 15.74, + "learning_rate": 0.00010076530612244899, + "loss": 1.179, + "step": 778 + }, + { + "epoch": 15.76, + "learning_rate": 0.00010063775510204084, + "loss": 1.2027, + "step": 779 + }, + { + "epoch": 15.78, + "learning_rate": 0.00010051020408163265, + "loss": 1.2428, + "step": 780 + }, + { + "epoch": 15.8, + "learning_rate": 0.0001003826530612245, + "loss": 1.2324, + "step": 781 + }, + { + "epoch": 15.82, + "learning_rate": 0.00010025510204081634, + "loss": 1.1251, + "step": 782 + }, + { + "epoch": 15.84, + "learning_rate": 0.00010012755102040816, + "loss": 1.2405, + "step": 783 + }, + { + "epoch": 15.86, + "learning_rate": 0.0001, + "loss": 1.2005, + "step": 784 + }, + { + "epoch": 15.88, + "learning_rate": 9.987244897959184e-05, + "loss": 1.2259, + "step": 785 + }, + { + "epoch": 15.9, + "learning_rate": 9.974489795918368e-05, + "loss": 1.1576, + "step": 786 + }, + { + "epoch": 15.92, + "learning_rate": 9.961734693877551e-05, + "loss": 1.1834, + "step": 787 + }, + { + "epoch": 15.94, + "learning_rate": 9.948979591836736e-05, + "loss": 1.2396, + "step": 788 + }, + { + "epoch": 15.96, + "learning_rate": 9.936224489795919e-05, + "loss": 1.1865, + "step": 789 + }, + { + "epoch": 15.98, + "learning_rate": 9.923469387755102e-05, + "loss": 1.2356, + "step": 790 + }, + { + "epoch": 16.01, + "learning_rate": 9.910714285714286e-05, + "loss": 1.2639, + "step": 791 + }, + { + "epoch": 16.03, + "learning_rate": 9.897959183673469e-05, + "loss": 1.1216, + "step": 792 + }, + { + "epoch": 16.05, + "learning_rate": 9.885204081632652e-05, + "loss": 1.1051, + "step": 793 + }, + { + "epoch": 16.07, + "learning_rate": 9.872448979591837e-05, + "loss": 1.0864, + "step": 794 + }, + { + "epoch": 16.09, + "learning_rate": 9.859693877551021e-05, + "loss": 1.182, + "step": 795 + }, + { + "epoch": 16.11, + "learning_rate": 9.846938775510204e-05, + "loss": 1.1272, + "step": 796 + }, + { + "epoch": 16.13, + "learning_rate": 9.834183673469389e-05, + "loss": 1.1946, + "step": 797 + }, + { + "epoch": 16.15, + "learning_rate": 9.821428571428572e-05, + "loss": 1.0875, + "step": 798 + }, + { + "epoch": 16.17, + "learning_rate": 9.808673469387756e-05, + "loss": 1.1671, + "step": 799 + }, + { + "epoch": 16.19, + "learning_rate": 9.79591836734694e-05, + "loss": 1.1502, + "step": 800 + }, + { + "epoch": 16.21, + "learning_rate": 9.783163265306124e-05, + "loss": 1.19, + "step": 801 + }, + { + "epoch": 16.23, + "learning_rate": 9.770408163265307e-05, + "loss": 1.1258, + "step": 802 + }, + { + "epoch": 16.25, + "learning_rate": 9.75765306122449e-05, + "loss": 1.1765, + "step": 803 + }, + { + "epoch": 16.27, + "learning_rate": 9.744897959183674e-05, + "loss": 1.1217, + "step": 804 + }, + { + "epoch": 16.29, + "learning_rate": 9.732142857142858e-05, + "loss": 1.1293, + "step": 805 + }, + { + "epoch": 16.31, + "learning_rate": 9.719387755102042e-05, + "loss": 1.17, + "step": 806 + }, + { + "epoch": 16.33, + "learning_rate": 9.706632653061225e-05, + "loss": 1.17, + "step": 807 + }, + { + "epoch": 16.35, + "learning_rate": 9.693877551020408e-05, + "loss": 1.2004, + "step": 808 + }, + { + "epoch": 16.37, + "learning_rate": 9.681122448979593e-05, + "loss": 1.1648, + "step": 809 + }, + { + "epoch": 16.39, + "learning_rate": 9.668367346938776e-05, + "loss": 1.0688, + "step": 810 + }, + { + "epoch": 16.41, + "learning_rate": 9.655612244897959e-05, + "loss": 1.1607, + "step": 811 + }, + { + "epoch": 16.43, + "learning_rate": 9.642857142857143e-05, + "loss": 1.1298, + "step": 812 + }, + { + "epoch": 16.45, + "learning_rate": 9.630102040816326e-05, + "loss": 1.1064, + "step": 813 + }, + { + "epoch": 16.47, + "learning_rate": 9.617346938775511e-05, + "loss": 1.1472, + "step": 814 + }, + { + "epoch": 16.49, + "learning_rate": 9.604591836734694e-05, + "loss": 1.1577, + "step": 815 + }, + { + "epoch": 16.51, + "learning_rate": 9.591836734693878e-05, + "loss": 1.1436, + "step": 816 + }, + { + "epoch": 16.53, + "learning_rate": 9.579081632653061e-05, + "loss": 1.1657, + "step": 817 + }, + { + "epoch": 16.55, + "learning_rate": 9.566326530612246e-05, + "loss": 1.1147, + "step": 818 + }, + { + "epoch": 16.57, + "learning_rate": 9.553571428571429e-05, + "loss": 1.1839, + "step": 819 + }, + { + "epoch": 16.59, + "learning_rate": 9.540816326530613e-05, + "loss": 1.1298, + "step": 820 + }, + { + "epoch": 16.61, + "learning_rate": 9.528061224489796e-05, + "loss": 1.2141, + "step": 821 + }, + { + "epoch": 16.63, + "learning_rate": 9.515306122448981e-05, + "loss": 1.2045, + "step": 822 + }, + { + "epoch": 16.65, + "learning_rate": 9.502551020408164e-05, + "loss": 1.1791, + "step": 823 + }, + { + "epoch": 16.67, + "learning_rate": 9.489795918367348e-05, + "loss": 1.1137, + "step": 824 + }, + { + "epoch": 16.69, + "learning_rate": 9.477040816326531e-05, + "loss": 1.1312, + "step": 825 + }, + { + "epoch": 16.71, + "learning_rate": 9.464285714285715e-05, + "loss": 1.1102, + "step": 826 + }, + { + "epoch": 16.73, + "learning_rate": 9.451530612244899e-05, + "loss": 1.1865, + "step": 827 + }, + { + "epoch": 16.75, + "learning_rate": 9.438775510204082e-05, + "loss": 1.1232, + "step": 828 + }, + { + "epoch": 16.77, + "learning_rate": 9.426020408163265e-05, + "loss": 1.2068, + "step": 829 + }, + { + "epoch": 16.79, + "learning_rate": 9.41326530612245e-05, + "loss": 1.1864, + "step": 830 + }, + { + "epoch": 16.81, + "learning_rate": 9.400510204081633e-05, + "loss": 1.2195, + "step": 831 + }, + { + "epoch": 16.83, + "learning_rate": 9.387755102040817e-05, + "loss": 1.2063, + "step": 832 + }, + { + "epoch": 16.85, + "learning_rate": 9.375e-05, + "loss": 1.1455, + "step": 833 + }, + { + "epoch": 16.88, + "learning_rate": 9.362244897959183e-05, + "loss": 1.1819, + "step": 834 + }, + { + "epoch": 16.9, + "learning_rate": 9.349489795918368e-05, + "loss": 1.1887, + "step": 835 + }, + { + "epoch": 16.92, + "learning_rate": 9.336734693877551e-05, + "loss": 1.1557, + "step": 836 + }, + { + "epoch": 16.94, + "learning_rate": 9.323979591836735e-05, + "loss": 1.2094, + "step": 837 + }, + { + "epoch": 16.96, + "learning_rate": 9.311224489795918e-05, + "loss": 1.1512, + "step": 838 + }, + { + "epoch": 16.98, + "learning_rate": 9.298469387755103e-05, + "loss": 1.1463, + "step": 839 + }, + { + "epoch": 17.0, + "learning_rate": 9.285714285714286e-05, + "loss": 1.155, + "step": 840 + }, + { + "epoch": 17.02, + "learning_rate": 9.27295918367347e-05, + "loss": 1.1292, + "step": 841 + }, + { + "epoch": 17.04, + "learning_rate": 9.260204081632653e-05, + "loss": 1.0996, + "step": 842 + }, + { + "epoch": 17.06, + "learning_rate": 9.247448979591838e-05, + "loss": 1.0662, + "step": 843 + }, + { + "epoch": 17.08, + "learning_rate": 9.234693877551021e-05, + "loss": 1.0931, + "step": 844 + }, + { + "epoch": 17.1, + "learning_rate": 9.221938775510205e-05, + "loss": 1.0727, + "step": 845 + }, + { + "epoch": 17.12, + "learning_rate": 9.209183673469388e-05, + "loss": 1.1043, + "step": 846 + }, + { + "epoch": 17.14, + "learning_rate": 9.196428571428572e-05, + "loss": 1.0594, + "step": 847 + }, + { + "epoch": 17.16, + "learning_rate": 9.183673469387756e-05, + "loss": 1.0952, + "step": 848 + }, + { + "epoch": 17.18, + "learning_rate": 9.170918367346939e-05, + "loss": 1.0639, + "step": 849 + }, + { + "epoch": 17.2, + "learning_rate": 9.158163265306124e-05, + "loss": 1.132, + "step": 850 + }, + { + "epoch": 17.22, + "learning_rate": 9.145408163265307e-05, + "loss": 1.1083, + "step": 851 + }, + { + "epoch": 17.24, + "learning_rate": 9.13265306122449e-05, + "loss": 1.1282, + "step": 852 + }, + { + "epoch": 17.26, + "learning_rate": 9.119897959183674e-05, + "loss": 1.0474, + "step": 853 + }, + { + "epoch": 17.28, + "learning_rate": 9.107142857142857e-05, + "loss": 1.1138, + "step": 854 + }, + { + "epoch": 17.3, + "learning_rate": 9.094387755102042e-05, + "loss": 1.1025, + "step": 855 + }, + { + "epoch": 17.32, + "learning_rate": 9.081632653061225e-05, + "loss": 1.0968, + "step": 856 + }, + { + "epoch": 17.34, + "learning_rate": 9.068877551020408e-05, + "loss": 1.1683, + "step": 857 + }, + { + "epoch": 17.36, + "learning_rate": 9.056122448979592e-05, + "loss": 1.0975, + "step": 858 + }, + { + "epoch": 17.38, + "learning_rate": 9.043367346938775e-05, + "loss": 1.1274, + "step": 859 + }, + { + "epoch": 17.4, + "learning_rate": 9.030612244897958e-05, + "loss": 1.0916, + "step": 860 + }, + { + "epoch": 17.42, + "learning_rate": 9.017857142857143e-05, + "loss": 1.0912, + "step": 861 + }, + { + "epoch": 17.44, + "learning_rate": 9.005102040816327e-05, + "loss": 1.0875, + "step": 862 + }, + { + "epoch": 17.46, + "learning_rate": 8.99234693877551e-05, + "loss": 1.05, + "step": 863 + }, + { + "epoch": 17.48, + "learning_rate": 8.979591836734695e-05, + "loss": 1.1418, + "step": 864 + }, + { + "epoch": 17.5, + "learning_rate": 8.966836734693878e-05, + "loss": 1.0609, + "step": 865 + }, + { + "epoch": 17.52, + "learning_rate": 8.954081632653062e-05, + "loss": 1.1611, + "step": 866 + }, + { + "epoch": 17.54, + "learning_rate": 8.941326530612245e-05, + "loss": 1.1065, + "step": 867 + }, + { + "epoch": 17.56, + "learning_rate": 8.92857142857143e-05, + "loss": 1.1611, + "step": 868 + }, + { + "epoch": 17.58, + "learning_rate": 8.915816326530613e-05, + "loss": 1.1398, + "step": 869 + }, + { + "epoch": 17.6, + "learning_rate": 8.903061224489796e-05, + "loss": 1.1055, + "step": 870 + }, + { + "epoch": 17.62, + "learning_rate": 8.89030612244898e-05, + "loss": 1.1314, + "step": 871 + }, + { + "epoch": 17.64, + "learning_rate": 8.877551020408164e-05, + "loss": 1.1084, + "step": 872 + }, + { + "epoch": 17.66, + "learning_rate": 8.864795918367348e-05, + "loss": 1.1254, + "step": 873 + }, + { + "epoch": 17.68, + "learning_rate": 8.852040816326531e-05, + "loss": 1.142, + "step": 874 + }, + { + "epoch": 17.7, + "learning_rate": 8.839285714285714e-05, + "loss": 1.1371, + "step": 875 + }, + { + "epoch": 17.72, + "learning_rate": 8.826530612244899e-05, + "loss": 1.1092, + "step": 876 + }, + { + "epoch": 17.75, + "learning_rate": 8.813775510204082e-05, + "loss": 1.161, + "step": 877 + }, + { + "epoch": 17.77, + "learning_rate": 8.801020408163265e-05, + "loss": 1.1044, + "step": 878 + }, + { + "epoch": 17.79, + "learning_rate": 8.788265306122449e-05, + "loss": 1.117, + "step": 879 + }, + { + "epoch": 17.81, + "learning_rate": 8.775510204081632e-05, + "loss": 1.1262, + "step": 880 + }, + { + "epoch": 17.83, + "learning_rate": 8.762755102040817e-05, + "loss": 1.0829, + "step": 881 + }, + { + "epoch": 17.85, + "learning_rate": 8.75e-05, + "loss": 1.1393, + "step": 882 + }, + { + "epoch": 17.87, + "learning_rate": 8.737244897959183e-05, + "loss": 1.1781, + "step": 883 + }, + { + "epoch": 17.89, + "learning_rate": 8.724489795918367e-05, + "loss": 1.1582, + "step": 884 + }, + { + "epoch": 17.91, + "learning_rate": 8.711734693877552e-05, + "loss": 1.1469, + "step": 885 + }, + { + "epoch": 17.93, + "learning_rate": 8.698979591836735e-05, + "loss": 1.1494, + "step": 886 + }, + { + "epoch": 17.95, + "learning_rate": 8.68622448979592e-05, + "loss": 1.1251, + "step": 887 + }, + { + "epoch": 17.97, + "learning_rate": 8.673469387755102e-05, + "loss": 1.1624, + "step": 888 + }, + { + "epoch": 17.99, + "learning_rate": 8.660714285714287e-05, + "loss": 1.0842, + "step": 889 + }, + { + "epoch": 18.01, + "learning_rate": 8.64795918367347e-05, + "loss": 1.1944, + "step": 890 + }, + { + "epoch": 18.03, + "learning_rate": 8.635204081632653e-05, + "loss": 1.0642, + "step": 891 + }, + { + "epoch": 18.05, + "learning_rate": 8.622448979591838e-05, + "loss": 1.0459, + "step": 892 + }, + { + "epoch": 18.07, + "learning_rate": 8.60969387755102e-05, + "loss": 1.0941, + "step": 893 + }, + { + "epoch": 18.09, + "learning_rate": 8.596938775510205e-05, + "loss": 1.0457, + "step": 894 + }, + { + "epoch": 18.11, + "learning_rate": 8.584183673469388e-05, + "loss": 1.1033, + "step": 895 + }, + { + "epoch": 18.13, + "learning_rate": 8.571428571428571e-05, + "loss": 1.0756, + "step": 896 + }, + { + "epoch": 18.15, + "learning_rate": 8.558673469387756e-05, + "loss": 1.0615, + "step": 897 + }, + { + "epoch": 18.17, + "learning_rate": 8.545918367346939e-05, + "loss": 1.0828, + "step": 898 + }, + { + "epoch": 18.19, + "learning_rate": 8.533163265306123e-05, + "loss": 1.1158, + "step": 899 + }, + { + "epoch": 18.21, + "learning_rate": 8.520408163265306e-05, + "loss": 1.0133, + "step": 900 + }, + { + "epoch": 18.23, + "learning_rate": 8.50765306122449e-05, + "loss": 1.0437, + "step": 901 + }, + { + "epoch": 18.25, + "learning_rate": 8.494897959183674e-05, + "loss": 1.0372, + "step": 902 + }, + { + "epoch": 18.27, + "learning_rate": 8.482142857142857e-05, + "loss": 1.1012, + "step": 903 + }, + { + "epoch": 18.29, + "learning_rate": 8.469387755102041e-05, + "loss": 1.0777, + "step": 904 + }, + { + "epoch": 18.31, + "learning_rate": 8.456632653061224e-05, + "loss": 1.0799, + "step": 905 + }, + { + "epoch": 18.33, + "learning_rate": 8.443877551020409e-05, + "loss": 0.9846, + "step": 906 + }, + { + "epoch": 18.35, + "learning_rate": 8.431122448979592e-05, + "loss": 1.1, + "step": 907 + }, + { + "epoch": 18.37, + "learning_rate": 8.418367346938776e-05, + "loss": 1.0787, + "step": 908 + }, + { + "epoch": 18.39, + "learning_rate": 8.40561224489796e-05, + "loss": 1.0647, + "step": 909 + }, + { + "epoch": 18.41, + "learning_rate": 8.392857142857144e-05, + "loss": 1.056, + "step": 910 + }, + { + "epoch": 18.43, + "learning_rate": 8.380102040816327e-05, + "loss": 1.1131, + "step": 911 + }, + { + "epoch": 18.45, + "learning_rate": 8.367346938775511e-05, + "loss": 1.0825, + "step": 912 + }, + { + "epoch": 18.47, + "learning_rate": 8.354591836734695e-05, + "loss": 1.0681, + "step": 913 + }, + { + "epoch": 18.49, + "learning_rate": 8.341836734693878e-05, + "loss": 1.0479, + "step": 914 + }, + { + "epoch": 18.51, + "learning_rate": 8.329081632653062e-05, + "loss": 1.0921, + "step": 915 + }, + { + "epoch": 18.53, + "learning_rate": 8.316326530612245e-05, + "loss": 1.0626, + "step": 916 + }, + { + "epoch": 18.55, + "learning_rate": 8.30357142857143e-05, + "loss": 1.0518, + "step": 917 + }, + { + "epoch": 18.57, + "learning_rate": 8.290816326530613e-05, + "loss": 1.0557, + "step": 918 + }, + { + "epoch": 18.6, + "learning_rate": 8.278061224489796e-05, + "loss": 1.0831, + "step": 919 + }, + { + "epoch": 18.62, + "learning_rate": 8.26530612244898e-05, + "loss": 1.0307, + "step": 920 + }, + { + "epoch": 18.64, + "learning_rate": 8.252551020408163e-05, + "loss": 1.0455, + "step": 921 + }, + { + "epoch": 18.66, + "learning_rate": 8.239795918367348e-05, + "loss": 1.0667, + "step": 922 + }, + { + "epoch": 18.68, + "learning_rate": 8.227040816326531e-05, + "loss": 1.0736, + "step": 923 + }, + { + "epoch": 18.7, + "learning_rate": 8.214285714285714e-05, + "loss": 1.0108, + "step": 924 + }, + { + "epoch": 18.72, + "learning_rate": 8.201530612244898e-05, + "loss": 1.0458, + "step": 925 + }, + { + "epoch": 18.74, + "learning_rate": 8.188775510204081e-05, + "loss": 1.0852, + "step": 926 + }, + { + "epoch": 18.76, + "learning_rate": 8.176020408163265e-05, + "loss": 1.1207, + "step": 927 + }, + { + "epoch": 18.78, + "learning_rate": 8.163265306122449e-05, + "loss": 1.0914, + "step": 928 + }, + { + "epoch": 18.8, + "learning_rate": 8.150510204081633e-05, + "loss": 1.1108, + "step": 929 + }, + { + "epoch": 18.82, + "learning_rate": 8.137755102040817e-05, + "loss": 1.1394, + "step": 930 + }, + { + "epoch": 18.84, + "learning_rate": 8.125000000000001e-05, + "loss": 1.029, + "step": 931 + }, + { + "epoch": 18.86, + "learning_rate": 8.112244897959184e-05, + "loss": 1.0661, + "step": 932 + }, + { + "epoch": 18.88, + "learning_rate": 8.099489795918369e-05, + "loss": 1.0303, + "step": 933 + }, + { + "epoch": 18.9, + "learning_rate": 8.086734693877552e-05, + "loss": 1.1144, + "step": 934 + }, + { + "epoch": 18.92, + "learning_rate": 8.073979591836736e-05, + "loss": 1.1096, + "step": 935 + }, + { + "epoch": 18.94, + "learning_rate": 8.061224489795919e-05, + "loss": 1.123, + "step": 936 + }, + { + "epoch": 18.96, + "learning_rate": 8.048469387755102e-05, + "loss": 1.1002, + "step": 937 + }, + { + "epoch": 18.98, + "learning_rate": 8.035714285714287e-05, + "loss": 1.1016, + "step": 938 + }, + { + "epoch": 19.0, + "learning_rate": 8.02295918367347e-05, + "loss": 1.0847, + "step": 939 + }, + { + "epoch": 19.02, + "learning_rate": 8.010204081632653e-05, + "loss": 1.1029, + "step": 940 + }, + { + "epoch": 19.04, + "learning_rate": 7.997448979591837e-05, + "loss": 1.041, + "step": 941 + }, + { + "epoch": 19.06, + "learning_rate": 7.98469387755102e-05, + "loss": 1.01, + "step": 942 + }, + { + "epoch": 19.08, + "learning_rate": 7.971938775510205e-05, + "loss": 1.0197, + "step": 943 + }, + { + "epoch": 19.1, + "learning_rate": 7.959183673469388e-05, + "loss": 1.0543, + "step": 944 + }, + { + "epoch": 19.12, + "learning_rate": 7.946428571428571e-05, + "loss": 1.0369, + "step": 945 + }, + { + "epoch": 19.14, + "learning_rate": 7.933673469387755e-05, + "loss": 1.0154, + "step": 946 + }, + { + "epoch": 19.16, + "learning_rate": 7.920918367346939e-05, + "loss": 0.9546, + "step": 947 + }, + { + "epoch": 19.18, + "learning_rate": 7.908163265306123e-05, + "loss": 0.9982, + "step": 948 + }, + { + "epoch": 19.2, + "learning_rate": 7.895408163265306e-05, + "loss": 1.0748, + "step": 949 + }, + { + "epoch": 19.22, + "learning_rate": 7.882653061224489e-05, + "loss": 1.0562, + "step": 950 + }, + { + "epoch": 19.24, + "learning_rate": 7.869897959183674e-05, + "loss": 1.0352, + "step": 951 + }, + { + "epoch": 19.26, + "learning_rate": 7.857142857142858e-05, + "loss": 0.9976, + "step": 952 + }, + { + "epoch": 19.28, + "learning_rate": 7.844387755102041e-05, + "loss": 1.0221, + "step": 953 + }, + { + "epoch": 19.3, + "learning_rate": 7.831632653061226e-05, + "loss": 1.0119, + "step": 954 + }, + { + "epoch": 19.32, + "learning_rate": 7.818877551020409e-05, + "loss": 1.0657, + "step": 955 + }, + { + "epoch": 19.34, + "learning_rate": 7.806122448979593e-05, + "loss": 0.9591, + "step": 956 + }, + { + "epoch": 19.36, + "learning_rate": 7.793367346938776e-05, + "loss": 1.0101, + "step": 957 + }, + { + "epoch": 19.38, + "learning_rate": 7.780612244897959e-05, + "loss": 1.0453, + "step": 958 + }, + { + "epoch": 19.4, + "learning_rate": 7.767857142857144e-05, + "loss": 1.0461, + "step": 959 + }, + { + "epoch": 19.42, + "learning_rate": 7.755102040816327e-05, + "loss": 1.0959, + "step": 960 + }, + { + "epoch": 19.44, + "learning_rate": 7.742346938775511e-05, + "loss": 1.0608, + "step": 961 + }, + { + "epoch": 19.47, + "learning_rate": 7.729591836734694e-05, + "loss": 1.1177, + "step": 962 + }, + { + "epoch": 19.49, + "learning_rate": 7.716836734693877e-05, + "loss": 1.0354, + "step": 963 + }, + { + "epoch": 19.51, + "learning_rate": 7.704081632653062e-05, + "loss": 1.0507, + "step": 964 + }, + { + "epoch": 19.53, + "learning_rate": 7.691326530612245e-05, + "loss": 1.0313, + "step": 965 + }, + { + "epoch": 19.55, + "learning_rate": 7.67857142857143e-05, + "loss": 1.0569, + "step": 966 + }, + { + "epoch": 19.57, + "learning_rate": 7.665816326530612e-05, + "loss": 1.0862, + "step": 967 + }, + { + "epoch": 19.59, + "learning_rate": 7.653061224489796e-05, + "loss": 1.0593, + "step": 968 + }, + { + "epoch": 19.61, + "learning_rate": 7.64030612244898e-05, + "loss": 1.0602, + "step": 969 + }, + { + "epoch": 19.63, + "learning_rate": 7.627551020408163e-05, + "loss": 1.0048, + "step": 970 + }, + { + "epoch": 19.65, + "learning_rate": 7.614795918367347e-05, + "loss": 1.0346, + "step": 971 + }, + { + "epoch": 19.67, + "learning_rate": 7.60204081632653e-05, + "loss": 1.0172, + "step": 972 + }, + { + "epoch": 19.69, + "learning_rate": 7.589285714285714e-05, + "loss": 1.02, + "step": 973 + }, + { + "epoch": 19.71, + "learning_rate": 7.576530612244898e-05, + "loss": 1.0028, + "step": 974 + }, + { + "epoch": 19.73, + "learning_rate": 7.563775510204083e-05, + "loss": 1.08, + "step": 975 + }, + { + "epoch": 19.75, + "learning_rate": 7.551020408163266e-05, + "loss": 1.0402, + "step": 976 + }, + { + "epoch": 19.77, + "learning_rate": 7.53826530612245e-05, + "loss": 1.0567, + "step": 977 + }, + { + "epoch": 19.79, + "learning_rate": 7.525510204081633e-05, + "loss": 1.0169, + "step": 978 + }, + { + "epoch": 19.81, + "learning_rate": 7.512755102040818e-05, + "loss": 0.9881, + "step": 979 + }, + { + "epoch": 19.83, + "learning_rate": 7.500000000000001e-05, + "loss": 1.0677, + "step": 980 + }, + { + "epoch": 19.85, + "learning_rate": 7.487244897959184e-05, + "loss": 1.1026, + "step": 981 + }, + { + "epoch": 19.87, + "learning_rate": 7.474489795918368e-05, + "loss": 1.0101, + "step": 982 + }, + { + "epoch": 19.89, + "learning_rate": 7.461734693877551e-05, + "loss": 1.069, + "step": 983 + }, + { + "epoch": 19.91, + "learning_rate": 7.448979591836736e-05, + "loss": 1.0493, + "step": 984 + }, + { + "epoch": 19.93, + "learning_rate": 7.436224489795919e-05, + "loss": 1.0858, + "step": 985 + }, + { + "epoch": 19.95, + "learning_rate": 7.423469387755102e-05, + "loss": 1.0734, + "step": 986 + }, + { + "epoch": 19.97, + "learning_rate": 7.410714285714286e-05, + "loss": 1.0203, + "step": 987 + }, + { + "epoch": 19.99, + "learning_rate": 7.39795918367347e-05, + "loss": 1.0285, + "step": 988 + }, + { + "epoch": 20.01, + "learning_rate": 7.385204081632653e-05, + "loss": 0.9446, + "step": 989 + }, + { + "epoch": 20.03, + "learning_rate": 7.372448979591837e-05, + "loss": 0.9915, + "step": 990 + }, + { + "epoch": 20.05, + "learning_rate": 7.35969387755102e-05, + "loss": 0.9882, + "step": 991 + }, + { + "epoch": 20.07, + "learning_rate": 7.346938775510205e-05, + "loss": 0.9338, + "step": 992 + }, + { + "epoch": 20.09, + "learning_rate": 7.334183673469388e-05, + "loss": 0.942, + "step": 993 + }, + { + "epoch": 20.11, + "learning_rate": 7.321428571428571e-05, + "loss": 0.9725, + "step": 994 + }, + { + "epoch": 20.13, + "learning_rate": 7.308673469387755e-05, + "loss": 1.027, + "step": 995 + }, + { + "epoch": 20.15, + "learning_rate": 7.29591836734694e-05, + "loss": 1.0081, + "step": 996 + }, + { + "epoch": 20.17, + "learning_rate": 7.283163265306123e-05, + "loss": 1.0117, + "step": 997 + }, + { + "epoch": 20.19, + "learning_rate": 7.270408163265307e-05, + "loss": 0.969, + "step": 998 + }, + { + "epoch": 20.21, + "learning_rate": 7.25765306122449e-05, + "loss": 1.0024, + "step": 999 + }, + { + "epoch": 20.23, + "learning_rate": 7.244897959183675e-05, + "loss": 0.994, + "step": 1000 + }, + { + "epoch": 20.25, + "learning_rate": 7.232142857142858e-05, + "loss": 1.0248, + "step": 1001 + }, + { + "epoch": 20.27, + "learning_rate": 7.219387755102042e-05, + "loss": 1.0493, + "step": 1002 + }, + { + "epoch": 20.29, + "learning_rate": 7.206632653061225e-05, + "loss": 1.0011, + "step": 1003 + }, + { + "epoch": 20.31, + "learning_rate": 7.193877551020408e-05, + "loss": 0.9874, + "step": 1004 + }, + { + "epoch": 20.34, + "learning_rate": 7.181122448979593e-05, + "loss": 1.0049, + "step": 1005 + }, + { + "epoch": 20.36, + "learning_rate": 7.168367346938776e-05, + "loss": 1.0314, + "step": 1006 + }, + { + "epoch": 20.38, + "learning_rate": 7.155612244897959e-05, + "loss": 0.9742, + "step": 1007 + }, + { + "epoch": 20.4, + "learning_rate": 7.142857142857143e-05, + "loss": 1.0621, + "step": 1008 + }, + { + "epoch": 20.42, + "learning_rate": 7.130102040816326e-05, + "loss": 0.9672, + "step": 1009 + }, + { + "epoch": 20.44, + "learning_rate": 7.117346938775511e-05, + "loss": 1.0018, + "step": 1010 + }, + { + "epoch": 20.46, + "learning_rate": 7.104591836734694e-05, + "loss": 1.0045, + "step": 1011 + }, + { + "epoch": 20.48, + "learning_rate": 7.091836734693877e-05, + "loss": 0.9675, + "step": 1012 + }, + { + "epoch": 20.5, + "learning_rate": 7.079081632653062e-05, + "loss": 0.976, + "step": 1013 + }, + { + "epoch": 20.52, + "learning_rate": 7.066326530612245e-05, + "loss": 1.0523, + "step": 1014 + }, + { + "epoch": 20.54, + "learning_rate": 7.053571428571429e-05, + "loss": 1.052, + "step": 1015 + }, + { + "epoch": 20.56, + "learning_rate": 7.040816326530612e-05, + "loss": 0.9903, + "step": 1016 + }, + { + "epoch": 20.58, + "learning_rate": 7.028061224489795e-05, + "loss": 1.0337, + "step": 1017 + }, + { + "epoch": 20.6, + "learning_rate": 7.01530612244898e-05, + "loss": 1.1122, + "step": 1018 + }, + { + "epoch": 20.62, + "learning_rate": 7.002551020408164e-05, + "loss": 1.0133, + "step": 1019 + }, + { + "epoch": 20.64, + "learning_rate": 6.989795918367347e-05, + "loss": 0.9588, + "step": 1020 + }, + { + "epoch": 20.66, + "learning_rate": 6.977040816326532e-05, + "loss": 0.9892, + "step": 1021 + }, + { + "epoch": 20.68, + "learning_rate": 6.964285714285715e-05, + "loss": 1.025, + "step": 1022 + }, + { + "epoch": 20.7, + "learning_rate": 6.951530612244899e-05, + "loss": 1.0196, + "step": 1023 + }, + { + "epoch": 20.72, + "learning_rate": 6.938775510204082e-05, + "loss": 1.0146, + "step": 1024 + }, + { + "epoch": 20.74, + "learning_rate": 6.926020408163265e-05, + "loss": 1.0656, + "step": 1025 + }, + { + "epoch": 20.76, + "learning_rate": 6.91326530612245e-05, + "loss": 0.9584, + "step": 1026 + }, + { + "epoch": 20.78, + "learning_rate": 6.900510204081633e-05, + "loss": 0.9877, + "step": 1027 + }, + { + "epoch": 20.8, + "learning_rate": 6.887755102040817e-05, + "loss": 1.0607, + "step": 1028 + }, + { + "epoch": 20.82, + "learning_rate": 6.875e-05, + "loss": 0.9969, + "step": 1029 + }, + { + "epoch": 20.84, + "learning_rate": 6.862244897959184e-05, + "loss": 0.9506, + "step": 1030 + }, + { + "epoch": 20.86, + "learning_rate": 6.849489795918368e-05, + "loss": 1.0576, + "step": 1031 + }, + { + "epoch": 20.88, + "learning_rate": 6.836734693877551e-05, + "loss": 1.0094, + "step": 1032 + }, + { + "epoch": 20.9, + "learning_rate": 6.823979591836735e-05, + "loss": 0.9872, + "step": 1033 + }, + { + "epoch": 20.92, + "learning_rate": 6.811224489795919e-05, + "loss": 1.0544, + "step": 1034 + }, + { + "epoch": 20.94, + "learning_rate": 6.798469387755102e-05, + "loss": 1.0194, + "step": 1035 + }, + { + "epoch": 20.96, + "learning_rate": 6.785714285714286e-05, + "loss": 1.0009, + "step": 1036 + }, + { + "epoch": 20.98, + "learning_rate": 6.772959183673469e-05, + "loss": 0.9727, + "step": 1037 + }, + { + "epoch": 21.0, + "learning_rate": 6.760204081632652e-05, + "loss": 0.9754, + "step": 1038 + }, + { + "epoch": 21.02, + "learning_rate": 6.747448979591837e-05, + "loss": 0.9953, + "step": 1039 + }, + { + "epoch": 21.04, + "learning_rate": 6.73469387755102e-05, + "loss": 0.9307, + "step": 1040 + }, + { + "epoch": 21.06, + "learning_rate": 6.721938775510204e-05, + "loss": 0.9151, + "step": 1041 + }, + { + "epoch": 21.08, + "learning_rate": 6.709183673469389e-05, + "loss": 0.9474, + "step": 1042 + }, + { + "epoch": 21.1, + "learning_rate": 6.696428571428572e-05, + "loss": 0.9697, + "step": 1043 + }, + { + "epoch": 21.12, + "learning_rate": 6.683673469387756e-05, + "loss": 0.9423, + "step": 1044 + }, + { + "epoch": 21.14, + "learning_rate": 6.670918367346939e-05, + "loss": 0.9797, + "step": 1045 + }, + { + "epoch": 21.16, + "learning_rate": 6.658163265306124e-05, + "loss": 0.919, + "step": 1046 + }, + { + "epoch": 21.18, + "learning_rate": 6.645408163265307e-05, + "loss": 0.9743, + "step": 1047 + }, + { + "epoch": 21.21, + "learning_rate": 6.63265306122449e-05, + "loss": 0.9575, + "step": 1048 + }, + { + "epoch": 21.23, + "learning_rate": 6.619897959183674e-05, + "loss": 0.9861, + "step": 1049 + }, + { + "epoch": 21.25, + "learning_rate": 6.607142857142857e-05, + "loss": 0.9103, + "step": 1050 + }, + { + "epoch": 21.27, + "learning_rate": 6.594387755102042e-05, + "loss": 0.993, + "step": 1051 + }, + { + "epoch": 21.29, + "learning_rate": 6.581632653061225e-05, + "loss": 0.9668, + "step": 1052 + }, + { + "epoch": 21.31, + "learning_rate": 6.568877551020408e-05, + "loss": 1.0008, + "step": 1053 + }, + { + "epoch": 21.33, + "learning_rate": 6.556122448979592e-05, + "loss": 0.9825, + "step": 1054 + }, + { + "epoch": 21.35, + "learning_rate": 6.543367346938776e-05, + "loss": 1.0174, + "step": 1055 + }, + { + "epoch": 21.37, + "learning_rate": 6.530612244897959e-05, + "loss": 0.9685, + "step": 1056 + }, + { + "epoch": 21.39, + "learning_rate": 6.517857142857143e-05, + "loss": 0.9265, + "step": 1057 + }, + { + "epoch": 21.41, + "learning_rate": 6.505102040816326e-05, + "loss": 0.9495, + "step": 1058 + }, + { + "epoch": 21.43, + "learning_rate": 6.49234693877551e-05, + "loss": 0.9541, + "step": 1059 + }, + { + "epoch": 21.45, + "learning_rate": 6.479591836734694e-05, + "loss": 0.9299, + "step": 1060 + }, + { + "epoch": 21.47, + "learning_rate": 6.466836734693877e-05, + "loss": 0.9625, + "step": 1061 + }, + { + "epoch": 21.49, + "learning_rate": 6.454081632653061e-05, + "loss": 1.0054, + "step": 1062 + }, + { + "epoch": 21.51, + "learning_rate": 6.441326530612244e-05, + "loss": 0.9893, + "step": 1063 + }, + { + "epoch": 21.53, + "learning_rate": 6.428571428571429e-05, + "loss": 0.9906, + "step": 1064 + }, + { + "epoch": 21.55, + "learning_rate": 6.415816326530613e-05, + "loss": 0.9487, + "step": 1065 + }, + { + "epoch": 21.57, + "learning_rate": 6.403061224489796e-05, + "loss": 0.9728, + "step": 1066 + }, + { + "epoch": 21.59, + "learning_rate": 6.390306122448981e-05, + "loss": 0.9883, + "step": 1067 + }, + { + "epoch": 21.61, + "learning_rate": 6.377551020408164e-05, + "loss": 1.053, + "step": 1068 + }, + { + "epoch": 21.63, + "learning_rate": 6.364795918367348e-05, + "loss": 1.012, + "step": 1069 + }, + { + "epoch": 21.65, + "learning_rate": 6.352040816326531e-05, + "loss": 0.962, + "step": 1070 + }, + { + "epoch": 21.67, + "learning_rate": 6.339285714285714e-05, + "loss": 0.9955, + "step": 1071 + }, + { + "epoch": 21.69, + "learning_rate": 6.326530612244899e-05, + "loss": 0.9908, + "step": 1072 + }, + { + "epoch": 21.71, + "learning_rate": 6.313775510204082e-05, + "loss": 1.0327, + "step": 1073 + }, + { + "epoch": 21.73, + "learning_rate": 6.301020408163265e-05, + "loss": 0.9255, + "step": 1074 + }, + { + "epoch": 21.75, + "learning_rate": 6.28826530612245e-05, + "loss": 0.9268, + "step": 1075 + }, + { + "epoch": 21.77, + "learning_rate": 6.275510204081633e-05, + "loss": 0.9204, + "step": 1076 + }, + { + "epoch": 21.79, + "learning_rate": 6.262755102040817e-05, + "loss": 0.9838, + "step": 1077 + }, + { + "epoch": 21.81, + "learning_rate": 6.25e-05, + "loss": 0.954, + "step": 1078 + }, + { + "epoch": 21.83, + "learning_rate": 6.237244897959183e-05, + "loss": 1.0102, + "step": 1079 + }, + { + "epoch": 21.85, + "learning_rate": 6.224489795918368e-05, + "loss": 0.916, + "step": 1080 + }, + { + "epoch": 21.87, + "learning_rate": 6.211734693877551e-05, + "loss": 0.9939, + "step": 1081 + }, + { + "epoch": 21.89, + "learning_rate": 6.198979591836735e-05, + "loss": 0.9675, + "step": 1082 + }, + { + "epoch": 21.91, + "learning_rate": 6.186224489795918e-05, + "loss": 0.9666, + "step": 1083 + }, + { + "epoch": 21.93, + "learning_rate": 6.173469387755101e-05, + "loss": 0.9919, + "step": 1084 + }, + { + "epoch": 21.95, + "learning_rate": 6.160714285714286e-05, + "loss": 1.0106, + "step": 1085 + }, + { + "epoch": 21.97, + "learning_rate": 6.14795918367347e-05, + "loss": 0.9982, + "step": 1086 + }, + { + "epoch": 21.99, + "learning_rate": 6.135204081632653e-05, + "loss": 1.0137, + "step": 1087 + }, + { + "epoch": 22.01, + "learning_rate": 6.122448979591838e-05, + "loss": 0.9331, + "step": 1088 + }, + { + "epoch": 22.03, + "learning_rate": 6.109693877551021e-05, + "loss": 0.8834, + "step": 1089 + }, + { + "epoch": 22.06, + "learning_rate": 6.0969387755102046e-05, + "loss": 0.9757, + "step": 1090 + }, + { + "epoch": 22.08, + "learning_rate": 6.084183673469388e-05, + "loss": 0.9038, + "step": 1091 + }, + { + "epoch": 22.1, + "learning_rate": 6.0714285714285715e-05, + "loss": 0.9097, + "step": 1092 + }, + { + "epoch": 22.12, + "learning_rate": 6.058673469387756e-05, + "loss": 0.8972, + "step": 1093 + }, + { + "epoch": 22.14, + "learning_rate": 6.045918367346939e-05, + "loss": 0.8825, + "step": 1094 + }, + { + "epoch": 22.16, + "learning_rate": 6.0331632653061234e-05, + "loss": 0.9814, + "step": 1095 + }, + { + "epoch": 22.18, + "learning_rate": 6.0204081632653065e-05, + "loss": 0.9874, + "step": 1096 + }, + { + "epoch": 22.2, + "learning_rate": 6.0076530612244896e-05, + "loss": 0.912, + "step": 1097 + }, + { + "epoch": 22.22, + "learning_rate": 5.994897959183674e-05, + "loss": 0.9206, + "step": 1098 + }, + { + "epoch": 22.24, + "learning_rate": 5.982142857142857e-05, + "loss": 0.9497, + "step": 1099 + }, + { + "epoch": 22.26, + "learning_rate": 5.9693877551020416e-05, + "loss": 0.9269, + "step": 1100 + }, + { + "epoch": 22.28, + "learning_rate": 5.956632653061225e-05, + "loss": 0.9452, + "step": 1101 + }, + { + "epoch": 22.3, + "learning_rate": 5.9438775510204084e-05, + "loss": 0.9548, + "step": 1102 + }, + { + "epoch": 22.32, + "learning_rate": 5.931122448979592e-05, + "loss": 0.9689, + "step": 1103 + }, + { + "epoch": 22.34, + "learning_rate": 5.918367346938776e-05, + "loss": 0.9455, + "step": 1104 + }, + { + "epoch": 22.36, + "learning_rate": 5.905612244897959e-05, + "loss": 0.9409, + "step": 1105 + }, + { + "epoch": 22.38, + "learning_rate": 5.8928571428571435e-05, + "loss": 0.9093, + "step": 1106 + }, + { + "epoch": 22.4, + "learning_rate": 5.8801020408163266e-05, + "loss": 0.921, + "step": 1107 + }, + { + "epoch": 22.42, + "learning_rate": 5.867346938775511e-05, + "loss": 0.9368, + "step": 1108 + }, + { + "epoch": 22.44, + "learning_rate": 5.854591836734694e-05, + "loss": 0.907, + "step": 1109 + }, + { + "epoch": 22.46, + "learning_rate": 5.841836734693877e-05, + "loss": 0.9126, + "step": 1110 + }, + { + "epoch": 22.48, + "learning_rate": 5.8290816326530616e-05, + "loss": 0.9161, + "step": 1111 + }, + { + "epoch": 22.5, + "learning_rate": 5.816326530612245e-05, + "loss": 0.9542, + "step": 1112 + }, + { + "epoch": 22.52, + "learning_rate": 5.803571428571429e-05, + "loss": 0.9775, + "step": 1113 + }, + { + "epoch": 22.54, + "learning_rate": 5.790816326530612e-05, + "loss": 1.0006, + "step": 1114 + }, + { + "epoch": 22.56, + "learning_rate": 5.778061224489796e-05, + "loss": 0.8965, + "step": 1115 + }, + { + "epoch": 22.58, + "learning_rate": 5.7653061224489805e-05, + "loss": 0.944, + "step": 1116 + }, + { + "epoch": 22.6, + "learning_rate": 5.7525510204081636e-05, + "loss": 0.9162, + "step": 1117 + }, + { + "epoch": 22.62, + "learning_rate": 5.739795918367348e-05, + "loss": 0.9325, + "step": 1118 + }, + { + "epoch": 22.64, + "learning_rate": 5.727040816326531e-05, + "loss": 0.8998, + "step": 1119 + }, + { + "epoch": 22.66, + "learning_rate": 5.714285714285714e-05, + "loss": 0.9362, + "step": 1120 + }, + { + "epoch": 22.68, + "learning_rate": 5.7015306122448986e-05, + "loss": 0.9969, + "step": 1121 + }, + { + "epoch": 22.7, + "learning_rate": 5.688775510204082e-05, + "loss": 0.9104, + "step": 1122 + }, + { + "epoch": 22.72, + "learning_rate": 5.676020408163265e-05, + "loss": 0.9746, + "step": 1123 + }, + { + "epoch": 22.74, + "learning_rate": 5.663265306122449e-05, + "loss": 0.9821, + "step": 1124 + }, + { + "epoch": 22.76, + "learning_rate": 5.650510204081633e-05, + "loss": 0.9526, + "step": 1125 + }, + { + "epoch": 22.78, + "learning_rate": 5.637755102040817e-05, + "loss": 0.871, + "step": 1126 + }, + { + "epoch": 22.8, + "learning_rate": 5.6250000000000005e-05, + "loss": 0.9534, + "step": 1127 + }, + { + "epoch": 22.82, + "learning_rate": 5.6122448979591836e-05, + "loss": 0.9616, + "step": 1128 + }, + { + "epoch": 22.84, + "learning_rate": 5.599489795918368e-05, + "loss": 0.9627, + "step": 1129 + }, + { + "epoch": 22.86, + "learning_rate": 5.586734693877551e-05, + "loss": 0.9704, + "step": 1130 + }, + { + "epoch": 22.88, + "learning_rate": 5.5739795918367356e-05, + "loss": 0.9506, + "step": 1131 + }, + { + "epoch": 22.9, + "learning_rate": 5.561224489795919e-05, + "loss": 0.9553, + "step": 1132 + }, + { + "epoch": 22.93, + "learning_rate": 5.548469387755102e-05, + "loss": 0.9294, + "step": 1133 + }, + { + "epoch": 22.95, + "learning_rate": 5.535714285714286e-05, + "loss": 0.8979, + "step": 1134 + }, + { + "epoch": 22.97, + "learning_rate": 5.522959183673469e-05, + "loss": 1.0004, + "step": 1135 + }, + { + "epoch": 22.99, + "learning_rate": 5.510204081632653e-05, + "loss": 0.9821, + "step": 1136 + }, + { + "epoch": 23.01, + "learning_rate": 5.497448979591837e-05, + "loss": 0.9607, + "step": 1137 + }, + { + "epoch": 23.03, + "learning_rate": 5.4846938775510206e-05, + "loss": 0.9757, + "step": 1138 + }, + { + "epoch": 23.05, + "learning_rate": 5.471938775510205e-05, + "loss": 0.9096, + "step": 1139 + }, + { + "epoch": 23.07, + "learning_rate": 5.459183673469388e-05, + "loss": 0.9144, + "step": 1140 + }, + { + "epoch": 23.09, + "learning_rate": 5.446428571428571e-05, + "loss": 0.8667, + "step": 1141 + }, + { + "epoch": 23.11, + "learning_rate": 5.4336734693877556e-05, + "loss": 0.8993, + "step": 1142 + }, + { + "epoch": 23.13, + "learning_rate": 5.420918367346939e-05, + "loss": 0.8964, + "step": 1143 + }, + { + "epoch": 23.15, + "learning_rate": 5.408163265306123e-05, + "loss": 0.9173, + "step": 1144 + }, + { + "epoch": 23.17, + "learning_rate": 5.395408163265306e-05, + "loss": 0.9019, + "step": 1145 + }, + { + "epoch": 23.19, + "learning_rate": 5.382653061224489e-05, + "loss": 0.9303, + "step": 1146 + }, + { + "epoch": 23.21, + "learning_rate": 5.369897959183674e-05, + "loss": 0.9268, + "step": 1147 + }, + { + "epoch": 23.23, + "learning_rate": 5.3571428571428575e-05, + "loss": 0.8803, + "step": 1148 + }, + { + "epoch": 23.25, + "learning_rate": 5.344387755102041e-05, + "loss": 0.9197, + "step": 1149 + }, + { + "epoch": 23.27, + "learning_rate": 5.331632653061225e-05, + "loss": 0.9204, + "step": 1150 + }, + { + "epoch": 23.29, + "learning_rate": 5.318877551020408e-05, + "loss": 0.8802, + "step": 1151 + }, + { + "epoch": 23.31, + "learning_rate": 5.3061224489795926e-05, + "loss": 0.9044, + "step": 1152 + }, + { + "epoch": 23.33, + "learning_rate": 5.293367346938776e-05, + "loss": 0.8893, + "step": 1153 + }, + { + "epoch": 23.35, + "learning_rate": 5.280612244897959e-05, + "loss": 0.8928, + "step": 1154 + }, + { + "epoch": 23.37, + "learning_rate": 5.267857142857143e-05, + "loss": 0.9353, + "step": 1155 + }, + { + "epoch": 23.39, + "learning_rate": 5.255102040816326e-05, + "loss": 0.9345, + "step": 1156 + }, + { + "epoch": 23.41, + "learning_rate": 5.242346938775511e-05, + "loss": 0.9372, + "step": 1157 + }, + { + "epoch": 23.43, + "learning_rate": 5.229591836734694e-05, + "loss": 0.9234, + "step": 1158 + }, + { + "epoch": 23.45, + "learning_rate": 5.2168367346938776e-05, + "loss": 0.9177, + "step": 1159 + }, + { + "epoch": 23.47, + "learning_rate": 5.2040816326530614e-05, + "loss": 0.8757, + "step": 1160 + }, + { + "epoch": 23.49, + "learning_rate": 5.191326530612245e-05, + "loss": 0.9048, + "step": 1161 + }, + { + "epoch": 23.51, + "learning_rate": 5.1785714285714296e-05, + "loss": 0.9248, + "step": 1162 + }, + { + "epoch": 23.53, + "learning_rate": 5.1658163265306127e-05, + "loss": 0.9379, + "step": 1163 + }, + { + "epoch": 23.55, + "learning_rate": 5.153061224489796e-05, + "loss": 0.8596, + "step": 1164 + }, + { + "epoch": 23.57, + "learning_rate": 5.14030612244898e-05, + "loss": 0.9751, + "step": 1165 + }, + { + "epoch": 23.59, + "learning_rate": 5.127551020408163e-05, + "loss": 0.8842, + "step": 1166 + }, + { + "epoch": 23.61, + "learning_rate": 5.114795918367348e-05, + "loss": 0.8765, + "step": 1167 + }, + { + "epoch": 23.63, + "learning_rate": 5.102040816326531e-05, + "loss": 0.8942, + "step": 1168 + }, + { + "epoch": 23.65, + "learning_rate": 5.089285714285714e-05, + "loss": 0.938, + "step": 1169 + }, + { + "epoch": 23.67, + "learning_rate": 5.076530612244898e-05, + "loss": 0.8993, + "step": 1170 + }, + { + "epoch": 23.69, + "learning_rate": 5.063775510204082e-05, + "loss": 0.9362, + "step": 1171 + }, + { + "epoch": 23.71, + "learning_rate": 5.051020408163265e-05, + "loss": 0.9249, + "step": 1172 + }, + { + "epoch": 23.73, + "learning_rate": 5.0382653061224496e-05, + "loss": 0.9055, + "step": 1173 + }, + { + "epoch": 23.75, + "learning_rate": 5.025510204081633e-05, + "loss": 0.8967, + "step": 1174 + }, + { + "epoch": 23.77, + "learning_rate": 5.012755102040817e-05, + "loss": 0.8795, + "step": 1175 + }, + { + "epoch": 23.8, + "learning_rate": 5e-05, + "loss": 0.9452, + "step": 1176 + }, + { + "epoch": 23.82, + "learning_rate": 4.987244897959184e-05, + "loss": 0.926, + "step": 1177 + }, + { + "epoch": 23.84, + "learning_rate": 4.974489795918368e-05, + "loss": 0.8948, + "step": 1178 + }, + { + "epoch": 23.86, + "learning_rate": 4.961734693877551e-05, + "loss": 0.8926, + "step": 1179 + }, + { + "epoch": 23.88, + "learning_rate": 4.9489795918367346e-05, + "loss": 0.8949, + "step": 1180 + }, + { + "epoch": 23.9, + "learning_rate": 4.9362244897959184e-05, + "loss": 0.9648, + "step": 1181 + }, + { + "epoch": 23.92, + "learning_rate": 4.923469387755102e-05, + "loss": 0.9599, + "step": 1182 + }, + { + "epoch": 23.94, + "learning_rate": 4.910714285714286e-05, + "loss": 0.9603, + "step": 1183 + }, + { + "epoch": 23.96, + "learning_rate": 4.89795918367347e-05, + "loss": 0.9302, + "step": 1184 + }, + { + "epoch": 23.98, + "learning_rate": 4.8852040816326534e-05, + "loss": 0.9261, + "step": 1185 + }, + { + "epoch": 24.0, + "learning_rate": 4.872448979591837e-05, + "loss": 0.9257, + "step": 1186 + }, + { + "epoch": 24.02, + "learning_rate": 4.859693877551021e-05, + "loss": 0.8725, + "step": 1187 + }, + { + "epoch": 24.04, + "learning_rate": 4.846938775510204e-05, + "loss": 0.8486, + "step": 1188 + }, + { + "epoch": 24.06, + "learning_rate": 4.834183673469388e-05, + "loss": 0.8457, + "step": 1189 + }, + { + "epoch": 24.08, + "learning_rate": 4.8214285714285716e-05, + "loss": 0.7848, + "step": 1190 + }, + { + "epoch": 24.1, + "learning_rate": 4.8086734693877554e-05, + "loss": 0.8885, + "step": 1191 + }, + { + "epoch": 24.12, + "learning_rate": 4.795918367346939e-05, + "loss": 0.9099, + "step": 1192 + }, + { + "epoch": 24.14, + "learning_rate": 4.783163265306123e-05, + "loss": 0.9147, + "step": 1193 + }, + { + "epoch": 24.16, + "learning_rate": 4.7704081632653066e-05, + "loss": 0.8781, + "step": 1194 + }, + { + "epoch": 24.18, + "learning_rate": 4.7576530612244904e-05, + "loss": 0.8847, + "step": 1195 + }, + { + "epoch": 24.2, + "learning_rate": 4.744897959183674e-05, + "loss": 0.9041, + "step": 1196 + }, + { + "epoch": 24.22, + "learning_rate": 4.732142857142857e-05, + "loss": 0.8639, + "step": 1197 + }, + { + "epoch": 24.24, + "learning_rate": 4.719387755102041e-05, + "loss": 0.8831, + "step": 1198 + }, + { + "epoch": 24.26, + "learning_rate": 4.706632653061225e-05, + "loss": 0.9063, + "step": 1199 + }, + { + "epoch": 24.28, + "learning_rate": 4.6938775510204086e-05, + "loss": 0.8753, + "step": 1200 + }, + { + "epoch": 24.3, + "learning_rate": 4.6811224489795916e-05, + "loss": 0.8977, + "step": 1201 + }, + { + "epoch": 24.32, + "learning_rate": 4.6683673469387754e-05, + "loss": 0.8729, + "step": 1202 + }, + { + "epoch": 24.34, + "learning_rate": 4.655612244897959e-05, + "loss": 0.898, + "step": 1203 + }, + { + "epoch": 24.36, + "learning_rate": 4.642857142857143e-05, + "loss": 0.8521, + "step": 1204 + }, + { + "epoch": 24.38, + "learning_rate": 4.630102040816327e-05, + "loss": 0.8563, + "step": 1205 + }, + { + "epoch": 24.4, + "learning_rate": 4.6173469387755105e-05, + "loss": 0.8462, + "step": 1206 + }, + { + "epoch": 24.42, + "learning_rate": 4.604591836734694e-05, + "loss": 0.8929, + "step": 1207 + }, + { + "epoch": 24.44, + "learning_rate": 4.591836734693878e-05, + "loss": 0.9154, + "step": 1208 + }, + { + "epoch": 24.46, + "learning_rate": 4.579081632653062e-05, + "loss": 0.811, + "step": 1209 + }, + { + "epoch": 24.48, + "learning_rate": 4.566326530612245e-05, + "loss": 0.8667, + "step": 1210 + }, + { + "epoch": 24.5, + "learning_rate": 4.5535714285714286e-05, + "loss": 0.9179, + "step": 1211 + }, + { + "epoch": 24.52, + "learning_rate": 4.5408163265306124e-05, + "loss": 0.8757, + "step": 1212 + }, + { + "epoch": 24.54, + "learning_rate": 4.528061224489796e-05, + "loss": 0.8519, + "step": 1213 + }, + { + "epoch": 24.56, + "learning_rate": 4.515306122448979e-05, + "loss": 0.9335, + "step": 1214 + }, + { + "epoch": 24.58, + "learning_rate": 4.502551020408164e-05, + "loss": 0.8785, + "step": 1215 + }, + { + "epoch": 24.6, + "learning_rate": 4.4897959183673474e-05, + "loss": 0.9022, + "step": 1216 + }, + { + "epoch": 24.62, + "learning_rate": 4.477040816326531e-05, + "loss": 0.9532, + "step": 1217 + }, + { + "epoch": 24.64, + "learning_rate": 4.464285714285715e-05, + "loss": 0.8956, + "step": 1218 + }, + { + "epoch": 24.67, + "learning_rate": 4.451530612244898e-05, + "loss": 0.8739, + "step": 1219 + }, + { + "epoch": 24.69, + "learning_rate": 4.438775510204082e-05, + "loss": 0.9312, + "step": 1220 + }, + { + "epoch": 24.71, + "learning_rate": 4.4260204081632656e-05, + "loss": 0.8536, + "step": 1221 + }, + { + "epoch": 24.73, + "learning_rate": 4.4132653061224493e-05, + "loss": 0.8984, + "step": 1222 + }, + { + "epoch": 24.75, + "learning_rate": 4.4005102040816324e-05, + "loss": 0.8949, + "step": 1223 + }, + { + "epoch": 24.77, + "learning_rate": 4.387755102040816e-05, + "loss": 0.9389, + "step": 1224 + }, + { + "epoch": 24.79, + "learning_rate": 4.375e-05, + "loss": 0.8703, + "step": 1225 + }, + { + "epoch": 24.81, + "learning_rate": 4.362244897959184e-05, + "loss": 0.9407, + "step": 1226 + }, + { + "epoch": 24.83, + "learning_rate": 4.3494897959183675e-05, + "loss": 0.9016, + "step": 1227 + }, + { + "epoch": 24.85, + "learning_rate": 4.336734693877551e-05, + "loss": 0.9025, + "step": 1228 + }, + { + "epoch": 24.87, + "learning_rate": 4.323979591836735e-05, + "loss": 0.9415, + "step": 1229 + }, + { + "epoch": 24.89, + "learning_rate": 4.311224489795919e-05, + "loss": 0.9146, + "step": 1230 + }, + { + "epoch": 24.91, + "learning_rate": 4.2984693877551025e-05, + "loss": 0.9144, + "step": 1231 + }, + { + "epoch": 24.93, + "learning_rate": 4.2857142857142856e-05, + "loss": 0.9138, + "step": 1232 + }, + { + "epoch": 24.95, + "learning_rate": 4.2729591836734694e-05, + "loss": 0.9372, + "step": 1233 + }, + { + "epoch": 24.97, + "learning_rate": 4.260204081632653e-05, + "loss": 0.8701, + "step": 1234 + }, + { + "epoch": 24.99, + "learning_rate": 4.247448979591837e-05, + "loss": 0.9278, + "step": 1235 + }, + { + "epoch": 25.01, + "learning_rate": 4.234693877551021e-05, + "loss": 0.9157, + "step": 1236 + }, + { + "epoch": 25.03, + "learning_rate": 4.2219387755102045e-05, + "loss": 0.8852, + "step": 1237 + }, + { + "epoch": 25.05, + "learning_rate": 4.209183673469388e-05, + "loss": 0.855, + "step": 1238 + }, + { + "epoch": 25.07, + "learning_rate": 4.196428571428572e-05, + "loss": 0.8547, + "step": 1239 + }, + { + "epoch": 25.09, + "learning_rate": 4.183673469387756e-05, + "loss": 0.8691, + "step": 1240 + }, + { + "epoch": 25.11, + "learning_rate": 4.170918367346939e-05, + "loss": 0.9101, + "step": 1241 + }, + { + "epoch": 25.13, + "learning_rate": 4.1581632653061226e-05, + "loss": 0.8408, + "step": 1242 + }, + { + "epoch": 25.15, + "learning_rate": 4.1454081632653064e-05, + "loss": 0.9008, + "step": 1243 + }, + { + "epoch": 25.17, + "learning_rate": 4.13265306122449e-05, + "loss": 0.859, + "step": 1244 + }, + { + "epoch": 25.19, + "learning_rate": 4.119897959183674e-05, + "loss": 0.8525, + "step": 1245 + }, + { + "epoch": 25.21, + "learning_rate": 4.107142857142857e-05, + "loss": 0.8682, + "step": 1246 + }, + { + "epoch": 25.23, + "learning_rate": 4.094387755102041e-05, + "loss": 0.8426, + "step": 1247 + }, + { + "epoch": 25.25, + "learning_rate": 4.0816326530612245e-05, + "loss": 0.8948, + "step": 1248 + }, + { + "epoch": 25.27, + "learning_rate": 4.068877551020408e-05, + "loss": 0.8333, + "step": 1249 + }, + { + "epoch": 25.29, + "learning_rate": 4.056122448979592e-05, + "loss": 0.87, + "step": 1250 + }, + { + "epoch": 25.31, + "learning_rate": 4.043367346938776e-05, + "loss": 0.8215, + "step": 1251 + }, + { + "epoch": 25.33, + "learning_rate": 4.0306122448979596e-05, + "loss": 0.862, + "step": 1252 + }, + { + "epoch": 25.35, + "learning_rate": 4.017857142857143e-05, + "loss": 0.8607, + "step": 1253 + }, + { + "epoch": 25.37, + "learning_rate": 4.0051020408163264e-05, + "loss": 0.8218, + "step": 1254 + }, + { + "epoch": 25.39, + "learning_rate": 3.99234693877551e-05, + "loss": 0.806, + "step": 1255 + }, + { + "epoch": 25.41, + "learning_rate": 3.979591836734694e-05, + "loss": 0.8929, + "step": 1256 + }, + { + "epoch": 25.43, + "learning_rate": 3.966836734693878e-05, + "loss": 0.8551, + "step": 1257 + }, + { + "epoch": 25.45, + "learning_rate": 3.9540816326530615e-05, + "loss": 0.8408, + "step": 1258 + }, + { + "epoch": 25.47, + "learning_rate": 3.9413265306122446e-05, + "loss": 0.8819, + "step": 1259 + }, + { + "epoch": 25.49, + "learning_rate": 3.928571428571429e-05, + "loss": 0.8757, + "step": 1260 + }, + { + "epoch": 25.52, + "learning_rate": 3.915816326530613e-05, + "loss": 0.8778, + "step": 1261 + }, + { + "epoch": 25.54, + "learning_rate": 3.9030612244897965e-05, + "loss": 0.8524, + "step": 1262 + }, + { + "epoch": 25.56, + "learning_rate": 3.8903061224489796e-05, + "loss": 0.846, + "step": 1263 + }, + { + "epoch": 25.58, + "learning_rate": 3.8775510204081634e-05, + "loss": 0.8757, + "step": 1264 + }, + { + "epoch": 25.6, + "learning_rate": 3.864795918367347e-05, + "loss": 0.9084, + "step": 1265 + }, + { + "epoch": 25.62, + "learning_rate": 3.852040816326531e-05, + "loss": 0.8826, + "step": 1266 + }, + { + "epoch": 25.64, + "learning_rate": 3.839285714285715e-05, + "loss": 0.8619, + "step": 1267 + }, + { + "epoch": 25.66, + "learning_rate": 3.826530612244898e-05, + "loss": 0.8942, + "step": 1268 + }, + { + "epoch": 25.68, + "learning_rate": 3.8137755102040815e-05, + "loss": 0.8342, + "step": 1269 + }, + { + "epoch": 25.7, + "learning_rate": 3.801020408163265e-05, + "loss": 0.8512, + "step": 1270 + }, + { + "epoch": 25.72, + "learning_rate": 3.788265306122449e-05, + "loss": 0.8393, + "step": 1271 + }, + { + "epoch": 25.74, + "learning_rate": 3.775510204081633e-05, + "loss": 0.8508, + "step": 1272 + }, + { + "epoch": 25.76, + "learning_rate": 3.7627551020408166e-05, + "loss": 0.9094, + "step": 1273 + }, + { + "epoch": 25.78, + "learning_rate": 3.7500000000000003e-05, + "loss": 0.9175, + "step": 1274 + }, + { + "epoch": 25.8, + "learning_rate": 3.737244897959184e-05, + "loss": 0.9179, + "step": 1275 + }, + { + "epoch": 25.82, + "learning_rate": 3.724489795918368e-05, + "loss": 0.869, + "step": 1276 + }, + { + "epoch": 25.84, + "learning_rate": 3.711734693877551e-05, + "loss": 0.8568, + "step": 1277 + }, + { + "epoch": 25.86, + "learning_rate": 3.698979591836735e-05, + "loss": 0.9104, + "step": 1278 + }, + { + "epoch": 25.88, + "learning_rate": 3.6862244897959185e-05, + "loss": 0.8912, + "step": 1279 + }, + { + "epoch": 25.9, + "learning_rate": 3.673469387755102e-05, + "loss": 0.878, + "step": 1280 + }, + { + "epoch": 25.92, + "learning_rate": 3.6607142857142853e-05, + "loss": 0.8711, + "step": 1281 + }, + { + "epoch": 25.94, + "learning_rate": 3.64795918367347e-05, + "loss": 0.9404, + "step": 1282 + }, + { + "epoch": 25.96, + "learning_rate": 3.6352040816326536e-05, + "loss": 0.9302, + "step": 1283 + }, + { + "epoch": 25.98, + "learning_rate": 3.622448979591837e-05, + "loss": 0.8907, + "step": 1284 + }, + { + "epoch": 26.0, + "learning_rate": 3.609693877551021e-05, + "loss": 0.8473, + "step": 1285 + }, + { + "epoch": 26.02, + "learning_rate": 3.596938775510204e-05, + "loss": 0.8482, + "step": 1286 + }, + { + "epoch": 26.04, + "learning_rate": 3.584183673469388e-05, + "loss": 0.8683, + "step": 1287 + }, + { + "epoch": 26.06, + "learning_rate": 3.571428571428572e-05, + "loss": 0.8443, + "step": 1288 + }, + { + "epoch": 26.08, + "learning_rate": 3.5586734693877555e-05, + "loss": 0.8462, + "step": 1289 + }, + { + "epoch": 26.1, + "learning_rate": 3.5459183673469385e-05, + "loss": 0.8204, + "step": 1290 + }, + { + "epoch": 26.12, + "learning_rate": 3.533163265306122e-05, + "loss": 0.8632, + "step": 1291 + }, + { + "epoch": 26.14, + "learning_rate": 3.520408163265306e-05, + "loss": 0.8883, + "step": 1292 + }, + { + "epoch": 26.16, + "learning_rate": 3.50765306122449e-05, + "loss": 0.8369, + "step": 1293 + }, + { + "epoch": 26.18, + "learning_rate": 3.4948979591836736e-05, + "loss": 0.8369, + "step": 1294 + }, + { + "epoch": 26.2, + "learning_rate": 3.4821428571428574e-05, + "loss": 0.8506, + "step": 1295 + }, + { + "epoch": 26.22, + "learning_rate": 3.469387755102041e-05, + "loss": 0.839, + "step": 1296 + }, + { + "epoch": 26.24, + "learning_rate": 3.456632653061225e-05, + "loss": 0.8421, + "step": 1297 + }, + { + "epoch": 26.26, + "learning_rate": 3.443877551020409e-05, + "loss": 0.8292, + "step": 1298 + }, + { + "epoch": 26.28, + "learning_rate": 3.431122448979592e-05, + "loss": 0.8412, + "step": 1299 + }, + { + "epoch": 26.3, + "learning_rate": 3.4183673469387755e-05, + "loss": 0.8305, + "step": 1300 + } + ], + "logging_steps": 1, + "max_steps": 1568, + "num_train_epochs": 32, + "save_steps": 100, + "total_flos": 1.9253105037500006e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-1300/training_args.bin b/checkpoint-1300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..db23e07d097c18532e52f58a70eb72d22e39c8c1 --- /dev/null +++ b/checkpoint-1300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b610cbc4242bb50b4985b00e205994ae514fec6d9e2273f2b545a583a07b154b +size 4155 diff --git a/checkpoint-1400/README.md b/checkpoint-1400/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-1400/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-1400/adapter_config.json b/checkpoint-1400/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b --- /dev/null +++ b/checkpoint-1400/adapter_config.json @@ -0,0 +1,23 @@ +{ + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-1400/adapter_model.bin b/checkpoint-1400/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..4745aee31110084e19714506670916f3c62d1f94 --- /dev/null +++ b/checkpoint-1400/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:019860e9604c2bce3e1eb82f3a576a7272afd222d602f5fba5c803b83043ee76 +size 39409357 diff --git a/checkpoint-1400/optimizer.pt b/checkpoint-1400/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..1a5d1001a74ae6e7fcb4afa45ee820537e022650 --- /dev/null +++ b/checkpoint-1400/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dff811fb285c320d3235ecb226a73a05ba2c9fa3c8dcea292fb556c245eba3c7 +size 78844421 diff --git a/checkpoint-1400/rng_state.pth b/checkpoint-1400/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..55458251cf9b9dd5303788f73cb2ddd459a88907 --- /dev/null +++ b/checkpoint-1400/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:feeeda336ac9687793442afff9ff589b66dc74c1c2b27bafbcd1072d4b5fa37d +size 14575 diff --git a/checkpoint-1400/scheduler.pt b/checkpoint-1400/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..8c0bb19c7480c9e99af2b0ca613475fbdd4e6141 --- /dev/null +++ b/checkpoint-1400/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fc463b7a5519b8283bc8f09760b55dbcf1be2f24e13b19140789bdc09c3e1d5d +size 627 diff --git a/checkpoint-1400/special_tokens_map.json b/checkpoint-1400/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-1400/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-1400/tokenizer.json b/checkpoint-1400/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf002cafbd4818dcff2abc9156c088d681b4533 --- /dev/null +++ b/checkpoint-1400/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba +size 14500471 diff --git a/checkpoint-1400/tokenizer_config.json b/checkpoint-1400/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-1400/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-1400/trainer_state.json b/checkpoint-1400/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..8d441a440c3a045965abb373837074c2d4d12482 --- /dev/null +++ b/checkpoint-1400/trainer_state.json @@ -0,0 +1,8419 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 28.327537148276953, + "eval_steps": 500, + "global_step": 1400, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.02, + "learning_rate": 0.00019987244897959184, + "loss": 3.2215, + "step": 1 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019974489795918367, + "loss": 2.8365, + "step": 2 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019961734693877553, + "loss": 2.602, + "step": 3 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019948979591836736, + "loss": 2.4196, + "step": 4 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001993622448979592, + "loss": 2.2574, + "step": 5 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019923469387755102, + "loss": 2.2239, + "step": 6 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019910714285714288, + "loss": 2.1661, + "step": 7 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001989795918367347, + "loss": 2.0987, + "step": 8 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019885204081632654, + "loss": 2.015, + "step": 9 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019872448979591837, + "loss": 1.9771, + "step": 10 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019859693877551023, + "loss": 2.0271, + "step": 11 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019846938775510203, + "loss": 1.9812, + "step": 12 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001983418367346939, + "loss": 2.0834, + "step": 13 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019821428571428572, + "loss": 1.9174, + "step": 14 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019808673469387755, + "loss": 1.8409, + "step": 15 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019795918367346938, + "loss": 1.929, + "step": 16 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019783163265306124, + "loss": 2.0041, + "step": 17 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019770408163265305, + "loss": 1.9385, + "step": 18 + }, + { + "epoch": 0.38, + "learning_rate": 0.0001975765306122449, + "loss": 1.9592, + "step": 19 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019744897959183674, + "loss": 1.9701, + "step": 20 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001973214285714286, + "loss": 1.9277, + "step": 21 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019719387755102042, + "loss": 1.8394, + "step": 22 + }, + { + "epoch": 0.47, + "learning_rate": 0.00019706632653061226, + "loss": 1.8666, + "step": 23 + }, + { + "epoch": 0.49, + "learning_rate": 0.00019693877551020409, + "loss": 1.8997, + "step": 24 + }, + { + "epoch": 0.51, + "learning_rate": 0.00019681122448979592, + "loss": 1.9432, + "step": 25 + }, + { + "epoch": 0.53, + "learning_rate": 0.00019668367346938777, + "loss": 1.9137, + "step": 26 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001965561224489796, + "loss": 1.905, + "step": 27 + }, + { + "epoch": 0.57, + "learning_rate": 0.00019642857142857144, + "loss": 1.8708, + "step": 28 + }, + { + "epoch": 0.59, + "learning_rate": 0.00019630102040816327, + "loss": 1.9097, + "step": 29 + }, + { + "epoch": 0.61, + "learning_rate": 0.00019617346938775513, + "loss": 1.896, + "step": 30 + }, + { + "epoch": 0.63, + "learning_rate": 0.00019604591836734696, + "loss": 1.8834, + "step": 31 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001959183673469388, + "loss": 1.8323, + "step": 32 + }, + { + "epoch": 0.67, + "learning_rate": 0.00019579081632653062, + "loss": 1.804, + "step": 33 + }, + { + "epoch": 0.69, + "learning_rate": 0.00019566326530612248, + "loss": 1.8906, + "step": 34 + }, + { + "epoch": 0.71, + "learning_rate": 0.00019553571428571428, + "loss": 1.8693, + "step": 35 + }, + { + "epoch": 0.73, + "learning_rate": 0.00019540816326530614, + "loss": 1.9308, + "step": 36 + }, + { + "epoch": 0.75, + "learning_rate": 0.00019528061224489797, + "loss": 1.8082, + "step": 37 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001951530612244898, + "loss": 1.848, + "step": 38 + }, + { + "epoch": 0.79, + "learning_rate": 0.00019502551020408163, + "loss": 1.8866, + "step": 39 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001948979591836735, + "loss": 1.7844, + "step": 40 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001947704081632653, + "loss": 1.8485, + "step": 41 + }, + { + "epoch": 0.85, + "learning_rate": 0.00019464285714285715, + "loss": 1.7917, + "step": 42 + }, + { + "epoch": 0.87, + "learning_rate": 0.00019451530612244898, + "loss": 1.7342, + "step": 43 + }, + { + "epoch": 0.89, + "learning_rate": 0.00019438775510204084, + "loss": 1.8479, + "step": 44 + }, + { + "epoch": 0.91, + "learning_rate": 0.00019426020408163267, + "loss": 1.8639, + "step": 45 + }, + { + "epoch": 0.93, + "learning_rate": 0.0001941326530612245, + "loss": 1.8166, + "step": 46 + }, + { + "epoch": 0.95, + "learning_rate": 0.00019400510204081633, + "loss": 1.7566, + "step": 47 + }, + { + "epoch": 0.97, + "learning_rate": 0.00019387755102040816, + "loss": 1.8071, + "step": 48 + }, + { + "epoch": 0.99, + "learning_rate": 0.00019375000000000002, + "loss": 1.8612, + "step": 49 + }, + { + "epoch": 1.01, + "learning_rate": 0.00019362244897959185, + "loss": 1.7819, + "step": 50 + }, + { + "epoch": 1.03, + "learning_rate": 0.00019349489795918368, + "loss": 1.8647, + "step": 51 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001933673469387755, + "loss": 1.8196, + "step": 52 + }, + { + "epoch": 1.07, + "learning_rate": 0.00019323979591836737, + "loss": 1.8027, + "step": 53 + }, + { + "epoch": 1.09, + "learning_rate": 0.00019311224489795917, + "loss": 1.8927, + "step": 54 + }, + { + "epoch": 1.11, + "learning_rate": 0.00019298469387755103, + "loss": 1.8481, + "step": 55 + }, + { + "epoch": 1.13, + "learning_rate": 0.00019285714285714286, + "loss": 1.7781, + "step": 56 + }, + { + "epoch": 1.15, + "learning_rate": 0.00019272959183673472, + "loss": 1.8101, + "step": 57 + }, + { + "epoch": 1.17, + "learning_rate": 0.00019260204081632653, + "loss": 1.7257, + "step": 58 + }, + { + "epoch": 1.19, + "learning_rate": 0.00019247448979591838, + "loss": 1.8185, + "step": 59 + }, + { + "epoch": 1.21, + "learning_rate": 0.00019234693877551021, + "loss": 1.8557, + "step": 60 + }, + { + "epoch": 1.23, + "learning_rate": 0.00019221938775510204, + "loss": 1.7418, + "step": 61 + }, + { + "epoch": 1.25, + "learning_rate": 0.00019209183673469388, + "loss": 1.6879, + "step": 62 + }, + { + "epoch": 1.27, + "learning_rate": 0.00019196428571428573, + "loss": 1.7651, + "step": 63 + }, + { + "epoch": 1.29, + "learning_rate": 0.00019183673469387756, + "loss": 1.7759, + "step": 64 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001917091836734694, + "loss": 1.7691, + "step": 65 + }, + { + "epoch": 1.34, + "learning_rate": 0.00019158163265306123, + "loss": 1.7794, + "step": 66 + }, + { + "epoch": 1.36, + "learning_rate": 0.00019145408163265306, + "loss": 1.8152, + "step": 67 + }, + { + "epoch": 1.38, + "learning_rate": 0.00019132653061224492, + "loss": 1.8052, + "step": 68 + }, + { + "epoch": 1.4, + "learning_rate": 0.00019119897959183675, + "loss": 1.8054, + "step": 69 + }, + { + "epoch": 1.42, + "learning_rate": 0.00019107142857142858, + "loss": 1.8114, + "step": 70 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001909438775510204, + "loss": 1.7749, + "step": 71 + }, + { + "epoch": 1.46, + "learning_rate": 0.00019081632653061227, + "loss": 1.777, + "step": 72 + }, + { + "epoch": 1.48, + "learning_rate": 0.0001906887755102041, + "loss": 1.7896, + "step": 73 + }, + { + "epoch": 1.5, + "learning_rate": 0.00019056122448979593, + "loss": 1.8335, + "step": 74 + }, + { + "epoch": 1.52, + "learning_rate": 0.00019043367346938776, + "loss": 1.8155, + "step": 75 + }, + { + "epoch": 1.54, + "learning_rate": 0.00019030612244897962, + "loss": 1.8224, + "step": 76 + }, + { + "epoch": 1.56, + "learning_rate": 0.00019017857142857142, + "loss": 1.7889, + "step": 77 + }, + { + "epoch": 1.58, + "learning_rate": 0.00019005102040816328, + "loss": 1.8866, + "step": 78 + }, + { + "epoch": 1.6, + "learning_rate": 0.0001899234693877551, + "loss": 1.8439, + "step": 79 + }, + { + "epoch": 1.62, + "learning_rate": 0.00018979591836734697, + "loss": 1.7906, + "step": 80 + }, + { + "epoch": 1.64, + "learning_rate": 0.00018966836734693877, + "loss": 1.8627, + "step": 81 + }, + { + "epoch": 1.66, + "learning_rate": 0.00018954081632653063, + "loss": 1.7497, + "step": 82 + }, + { + "epoch": 1.68, + "learning_rate": 0.00018941326530612246, + "loss": 1.7936, + "step": 83 + }, + { + "epoch": 1.7, + "learning_rate": 0.0001892857142857143, + "loss": 1.8341, + "step": 84 + }, + { + "epoch": 1.72, + "learning_rate": 0.00018915816326530612, + "loss": 1.7868, + "step": 85 + }, + { + "epoch": 1.74, + "learning_rate": 0.00018903061224489798, + "loss": 1.7493, + "step": 86 + }, + { + "epoch": 1.76, + "learning_rate": 0.0001889030612244898, + "loss": 1.7926, + "step": 87 + }, + { + "epoch": 1.78, + "learning_rate": 0.00018877551020408164, + "loss": 1.8278, + "step": 88 + }, + { + "epoch": 1.8, + "learning_rate": 0.00018864795918367347, + "loss": 1.7387, + "step": 89 + }, + { + "epoch": 1.82, + "learning_rate": 0.0001885204081632653, + "loss": 1.7669, + "step": 90 + }, + { + "epoch": 1.84, + "learning_rate": 0.00018839285714285716, + "loss": 1.7686, + "step": 91 + }, + { + "epoch": 1.86, + "learning_rate": 0.000188265306122449, + "loss": 1.7759, + "step": 92 + }, + { + "epoch": 1.88, + "learning_rate": 0.00018813775510204082, + "loss": 1.7016, + "step": 93 + }, + { + "epoch": 1.9, + "learning_rate": 0.00018801020408163265, + "loss": 1.8123, + "step": 94 + }, + { + "epoch": 1.92, + "learning_rate": 0.0001878826530612245, + "loss": 1.8315, + "step": 95 + }, + { + "epoch": 1.94, + "learning_rate": 0.00018775510204081634, + "loss": 1.7679, + "step": 96 + }, + { + "epoch": 1.96, + "learning_rate": 0.00018762755102040817, + "loss": 1.7874, + "step": 97 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001875, + "loss": 1.8008, + "step": 98 + }, + { + "epoch": 2.0, + "learning_rate": 0.00018737244897959186, + "loss": 1.7177, + "step": 99 + }, + { + "epoch": 2.02, + "learning_rate": 0.00018724489795918367, + "loss": 1.7272, + "step": 100 + }, + { + "epoch": 2.04, + "learning_rate": 0.00018711734693877552, + "loss": 1.7848, + "step": 101 + }, + { + "epoch": 2.06, + "learning_rate": 0.00018698979591836735, + "loss": 1.744, + "step": 102 + }, + { + "epoch": 2.08, + "learning_rate": 0.00018686224489795919, + "loss": 1.7005, + "step": 103 + }, + { + "epoch": 2.1, + "learning_rate": 0.00018673469387755102, + "loss": 1.8247, + "step": 104 + }, + { + "epoch": 2.12, + "learning_rate": 0.00018660714285714287, + "loss": 1.6855, + "step": 105 + }, + { + "epoch": 2.14, + "learning_rate": 0.0001864795918367347, + "loss": 1.7627, + "step": 106 + }, + { + "epoch": 2.17, + "learning_rate": 0.00018635204081632654, + "loss": 1.7564, + "step": 107 + }, + { + "epoch": 2.19, + "learning_rate": 0.00018622448979591837, + "loss": 1.8237, + "step": 108 + }, + { + "epoch": 2.21, + "learning_rate": 0.00018609693877551022, + "loss": 1.7421, + "step": 109 + }, + { + "epoch": 2.23, + "learning_rate": 0.00018596938775510206, + "loss": 1.7517, + "step": 110 + }, + { + "epoch": 2.25, + "learning_rate": 0.0001858418367346939, + "loss": 1.7515, + "step": 111 + }, + { + "epoch": 2.27, + "learning_rate": 0.00018571428571428572, + "loss": 1.7842, + "step": 112 + }, + { + "epoch": 2.29, + "learning_rate": 0.00018558673469387755, + "loss": 1.8001, + "step": 113 + }, + { + "epoch": 2.31, + "learning_rate": 0.0001854591836734694, + "loss": 1.7653, + "step": 114 + }, + { + "epoch": 2.33, + "learning_rate": 0.00018533163265306124, + "loss": 1.694, + "step": 115 + }, + { + "epoch": 2.35, + "learning_rate": 0.00018520408163265307, + "loss": 1.7457, + "step": 116 + }, + { + "epoch": 2.37, + "learning_rate": 0.0001850765306122449, + "loss": 1.7899, + "step": 117 + }, + { + "epoch": 2.39, + "learning_rate": 0.00018494897959183676, + "loss": 1.7473, + "step": 118 + }, + { + "epoch": 2.41, + "learning_rate": 0.0001848214285714286, + "loss": 1.6639, + "step": 119 + }, + { + "epoch": 2.43, + "learning_rate": 0.00018469387755102042, + "loss": 1.762, + "step": 120 + }, + { + "epoch": 2.45, + "learning_rate": 0.00018456632653061225, + "loss": 1.7378, + "step": 121 + }, + { + "epoch": 2.47, + "learning_rate": 0.0001844387755102041, + "loss": 1.672, + "step": 122 + }, + { + "epoch": 2.49, + "learning_rate": 0.0001843112244897959, + "loss": 1.7267, + "step": 123 + }, + { + "epoch": 2.51, + "learning_rate": 0.00018418367346938777, + "loss": 1.7825, + "step": 124 + }, + { + "epoch": 2.53, + "learning_rate": 0.0001840561224489796, + "loss": 1.7566, + "step": 125 + }, + { + "epoch": 2.55, + "learning_rate": 0.00018392857142857143, + "loss": 1.8169, + "step": 126 + }, + { + "epoch": 2.57, + "learning_rate": 0.00018380102040816326, + "loss": 1.6801, + "step": 127 + }, + { + "epoch": 2.59, + "learning_rate": 0.00018367346938775512, + "loss": 1.7292, + "step": 128 + }, + { + "epoch": 2.61, + "learning_rate": 0.00018354591836734695, + "loss": 1.737, + "step": 129 + }, + { + "epoch": 2.63, + "learning_rate": 0.00018341836734693878, + "loss": 1.7696, + "step": 130 + }, + { + "epoch": 2.65, + "learning_rate": 0.0001832908163265306, + "loss": 1.7239, + "step": 131 + }, + { + "epoch": 2.67, + "learning_rate": 0.00018316326530612247, + "loss": 1.7441, + "step": 132 + }, + { + "epoch": 2.69, + "learning_rate": 0.0001830357142857143, + "loss": 1.7825, + "step": 133 + }, + { + "epoch": 2.71, + "learning_rate": 0.00018290816326530613, + "loss": 1.7411, + "step": 134 + }, + { + "epoch": 2.73, + "learning_rate": 0.00018278061224489796, + "loss": 1.7119, + "step": 135 + }, + { + "epoch": 2.75, + "learning_rate": 0.0001826530612244898, + "loss": 1.7443, + "step": 136 + }, + { + "epoch": 2.77, + "learning_rate": 0.00018252551020408165, + "loss": 1.7197, + "step": 137 + }, + { + "epoch": 2.79, + "learning_rate": 0.00018239795918367348, + "loss": 1.7273, + "step": 138 + }, + { + "epoch": 2.81, + "learning_rate": 0.0001822704081632653, + "loss": 1.7681, + "step": 139 + }, + { + "epoch": 2.83, + "learning_rate": 0.00018214285714285714, + "loss": 1.8088, + "step": 140 + }, + { + "epoch": 2.85, + "learning_rate": 0.000182015306122449, + "loss": 1.7301, + "step": 141 + }, + { + "epoch": 2.87, + "learning_rate": 0.00018188775510204083, + "loss": 1.6853, + "step": 142 + }, + { + "epoch": 2.89, + "learning_rate": 0.00018176020408163266, + "loss": 1.6966, + "step": 143 + }, + { + "epoch": 2.91, + "learning_rate": 0.0001816326530612245, + "loss": 1.7938, + "step": 144 + }, + { + "epoch": 2.93, + "learning_rate": 0.00018150510204081635, + "loss": 1.7639, + "step": 145 + }, + { + "epoch": 2.95, + "learning_rate": 0.00018137755102040816, + "loss": 1.7527, + "step": 146 + }, + { + "epoch": 2.97, + "learning_rate": 0.00018125000000000001, + "loss": 1.7386, + "step": 147 + }, + { + "epoch": 2.99, + "learning_rate": 0.00018112244897959185, + "loss": 1.7223, + "step": 148 + }, + { + "epoch": 3.01, + "learning_rate": 0.00018099489795918368, + "loss": 1.7571, + "step": 149 + }, + { + "epoch": 3.04, + "learning_rate": 0.0001808673469387755, + "loss": 1.7054, + "step": 150 + }, + { + "epoch": 3.06, + "learning_rate": 0.00018073979591836737, + "loss": 1.6581, + "step": 151 + }, + { + "epoch": 3.08, + "learning_rate": 0.00018061224489795917, + "loss": 1.681, + "step": 152 + }, + { + "epoch": 3.1, + "learning_rate": 0.00018048469387755103, + "loss": 1.7425, + "step": 153 + }, + { + "epoch": 3.12, + "learning_rate": 0.00018035714285714286, + "loss": 1.7108, + "step": 154 + }, + { + "epoch": 3.14, + "learning_rate": 0.00018022959183673472, + "loss": 1.7194, + "step": 155 + }, + { + "epoch": 3.16, + "learning_rate": 0.00018010204081632655, + "loss": 1.6953, + "step": 156 + }, + { + "epoch": 3.18, + "learning_rate": 0.00017997448979591838, + "loss": 1.669, + "step": 157 + }, + { + "epoch": 3.2, + "learning_rate": 0.0001798469387755102, + "loss": 1.744, + "step": 158 + }, + { + "epoch": 3.22, + "learning_rate": 0.00017971938775510204, + "loss": 1.6467, + "step": 159 + }, + { + "epoch": 3.24, + "learning_rate": 0.0001795918367346939, + "loss": 1.7103, + "step": 160 + }, + { + "epoch": 3.26, + "learning_rate": 0.00017946428571428573, + "loss": 1.6662, + "step": 161 + }, + { + "epoch": 3.28, + "learning_rate": 0.00017933673469387756, + "loss": 1.6657, + "step": 162 + }, + { + "epoch": 3.3, + "learning_rate": 0.0001792091836734694, + "loss": 1.791, + "step": 163 + }, + { + "epoch": 3.32, + "learning_rate": 0.00017908163265306125, + "loss": 1.7704, + "step": 164 + }, + { + "epoch": 3.34, + "learning_rate": 0.00017895408163265305, + "loss": 1.7229, + "step": 165 + }, + { + "epoch": 3.36, + "learning_rate": 0.0001788265306122449, + "loss": 1.76, + "step": 166 + }, + { + "epoch": 3.38, + "learning_rate": 0.00017869897959183674, + "loss": 1.6482, + "step": 167 + }, + { + "epoch": 3.4, + "learning_rate": 0.0001785714285714286, + "loss": 1.8076, + "step": 168 + }, + { + "epoch": 3.42, + "learning_rate": 0.0001784438775510204, + "loss": 1.7368, + "step": 169 + }, + { + "epoch": 3.44, + "learning_rate": 0.00017831632653061226, + "loss": 1.6264, + "step": 170 + }, + { + "epoch": 3.46, + "learning_rate": 0.0001781887755102041, + "loss": 1.6289, + "step": 171 + }, + { + "epoch": 3.48, + "learning_rate": 0.00017806122448979592, + "loss": 1.7913, + "step": 172 + }, + { + "epoch": 3.5, + "learning_rate": 0.00017793367346938775, + "loss": 1.6985, + "step": 173 + }, + { + "epoch": 3.52, + "learning_rate": 0.0001778061224489796, + "loss": 1.6936, + "step": 174 + }, + { + "epoch": 3.54, + "learning_rate": 0.00017767857142857141, + "loss": 1.8068, + "step": 175 + }, + { + "epoch": 3.56, + "learning_rate": 0.00017755102040816327, + "loss": 1.7243, + "step": 176 + }, + { + "epoch": 3.58, + "learning_rate": 0.0001774234693877551, + "loss": 1.6893, + "step": 177 + }, + { + "epoch": 3.6, + "learning_rate": 0.00017729591836734696, + "loss": 1.8122, + "step": 178 + }, + { + "epoch": 3.62, + "learning_rate": 0.0001771683673469388, + "loss": 1.6562, + "step": 179 + }, + { + "epoch": 3.64, + "learning_rate": 0.00017704081632653062, + "loss": 1.6999, + "step": 180 + }, + { + "epoch": 3.66, + "learning_rate": 0.00017691326530612245, + "loss": 1.7229, + "step": 181 + }, + { + "epoch": 3.68, + "learning_rate": 0.00017678571428571428, + "loss": 1.6764, + "step": 182 + }, + { + "epoch": 3.7, + "learning_rate": 0.00017665816326530614, + "loss": 1.6982, + "step": 183 + }, + { + "epoch": 3.72, + "learning_rate": 0.00017653061224489797, + "loss": 1.696, + "step": 184 + }, + { + "epoch": 3.74, + "learning_rate": 0.0001764030612244898, + "loss": 1.6797, + "step": 185 + }, + { + "epoch": 3.76, + "learning_rate": 0.00017627551020408164, + "loss": 1.637, + "step": 186 + }, + { + "epoch": 3.78, + "learning_rate": 0.0001761479591836735, + "loss": 1.7074, + "step": 187 + }, + { + "epoch": 3.8, + "learning_rate": 0.0001760204081632653, + "loss": 1.705, + "step": 188 + }, + { + "epoch": 3.82, + "learning_rate": 0.00017589285714285716, + "loss": 1.6153, + "step": 189 + }, + { + "epoch": 3.84, + "learning_rate": 0.00017576530612244899, + "loss": 1.7354, + "step": 190 + }, + { + "epoch": 3.86, + "learning_rate": 0.00017563775510204084, + "loss": 1.6941, + "step": 191 + }, + { + "epoch": 3.88, + "learning_rate": 0.00017551020408163265, + "loss": 1.7231, + "step": 192 + }, + { + "epoch": 3.91, + "learning_rate": 0.0001753826530612245, + "loss": 1.7663, + "step": 193 + }, + { + "epoch": 3.93, + "learning_rate": 0.00017525510204081634, + "loss": 1.6532, + "step": 194 + }, + { + "epoch": 3.95, + "learning_rate": 0.00017512755102040817, + "loss": 1.7115, + "step": 195 + }, + { + "epoch": 3.97, + "learning_rate": 0.000175, + "loss": 1.6955, + "step": 196 + }, + { + "epoch": 3.99, + "learning_rate": 0.00017487244897959186, + "loss": 1.6863, + "step": 197 + }, + { + "epoch": 4.01, + "learning_rate": 0.00017474489795918366, + "loss": 1.7012, + "step": 198 + }, + { + "epoch": 4.03, + "learning_rate": 0.00017461734693877552, + "loss": 1.5927, + "step": 199 + }, + { + "epoch": 4.05, + "learning_rate": 0.00017448979591836735, + "loss": 1.6272, + "step": 200 + }, + { + "epoch": 4.07, + "learning_rate": 0.00017436224489795918, + "loss": 1.5994, + "step": 201 + }, + { + "epoch": 4.09, + "learning_rate": 0.00017423469387755104, + "loss": 1.7141, + "step": 202 + }, + { + "epoch": 4.11, + "learning_rate": 0.00017410714285714287, + "loss": 1.7547, + "step": 203 + }, + { + "epoch": 4.13, + "learning_rate": 0.0001739795918367347, + "loss": 1.6254, + "step": 204 + }, + { + "epoch": 4.15, + "learning_rate": 0.00017385204081632653, + "loss": 1.6686, + "step": 205 + }, + { + "epoch": 4.17, + "learning_rate": 0.0001737244897959184, + "loss": 1.6684, + "step": 206 + }, + { + "epoch": 4.19, + "learning_rate": 0.00017359693877551022, + "loss": 1.6724, + "step": 207 + }, + { + "epoch": 4.21, + "learning_rate": 0.00017346938775510205, + "loss": 1.7361, + "step": 208 + }, + { + "epoch": 4.23, + "learning_rate": 0.00017334183673469388, + "loss": 1.7167, + "step": 209 + }, + { + "epoch": 4.25, + "learning_rate": 0.00017321428571428574, + "loss": 1.7226, + "step": 210 + }, + { + "epoch": 4.27, + "learning_rate": 0.00017308673469387754, + "loss": 1.7133, + "step": 211 + }, + { + "epoch": 4.29, + "learning_rate": 0.0001729591836734694, + "loss": 1.649, + "step": 212 + }, + { + "epoch": 4.31, + "learning_rate": 0.00017283163265306123, + "loss": 1.7104, + "step": 213 + }, + { + "epoch": 4.33, + "learning_rate": 0.00017270408163265306, + "loss": 1.6861, + "step": 214 + }, + { + "epoch": 4.35, + "learning_rate": 0.0001725765306122449, + "loss": 1.648, + "step": 215 + }, + { + "epoch": 4.37, + "learning_rate": 0.00017244897959183675, + "loss": 1.6215, + "step": 216 + }, + { + "epoch": 4.39, + "learning_rate": 0.00017232142857142858, + "loss": 1.6334, + "step": 217 + }, + { + "epoch": 4.41, + "learning_rate": 0.0001721938775510204, + "loss": 1.6283, + "step": 218 + }, + { + "epoch": 4.43, + "learning_rate": 0.00017206632653061224, + "loss": 1.6462, + "step": 219 + }, + { + "epoch": 4.45, + "learning_rate": 0.0001719387755102041, + "loss": 1.7233, + "step": 220 + }, + { + "epoch": 4.47, + "learning_rate": 0.0001718112244897959, + "loss": 1.7839, + "step": 221 + }, + { + "epoch": 4.49, + "learning_rate": 0.00017168367346938776, + "loss": 1.7204, + "step": 222 + }, + { + "epoch": 4.51, + "learning_rate": 0.0001715561224489796, + "loss": 1.7671, + "step": 223 + }, + { + "epoch": 4.53, + "learning_rate": 0.00017142857142857143, + "loss": 1.6824, + "step": 224 + }, + { + "epoch": 4.55, + "learning_rate": 0.00017130102040816328, + "loss": 1.7068, + "step": 225 + }, + { + "epoch": 4.57, + "learning_rate": 0.00017117346938775511, + "loss": 1.6515, + "step": 226 + }, + { + "epoch": 4.59, + "learning_rate": 0.00017104591836734694, + "loss": 1.6586, + "step": 227 + }, + { + "epoch": 4.61, + "learning_rate": 0.00017091836734693878, + "loss": 1.6355, + "step": 228 + }, + { + "epoch": 4.63, + "learning_rate": 0.00017079081632653063, + "loss": 1.7173, + "step": 229 + }, + { + "epoch": 4.65, + "learning_rate": 0.00017066326530612246, + "loss": 1.6585, + "step": 230 + }, + { + "epoch": 4.67, + "learning_rate": 0.0001705357142857143, + "loss": 1.5856, + "step": 231 + }, + { + "epoch": 4.69, + "learning_rate": 0.00017040816326530613, + "loss": 1.5923, + "step": 232 + }, + { + "epoch": 4.71, + "learning_rate": 0.00017028061224489798, + "loss": 1.7128, + "step": 233 + }, + { + "epoch": 4.73, + "learning_rate": 0.0001701530612244898, + "loss": 1.6971, + "step": 234 + }, + { + "epoch": 4.75, + "learning_rate": 0.00017002551020408165, + "loss": 1.6416, + "step": 235 + }, + { + "epoch": 4.78, + "learning_rate": 0.00016989795918367348, + "loss": 1.645, + "step": 236 + }, + { + "epoch": 4.8, + "learning_rate": 0.0001697704081632653, + "loss": 1.6792, + "step": 237 + }, + { + "epoch": 4.82, + "learning_rate": 0.00016964285714285714, + "loss": 1.6522, + "step": 238 + }, + { + "epoch": 4.84, + "learning_rate": 0.000169515306122449, + "loss": 1.6315, + "step": 239 + }, + { + "epoch": 4.86, + "learning_rate": 0.00016938775510204083, + "loss": 1.6622, + "step": 240 + }, + { + "epoch": 4.88, + "learning_rate": 0.00016926020408163266, + "loss": 1.6566, + "step": 241 + }, + { + "epoch": 4.9, + "learning_rate": 0.0001691326530612245, + "loss": 1.7141, + "step": 242 + }, + { + "epoch": 4.92, + "learning_rate": 0.00016900510204081635, + "loss": 1.5873, + "step": 243 + }, + { + "epoch": 4.94, + "learning_rate": 0.00016887755102040818, + "loss": 1.6571, + "step": 244 + }, + { + "epoch": 4.96, + "learning_rate": 0.00016875, + "loss": 1.6829, + "step": 245 + }, + { + "epoch": 4.98, + "learning_rate": 0.00016862244897959184, + "loss": 1.6935, + "step": 246 + }, + { + "epoch": 5.0, + "learning_rate": 0.00016849489795918367, + "loss": 1.6782, + "step": 247 + }, + { + "epoch": 5.02, + "learning_rate": 0.00016836734693877553, + "loss": 1.622, + "step": 248 + }, + { + "epoch": 5.04, + "learning_rate": 0.00016823979591836736, + "loss": 1.6596, + "step": 249 + }, + { + "epoch": 5.06, + "learning_rate": 0.0001681122448979592, + "loss": 1.5821, + "step": 250 + }, + { + "epoch": 5.08, + "learning_rate": 0.00016798469387755102, + "loss": 1.7292, + "step": 251 + }, + { + "epoch": 5.1, + "learning_rate": 0.00016785714285714288, + "loss": 1.646, + "step": 252 + }, + { + "epoch": 5.12, + "learning_rate": 0.0001677295918367347, + "loss": 1.6969, + "step": 253 + }, + { + "epoch": 5.14, + "learning_rate": 0.00016760204081632654, + "loss": 1.6082, + "step": 254 + }, + { + "epoch": 5.16, + "learning_rate": 0.00016747448979591837, + "loss": 1.5843, + "step": 255 + }, + { + "epoch": 5.18, + "learning_rate": 0.00016734693877551023, + "loss": 1.6827, + "step": 256 + }, + { + "epoch": 5.2, + "learning_rate": 0.00016721938775510203, + "loss": 1.5824, + "step": 257 + }, + { + "epoch": 5.22, + "learning_rate": 0.0001670918367346939, + "loss": 1.6795, + "step": 258 + }, + { + "epoch": 5.24, + "learning_rate": 0.00016696428571428572, + "loss": 1.5639, + "step": 259 + }, + { + "epoch": 5.26, + "learning_rate": 0.00016683673469387755, + "loss": 1.592, + "step": 260 + }, + { + "epoch": 5.28, + "learning_rate": 0.00016670918367346938, + "loss": 1.65, + "step": 261 + }, + { + "epoch": 5.3, + "learning_rate": 0.00016658163265306124, + "loss": 1.5592, + "step": 262 + }, + { + "epoch": 5.32, + "learning_rate": 0.00016645408163265305, + "loss": 1.5091, + "step": 263 + }, + { + "epoch": 5.34, + "learning_rate": 0.0001663265306122449, + "loss": 1.6138, + "step": 264 + }, + { + "epoch": 5.36, + "learning_rate": 0.00016619897959183673, + "loss": 1.625, + "step": 265 + }, + { + "epoch": 5.38, + "learning_rate": 0.0001660714285714286, + "loss": 1.5757, + "step": 266 + }, + { + "epoch": 5.4, + "learning_rate": 0.00016594387755102042, + "loss": 1.6372, + "step": 267 + }, + { + "epoch": 5.42, + "learning_rate": 0.00016581632653061225, + "loss": 1.5891, + "step": 268 + }, + { + "epoch": 5.44, + "learning_rate": 0.00016568877551020409, + "loss": 1.6893, + "step": 269 + }, + { + "epoch": 5.46, + "learning_rate": 0.00016556122448979592, + "loss": 1.6662, + "step": 270 + }, + { + "epoch": 5.48, + "learning_rate": 0.00016543367346938777, + "loss": 1.7132, + "step": 271 + }, + { + "epoch": 5.5, + "learning_rate": 0.0001653061224489796, + "loss": 1.5835, + "step": 272 + }, + { + "epoch": 5.52, + "learning_rate": 0.00016517857142857144, + "loss": 1.6342, + "step": 273 + }, + { + "epoch": 5.54, + "learning_rate": 0.00016505102040816327, + "loss": 1.6717, + "step": 274 + }, + { + "epoch": 5.56, + "learning_rate": 0.00016492346938775512, + "loss": 1.6248, + "step": 275 + }, + { + "epoch": 5.58, + "learning_rate": 0.00016479591836734696, + "loss": 1.6117, + "step": 276 + }, + { + "epoch": 5.6, + "learning_rate": 0.0001646683673469388, + "loss": 1.6798, + "step": 277 + }, + { + "epoch": 5.63, + "learning_rate": 0.00016454081632653062, + "loss": 1.6406, + "step": 278 + }, + { + "epoch": 5.65, + "learning_rate": 0.00016441326530612248, + "loss": 1.6512, + "step": 279 + }, + { + "epoch": 5.67, + "learning_rate": 0.00016428571428571428, + "loss": 1.6102, + "step": 280 + }, + { + "epoch": 5.69, + "learning_rate": 0.00016415816326530614, + "loss": 1.6113, + "step": 281 + }, + { + "epoch": 5.71, + "learning_rate": 0.00016403061224489797, + "loss": 1.7116, + "step": 282 + }, + { + "epoch": 5.73, + "learning_rate": 0.0001639030612244898, + "loss": 1.6846, + "step": 283 + }, + { + "epoch": 5.75, + "learning_rate": 0.00016377551020408163, + "loss": 1.6911, + "step": 284 + }, + { + "epoch": 5.77, + "learning_rate": 0.0001636479591836735, + "loss": 1.6202, + "step": 285 + }, + { + "epoch": 5.79, + "learning_rate": 0.0001635204081632653, + "loss": 1.5715, + "step": 286 + }, + { + "epoch": 5.81, + "learning_rate": 0.00016339285714285715, + "loss": 1.6461, + "step": 287 + }, + { + "epoch": 5.83, + "learning_rate": 0.00016326530612244898, + "loss": 1.6624, + "step": 288 + }, + { + "epoch": 5.85, + "learning_rate": 0.00016313775510204084, + "loss": 1.6535, + "step": 289 + }, + { + "epoch": 5.87, + "learning_rate": 0.00016301020408163267, + "loss": 1.6275, + "step": 290 + }, + { + "epoch": 5.89, + "learning_rate": 0.0001628826530612245, + "loss": 1.6636, + "step": 291 + }, + { + "epoch": 5.91, + "learning_rate": 0.00016275510204081633, + "loss": 1.6546, + "step": 292 + }, + { + "epoch": 5.93, + "learning_rate": 0.00016262755102040816, + "loss": 1.7274, + "step": 293 + }, + { + "epoch": 5.95, + "learning_rate": 0.00016250000000000002, + "loss": 1.5901, + "step": 294 + }, + { + "epoch": 5.97, + "learning_rate": 0.00016237244897959185, + "loss": 1.6046, + "step": 295 + }, + { + "epoch": 5.99, + "learning_rate": 0.00016224489795918368, + "loss": 1.5828, + "step": 296 + }, + { + "epoch": 6.01, + "learning_rate": 0.0001621173469387755, + "loss": 1.6435, + "step": 297 + }, + { + "epoch": 6.03, + "learning_rate": 0.00016198979591836737, + "loss": 1.6263, + "step": 298 + }, + { + "epoch": 6.05, + "learning_rate": 0.00016186224489795917, + "loss": 1.4944, + "step": 299 + }, + { + "epoch": 6.07, + "learning_rate": 0.00016173469387755103, + "loss": 1.6286, + "step": 300 + }, + { + "epoch": 6.09, + "learning_rate": 0.00016160714285714286, + "loss": 1.694, + "step": 301 + }, + { + "epoch": 6.11, + "learning_rate": 0.00016147959183673472, + "loss": 1.6197, + "step": 302 + }, + { + "epoch": 6.13, + "learning_rate": 0.00016135204081632652, + "loss": 1.5597, + "step": 303 + }, + { + "epoch": 6.15, + "learning_rate": 0.00016122448979591838, + "loss": 1.5487, + "step": 304 + }, + { + "epoch": 6.17, + "learning_rate": 0.0001610969387755102, + "loss": 1.5769, + "step": 305 + }, + { + "epoch": 6.19, + "learning_rate": 0.00016096938775510204, + "loss": 1.6367, + "step": 306 + }, + { + "epoch": 6.21, + "learning_rate": 0.00016084183673469388, + "loss": 1.583, + "step": 307 + }, + { + "epoch": 6.23, + "learning_rate": 0.00016071428571428573, + "loss": 1.6201, + "step": 308 + }, + { + "epoch": 6.25, + "learning_rate": 0.00016058673469387754, + "loss": 1.6586, + "step": 309 + }, + { + "epoch": 6.27, + "learning_rate": 0.0001604591836734694, + "loss": 1.6711, + "step": 310 + }, + { + "epoch": 6.29, + "learning_rate": 0.00016033163265306123, + "loss": 1.6402, + "step": 311 + }, + { + "epoch": 6.31, + "learning_rate": 0.00016020408163265306, + "loss": 1.5247, + "step": 312 + }, + { + "epoch": 6.33, + "learning_rate": 0.00016007653061224491, + "loss": 1.5356, + "step": 313 + }, + { + "epoch": 6.35, + "learning_rate": 0.00015994897959183675, + "loss": 1.564, + "step": 314 + }, + { + "epoch": 6.37, + "learning_rate": 0.00015982142857142858, + "loss": 1.563, + "step": 315 + }, + { + "epoch": 6.39, + "learning_rate": 0.0001596938775510204, + "loss": 1.5198, + "step": 316 + }, + { + "epoch": 6.41, + "learning_rate": 0.00015956632653061227, + "loss": 1.6558, + "step": 317 + }, + { + "epoch": 6.43, + "learning_rate": 0.0001594387755102041, + "loss": 1.5534, + "step": 318 + }, + { + "epoch": 6.45, + "learning_rate": 0.00015931122448979593, + "loss": 1.6239, + "step": 319 + }, + { + "epoch": 6.47, + "learning_rate": 0.00015918367346938776, + "loss": 1.5645, + "step": 320 + }, + { + "epoch": 6.5, + "learning_rate": 0.00015905612244897962, + "loss": 1.5713, + "step": 321 + }, + { + "epoch": 6.52, + "learning_rate": 0.00015892857142857142, + "loss": 1.6176, + "step": 322 + }, + { + "epoch": 6.54, + "learning_rate": 0.00015880102040816328, + "loss": 1.502, + "step": 323 + }, + { + "epoch": 6.56, + "learning_rate": 0.0001586734693877551, + "loss": 1.645, + "step": 324 + }, + { + "epoch": 6.58, + "learning_rate": 0.00015854591836734697, + "loss": 1.5904, + "step": 325 + }, + { + "epoch": 6.6, + "learning_rate": 0.00015841836734693877, + "loss": 1.6149, + "step": 326 + }, + { + "epoch": 6.62, + "learning_rate": 0.00015829081632653063, + "loss": 1.6757, + "step": 327 + }, + { + "epoch": 6.64, + "learning_rate": 0.00015816326530612246, + "loss": 1.541, + "step": 328 + }, + { + "epoch": 6.66, + "learning_rate": 0.0001580357142857143, + "loss": 1.5898, + "step": 329 + }, + { + "epoch": 6.68, + "learning_rate": 0.00015790816326530612, + "loss": 1.5441, + "step": 330 + }, + { + "epoch": 6.7, + "learning_rate": 0.00015778061224489798, + "loss": 1.61, + "step": 331 + }, + { + "epoch": 6.72, + "learning_rate": 0.00015765306122448978, + "loss": 1.615, + "step": 332 + }, + { + "epoch": 6.74, + "learning_rate": 0.00015752551020408164, + "loss": 1.6575, + "step": 333 + }, + { + "epoch": 6.76, + "learning_rate": 0.00015739795918367347, + "loss": 1.6702, + "step": 334 + }, + { + "epoch": 6.78, + "learning_rate": 0.0001572704081632653, + "loss": 1.6009, + "step": 335 + }, + { + "epoch": 6.8, + "learning_rate": 0.00015714285714285716, + "loss": 1.5568, + "step": 336 + }, + { + "epoch": 6.82, + "learning_rate": 0.000157015306122449, + "loss": 1.619, + "step": 337 + }, + { + "epoch": 6.84, + "learning_rate": 0.00015688775510204082, + "loss": 1.5563, + "step": 338 + }, + { + "epoch": 6.86, + "learning_rate": 0.00015676020408163265, + "loss": 1.6328, + "step": 339 + }, + { + "epoch": 6.88, + "learning_rate": 0.0001566326530612245, + "loss": 1.5726, + "step": 340 + }, + { + "epoch": 6.9, + "learning_rate": 0.00015650510204081634, + "loss": 1.6199, + "step": 341 + }, + { + "epoch": 6.92, + "learning_rate": 0.00015637755102040817, + "loss": 1.5722, + "step": 342 + }, + { + "epoch": 6.94, + "learning_rate": 0.00015625, + "loss": 1.5685, + "step": 343 + }, + { + "epoch": 6.96, + "learning_rate": 0.00015612244897959186, + "loss": 1.5615, + "step": 344 + }, + { + "epoch": 6.98, + "learning_rate": 0.00015599489795918366, + "loss": 1.5994, + "step": 345 + }, + { + "epoch": 7.0, + "learning_rate": 0.00015586734693877552, + "loss": 1.5579, + "step": 346 + }, + { + "epoch": 7.02, + "learning_rate": 0.00015573979591836735, + "loss": 1.547, + "step": 347 + }, + { + "epoch": 7.04, + "learning_rate": 0.00015561224489795918, + "loss": 1.5292, + "step": 348 + }, + { + "epoch": 7.06, + "learning_rate": 0.00015548469387755102, + "loss": 1.6032, + "step": 349 + }, + { + "epoch": 7.08, + "learning_rate": 0.00015535714285714287, + "loss": 1.5149, + "step": 350 + }, + { + "epoch": 7.1, + "learning_rate": 0.0001552295918367347, + "loss": 1.6093, + "step": 351 + }, + { + "epoch": 7.12, + "learning_rate": 0.00015510204081632654, + "loss": 1.5421, + "step": 352 + }, + { + "epoch": 7.14, + "learning_rate": 0.00015497448979591837, + "loss": 1.5733, + "step": 353 + }, + { + "epoch": 7.16, + "learning_rate": 0.00015484693877551022, + "loss": 1.5703, + "step": 354 + }, + { + "epoch": 7.18, + "learning_rate": 0.00015471938775510203, + "loss": 1.6141, + "step": 355 + }, + { + "epoch": 7.2, + "learning_rate": 0.00015459183673469389, + "loss": 1.5526, + "step": 356 + }, + { + "epoch": 7.22, + "learning_rate": 0.00015446428571428572, + "loss": 1.5347, + "step": 357 + }, + { + "epoch": 7.24, + "learning_rate": 0.00015433673469387755, + "loss": 1.5682, + "step": 358 + }, + { + "epoch": 7.26, + "learning_rate": 0.0001542091836734694, + "loss": 1.5292, + "step": 359 + }, + { + "epoch": 7.28, + "learning_rate": 0.00015408163265306124, + "loss": 1.499, + "step": 360 + }, + { + "epoch": 7.3, + "learning_rate": 0.00015395408163265307, + "loss": 1.5624, + "step": 361 + }, + { + "epoch": 7.32, + "learning_rate": 0.0001538265306122449, + "loss": 1.627, + "step": 362 + }, + { + "epoch": 7.34, + "learning_rate": 0.00015369897959183676, + "loss": 1.5327, + "step": 363 + }, + { + "epoch": 7.37, + "learning_rate": 0.0001535714285714286, + "loss": 1.5622, + "step": 364 + }, + { + "epoch": 7.39, + "learning_rate": 0.00015344387755102042, + "loss": 1.5659, + "step": 365 + }, + { + "epoch": 7.41, + "learning_rate": 0.00015331632653061225, + "loss": 1.5019, + "step": 366 + }, + { + "epoch": 7.43, + "learning_rate": 0.0001531887755102041, + "loss": 1.5921, + "step": 367 + }, + { + "epoch": 7.45, + "learning_rate": 0.0001530612244897959, + "loss": 1.5914, + "step": 368 + }, + { + "epoch": 7.47, + "learning_rate": 0.00015293367346938777, + "loss": 1.5045, + "step": 369 + }, + { + "epoch": 7.49, + "learning_rate": 0.0001528061224489796, + "loss": 1.6209, + "step": 370 + }, + { + "epoch": 7.51, + "learning_rate": 0.00015267857142857143, + "loss": 1.5198, + "step": 371 + }, + { + "epoch": 7.53, + "learning_rate": 0.00015255102040816326, + "loss": 1.5363, + "step": 372 + }, + { + "epoch": 7.55, + "learning_rate": 0.00015242346938775512, + "loss": 1.5391, + "step": 373 + }, + { + "epoch": 7.57, + "learning_rate": 0.00015229591836734695, + "loss": 1.4546, + "step": 374 + }, + { + "epoch": 7.59, + "learning_rate": 0.00015216836734693878, + "loss": 1.5546, + "step": 375 + }, + { + "epoch": 7.61, + "learning_rate": 0.0001520408163265306, + "loss": 1.5629, + "step": 376 + }, + { + "epoch": 7.63, + "learning_rate": 0.00015191326530612247, + "loss": 1.6002, + "step": 377 + }, + { + "epoch": 7.65, + "learning_rate": 0.00015178571428571427, + "loss": 1.5543, + "step": 378 + }, + { + "epoch": 7.67, + "learning_rate": 0.00015165816326530613, + "loss": 1.5925, + "step": 379 + }, + { + "epoch": 7.69, + "learning_rate": 0.00015153061224489796, + "loss": 1.5631, + "step": 380 + }, + { + "epoch": 7.71, + "learning_rate": 0.0001514030612244898, + "loss": 1.5677, + "step": 381 + }, + { + "epoch": 7.73, + "learning_rate": 0.00015127551020408165, + "loss": 1.5828, + "step": 382 + }, + { + "epoch": 7.75, + "learning_rate": 0.00015114795918367348, + "loss": 1.6494, + "step": 383 + }, + { + "epoch": 7.77, + "learning_rate": 0.0001510204081632653, + "loss": 1.553, + "step": 384 + }, + { + "epoch": 7.79, + "learning_rate": 0.00015089285714285714, + "loss": 1.6156, + "step": 385 + }, + { + "epoch": 7.81, + "learning_rate": 0.000150765306122449, + "loss": 1.5001, + "step": 386 + }, + { + "epoch": 7.83, + "learning_rate": 0.00015063775510204083, + "loss": 1.5321, + "step": 387 + }, + { + "epoch": 7.85, + "learning_rate": 0.00015051020408163266, + "loss": 1.5307, + "step": 388 + }, + { + "epoch": 7.87, + "learning_rate": 0.0001503826530612245, + "loss": 1.5639, + "step": 389 + }, + { + "epoch": 7.89, + "learning_rate": 0.00015025510204081635, + "loss": 1.517, + "step": 390 + }, + { + "epoch": 7.91, + "learning_rate": 0.00015012755102040816, + "loss": 1.4776, + "step": 391 + }, + { + "epoch": 7.93, + "learning_rate": 0.00015000000000000001, + "loss": 1.5368, + "step": 392 + }, + { + "epoch": 7.95, + "learning_rate": 0.00014987244897959184, + "loss": 1.5636, + "step": 393 + }, + { + "epoch": 7.97, + "learning_rate": 0.00014974489795918368, + "loss": 1.6004, + "step": 394 + }, + { + "epoch": 7.99, + "learning_rate": 0.0001496173469387755, + "loss": 1.5524, + "step": 395 + }, + { + "epoch": 8.01, + "learning_rate": 0.00014948979591836736, + "loss": 1.5307, + "step": 396 + }, + { + "epoch": 8.03, + "learning_rate": 0.00014936224489795917, + "loss": 1.5123, + "step": 397 + }, + { + "epoch": 8.05, + "learning_rate": 0.00014923469387755103, + "loss": 1.5132, + "step": 398 + }, + { + "epoch": 8.07, + "learning_rate": 0.00014910714285714286, + "loss": 1.5109, + "step": 399 + }, + { + "epoch": 8.09, + "learning_rate": 0.00014897959183673472, + "loss": 1.5302, + "step": 400 + }, + { + "epoch": 8.11, + "learning_rate": 0.00014885204081632652, + "loss": 1.5238, + "step": 401 + }, + { + "epoch": 8.13, + "learning_rate": 0.00014872448979591838, + "loss": 1.4781, + "step": 402 + }, + { + "epoch": 8.15, + "learning_rate": 0.0001485969387755102, + "loss": 1.5446, + "step": 403 + }, + { + "epoch": 8.17, + "learning_rate": 0.00014846938775510204, + "loss": 1.5, + "step": 404 + }, + { + "epoch": 8.19, + "learning_rate": 0.0001483418367346939, + "loss": 1.5458, + "step": 405 + }, + { + "epoch": 8.21, + "learning_rate": 0.00014821428571428573, + "loss": 1.5257, + "step": 406 + }, + { + "epoch": 8.24, + "learning_rate": 0.00014808673469387756, + "loss": 1.4607, + "step": 407 + }, + { + "epoch": 8.26, + "learning_rate": 0.0001479591836734694, + "loss": 1.4282, + "step": 408 + }, + { + "epoch": 8.28, + "learning_rate": 0.00014783163265306125, + "loss": 1.4519, + "step": 409 + }, + { + "epoch": 8.3, + "learning_rate": 0.00014770408163265305, + "loss": 1.475, + "step": 410 + }, + { + "epoch": 8.32, + "learning_rate": 0.0001475765306122449, + "loss": 1.5425, + "step": 411 + }, + { + "epoch": 8.34, + "learning_rate": 0.00014744897959183674, + "loss": 1.5407, + "step": 412 + }, + { + "epoch": 8.36, + "learning_rate": 0.0001473214285714286, + "loss": 1.5698, + "step": 413 + }, + { + "epoch": 8.38, + "learning_rate": 0.0001471938775510204, + "loss": 1.4282, + "step": 414 + }, + { + "epoch": 8.4, + "learning_rate": 0.00014706632653061226, + "loss": 1.5301, + "step": 415 + }, + { + "epoch": 8.42, + "learning_rate": 0.0001469387755102041, + "loss": 1.5083, + "step": 416 + }, + { + "epoch": 8.44, + "learning_rate": 0.00014681122448979592, + "loss": 1.5712, + "step": 417 + }, + { + "epoch": 8.46, + "learning_rate": 0.00014668367346938775, + "loss": 1.4363, + "step": 418 + }, + { + "epoch": 8.48, + "learning_rate": 0.0001465561224489796, + "loss": 1.4463, + "step": 419 + }, + { + "epoch": 8.5, + "learning_rate": 0.00014642857142857141, + "loss": 1.4738, + "step": 420 + }, + { + "epoch": 8.52, + "learning_rate": 0.00014630102040816327, + "loss": 1.5396, + "step": 421 + }, + { + "epoch": 8.54, + "learning_rate": 0.0001461734693877551, + "loss": 1.4384, + "step": 422 + }, + { + "epoch": 8.56, + "learning_rate": 0.00014604591836734696, + "loss": 1.5345, + "step": 423 + }, + { + "epoch": 8.58, + "learning_rate": 0.0001459183673469388, + "loss": 1.5355, + "step": 424 + }, + { + "epoch": 8.6, + "learning_rate": 0.00014579081632653062, + "loss": 1.5188, + "step": 425 + }, + { + "epoch": 8.62, + "learning_rate": 0.00014566326530612245, + "loss": 1.5575, + "step": 426 + }, + { + "epoch": 8.64, + "learning_rate": 0.00014553571428571428, + "loss": 1.5279, + "step": 427 + }, + { + "epoch": 8.66, + "learning_rate": 0.00014540816326530614, + "loss": 1.5484, + "step": 428 + }, + { + "epoch": 8.68, + "learning_rate": 0.00014528061224489797, + "loss": 1.4878, + "step": 429 + }, + { + "epoch": 8.7, + "learning_rate": 0.0001451530612244898, + "loss": 1.503, + "step": 430 + }, + { + "epoch": 8.72, + "learning_rate": 0.00014502551020408163, + "loss": 1.4723, + "step": 431 + }, + { + "epoch": 8.74, + "learning_rate": 0.0001448979591836735, + "loss": 1.5579, + "step": 432 + }, + { + "epoch": 8.76, + "learning_rate": 0.0001447704081632653, + "loss": 1.4789, + "step": 433 + }, + { + "epoch": 8.78, + "learning_rate": 0.00014464285714285715, + "loss": 1.5501, + "step": 434 + }, + { + "epoch": 8.8, + "learning_rate": 0.00014451530612244899, + "loss": 1.5204, + "step": 435 + }, + { + "epoch": 8.82, + "learning_rate": 0.00014438775510204084, + "loss": 1.5489, + "step": 436 + }, + { + "epoch": 8.84, + "learning_rate": 0.00014426020408163265, + "loss": 1.5464, + "step": 437 + }, + { + "epoch": 8.86, + "learning_rate": 0.0001441326530612245, + "loss": 1.5896, + "step": 438 + }, + { + "epoch": 8.88, + "learning_rate": 0.00014400510204081634, + "loss": 1.5465, + "step": 439 + }, + { + "epoch": 8.9, + "learning_rate": 0.00014387755102040817, + "loss": 1.5094, + "step": 440 + }, + { + "epoch": 8.92, + "learning_rate": 0.00014375, + "loss": 1.5144, + "step": 441 + }, + { + "epoch": 8.94, + "learning_rate": 0.00014362244897959186, + "loss": 1.4919, + "step": 442 + }, + { + "epoch": 8.96, + "learning_rate": 0.00014349489795918366, + "loss": 1.4702, + "step": 443 + }, + { + "epoch": 8.98, + "learning_rate": 0.00014336734693877552, + "loss": 1.4996, + "step": 444 + }, + { + "epoch": 9.0, + "learning_rate": 0.00014323979591836735, + "loss": 1.5503, + "step": 445 + }, + { + "epoch": 9.02, + "learning_rate": 0.00014311224489795918, + "loss": 1.4125, + "step": 446 + }, + { + "epoch": 9.04, + "learning_rate": 0.00014298469387755104, + "loss": 1.4722, + "step": 447 + }, + { + "epoch": 9.06, + "learning_rate": 0.00014285714285714287, + "loss": 1.5199, + "step": 448 + }, + { + "epoch": 9.09, + "learning_rate": 0.0001427295918367347, + "loss": 1.4571, + "step": 449 + }, + { + "epoch": 9.11, + "learning_rate": 0.00014260204081632653, + "loss": 1.4996, + "step": 450 + }, + { + "epoch": 9.13, + "learning_rate": 0.0001424744897959184, + "loss": 1.4092, + "step": 451 + }, + { + "epoch": 9.15, + "learning_rate": 0.00014234693877551022, + "loss": 1.4198, + "step": 452 + }, + { + "epoch": 9.17, + "learning_rate": 0.00014221938775510205, + "loss": 1.4916, + "step": 453 + }, + { + "epoch": 9.19, + "learning_rate": 0.00014209183673469388, + "loss": 1.5051, + "step": 454 + }, + { + "epoch": 9.21, + "learning_rate": 0.00014196428571428574, + "loss": 1.4321, + "step": 455 + }, + { + "epoch": 9.23, + "learning_rate": 0.00014183673469387754, + "loss": 1.4097, + "step": 456 + }, + { + "epoch": 9.25, + "learning_rate": 0.0001417091836734694, + "loss": 1.4853, + "step": 457 + }, + { + "epoch": 9.27, + "learning_rate": 0.00014158163265306123, + "loss": 1.4593, + "step": 458 + }, + { + "epoch": 9.29, + "learning_rate": 0.00014145408163265306, + "loss": 1.3729, + "step": 459 + }, + { + "epoch": 9.31, + "learning_rate": 0.0001413265306122449, + "loss": 1.4467, + "step": 460 + }, + { + "epoch": 9.33, + "learning_rate": 0.00014119897959183675, + "loss": 1.4467, + "step": 461 + }, + { + "epoch": 9.35, + "learning_rate": 0.00014107142857142858, + "loss": 1.4785, + "step": 462 + }, + { + "epoch": 9.37, + "learning_rate": 0.0001409438775510204, + "loss": 1.4089, + "step": 463 + }, + { + "epoch": 9.39, + "learning_rate": 0.00014081632653061224, + "loss": 1.5026, + "step": 464 + }, + { + "epoch": 9.41, + "learning_rate": 0.0001406887755102041, + "loss": 1.4857, + "step": 465 + }, + { + "epoch": 9.43, + "learning_rate": 0.0001405612244897959, + "loss": 1.3745, + "step": 466 + }, + { + "epoch": 9.45, + "learning_rate": 0.00014043367346938776, + "loss": 1.4733, + "step": 467 + }, + { + "epoch": 9.47, + "learning_rate": 0.0001403061224489796, + "loss": 1.5212, + "step": 468 + }, + { + "epoch": 9.49, + "learning_rate": 0.00014017857142857142, + "loss": 1.5398, + "step": 469 + }, + { + "epoch": 9.51, + "learning_rate": 0.00014005102040816328, + "loss": 1.478, + "step": 470 + }, + { + "epoch": 9.53, + "learning_rate": 0.0001399234693877551, + "loss": 1.496, + "step": 471 + }, + { + "epoch": 9.55, + "learning_rate": 0.00013979591836734694, + "loss": 1.4837, + "step": 472 + }, + { + "epoch": 9.57, + "learning_rate": 0.00013966836734693878, + "loss": 1.4724, + "step": 473 + }, + { + "epoch": 9.59, + "learning_rate": 0.00013954081632653063, + "loss": 1.4828, + "step": 474 + }, + { + "epoch": 9.61, + "learning_rate": 0.00013941326530612246, + "loss": 1.5012, + "step": 475 + }, + { + "epoch": 9.63, + "learning_rate": 0.0001392857142857143, + "loss": 1.4879, + "step": 476 + }, + { + "epoch": 9.65, + "learning_rate": 0.00013915816326530613, + "loss": 1.4196, + "step": 477 + }, + { + "epoch": 9.67, + "learning_rate": 0.00013903061224489798, + "loss": 1.4915, + "step": 478 + }, + { + "epoch": 9.69, + "learning_rate": 0.0001389030612244898, + "loss": 1.3878, + "step": 479 + }, + { + "epoch": 9.71, + "learning_rate": 0.00013877551020408165, + "loss": 1.466, + "step": 480 + }, + { + "epoch": 9.73, + "learning_rate": 0.00013864795918367348, + "loss": 1.4582, + "step": 481 + }, + { + "epoch": 9.75, + "learning_rate": 0.0001385204081632653, + "loss": 1.533, + "step": 482 + }, + { + "epoch": 9.77, + "learning_rate": 0.00013839285714285714, + "loss": 1.4697, + "step": 483 + }, + { + "epoch": 9.79, + "learning_rate": 0.000138265306122449, + "loss": 1.3989, + "step": 484 + }, + { + "epoch": 9.81, + "learning_rate": 0.00013813775510204083, + "loss": 1.4361, + "step": 485 + }, + { + "epoch": 9.83, + "learning_rate": 0.00013801020408163266, + "loss": 1.5271, + "step": 486 + }, + { + "epoch": 9.85, + "learning_rate": 0.0001378826530612245, + "loss": 1.4905, + "step": 487 + }, + { + "epoch": 9.87, + "learning_rate": 0.00013775510204081635, + "loss": 1.4757, + "step": 488 + }, + { + "epoch": 9.89, + "learning_rate": 0.00013762755102040815, + "loss": 1.5485, + "step": 489 + }, + { + "epoch": 9.91, + "learning_rate": 0.0001375, + "loss": 1.4783, + "step": 490 + }, + { + "epoch": 9.93, + "learning_rate": 0.00013737244897959184, + "loss": 1.4849, + "step": 491 + }, + { + "epoch": 9.96, + "learning_rate": 0.00013724489795918367, + "loss": 1.5382, + "step": 492 + }, + { + "epoch": 9.98, + "learning_rate": 0.00013711734693877553, + "loss": 1.4902, + "step": 493 + }, + { + "epoch": 10.0, + "learning_rate": 0.00013698979591836736, + "loss": 1.4865, + "step": 494 + }, + { + "epoch": 10.02, + "learning_rate": 0.0001368622448979592, + "loss": 1.4436, + "step": 495 + }, + { + "epoch": 10.04, + "learning_rate": 0.00013673469387755102, + "loss": 1.408, + "step": 496 + }, + { + "epoch": 10.06, + "learning_rate": 0.00013660714285714288, + "loss": 1.4764, + "step": 497 + }, + { + "epoch": 10.08, + "learning_rate": 0.0001364795918367347, + "loss": 1.4646, + "step": 498 + }, + { + "epoch": 10.1, + "learning_rate": 0.00013635204081632654, + "loss": 1.406, + "step": 499 + }, + { + "epoch": 10.12, + "learning_rate": 0.00013622448979591837, + "loss": 1.4785, + "step": 500 + }, + { + "epoch": 10.14, + "learning_rate": 0.00013609693877551023, + "loss": 1.4117, + "step": 501 + }, + { + "epoch": 10.16, + "learning_rate": 0.00013596938775510203, + "loss": 1.4108, + "step": 502 + }, + { + "epoch": 10.18, + "learning_rate": 0.0001358418367346939, + "loss": 1.4155, + "step": 503 + }, + { + "epoch": 10.2, + "learning_rate": 0.00013571428571428572, + "loss": 1.4021, + "step": 504 + }, + { + "epoch": 10.22, + "learning_rate": 0.00013558673469387755, + "loss": 1.411, + "step": 505 + }, + { + "epoch": 10.24, + "learning_rate": 0.00013545918367346938, + "loss": 1.3851, + "step": 506 + }, + { + "epoch": 10.26, + "learning_rate": 0.00013533163265306124, + "loss": 1.387, + "step": 507 + }, + { + "epoch": 10.28, + "learning_rate": 0.00013520408163265305, + "loss": 1.4163, + "step": 508 + }, + { + "epoch": 10.3, + "learning_rate": 0.0001350765306122449, + "loss": 1.3343, + "step": 509 + }, + { + "epoch": 10.32, + "learning_rate": 0.00013494897959183673, + "loss": 1.4811, + "step": 510 + }, + { + "epoch": 10.34, + "learning_rate": 0.0001348214285714286, + "loss": 1.4086, + "step": 511 + }, + { + "epoch": 10.36, + "learning_rate": 0.0001346938775510204, + "loss": 1.3879, + "step": 512 + }, + { + "epoch": 10.38, + "learning_rate": 0.00013456632653061225, + "loss": 1.4204, + "step": 513 + }, + { + "epoch": 10.4, + "learning_rate": 0.00013443877551020408, + "loss": 1.4158, + "step": 514 + }, + { + "epoch": 10.42, + "learning_rate": 0.00013431122448979592, + "loss": 1.4521, + "step": 515 + }, + { + "epoch": 10.44, + "learning_rate": 0.00013418367346938777, + "loss": 1.4196, + "step": 516 + }, + { + "epoch": 10.46, + "learning_rate": 0.0001340561224489796, + "loss": 1.4361, + "step": 517 + }, + { + "epoch": 10.48, + "learning_rate": 0.00013392857142857144, + "loss": 1.4482, + "step": 518 + }, + { + "epoch": 10.5, + "learning_rate": 0.00013380102040816327, + "loss": 1.4801, + "step": 519 + }, + { + "epoch": 10.52, + "learning_rate": 0.00013367346938775512, + "loss": 1.4556, + "step": 520 + }, + { + "epoch": 10.54, + "learning_rate": 0.00013354591836734695, + "loss": 1.3902, + "step": 521 + }, + { + "epoch": 10.56, + "learning_rate": 0.00013341836734693879, + "loss": 1.4269, + "step": 522 + }, + { + "epoch": 10.58, + "learning_rate": 0.00013329081632653062, + "loss": 1.4899, + "step": 523 + }, + { + "epoch": 10.6, + "learning_rate": 0.00013316326530612247, + "loss": 1.3952, + "step": 524 + }, + { + "epoch": 10.62, + "learning_rate": 0.00013303571428571428, + "loss": 1.4116, + "step": 525 + }, + { + "epoch": 10.64, + "learning_rate": 0.00013290816326530614, + "loss": 1.4583, + "step": 526 + }, + { + "epoch": 10.66, + "learning_rate": 0.00013278061224489797, + "loss": 1.4466, + "step": 527 + }, + { + "epoch": 10.68, + "learning_rate": 0.0001326530612244898, + "loss": 1.4242, + "step": 528 + }, + { + "epoch": 10.7, + "learning_rate": 0.00013252551020408163, + "loss": 1.3717, + "step": 529 + }, + { + "epoch": 10.72, + "learning_rate": 0.0001323979591836735, + "loss": 1.4583, + "step": 530 + }, + { + "epoch": 10.74, + "learning_rate": 0.0001322704081632653, + "loss": 1.4185, + "step": 531 + }, + { + "epoch": 10.76, + "learning_rate": 0.00013214285714285715, + "loss": 1.4287, + "step": 532 + }, + { + "epoch": 10.78, + "learning_rate": 0.00013201530612244898, + "loss": 1.4385, + "step": 533 + }, + { + "epoch": 10.8, + "learning_rate": 0.00013188775510204084, + "loss": 1.453, + "step": 534 + }, + { + "epoch": 10.83, + "learning_rate": 0.00013176020408163264, + "loss": 1.4161, + "step": 535 + }, + { + "epoch": 10.85, + "learning_rate": 0.0001316326530612245, + "loss": 1.457, + "step": 536 + }, + { + "epoch": 10.87, + "learning_rate": 0.00013150510204081633, + "loss": 1.4367, + "step": 537 + }, + { + "epoch": 10.89, + "learning_rate": 0.00013137755102040816, + "loss": 1.4256, + "step": 538 + }, + { + "epoch": 10.91, + "learning_rate": 0.00013125000000000002, + "loss": 1.424, + "step": 539 + }, + { + "epoch": 10.93, + "learning_rate": 0.00013112244897959185, + "loss": 1.3923, + "step": 540 + }, + { + "epoch": 10.95, + "learning_rate": 0.00013099489795918368, + "loss": 1.4225, + "step": 541 + }, + { + "epoch": 10.97, + "learning_rate": 0.0001308673469387755, + "loss": 1.3969, + "step": 542 + }, + { + "epoch": 10.99, + "learning_rate": 0.00013073979591836737, + "loss": 1.4446, + "step": 543 + }, + { + "epoch": 11.01, + "learning_rate": 0.00013061224489795917, + "loss": 1.4375, + "step": 544 + }, + { + "epoch": 11.03, + "learning_rate": 0.00013048469387755103, + "loss": 1.4064, + "step": 545 + }, + { + "epoch": 11.05, + "learning_rate": 0.00013035714285714286, + "loss": 1.3454, + "step": 546 + }, + { + "epoch": 11.07, + "learning_rate": 0.00013022959183673472, + "loss": 1.3234, + "step": 547 + }, + { + "epoch": 11.09, + "learning_rate": 0.00013010204081632652, + "loss": 1.3759, + "step": 548 + }, + { + "epoch": 11.11, + "learning_rate": 0.00012997448979591838, + "loss": 1.4221, + "step": 549 + }, + { + "epoch": 11.13, + "learning_rate": 0.0001298469387755102, + "loss": 1.4261, + "step": 550 + }, + { + "epoch": 11.15, + "learning_rate": 0.00012971938775510204, + "loss": 1.3341, + "step": 551 + }, + { + "epoch": 11.17, + "learning_rate": 0.00012959183673469387, + "loss": 1.3994, + "step": 552 + }, + { + "epoch": 11.19, + "learning_rate": 0.00012946428571428573, + "loss": 1.3894, + "step": 553 + }, + { + "epoch": 11.21, + "learning_rate": 0.00012933673469387754, + "loss": 1.3585, + "step": 554 + }, + { + "epoch": 11.23, + "learning_rate": 0.0001292091836734694, + "loss": 1.3763, + "step": 555 + }, + { + "epoch": 11.25, + "learning_rate": 0.00012908163265306123, + "loss": 1.3623, + "step": 556 + }, + { + "epoch": 11.27, + "learning_rate": 0.00012895408163265306, + "loss": 1.3907, + "step": 557 + }, + { + "epoch": 11.29, + "learning_rate": 0.0001288265306122449, + "loss": 1.3807, + "step": 558 + }, + { + "epoch": 11.31, + "learning_rate": 0.00012869897959183674, + "loss": 1.4045, + "step": 559 + }, + { + "epoch": 11.33, + "learning_rate": 0.00012857142857142858, + "loss": 1.4038, + "step": 560 + }, + { + "epoch": 11.35, + "learning_rate": 0.0001284438775510204, + "loss": 1.3466, + "step": 561 + }, + { + "epoch": 11.37, + "learning_rate": 0.00012831632653061226, + "loss": 1.3449, + "step": 562 + }, + { + "epoch": 11.39, + "learning_rate": 0.0001281887755102041, + "loss": 1.3866, + "step": 563 + }, + { + "epoch": 11.41, + "learning_rate": 0.00012806122448979593, + "loss": 1.3106, + "step": 564 + }, + { + "epoch": 11.43, + "learning_rate": 0.00012793367346938776, + "loss": 1.4414, + "step": 565 + }, + { + "epoch": 11.45, + "learning_rate": 0.00012780612244897962, + "loss": 1.3737, + "step": 566 + }, + { + "epoch": 11.47, + "learning_rate": 0.00012767857142857142, + "loss": 1.4053, + "step": 567 + }, + { + "epoch": 11.49, + "learning_rate": 0.00012755102040816328, + "loss": 1.4561, + "step": 568 + }, + { + "epoch": 11.51, + "learning_rate": 0.0001274234693877551, + "loss": 1.3684, + "step": 569 + }, + { + "epoch": 11.53, + "learning_rate": 0.00012729591836734697, + "loss": 1.3117, + "step": 570 + }, + { + "epoch": 11.55, + "learning_rate": 0.00012716836734693877, + "loss": 1.3474, + "step": 571 + }, + { + "epoch": 11.57, + "learning_rate": 0.00012704081632653063, + "loss": 1.3804, + "step": 572 + }, + { + "epoch": 11.59, + "learning_rate": 0.00012691326530612246, + "loss": 1.3656, + "step": 573 + }, + { + "epoch": 11.61, + "learning_rate": 0.0001267857142857143, + "loss": 1.3133, + "step": 574 + }, + { + "epoch": 11.63, + "learning_rate": 0.00012665816326530612, + "loss": 1.4077, + "step": 575 + }, + { + "epoch": 11.65, + "learning_rate": 0.00012653061224489798, + "loss": 1.4087, + "step": 576 + }, + { + "epoch": 11.67, + "learning_rate": 0.00012640306122448978, + "loss": 1.3524, + "step": 577 + }, + { + "epoch": 11.7, + "learning_rate": 0.00012627551020408164, + "loss": 1.3481, + "step": 578 + }, + { + "epoch": 11.72, + "learning_rate": 0.00012614795918367347, + "loss": 1.4497, + "step": 579 + }, + { + "epoch": 11.74, + "learning_rate": 0.0001260204081632653, + "loss": 1.3866, + "step": 580 + }, + { + "epoch": 11.76, + "learning_rate": 0.00012589285714285713, + "loss": 1.42, + "step": 581 + }, + { + "epoch": 11.78, + "learning_rate": 0.000125765306122449, + "loss": 1.3562, + "step": 582 + }, + { + "epoch": 11.8, + "learning_rate": 0.00012563775510204082, + "loss": 1.3249, + "step": 583 + }, + { + "epoch": 11.82, + "learning_rate": 0.00012551020408163265, + "loss": 1.4277, + "step": 584 + }, + { + "epoch": 11.84, + "learning_rate": 0.0001253826530612245, + "loss": 1.3734, + "step": 585 + }, + { + "epoch": 11.86, + "learning_rate": 0.00012525510204081634, + "loss": 1.3765, + "step": 586 + }, + { + "epoch": 11.88, + "learning_rate": 0.00012512755102040817, + "loss": 1.4153, + "step": 587 + }, + { + "epoch": 11.9, + "learning_rate": 0.000125, + "loss": 1.3847, + "step": 588 + }, + { + "epoch": 11.92, + "learning_rate": 0.00012487244897959186, + "loss": 1.3824, + "step": 589 + }, + { + "epoch": 11.94, + "learning_rate": 0.00012474489795918366, + "loss": 1.3938, + "step": 590 + }, + { + "epoch": 11.96, + "learning_rate": 0.00012461734693877552, + "loss": 1.4143, + "step": 591 + }, + { + "epoch": 11.98, + "learning_rate": 0.00012448979591836735, + "loss": 1.3794, + "step": 592 + }, + { + "epoch": 12.0, + "learning_rate": 0.00012436224489795918, + "loss": 1.3755, + "step": 593 + }, + { + "epoch": 12.02, + "learning_rate": 0.00012423469387755101, + "loss": 1.3736, + "step": 594 + }, + { + "epoch": 12.04, + "learning_rate": 0.00012410714285714287, + "loss": 1.2957, + "step": 595 + }, + { + "epoch": 12.06, + "learning_rate": 0.0001239795918367347, + "loss": 1.2996, + "step": 596 + }, + { + "epoch": 12.08, + "learning_rate": 0.00012385204081632653, + "loss": 1.3648, + "step": 597 + }, + { + "epoch": 12.1, + "learning_rate": 0.00012372448979591837, + "loss": 1.3031, + "step": 598 + }, + { + "epoch": 12.12, + "learning_rate": 0.00012359693877551022, + "loss": 1.2933, + "step": 599 + }, + { + "epoch": 12.14, + "learning_rate": 0.00012346938775510203, + "loss": 1.322, + "step": 600 + }, + { + "epoch": 12.16, + "learning_rate": 0.00012334183673469389, + "loss": 1.3123, + "step": 601 + }, + { + "epoch": 12.18, + "learning_rate": 0.00012321428571428572, + "loss": 1.3187, + "step": 602 + }, + { + "epoch": 12.2, + "learning_rate": 0.00012308673469387755, + "loss": 1.3353, + "step": 603 + }, + { + "epoch": 12.22, + "learning_rate": 0.0001229591836734694, + "loss": 1.3221, + "step": 604 + }, + { + "epoch": 12.24, + "learning_rate": 0.00012283163265306124, + "loss": 1.3458, + "step": 605 + }, + { + "epoch": 12.26, + "learning_rate": 0.00012270408163265307, + "loss": 1.275, + "step": 606 + }, + { + "epoch": 12.28, + "learning_rate": 0.0001225765306122449, + "loss": 1.3455, + "step": 607 + }, + { + "epoch": 12.3, + "learning_rate": 0.00012244897959183676, + "loss": 1.2769, + "step": 608 + }, + { + "epoch": 12.32, + "learning_rate": 0.00012232142857142859, + "loss": 1.3201, + "step": 609 + }, + { + "epoch": 12.34, + "learning_rate": 0.00012219387755102042, + "loss": 1.3073, + "step": 610 + }, + { + "epoch": 12.36, + "learning_rate": 0.00012206632653061225, + "loss": 1.3103, + "step": 611 + }, + { + "epoch": 12.38, + "learning_rate": 0.00012193877551020409, + "loss": 1.4437, + "step": 612 + }, + { + "epoch": 12.4, + "learning_rate": 0.00012181122448979591, + "loss": 1.3086, + "step": 613 + }, + { + "epoch": 12.42, + "learning_rate": 0.00012168367346938775, + "loss": 1.3867, + "step": 614 + }, + { + "epoch": 12.44, + "learning_rate": 0.0001215561224489796, + "loss": 1.2565, + "step": 615 + }, + { + "epoch": 12.46, + "learning_rate": 0.00012142857142857143, + "loss": 1.335, + "step": 616 + }, + { + "epoch": 12.48, + "learning_rate": 0.00012130102040816327, + "loss": 1.3423, + "step": 617 + }, + { + "epoch": 12.5, + "learning_rate": 0.00012117346938775512, + "loss": 1.3433, + "step": 618 + }, + { + "epoch": 12.52, + "learning_rate": 0.00012104591836734695, + "loss": 1.3387, + "step": 619 + }, + { + "epoch": 12.55, + "learning_rate": 0.00012091836734693878, + "loss": 1.3923, + "step": 620 + }, + { + "epoch": 12.57, + "learning_rate": 0.00012079081632653062, + "loss": 1.3774, + "step": 621 + }, + { + "epoch": 12.59, + "learning_rate": 0.00012066326530612247, + "loss": 1.3203, + "step": 622 + }, + { + "epoch": 12.61, + "learning_rate": 0.00012053571428571429, + "loss": 1.2924, + "step": 623 + }, + { + "epoch": 12.63, + "learning_rate": 0.00012040816326530613, + "loss": 1.3292, + "step": 624 + }, + { + "epoch": 12.65, + "learning_rate": 0.00012028061224489798, + "loss": 1.3161, + "step": 625 + }, + { + "epoch": 12.67, + "learning_rate": 0.00012015306122448979, + "loss": 1.352, + "step": 626 + }, + { + "epoch": 12.69, + "learning_rate": 0.00012002551020408164, + "loss": 1.3577, + "step": 627 + }, + { + "epoch": 12.71, + "learning_rate": 0.00011989795918367348, + "loss": 1.3575, + "step": 628 + }, + { + "epoch": 12.73, + "learning_rate": 0.0001197704081632653, + "loss": 1.3727, + "step": 629 + }, + { + "epoch": 12.75, + "learning_rate": 0.00011964285714285714, + "loss": 1.3312, + "step": 630 + }, + { + "epoch": 12.77, + "learning_rate": 0.00011951530612244899, + "loss": 1.3378, + "step": 631 + }, + { + "epoch": 12.79, + "learning_rate": 0.00011938775510204083, + "loss": 1.295, + "step": 632 + }, + { + "epoch": 12.81, + "learning_rate": 0.00011926020408163265, + "loss": 1.3447, + "step": 633 + }, + { + "epoch": 12.83, + "learning_rate": 0.0001191326530612245, + "loss": 1.3835, + "step": 634 + }, + { + "epoch": 12.85, + "learning_rate": 0.00011900510204081634, + "loss": 1.3222, + "step": 635 + }, + { + "epoch": 12.87, + "learning_rate": 0.00011887755102040817, + "loss": 1.2851, + "step": 636 + }, + { + "epoch": 12.89, + "learning_rate": 0.00011875, + "loss": 1.2723, + "step": 637 + }, + { + "epoch": 12.91, + "learning_rate": 0.00011862244897959184, + "loss": 1.3924, + "step": 638 + }, + { + "epoch": 12.93, + "learning_rate": 0.00011849489795918368, + "loss": 1.4625, + "step": 639 + }, + { + "epoch": 12.95, + "learning_rate": 0.00011836734693877552, + "loss": 1.3245, + "step": 640 + }, + { + "epoch": 12.97, + "learning_rate": 0.00011823979591836736, + "loss": 1.4042, + "step": 641 + }, + { + "epoch": 12.99, + "learning_rate": 0.00011811224489795918, + "loss": 1.3761, + "step": 642 + }, + { + "epoch": 13.01, + "learning_rate": 0.00011798469387755103, + "loss": 1.3376, + "step": 643 + }, + { + "epoch": 13.03, + "learning_rate": 0.00011785714285714287, + "loss": 1.2174, + "step": 644 + }, + { + "epoch": 13.05, + "learning_rate": 0.00011772959183673471, + "loss": 1.3602, + "step": 645 + }, + { + "epoch": 13.07, + "learning_rate": 0.00011760204081632653, + "loss": 1.3002, + "step": 646 + }, + { + "epoch": 13.09, + "learning_rate": 0.00011747448979591838, + "loss": 1.2262, + "step": 647 + }, + { + "epoch": 13.11, + "learning_rate": 0.00011734693877551022, + "loss": 1.3048, + "step": 648 + }, + { + "epoch": 13.13, + "learning_rate": 0.00011721938775510204, + "loss": 1.2231, + "step": 649 + }, + { + "epoch": 13.15, + "learning_rate": 0.00011709183673469388, + "loss": 1.2996, + "step": 650 + }, + { + "epoch": 13.17, + "learning_rate": 0.00011696428571428573, + "loss": 1.2708, + "step": 651 + }, + { + "epoch": 13.19, + "learning_rate": 0.00011683673469387754, + "loss": 1.2776, + "step": 652 + }, + { + "epoch": 13.21, + "learning_rate": 0.00011670918367346939, + "loss": 1.248, + "step": 653 + }, + { + "epoch": 13.23, + "learning_rate": 0.00011658163265306123, + "loss": 1.2582, + "step": 654 + }, + { + "epoch": 13.25, + "learning_rate": 0.00011645408163265305, + "loss": 1.3011, + "step": 655 + }, + { + "epoch": 13.27, + "learning_rate": 0.0001163265306122449, + "loss": 1.2969, + "step": 656 + }, + { + "epoch": 13.29, + "learning_rate": 0.00011619897959183674, + "loss": 1.2454, + "step": 657 + }, + { + "epoch": 13.31, + "learning_rate": 0.00011607142857142858, + "loss": 1.1914, + "step": 658 + }, + { + "epoch": 13.33, + "learning_rate": 0.00011594387755102041, + "loss": 1.34, + "step": 659 + }, + { + "epoch": 13.35, + "learning_rate": 0.00011581632653061225, + "loss": 1.2828, + "step": 660 + }, + { + "epoch": 13.37, + "learning_rate": 0.00011568877551020409, + "loss": 1.2962, + "step": 661 + }, + { + "epoch": 13.39, + "learning_rate": 0.00011556122448979592, + "loss": 1.3334, + "step": 662 + }, + { + "epoch": 13.42, + "learning_rate": 0.00011543367346938776, + "loss": 1.2832, + "step": 663 + }, + { + "epoch": 13.44, + "learning_rate": 0.00011530612244897961, + "loss": 1.3012, + "step": 664 + }, + { + "epoch": 13.46, + "learning_rate": 0.00011517857142857143, + "loss": 1.2857, + "step": 665 + }, + { + "epoch": 13.48, + "learning_rate": 0.00011505102040816327, + "loss": 1.2855, + "step": 666 + }, + { + "epoch": 13.5, + "learning_rate": 0.00011492346938775512, + "loss": 1.3077, + "step": 667 + }, + { + "epoch": 13.52, + "learning_rate": 0.00011479591836734696, + "loss": 1.3139, + "step": 668 + }, + { + "epoch": 13.54, + "learning_rate": 0.00011466836734693878, + "loss": 1.3138, + "step": 669 + }, + { + "epoch": 13.56, + "learning_rate": 0.00011454081632653062, + "loss": 1.2808, + "step": 670 + }, + { + "epoch": 13.58, + "learning_rate": 0.00011441326530612247, + "loss": 1.2492, + "step": 671 + }, + { + "epoch": 13.6, + "learning_rate": 0.00011428571428571428, + "loss": 1.2027, + "step": 672 + }, + { + "epoch": 13.62, + "learning_rate": 0.00011415816326530613, + "loss": 1.33, + "step": 673 + }, + { + "epoch": 13.64, + "learning_rate": 0.00011403061224489797, + "loss": 1.3112, + "step": 674 + }, + { + "epoch": 13.66, + "learning_rate": 0.00011390306122448979, + "loss": 1.2772, + "step": 675 + }, + { + "epoch": 13.68, + "learning_rate": 0.00011377551020408163, + "loss": 1.2701, + "step": 676 + }, + { + "epoch": 13.7, + "learning_rate": 0.00011364795918367348, + "loss": 1.1973, + "step": 677 + }, + { + "epoch": 13.72, + "learning_rate": 0.0001135204081632653, + "loss": 1.3124, + "step": 678 + }, + { + "epoch": 13.74, + "learning_rate": 0.00011339285714285714, + "loss": 1.3085, + "step": 679 + }, + { + "epoch": 13.76, + "learning_rate": 0.00011326530612244898, + "loss": 1.3457, + "step": 680 + }, + { + "epoch": 13.78, + "learning_rate": 0.00011313775510204083, + "loss": 1.3338, + "step": 681 + }, + { + "epoch": 13.8, + "learning_rate": 0.00011301020408163266, + "loss": 1.2753, + "step": 682 + }, + { + "epoch": 13.82, + "learning_rate": 0.00011288265306122449, + "loss": 1.2786, + "step": 683 + }, + { + "epoch": 13.84, + "learning_rate": 0.00011275510204081634, + "loss": 1.2584, + "step": 684 + }, + { + "epoch": 13.86, + "learning_rate": 0.00011262755102040817, + "loss": 1.2779, + "step": 685 + }, + { + "epoch": 13.88, + "learning_rate": 0.00011250000000000001, + "loss": 1.3502, + "step": 686 + }, + { + "epoch": 13.9, + "learning_rate": 0.00011237244897959185, + "loss": 1.3251, + "step": 687 + }, + { + "epoch": 13.92, + "learning_rate": 0.00011224489795918367, + "loss": 1.273, + "step": 688 + }, + { + "epoch": 13.94, + "learning_rate": 0.00011211734693877552, + "loss": 1.3341, + "step": 689 + }, + { + "epoch": 13.96, + "learning_rate": 0.00011198979591836736, + "loss": 1.2654, + "step": 690 + }, + { + "epoch": 13.98, + "learning_rate": 0.00011186224489795918, + "loss": 1.3333, + "step": 691 + }, + { + "epoch": 14.0, + "learning_rate": 0.00011173469387755102, + "loss": 1.3246, + "step": 692 + }, + { + "epoch": 14.02, + "learning_rate": 0.00011160714285714287, + "loss": 1.2547, + "step": 693 + }, + { + "epoch": 14.04, + "learning_rate": 0.00011147959183673471, + "loss": 1.208, + "step": 694 + }, + { + "epoch": 14.06, + "learning_rate": 0.00011135204081632653, + "loss": 1.223, + "step": 695 + }, + { + "epoch": 14.08, + "learning_rate": 0.00011122448979591837, + "loss": 1.2483, + "step": 696 + }, + { + "epoch": 14.1, + "learning_rate": 0.00011109693877551022, + "loss": 1.2823, + "step": 697 + }, + { + "epoch": 14.12, + "learning_rate": 0.00011096938775510204, + "loss": 1.2013, + "step": 698 + }, + { + "epoch": 14.14, + "learning_rate": 0.00011084183673469388, + "loss": 1.1883, + "step": 699 + }, + { + "epoch": 14.16, + "learning_rate": 0.00011071428571428572, + "loss": 1.2364, + "step": 700 + }, + { + "epoch": 14.18, + "learning_rate": 0.00011058673469387754, + "loss": 1.2069, + "step": 701 + }, + { + "epoch": 14.2, + "learning_rate": 0.00011045918367346939, + "loss": 1.1968, + "step": 702 + }, + { + "epoch": 14.22, + "learning_rate": 0.00011033163265306123, + "loss": 1.2236, + "step": 703 + }, + { + "epoch": 14.24, + "learning_rate": 0.00011020408163265306, + "loss": 1.1942, + "step": 704 + }, + { + "epoch": 14.26, + "learning_rate": 0.0001100765306122449, + "loss": 1.2561, + "step": 705 + }, + { + "epoch": 14.29, + "learning_rate": 0.00010994897959183674, + "loss": 1.1839, + "step": 706 + }, + { + "epoch": 14.31, + "learning_rate": 0.00010982142857142858, + "loss": 1.2128, + "step": 707 + }, + { + "epoch": 14.33, + "learning_rate": 0.00010969387755102041, + "loss": 1.3086, + "step": 708 + }, + { + "epoch": 14.35, + "learning_rate": 0.00010956632653061226, + "loss": 1.2379, + "step": 709 + }, + { + "epoch": 14.37, + "learning_rate": 0.0001094387755102041, + "loss": 1.176, + "step": 710 + }, + { + "epoch": 14.39, + "learning_rate": 0.00010931122448979592, + "loss": 1.2105, + "step": 711 + }, + { + "epoch": 14.41, + "learning_rate": 0.00010918367346938776, + "loss": 1.2149, + "step": 712 + }, + { + "epoch": 14.43, + "learning_rate": 0.0001090561224489796, + "loss": 1.2392, + "step": 713 + }, + { + "epoch": 14.45, + "learning_rate": 0.00010892857142857142, + "loss": 1.2471, + "step": 714 + }, + { + "epoch": 14.47, + "learning_rate": 0.00010880102040816327, + "loss": 1.2561, + "step": 715 + }, + { + "epoch": 14.49, + "learning_rate": 0.00010867346938775511, + "loss": 1.2179, + "step": 716 + }, + { + "epoch": 14.51, + "learning_rate": 0.00010854591836734696, + "loss": 1.2459, + "step": 717 + }, + { + "epoch": 14.53, + "learning_rate": 0.00010841836734693877, + "loss": 1.2933, + "step": 718 + }, + { + "epoch": 14.55, + "learning_rate": 0.00010829081632653062, + "loss": 1.2862, + "step": 719 + }, + { + "epoch": 14.57, + "learning_rate": 0.00010816326530612246, + "loss": 1.2976, + "step": 720 + }, + { + "epoch": 14.59, + "learning_rate": 0.00010803571428571428, + "loss": 1.231, + "step": 721 + }, + { + "epoch": 14.61, + "learning_rate": 0.00010790816326530613, + "loss": 1.2464, + "step": 722 + }, + { + "epoch": 14.63, + "learning_rate": 0.00010778061224489797, + "loss": 1.2181, + "step": 723 + }, + { + "epoch": 14.65, + "learning_rate": 0.00010765306122448979, + "loss": 1.3307, + "step": 724 + }, + { + "epoch": 14.67, + "learning_rate": 0.00010752551020408163, + "loss": 1.1723, + "step": 725 + }, + { + "epoch": 14.69, + "learning_rate": 0.00010739795918367348, + "loss": 1.1528, + "step": 726 + }, + { + "epoch": 14.71, + "learning_rate": 0.0001072704081632653, + "loss": 1.215, + "step": 727 + }, + { + "epoch": 14.73, + "learning_rate": 0.00010714285714285715, + "loss": 1.2624, + "step": 728 + }, + { + "epoch": 14.75, + "learning_rate": 0.00010701530612244898, + "loss": 1.3117, + "step": 729 + }, + { + "epoch": 14.77, + "learning_rate": 0.00010688775510204083, + "loss": 1.2572, + "step": 730 + }, + { + "epoch": 14.79, + "learning_rate": 0.00010676020408163266, + "loss": 1.222, + "step": 731 + }, + { + "epoch": 14.81, + "learning_rate": 0.0001066326530612245, + "loss": 1.2881, + "step": 732 + }, + { + "epoch": 14.83, + "learning_rate": 0.00010650510204081635, + "loss": 1.2676, + "step": 733 + }, + { + "epoch": 14.85, + "learning_rate": 0.00010637755102040816, + "loss": 1.2734, + "step": 734 + }, + { + "epoch": 14.87, + "learning_rate": 0.00010625000000000001, + "loss": 1.2885, + "step": 735 + }, + { + "epoch": 14.89, + "learning_rate": 0.00010612244897959185, + "loss": 1.2764, + "step": 736 + }, + { + "epoch": 14.91, + "learning_rate": 0.00010599489795918367, + "loss": 1.3267, + "step": 737 + }, + { + "epoch": 14.93, + "learning_rate": 0.00010586734693877551, + "loss": 1.2445, + "step": 738 + }, + { + "epoch": 14.95, + "learning_rate": 0.00010573979591836736, + "loss": 1.3359, + "step": 739 + }, + { + "epoch": 14.97, + "learning_rate": 0.00010561224489795918, + "loss": 1.2508, + "step": 740 + }, + { + "epoch": 14.99, + "learning_rate": 0.00010548469387755102, + "loss": 1.2227, + "step": 741 + }, + { + "epoch": 15.01, + "learning_rate": 0.00010535714285714286, + "loss": 1.1889, + "step": 742 + }, + { + "epoch": 15.03, + "learning_rate": 0.00010522959183673471, + "loss": 1.1919, + "step": 743 + }, + { + "epoch": 15.05, + "learning_rate": 0.00010510204081632653, + "loss": 1.2383, + "step": 744 + }, + { + "epoch": 15.07, + "learning_rate": 0.00010497448979591837, + "loss": 1.2401, + "step": 745 + }, + { + "epoch": 15.09, + "learning_rate": 0.00010484693877551021, + "loss": 1.2015, + "step": 746 + }, + { + "epoch": 15.11, + "learning_rate": 0.00010471938775510203, + "loss": 1.1509, + "step": 747 + }, + { + "epoch": 15.13, + "learning_rate": 0.00010459183673469388, + "loss": 1.1878, + "step": 748 + }, + { + "epoch": 15.16, + "learning_rate": 0.00010446428571428572, + "loss": 1.1706, + "step": 749 + }, + { + "epoch": 15.18, + "learning_rate": 0.00010433673469387755, + "loss": 1.1285, + "step": 750 + }, + { + "epoch": 15.2, + "learning_rate": 0.0001042091836734694, + "loss": 1.1608, + "step": 751 + }, + { + "epoch": 15.22, + "learning_rate": 0.00010408163265306123, + "loss": 1.1178, + "step": 752 + }, + { + "epoch": 15.24, + "learning_rate": 0.00010395408163265306, + "loss": 1.1293, + "step": 753 + }, + { + "epoch": 15.26, + "learning_rate": 0.0001038265306122449, + "loss": 1.2306, + "step": 754 + }, + { + "epoch": 15.28, + "learning_rate": 0.00010369897959183675, + "loss": 1.1541, + "step": 755 + }, + { + "epoch": 15.3, + "learning_rate": 0.00010357142857142859, + "loss": 1.1702, + "step": 756 + }, + { + "epoch": 15.32, + "learning_rate": 0.00010344387755102041, + "loss": 1.2119, + "step": 757 + }, + { + "epoch": 15.34, + "learning_rate": 0.00010331632653061225, + "loss": 1.2239, + "step": 758 + }, + { + "epoch": 15.36, + "learning_rate": 0.0001031887755102041, + "loss": 1.2019, + "step": 759 + }, + { + "epoch": 15.38, + "learning_rate": 0.00010306122448979591, + "loss": 1.2197, + "step": 760 + }, + { + "epoch": 15.4, + "learning_rate": 0.00010293367346938776, + "loss": 1.1769, + "step": 761 + }, + { + "epoch": 15.42, + "learning_rate": 0.0001028061224489796, + "loss": 1.1907, + "step": 762 + }, + { + "epoch": 15.44, + "learning_rate": 0.00010267857142857142, + "loss": 1.2089, + "step": 763 + }, + { + "epoch": 15.46, + "learning_rate": 0.00010255102040816327, + "loss": 1.1335, + "step": 764 + }, + { + "epoch": 15.48, + "learning_rate": 0.00010242346938775511, + "loss": 1.1633, + "step": 765 + }, + { + "epoch": 15.5, + "learning_rate": 0.00010229591836734695, + "loss": 1.1578, + "step": 766 + }, + { + "epoch": 15.52, + "learning_rate": 0.00010216836734693877, + "loss": 1.2236, + "step": 767 + }, + { + "epoch": 15.54, + "learning_rate": 0.00010204081632653062, + "loss": 1.1941, + "step": 768 + }, + { + "epoch": 15.56, + "learning_rate": 0.00010191326530612246, + "loss": 1.2666, + "step": 769 + }, + { + "epoch": 15.58, + "learning_rate": 0.00010178571428571428, + "loss": 1.1232, + "step": 770 + }, + { + "epoch": 15.6, + "learning_rate": 0.00010165816326530612, + "loss": 1.2242, + "step": 771 + }, + { + "epoch": 15.62, + "learning_rate": 0.00010153061224489797, + "loss": 1.1852, + "step": 772 + }, + { + "epoch": 15.64, + "learning_rate": 0.0001014030612244898, + "loss": 1.2626, + "step": 773 + }, + { + "epoch": 15.66, + "learning_rate": 0.00010127551020408164, + "loss": 1.1873, + "step": 774 + }, + { + "epoch": 15.68, + "learning_rate": 0.00010114795918367349, + "loss": 1.3005, + "step": 775 + }, + { + "epoch": 15.7, + "learning_rate": 0.0001010204081632653, + "loss": 1.1904, + "step": 776 + }, + { + "epoch": 15.72, + "learning_rate": 0.00010089285714285715, + "loss": 1.2927, + "step": 777 + }, + { + "epoch": 15.74, + "learning_rate": 0.00010076530612244899, + "loss": 1.179, + "step": 778 + }, + { + "epoch": 15.76, + "learning_rate": 0.00010063775510204084, + "loss": 1.2027, + "step": 779 + }, + { + "epoch": 15.78, + "learning_rate": 0.00010051020408163265, + "loss": 1.2428, + "step": 780 + }, + { + "epoch": 15.8, + "learning_rate": 0.0001003826530612245, + "loss": 1.2324, + "step": 781 + }, + { + "epoch": 15.82, + "learning_rate": 0.00010025510204081634, + "loss": 1.1251, + "step": 782 + }, + { + "epoch": 15.84, + "learning_rate": 0.00010012755102040816, + "loss": 1.2405, + "step": 783 + }, + { + "epoch": 15.86, + "learning_rate": 0.0001, + "loss": 1.2005, + "step": 784 + }, + { + "epoch": 15.88, + "learning_rate": 9.987244897959184e-05, + "loss": 1.2259, + "step": 785 + }, + { + "epoch": 15.9, + "learning_rate": 9.974489795918368e-05, + "loss": 1.1576, + "step": 786 + }, + { + "epoch": 15.92, + "learning_rate": 9.961734693877551e-05, + "loss": 1.1834, + "step": 787 + }, + { + "epoch": 15.94, + "learning_rate": 9.948979591836736e-05, + "loss": 1.2396, + "step": 788 + }, + { + "epoch": 15.96, + "learning_rate": 9.936224489795919e-05, + "loss": 1.1865, + "step": 789 + }, + { + "epoch": 15.98, + "learning_rate": 9.923469387755102e-05, + "loss": 1.2356, + "step": 790 + }, + { + "epoch": 16.01, + "learning_rate": 9.910714285714286e-05, + "loss": 1.2639, + "step": 791 + }, + { + "epoch": 16.03, + "learning_rate": 9.897959183673469e-05, + "loss": 1.1216, + "step": 792 + }, + { + "epoch": 16.05, + "learning_rate": 9.885204081632652e-05, + "loss": 1.1051, + "step": 793 + }, + { + "epoch": 16.07, + "learning_rate": 9.872448979591837e-05, + "loss": 1.0864, + "step": 794 + }, + { + "epoch": 16.09, + "learning_rate": 9.859693877551021e-05, + "loss": 1.182, + "step": 795 + }, + { + "epoch": 16.11, + "learning_rate": 9.846938775510204e-05, + "loss": 1.1272, + "step": 796 + }, + { + "epoch": 16.13, + "learning_rate": 9.834183673469389e-05, + "loss": 1.1946, + "step": 797 + }, + { + "epoch": 16.15, + "learning_rate": 9.821428571428572e-05, + "loss": 1.0875, + "step": 798 + }, + { + "epoch": 16.17, + "learning_rate": 9.808673469387756e-05, + "loss": 1.1671, + "step": 799 + }, + { + "epoch": 16.19, + "learning_rate": 9.79591836734694e-05, + "loss": 1.1502, + "step": 800 + }, + { + "epoch": 16.21, + "learning_rate": 9.783163265306124e-05, + "loss": 1.19, + "step": 801 + }, + { + "epoch": 16.23, + "learning_rate": 9.770408163265307e-05, + "loss": 1.1258, + "step": 802 + }, + { + "epoch": 16.25, + "learning_rate": 9.75765306122449e-05, + "loss": 1.1765, + "step": 803 + }, + { + "epoch": 16.27, + "learning_rate": 9.744897959183674e-05, + "loss": 1.1217, + "step": 804 + }, + { + "epoch": 16.29, + "learning_rate": 9.732142857142858e-05, + "loss": 1.1293, + "step": 805 + }, + { + "epoch": 16.31, + "learning_rate": 9.719387755102042e-05, + "loss": 1.17, + "step": 806 + }, + { + "epoch": 16.33, + "learning_rate": 9.706632653061225e-05, + "loss": 1.17, + "step": 807 + }, + { + "epoch": 16.35, + "learning_rate": 9.693877551020408e-05, + "loss": 1.2004, + "step": 808 + }, + { + "epoch": 16.37, + "learning_rate": 9.681122448979593e-05, + "loss": 1.1648, + "step": 809 + }, + { + "epoch": 16.39, + "learning_rate": 9.668367346938776e-05, + "loss": 1.0688, + "step": 810 + }, + { + "epoch": 16.41, + "learning_rate": 9.655612244897959e-05, + "loss": 1.1607, + "step": 811 + }, + { + "epoch": 16.43, + "learning_rate": 9.642857142857143e-05, + "loss": 1.1298, + "step": 812 + }, + { + "epoch": 16.45, + "learning_rate": 9.630102040816326e-05, + "loss": 1.1064, + "step": 813 + }, + { + "epoch": 16.47, + "learning_rate": 9.617346938775511e-05, + "loss": 1.1472, + "step": 814 + }, + { + "epoch": 16.49, + "learning_rate": 9.604591836734694e-05, + "loss": 1.1577, + "step": 815 + }, + { + "epoch": 16.51, + "learning_rate": 9.591836734693878e-05, + "loss": 1.1436, + "step": 816 + }, + { + "epoch": 16.53, + "learning_rate": 9.579081632653061e-05, + "loss": 1.1657, + "step": 817 + }, + { + "epoch": 16.55, + "learning_rate": 9.566326530612246e-05, + "loss": 1.1147, + "step": 818 + }, + { + "epoch": 16.57, + "learning_rate": 9.553571428571429e-05, + "loss": 1.1839, + "step": 819 + }, + { + "epoch": 16.59, + "learning_rate": 9.540816326530613e-05, + "loss": 1.1298, + "step": 820 + }, + { + "epoch": 16.61, + "learning_rate": 9.528061224489796e-05, + "loss": 1.2141, + "step": 821 + }, + { + "epoch": 16.63, + "learning_rate": 9.515306122448981e-05, + "loss": 1.2045, + "step": 822 + }, + { + "epoch": 16.65, + "learning_rate": 9.502551020408164e-05, + "loss": 1.1791, + "step": 823 + }, + { + "epoch": 16.67, + "learning_rate": 9.489795918367348e-05, + "loss": 1.1137, + "step": 824 + }, + { + "epoch": 16.69, + "learning_rate": 9.477040816326531e-05, + "loss": 1.1312, + "step": 825 + }, + { + "epoch": 16.71, + "learning_rate": 9.464285714285715e-05, + "loss": 1.1102, + "step": 826 + }, + { + "epoch": 16.73, + "learning_rate": 9.451530612244899e-05, + "loss": 1.1865, + "step": 827 + }, + { + "epoch": 16.75, + "learning_rate": 9.438775510204082e-05, + "loss": 1.1232, + "step": 828 + }, + { + "epoch": 16.77, + "learning_rate": 9.426020408163265e-05, + "loss": 1.2068, + "step": 829 + }, + { + "epoch": 16.79, + "learning_rate": 9.41326530612245e-05, + "loss": 1.1864, + "step": 830 + }, + { + "epoch": 16.81, + "learning_rate": 9.400510204081633e-05, + "loss": 1.2195, + "step": 831 + }, + { + "epoch": 16.83, + "learning_rate": 9.387755102040817e-05, + "loss": 1.2063, + "step": 832 + }, + { + "epoch": 16.85, + "learning_rate": 9.375e-05, + "loss": 1.1455, + "step": 833 + }, + { + "epoch": 16.88, + "learning_rate": 9.362244897959183e-05, + "loss": 1.1819, + "step": 834 + }, + { + "epoch": 16.9, + "learning_rate": 9.349489795918368e-05, + "loss": 1.1887, + "step": 835 + }, + { + "epoch": 16.92, + "learning_rate": 9.336734693877551e-05, + "loss": 1.1557, + "step": 836 + }, + { + "epoch": 16.94, + "learning_rate": 9.323979591836735e-05, + "loss": 1.2094, + "step": 837 + }, + { + "epoch": 16.96, + "learning_rate": 9.311224489795918e-05, + "loss": 1.1512, + "step": 838 + }, + { + "epoch": 16.98, + "learning_rate": 9.298469387755103e-05, + "loss": 1.1463, + "step": 839 + }, + { + "epoch": 17.0, + "learning_rate": 9.285714285714286e-05, + "loss": 1.155, + "step": 840 + }, + { + "epoch": 17.02, + "learning_rate": 9.27295918367347e-05, + "loss": 1.1292, + "step": 841 + }, + { + "epoch": 17.04, + "learning_rate": 9.260204081632653e-05, + "loss": 1.0996, + "step": 842 + }, + { + "epoch": 17.06, + "learning_rate": 9.247448979591838e-05, + "loss": 1.0662, + "step": 843 + }, + { + "epoch": 17.08, + "learning_rate": 9.234693877551021e-05, + "loss": 1.0931, + "step": 844 + }, + { + "epoch": 17.1, + "learning_rate": 9.221938775510205e-05, + "loss": 1.0727, + "step": 845 + }, + { + "epoch": 17.12, + "learning_rate": 9.209183673469388e-05, + "loss": 1.1043, + "step": 846 + }, + { + "epoch": 17.14, + "learning_rate": 9.196428571428572e-05, + "loss": 1.0594, + "step": 847 + }, + { + "epoch": 17.16, + "learning_rate": 9.183673469387756e-05, + "loss": 1.0952, + "step": 848 + }, + { + "epoch": 17.18, + "learning_rate": 9.170918367346939e-05, + "loss": 1.0639, + "step": 849 + }, + { + "epoch": 17.2, + "learning_rate": 9.158163265306124e-05, + "loss": 1.132, + "step": 850 + }, + { + "epoch": 17.22, + "learning_rate": 9.145408163265307e-05, + "loss": 1.1083, + "step": 851 + }, + { + "epoch": 17.24, + "learning_rate": 9.13265306122449e-05, + "loss": 1.1282, + "step": 852 + }, + { + "epoch": 17.26, + "learning_rate": 9.119897959183674e-05, + "loss": 1.0474, + "step": 853 + }, + { + "epoch": 17.28, + "learning_rate": 9.107142857142857e-05, + "loss": 1.1138, + "step": 854 + }, + { + "epoch": 17.3, + "learning_rate": 9.094387755102042e-05, + "loss": 1.1025, + "step": 855 + }, + { + "epoch": 17.32, + "learning_rate": 9.081632653061225e-05, + "loss": 1.0968, + "step": 856 + }, + { + "epoch": 17.34, + "learning_rate": 9.068877551020408e-05, + "loss": 1.1683, + "step": 857 + }, + { + "epoch": 17.36, + "learning_rate": 9.056122448979592e-05, + "loss": 1.0975, + "step": 858 + }, + { + "epoch": 17.38, + "learning_rate": 9.043367346938775e-05, + "loss": 1.1274, + "step": 859 + }, + { + "epoch": 17.4, + "learning_rate": 9.030612244897958e-05, + "loss": 1.0916, + "step": 860 + }, + { + "epoch": 17.42, + "learning_rate": 9.017857142857143e-05, + "loss": 1.0912, + "step": 861 + }, + { + "epoch": 17.44, + "learning_rate": 9.005102040816327e-05, + "loss": 1.0875, + "step": 862 + }, + { + "epoch": 17.46, + "learning_rate": 8.99234693877551e-05, + "loss": 1.05, + "step": 863 + }, + { + "epoch": 17.48, + "learning_rate": 8.979591836734695e-05, + "loss": 1.1418, + "step": 864 + }, + { + "epoch": 17.5, + "learning_rate": 8.966836734693878e-05, + "loss": 1.0609, + "step": 865 + }, + { + "epoch": 17.52, + "learning_rate": 8.954081632653062e-05, + "loss": 1.1611, + "step": 866 + }, + { + "epoch": 17.54, + "learning_rate": 8.941326530612245e-05, + "loss": 1.1065, + "step": 867 + }, + { + "epoch": 17.56, + "learning_rate": 8.92857142857143e-05, + "loss": 1.1611, + "step": 868 + }, + { + "epoch": 17.58, + "learning_rate": 8.915816326530613e-05, + "loss": 1.1398, + "step": 869 + }, + { + "epoch": 17.6, + "learning_rate": 8.903061224489796e-05, + "loss": 1.1055, + "step": 870 + }, + { + "epoch": 17.62, + "learning_rate": 8.89030612244898e-05, + "loss": 1.1314, + "step": 871 + }, + { + "epoch": 17.64, + "learning_rate": 8.877551020408164e-05, + "loss": 1.1084, + "step": 872 + }, + { + "epoch": 17.66, + "learning_rate": 8.864795918367348e-05, + "loss": 1.1254, + "step": 873 + }, + { + "epoch": 17.68, + "learning_rate": 8.852040816326531e-05, + "loss": 1.142, + "step": 874 + }, + { + "epoch": 17.7, + "learning_rate": 8.839285714285714e-05, + "loss": 1.1371, + "step": 875 + }, + { + "epoch": 17.72, + "learning_rate": 8.826530612244899e-05, + "loss": 1.1092, + "step": 876 + }, + { + "epoch": 17.75, + "learning_rate": 8.813775510204082e-05, + "loss": 1.161, + "step": 877 + }, + { + "epoch": 17.77, + "learning_rate": 8.801020408163265e-05, + "loss": 1.1044, + "step": 878 + }, + { + "epoch": 17.79, + "learning_rate": 8.788265306122449e-05, + "loss": 1.117, + "step": 879 + }, + { + "epoch": 17.81, + "learning_rate": 8.775510204081632e-05, + "loss": 1.1262, + "step": 880 + }, + { + "epoch": 17.83, + "learning_rate": 8.762755102040817e-05, + "loss": 1.0829, + "step": 881 + }, + { + "epoch": 17.85, + "learning_rate": 8.75e-05, + "loss": 1.1393, + "step": 882 + }, + { + "epoch": 17.87, + "learning_rate": 8.737244897959183e-05, + "loss": 1.1781, + "step": 883 + }, + { + "epoch": 17.89, + "learning_rate": 8.724489795918367e-05, + "loss": 1.1582, + "step": 884 + }, + { + "epoch": 17.91, + "learning_rate": 8.711734693877552e-05, + "loss": 1.1469, + "step": 885 + }, + { + "epoch": 17.93, + "learning_rate": 8.698979591836735e-05, + "loss": 1.1494, + "step": 886 + }, + { + "epoch": 17.95, + "learning_rate": 8.68622448979592e-05, + "loss": 1.1251, + "step": 887 + }, + { + "epoch": 17.97, + "learning_rate": 8.673469387755102e-05, + "loss": 1.1624, + "step": 888 + }, + { + "epoch": 17.99, + "learning_rate": 8.660714285714287e-05, + "loss": 1.0842, + "step": 889 + }, + { + "epoch": 18.01, + "learning_rate": 8.64795918367347e-05, + "loss": 1.1944, + "step": 890 + }, + { + "epoch": 18.03, + "learning_rate": 8.635204081632653e-05, + "loss": 1.0642, + "step": 891 + }, + { + "epoch": 18.05, + "learning_rate": 8.622448979591838e-05, + "loss": 1.0459, + "step": 892 + }, + { + "epoch": 18.07, + "learning_rate": 8.60969387755102e-05, + "loss": 1.0941, + "step": 893 + }, + { + "epoch": 18.09, + "learning_rate": 8.596938775510205e-05, + "loss": 1.0457, + "step": 894 + }, + { + "epoch": 18.11, + "learning_rate": 8.584183673469388e-05, + "loss": 1.1033, + "step": 895 + }, + { + "epoch": 18.13, + "learning_rate": 8.571428571428571e-05, + "loss": 1.0756, + "step": 896 + }, + { + "epoch": 18.15, + "learning_rate": 8.558673469387756e-05, + "loss": 1.0615, + "step": 897 + }, + { + "epoch": 18.17, + "learning_rate": 8.545918367346939e-05, + "loss": 1.0828, + "step": 898 + }, + { + "epoch": 18.19, + "learning_rate": 8.533163265306123e-05, + "loss": 1.1158, + "step": 899 + }, + { + "epoch": 18.21, + "learning_rate": 8.520408163265306e-05, + "loss": 1.0133, + "step": 900 + }, + { + "epoch": 18.23, + "learning_rate": 8.50765306122449e-05, + "loss": 1.0437, + "step": 901 + }, + { + "epoch": 18.25, + "learning_rate": 8.494897959183674e-05, + "loss": 1.0372, + "step": 902 + }, + { + "epoch": 18.27, + "learning_rate": 8.482142857142857e-05, + "loss": 1.1012, + "step": 903 + }, + { + "epoch": 18.29, + "learning_rate": 8.469387755102041e-05, + "loss": 1.0777, + "step": 904 + }, + { + "epoch": 18.31, + "learning_rate": 8.456632653061224e-05, + "loss": 1.0799, + "step": 905 + }, + { + "epoch": 18.33, + "learning_rate": 8.443877551020409e-05, + "loss": 0.9846, + "step": 906 + }, + { + "epoch": 18.35, + "learning_rate": 8.431122448979592e-05, + "loss": 1.1, + "step": 907 + }, + { + "epoch": 18.37, + "learning_rate": 8.418367346938776e-05, + "loss": 1.0787, + "step": 908 + }, + { + "epoch": 18.39, + "learning_rate": 8.40561224489796e-05, + "loss": 1.0647, + "step": 909 + }, + { + "epoch": 18.41, + "learning_rate": 8.392857142857144e-05, + "loss": 1.056, + "step": 910 + }, + { + "epoch": 18.43, + "learning_rate": 8.380102040816327e-05, + "loss": 1.1131, + "step": 911 + }, + { + "epoch": 18.45, + "learning_rate": 8.367346938775511e-05, + "loss": 1.0825, + "step": 912 + }, + { + "epoch": 18.47, + "learning_rate": 8.354591836734695e-05, + "loss": 1.0681, + "step": 913 + }, + { + "epoch": 18.49, + "learning_rate": 8.341836734693878e-05, + "loss": 1.0479, + "step": 914 + }, + { + "epoch": 18.51, + "learning_rate": 8.329081632653062e-05, + "loss": 1.0921, + "step": 915 + }, + { + "epoch": 18.53, + "learning_rate": 8.316326530612245e-05, + "loss": 1.0626, + "step": 916 + }, + { + "epoch": 18.55, + "learning_rate": 8.30357142857143e-05, + "loss": 1.0518, + "step": 917 + }, + { + "epoch": 18.57, + "learning_rate": 8.290816326530613e-05, + "loss": 1.0557, + "step": 918 + }, + { + "epoch": 18.6, + "learning_rate": 8.278061224489796e-05, + "loss": 1.0831, + "step": 919 + }, + { + "epoch": 18.62, + "learning_rate": 8.26530612244898e-05, + "loss": 1.0307, + "step": 920 + }, + { + "epoch": 18.64, + "learning_rate": 8.252551020408163e-05, + "loss": 1.0455, + "step": 921 + }, + { + "epoch": 18.66, + "learning_rate": 8.239795918367348e-05, + "loss": 1.0667, + "step": 922 + }, + { + "epoch": 18.68, + "learning_rate": 8.227040816326531e-05, + "loss": 1.0736, + "step": 923 + }, + { + "epoch": 18.7, + "learning_rate": 8.214285714285714e-05, + "loss": 1.0108, + "step": 924 + }, + { + "epoch": 18.72, + "learning_rate": 8.201530612244898e-05, + "loss": 1.0458, + "step": 925 + }, + { + "epoch": 18.74, + "learning_rate": 8.188775510204081e-05, + "loss": 1.0852, + "step": 926 + }, + { + "epoch": 18.76, + "learning_rate": 8.176020408163265e-05, + "loss": 1.1207, + "step": 927 + }, + { + "epoch": 18.78, + "learning_rate": 8.163265306122449e-05, + "loss": 1.0914, + "step": 928 + }, + { + "epoch": 18.8, + "learning_rate": 8.150510204081633e-05, + "loss": 1.1108, + "step": 929 + }, + { + "epoch": 18.82, + "learning_rate": 8.137755102040817e-05, + "loss": 1.1394, + "step": 930 + }, + { + "epoch": 18.84, + "learning_rate": 8.125000000000001e-05, + "loss": 1.029, + "step": 931 + }, + { + "epoch": 18.86, + "learning_rate": 8.112244897959184e-05, + "loss": 1.0661, + "step": 932 + }, + { + "epoch": 18.88, + "learning_rate": 8.099489795918369e-05, + "loss": 1.0303, + "step": 933 + }, + { + "epoch": 18.9, + "learning_rate": 8.086734693877552e-05, + "loss": 1.1144, + "step": 934 + }, + { + "epoch": 18.92, + "learning_rate": 8.073979591836736e-05, + "loss": 1.1096, + "step": 935 + }, + { + "epoch": 18.94, + "learning_rate": 8.061224489795919e-05, + "loss": 1.123, + "step": 936 + }, + { + "epoch": 18.96, + "learning_rate": 8.048469387755102e-05, + "loss": 1.1002, + "step": 937 + }, + { + "epoch": 18.98, + "learning_rate": 8.035714285714287e-05, + "loss": 1.1016, + "step": 938 + }, + { + "epoch": 19.0, + "learning_rate": 8.02295918367347e-05, + "loss": 1.0847, + "step": 939 + }, + { + "epoch": 19.02, + "learning_rate": 8.010204081632653e-05, + "loss": 1.1029, + "step": 940 + }, + { + "epoch": 19.04, + "learning_rate": 7.997448979591837e-05, + "loss": 1.041, + "step": 941 + }, + { + "epoch": 19.06, + "learning_rate": 7.98469387755102e-05, + "loss": 1.01, + "step": 942 + }, + { + "epoch": 19.08, + "learning_rate": 7.971938775510205e-05, + "loss": 1.0197, + "step": 943 + }, + { + "epoch": 19.1, + "learning_rate": 7.959183673469388e-05, + "loss": 1.0543, + "step": 944 + }, + { + "epoch": 19.12, + "learning_rate": 7.946428571428571e-05, + "loss": 1.0369, + "step": 945 + }, + { + "epoch": 19.14, + "learning_rate": 7.933673469387755e-05, + "loss": 1.0154, + "step": 946 + }, + { + "epoch": 19.16, + "learning_rate": 7.920918367346939e-05, + "loss": 0.9546, + "step": 947 + }, + { + "epoch": 19.18, + "learning_rate": 7.908163265306123e-05, + "loss": 0.9982, + "step": 948 + }, + { + "epoch": 19.2, + "learning_rate": 7.895408163265306e-05, + "loss": 1.0748, + "step": 949 + }, + { + "epoch": 19.22, + "learning_rate": 7.882653061224489e-05, + "loss": 1.0562, + "step": 950 + }, + { + "epoch": 19.24, + "learning_rate": 7.869897959183674e-05, + "loss": 1.0352, + "step": 951 + }, + { + "epoch": 19.26, + "learning_rate": 7.857142857142858e-05, + "loss": 0.9976, + "step": 952 + }, + { + "epoch": 19.28, + "learning_rate": 7.844387755102041e-05, + "loss": 1.0221, + "step": 953 + }, + { + "epoch": 19.3, + "learning_rate": 7.831632653061226e-05, + "loss": 1.0119, + "step": 954 + }, + { + "epoch": 19.32, + "learning_rate": 7.818877551020409e-05, + "loss": 1.0657, + "step": 955 + }, + { + "epoch": 19.34, + "learning_rate": 7.806122448979593e-05, + "loss": 0.9591, + "step": 956 + }, + { + "epoch": 19.36, + "learning_rate": 7.793367346938776e-05, + "loss": 1.0101, + "step": 957 + }, + { + "epoch": 19.38, + "learning_rate": 7.780612244897959e-05, + "loss": 1.0453, + "step": 958 + }, + { + "epoch": 19.4, + "learning_rate": 7.767857142857144e-05, + "loss": 1.0461, + "step": 959 + }, + { + "epoch": 19.42, + "learning_rate": 7.755102040816327e-05, + "loss": 1.0959, + "step": 960 + }, + { + "epoch": 19.44, + "learning_rate": 7.742346938775511e-05, + "loss": 1.0608, + "step": 961 + }, + { + "epoch": 19.47, + "learning_rate": 7.729591836734694e-05, + "loss": 1.1177, + "step": 962 + }, + { + "epoch": 19.49, + "learning_rate": 7.716836734693877e-05, + "loss": 1.0354, + "step": 963 + }, + { + "epoch": 19.51, + "learning_rate": 7.704081632653062e-05, + "loss": 1.0507, + "step": 964 + }, + { + "epoch": 19.53, + "learning_rate": 7.691326530612245e-05, + "loss": 1.0313, + "step": 965 + }, + { + "epoch": 19.55, + "learning_rate": 7.67857142857143e-05, + "loss": 1.0569, + "step": 966 + }, + { + "epoch": 19.57, + "learning_rate": 7.665816326530612e-05, + "loss": 1.0862, + "step": 967 + }, + { + "epoch": 19.59, + "learning_rate": 7.653061224489796e-05, + "loss": 1.0593, + "step": 968 + }, + { + "epoch": 19.61, + "learning_rate": 7.64030612244898e-05, + "loss": 1.0602, + "step": 969 + }, + { + "epoch": 19.63, + "learning_rate": 7.627551020408163e-05, + "loss": 1.0048, + "step": 970 + }, + { + "epoch": 19.65, + "learning_rate": 7.614795918367347e-05, + "loss": 1.0346, + "step": 971 + }, + { + "epoch": 19.67, + "learning_rate": 7.60204081632653e-05, + "loss": 1.0172, + "step": 972 + }, + { + "epoch": 19.69, + "learning_rate": 7.589285714285714e-05, + "loss": 1.02, + "step": 973 + }, + { + "epoch": 19.71, + "learning_rate": 7.576530612244898e-05, + "loss": 1.0028, + "step": 974 + }, + { + "epoch": 19.73, + "learning_rate": 7.563775510204083e-05, + "loss": 1.08, + "step": 975 + }, + { + "epoch": 19.75, + "learning_rate": 7.551020408163266e-05, + "loss": 1.0402, + "step": 976 + }, + { + "epoch": 19.77, + "learning_rate": 7.53826530612245e-05, + "loss": 1.0567, + "step": 977 + }, + { + "epoch": 19.79, + "learning_rate": 7.525510204081633e-05, + "loss": 1.0169, + "step": 978 + }, + { + "epoch": 19.81, + "learning_rate": 7.512755102040818e-05, + "loss": 0.9881, + "step": 979 + }, + { + "epoch": 19.83, + "learning_rate": 7.500000000000001e-05, + "loss": 1.0677, + "step": 980 + }, + { + "epoch": 19.85, + "learning_rate": 7.487244897959184e-05, + "loss": 1.1026, + "step": 981 + }, + { + "epoch": 19.87, + "learning_rate": 7.474489795918368e-05, + "loss": 1.0101, + "step": 982 + }, + { + "epoch": 19.89, + "learning_rate": 7.461734693877551e-05, + "loss": 1.069, + "step": 983 + }, + { + "epoch": 19.91, + "learning_rate": 7.448979591836736e-05, + "loss": 1.0493, + "step": 984 + }, + { + "epoch": 19.93, + "learning_rate": 7.436224489795919e-05, + "loss": 1.0858, + "step": 985 + }, + { + "epoch": 19.95, + "learning_rate": 7.423469387755102e-05, + "loss": 1.0734, + "step": 986 + }, + { + "epoch": 19.97, + "learning_rate": 7.410714285714286e-05, + "loss": 1.0203, + "step": 987 + }, + { + "epoch": 19.99, + "learning_rate": 7.39795918367347e-05, + "loss": 1.0285, + "step": 988 + }, + { + "epoch": 20.01, + "learning_rate": 7.385204081632653e-05, + "loss": 0.9446, + "step": 989 + }, + { + "epoch": 20.03, + "learning_rate": 7.372448979591837e-05, + "loss": 0.9915, + "step": 990 + }, + { + "epoch": 20.05, + "learning_rate": 7.35969387755102e-05, + "loss": 0.9882, + "step": 991 + }, + { + "epoch": 20.07, + "learning_rate": 7.346938775510205e-05, + "loss": 0.9338, + "step": 992 + }, + { + "epoch": 20.09, + "learning_rate": 7.334183673469388e-05, + "loss": 0.942, + "step": 993 + }, + { + "epoch": 20.11, + "learning_rate": 7.321428571428571e-05, + "loss": 0.9725, + "step": 994 + }, + { + "epoch": 20.13, + "learning_rate": 7.308673469387755e-05, + "loss": 1.027, + "step": 995 + }, + { + "epoch": 20.15, + "learning_rate": 7.29591836734694e-05, + "loss": 1.0081, + "step": 996 + }, + { + "epoch": 20.17, + "learning_rate": 7.283163265306123e-05, + "loss": 1.0117, + "step": 997 + }, + { + "epoch": 20.19, + "learning_rate": 7.270408163265307e-05, + "loss": 0.969, + "step": 998 + }, + { + "epoch": 20.21, + "learning_rate": 7.25765306122449e-05, + "loss": 1.0024, + "step": 999 + }, + { + "epoch": 20.23, + "learning_rate": 7.244897959183675e-05, + "loss": 0.994, + "step": 1000 + }, + { + "epoch": 20.25, + "learning_rate": 7.232142857142858e-05, + "loss": 1.0248, + "step": 1001 + }, + { + "epoch": 20.27, + "learning_rate": 7.219387755102042e-05, + "loss": 1.0493, + "step": 1002 + }, + { + "epoch": 20.29, + "learning_rate": 7.206632653061225e-05, + "loss": 1.0011, + "step": 1003 + }, + { + "epoch": 20.31, + "learning_rate": 7.193877551020408e-05, + "loss": 0.9874, + "step": 1004 + }, + { + "epoch": 20.34, + "learning_rate": 7.181122448979593e-05, + "loss": 1.0049, + "step": 1005 + }, + { + "epoch": 20.36, + "learning_rate": 7.168367346938776e-05, + "loss": 1.0314, + "step": 1006 + }, + { + "epoch": 20.38, + "learning_rate": 7.155612244897959e-05, + "loss": 0.9742, + "step": 1007 + }, + { + "epoch": 20.4, + "learning_rate": 7.142857142857143e-05, + "loss": 1.0621, + "step": 1008 + }, + { + "epoch": 20.42, + "learning_rate": 7.130102040816326e-05, + "loss": 0.9672, + "step": 1009 + }, + { + "epoch": 20.44, + "learning_rate": 7.117346938775511e-05, + "loss": 1.0018, + "step": 1010 + }, + { + "epoch": 20.46, + "learning_rate": 7.104591836734694e-05, + "loss": 1.0045, + "step": 1011 + }, + { + "epoch": 20.48, + "learning_rate": 7.091836734693877e-05, + "loss": 0.9675, + "step": 1012 + }, + { + "epoch": 20.5, + "learning_rate": 7.079081632653062e-05, + "loss": 0.976, + "step": 1013 + }, + { + "epoch": 20.52, + "learning_rate": 7.066326530612245e-05, + "loss": 1.0523, + "step": 1014 + }, + { + "epoch": 20.54, + "learning_rate": 7.053571428571429e-05, + "loss": 1.052, + "step": 1015 + }, + { + "epoch": 20.56, + "learning_rate": 7.040816326530612e-05, + "loss": 0.9903, + "step": 1016 + }, + { + "epoch": 20.58, + "learning_rate": 7.028061224489795e-05, + "loss": 1.0337, + "step": 1017 + }, + { + "epoch": 20.6, + "learning_rate": 7.01530612244898e-05, + "loss": 1.1122, + "step": 1018 + }, + { + "epoch": 20.62, + "learning_rate": 7.002551020408164e-05, + "loss": 1.0133, + "step": 1019 + }, + { + "epoch": 20.64, + "learning_rate": 6.989795918367347e-05, + "loss": 0.9588, + "step": 1020 + }, + { + "epoch": 20.66, + "learning_rate": 6.977040816326532e-05, + "loss": 0.9892, + "step": 1021 + }, + { + "epoch": 20.68, + "learning_rate": 6.964285714285715e-05, + "loss": 1.025, + "step": 1022 + }, + { + "epoch": 20.7, + "learning_rate": 6.951530612244899e-05, + "loss": 1.0196, + "step": 1023 + }, + { + "epoch": 20.72, + "learning_rate": 6.938775510204082e-05, + "loss": 1.0146, + "step": 1024 + }, + { + "epoch": 20.74, + "learning_rate": 6.926020408163265e-05, + "loss": 1.0656, + "step": 1025 + }, + { + "epoch": 20.76, + "learning_rate": 6.91326530612245e-05, + "loss": 0.9584, + "step": 1026 + }, + { + "epoch": 20.78, + "learning_rate": 6.900510204081633e-05, + "loss": 0.9877, + "step": 1027 + }, + { + "epoch": 20.8, + "learning_rate": 6.887755102040817e-05, + "loss": 1.0607, + "step": 1028 + }, + { + "epoch": 20.82, + "learning_rate": 6.875e-05, + "loss": 0.9969, + "step": 1029 + }, + { + "epoch": 20.84, + "learning_rate": 6.862244897959184e-05, + "loss": 0.9506, + "step": 1030 + }, + { + "epoch": 20.86, + "learning_rate": 6.849489795918368e-05, + "loss": 1.0576, + "step": 1031 + }, + { + "epoch": 20.88, + "learning_rate": 6.836734693877551e-05, + "loss": 1.0094, + "step": 1032 + }, + { + "epoch": 20.9, + "learning_rate": 6.823979591836735e-05, + "loss": 0.9872, + "step": 1033 + }, + { + "epoch": 20.92, + "learning_rate": 6.811224489795919e-05, + "loss": 1.0544, + "step": 1034 + }, + { + "epoch": 20.94, + "learning_rate": 6.798469387755102e-05, + "loss": 1.0194, + "step": 1035 + }, + { + "epoch": 20.96, + "learning_rate": 6.785714285714286e-05, + "loss": 1.0009, + "step": 1036 + }, + { + "epoch": 20.98, + "learning_rate": 6.772959183673469e-05, + "loss": 0.9727, + "step": 1037 + }, + { + "epoch": 21.0, + "learning_rate": 6.760204081632652e-05, + "loss": 0.9754, + "step": 1038 + }, + { + "epoch": 21.02, + "learning_rate": 6.747448979591837e-05, + "loss": 0.9953, + "step": 1039 + }, + { + "epoch": 21.04, + "learning_rate": 6.73469387755102e-05, + "loss": 0.9307, + "step": 1040 + }, + { + "epoch": 21.06, + "learning_rate": 6.721938775510204e-05, + "loss": 0.9151, + "step": 1041 + }, + { + "epoch": 21.08, + "learning_rate": 6.709183673469389e-05, + "loss": 0.9474, + "step": 1042 + }, + { + "epoch": 21.1, + "learning_rate": 6.696428571428572e-05, + "loss": 0.9697, + "step": 1043 + }, + { + "epoch": 21.12, + "learning_rate": 6.683673469387756e-05, + "loss": 0.9423, + "step": 1044 + }, + { + "epoch": 21.14, + "learning_rate": 6.670918367346939e-05, + "loss": 0.9797, + "step": 1045 + }, + { + "epoch": 21.16, + "learning_rate": 6.658163265306124e-05, + "loss": 0.919, + "step": 1046 + }, + { + "epoch": 21.18, + "learning_rate": 6.645408163265307e-05, + "loss": 0.9743, + "step": 1047 + }, + { + "epoch": 21.21, + "learning_rate": 6.63265306122449e-05, + "loss": 0.9575, + "step": 1048 + }, + { + "epoch": 21.23, + "learning_rate": 6.619897959183674e-05, + "loss": 0.9861, + "step": 1049 + }, + { + "epoch": 21.25, + "learning_rate": 6.607142857142857e-05, + "loss": 0.9103, + "step": 1050 + }, + { + "epoch": 21.27, + "learning_rate": 6.594387755102042e-05, + "loss": 0.993, + "step": 1051 + }, + { + "epoch": 21.29, + "learning_rate": 6.581632653061225e-05, + "loss": 0.9668, + "step": 1052 + }, + { + "epoch": 21.31, + "learning_rate": 6.568877551020408e-05, + "loss": 1.0008, + "step": 1053 + }, + { + "epoch": 21.33, + "learning_rate": 6.556122448979592e-05, + "loss": 0.9825, + "step": 1054 + }, + { + "epoch": 21.35, + "learning_rate": 6.543367346938776e-05, + "loss": 1.0174, + "step": 1055 + }, + { + "epoch": 21.37, + "learning_rate": 6.530612244897959e-05, + "loss": 0.9685, + "step": 1056 + }, + { + "epoch": 21.39, + "learning_rate": 6.517857142857143e-05, + "loss": 0.9265, + "step": 1057 + }, + { + "epoch": 21.41, + "learning_rate": 6.505102040816326e-05, + "loss": 0.9495, + "step": 1058 + }, + { + "epoch": 21.43, + "learning_rate": 6.49234693877551e-05, + "loss": 0.9541, + "step": 1059 + }, + { + "epoch": 21.45, + "learning_rate": 6.479591836734694e-05, + "loss": 0.9299, + "step": 1060 + }, + { + "epoch": 21.47, + "learning_rate": 6.466836734693877e-05, + "loss": 0.9625, + "step": 1061 + }, + { + "epoch": 21.49, + "learning_rate": 6.454081632653061e-05, + "loss": 1.0054, + "step": 1062 + }, + { + "epoch": 21.51, + "learning_rate": 6.441326530612244e-05, + "loss": 0.9893, + "step": 1063 + }, + { + "epoch": 21.53, + "learning_rate": 6.428571428571429e-05, + "loss": 0.9906, + "step": 1064 + }, + { + "epoch": 21.55, + "learning_rate": 6.415816326530613e-05, + "loss": 0.9487, + "step": 1065 + }, + { + "epoch": 21.57, + "learning_rate": 6.403061224489796e-05, + "loss": 0.9728, + "step": 1066 + }, + { + "epoch": 21.59, + "learning_rate": 6.390306122448981e-05, + "loss": 0.9883, + "step": 1067 + }, + { + "epoch": 21.61, + "learning_rate": 6.377551020408164e-05, + "loss": 1.053, + "step": 1068 + }, + { + "epoch": 21.63, + "learning_rate": 6.364795918367348e-05, + "loss": 1.012, + "step": 1069 + }, + { + "epoch": 21.65, + "learning_rate": 6.352040816326531e-05, + "loss": 0.962, + "step": 1070 + }, + { + "epoch": 21.67, + "learning_rate": 6.339285714285714e-05, + "loss": 0.9955, + "step": 1071 + }, + { + "epoch": 21.69, + "learning_rate": 6.326530612244899e-05, + "loss": 0.9908, + "step": 1072 + }, + { + "epoch": 21.71, + "learning_rate": 6.313775510204082e-05, + "loss": 1.0327, + "step": 1073 + }, + { + "epoch": 21.73, + "learning_rate": 6.301020408163265e-05, + "loss": 0.9255, + "step": 1074 + }, + { + "epoch": 21.75, + "learning_rate": 6.28826530612245e-05, + "loss": 0.9268, + "step": 1075 + }, + { + "epoch": 21.77, + "learning_rate": 6.275510204081633e-05, + "loss": 0.9204, + "step": 1076 + }, + { + "epoch": 21.79, + "learning_rate": 6.262755102040817e-05, + "loss": 0.9838, + "step": 1077 + }, + { + "epoch": 21.81, + "learning_rate": 6.25e-05, + "loss": 0.954, + "step": 1078 + }, + { + "epoch": 21.83, + "learning_rate": 6.237244897959183e-05, + "loss": 1.0102, + "step": 1079 + }, + { + "epoch": 21.85, + "learning_rate": 6.224489795918368e-05, + "loss": 0.916, + "step": 1080 + }, + { + "epoch": 21.87, + "learning_rate": 6.211734693877551e-05, + "loss": 0.9939, + "step": 1081 + }, + { + "epoch": 21.89, + "learning_rate": 6.198979591836735e-05, + "loss": 0.9675, + "step": 1082 + }, + { + "epoch": 21.91, + "learning_rate": 6.186224489795918e-05, + "loss": 0.9666, + "step": 1083 + }, + { + "epoch": 21.93, + "learning_rate": 6.173469387755101e-05, + "loss": 0.9919, + "step": 1084 + }, + { + "epoch": 21.95, + "learning_rate": 6.160714285714286e-05, + "loss": 1.0106, + "step": 1085 + }, + { + "epoch": 21.97, + "learning_rate": 6.14795918367347e-05, + "loss": 0.9982, + "step": 1086 + }, + { + "epoch": 21.99, + "learning_rate": 6.135204081632653e-05, + "loss": 1.0137, + "step": 1087 + }, + { + "epoch": 22.01, + "learning_rate": 6.122448979591838e-05, + "loss": 0.9331, + "step": 1088 + }, + { + "epoch": 22.03, + "learning_rate": 6.109693877551021e-05, + "loss": 0.8834, + "step": 1089 + }, + { + "epoch": 22.06, + "learning_rate": 6.0969387755102046e-05, + "loss": 0.9757, + "step": 1090 + }, + { + "epoch": 22.08, + "learning_rate": 6.084183673469388e-05, + "loss": 0.9038, + "step": 1091 + }, + { + "epoch": 22.1, + "learning_rate": 6.0714285714285715e-05, + "loss": 0.9097, + "step": 1092 + }, + { + "epoch": 22.12, + "learning_rate": 6.058673469387756e-05, + "loss": 0.8972, + "step": 1093 + }, + { + "epoch": 22.14, + "learning_rate": 6.045918367346939e-05, + "loss": 0.8825, + "step": 1094 + }, + { + "epoch": 22.16, + "learning_rate": 6.0331632653061234e-05, + "loss": 0.9814, + "step": 1095 + }, + { + "epoch": 22.18, + "learning_rate": 6.0204081632653065e-05, + "loss": 0.9874, + "step": 1096 + }, + { + "epoch": 22.2, + "learning_rate": 6.0076530612244896e-05, + "loss": 0.912, + "step": 1097 + }, + { + "epoch": 22.22, + "learning_rate": 5.994897959183674e-05, + "loss": 0.9206, + "step": 1098 + }, + { + "epoch": 22.24, + "learning_rate": 5.982142857142857e-05, + "loss": 0.9497, + "step": 1099 + }, + { + "epoch": 22.26, + "learning_rate": 5.9693877551020416e-05, + "loss": 0.9269, + "step": 1100 + }, + { + "epoch": 22.28, + "learning_rate": 5.956632653061225e-05, + "loss": 0.9452, + "step": 1101 + }, + { + "epoch": 22.3, + "learning_rate": 5.9438775510204084e-05, + "loss": 0.9548, + "step": 1102 + }, + { + "epoch": 22.32, + "learning_rate": 5.931122448979592e-05, + "loss": 0.9689, + "step": 1103 + }, + { + "epoch": 22.34, + "learning_rate": 5.918367346938776e-05, + "loss": 0.9455, + "step": 1104 + }, + { + "epoch": 22.36, + "learning_rate": 5.905612244897959e-05, + "loss": 0.9409, + "step": 1105 + }, + { + "epoch": 22.38, + "learning_rate": 5.8928571428571435e-05, + "loss": 0.9093, + "step": 1106 + }, + { + "epoch": 22.4, + "learning_rate": 5.8801020408163266e-05, + "loss": 0.921, + "step": 1107 + }, + { + "epoch": 22.42, + "learning_rate": 5.867346938775511e-05, + "loss": 0.9368, + "step": 1108 + }, + { + "epoch": 22.44, + "learning_rate": 5.854591836734694e-05, + "loss": 0.907, + "step": 1109 + }, + { + "epoch": 22.46, + "learning_rate": 5.841836734693877e-05, + "loss": 0.9126, + "step": 1110 + }, + { + "epoch": 22.48, + "learning_rate": 5.8290816326530616e-05, + "loss": 0.9161, + "step": 1111 + }, + { + "epoch": 22.5, + "learning_rate": 5.816326530612245e-05, + "loss": 0.9542, + "step": 1112 + }, + { + "epoch": 22.52, + "learning_rate": 5.803571428571429e-05, + "loss": 0.9775, + "step": 1113 + }, + { + "epoch": 22.54, + "learning_rate": 5.790816326530612e-05, + "loss": 1.0006, + "step": 1114 + }, + { + "epoch": 22.56, + "learning_rate": 5.778061224489796e-05, + "loss": 0.8965, + "step": 1115 + }, + { + "epoch": 22.58, + "learning_rate": 5.7653061224489805e-05, + "loss": 0.944, + "step": 1116 + }, + { + "epoch": 22.6, + "learning_rate": 5.7525510204081636e-05, + "loss": 0.9162, + "step": 1117 + }, + { + "epoch": 22.62, + "learning_rate": 5.739795918367348e-05, + "loss": 0.9325, + "step": 1118 + }, + { + "epoch": 22.64, + "learning_rate": 5.727040816326531e-05, + "loss": 0.8998, + "step": 1119 + }, + { + "epoch": 22.66, + "learning_rate": 5.714285714285714e-05, + "loss": 0.9362, + "step": 1120 + }, + { + "epoch": 22.68, + "learning_rate": 5.7015306122448986e-05, + "loss": 0.9969, + "step": 1121 + }, + { + "epoch": 22.7, + "learning_rate": 5.688775510204082e-05, + "loss": 0.9104, + "step": 1122 + }, + { + "epoch": 22.72, + "learning_rate": 5.676020408163265e-05, + "loss": 0.9746, + "step": 1123 + }, + { + "epoch": 22.74, + "learning_rate": 5.663265306122449e-05, + "loss": 0.9821, + "step": 1124 + }, + { + "epoch": 22.76, + "learning_rate": 5.650510204081633e-05, + "loss": 0.9526, + "step": 1125 + }, + { + "epoch": 22.78, + "learning_rate": 5.637755102040817e-05, + "loss": 0.871, + "step": 1126 + }, + { + "epoch": 22.8, + "learning_rate": 5.6250000000000005e-05, + "loss": 0.9534, + "step": 1127 + }, + { + "epoch": 22.82, + "learning_rate": 5.6122448979591836e-05, + "loss": 0.9616, + "step": 1128 + }, + { + "epoch": 22.84, + "learning_rate": 5.599489795918368e-05, + "loss": 0.9627, + "step": 1129 + }, + { + "epoch": 22.86, + "learning_rate": 5.586734693877551e-05, + "loss": 0.9704, + "step": 1130 + }, + { + "epoch": 22.88, + "learning_rate": 5.5739795918367356e-05, + "loss": 0.9506, + "step": 1131 + }, + { + "epoch": 22.9, + "learning_rate": 5.561224489795919e-05, + "loss": 0.9553, + "step": 1132 + }, + { + "epoch": 22.93, + "learning_rate": 5.548469387755102e-05, + "loss": 0.9294, + "step": 1133 + }, + { + "epoch": 22.95, + "learning_rate": 5.535714285714286e-05, + "loss": 0.8979, + "step": 1134 + }, + { + "epoch": 22.97, + "learning_rate": 5.522959183673469e-05, + "loss": 1.0004, + "step": 1135 + }, + { + "epoch": 22.99, + "learning_rate": 5.510204081632653e-05, + "loss": 0.9821, + "step": 1136 + }, + { + "epoch": 23.01, + "learning_rate": 5.497448979591837e-05, + "loss": 0.9607, + "step": 1137 + }, + { + "epoch": 23.03, + "learning_rate": 5.4846938775510206e-05, + "loss": 0.9757, + "step": 1138 + }, + { + "epoch": 23.05, + "learning_rate": 5.471938775510205e-05, + "loss": 0.9096, + "step": 1139 + }, + { + "epoch": 23.07, + "learning_rate": 5.459183673469388e-05, + "loss": 0.9144, + "step": 1140 + }, + { + "epoch": 23.09, + "learning_rate": 5.446428571428571e-05, + "loss": 0.8667, + "step": 1141 + }, + { + "epoch": 23.11, + "learning_rate": 5.4336734693877556e-05, + "loss": 0.8993, + "step": 1142 + }, + { + "epoch": 23.13, + "learning_rate": 5.420918367346939e-05, + "loss": 0.8964, + "step": 1143 + }, + { + "epoch": 23.15, + "learning_rate": 5.408163265306123e-05, + "loss": 0.9173, + "step": 1144 + }, + { + "epoch": 23.17, + "learning_rate": 5.395408163265306e-05, + "loss": 0.9019, + "step": 1145 + }, + { + "epoch": 23.19, + "learning_rate": 5.382653061224489e-05, + "loss": 0.9303, + "step": 1146 + }, + { + "epoch": 23.21, + "learning_rate": 5.369897959183674e-05, + "loss": 0.9268, + "step": 1147 + }, + { + "epoch": 23.23, + "learning_rate": 5.3571428571428575e-05, + "loss": 0.8803, + "step": 1148 + }, + { + "epoch": 23.25, + "learning_rate": 5.344387755102041e-05, + "loss": 0.9197, + "step": 1149 + }, + { + "epoch": 23.27, + "learning_rate": 5.331632653061225e-05, + "loss": 0.9204, + "step": 1150 + }, + { + "epoch": 23.29, + "learning_rate": 5.318877551020408e-05, + "loss": 0.8802, + "step": 1151 + }, + { + "epoch": 23.31, + "learning_rate": 5.3061224489795926e-05, + "loss": 0.9044, + "step": 1152 + }, + { + "epoch": 23.33, + "learning_rate": 5.293367346938776e-05, + "loss": 0.8893, + "step": 1153 + }, + { + "epoch": 23.35, + "learning_rate": 5.280612244897959e-05, + "loss": 0.8928, + "step": 1154 + }, + { + "epoch": 23.37, + "learning_rate": 5.267857142857143e-05, + "loss": 0.9353, + "step": 1155 + }, + { + "epoch": 23.39, + "learning_rate": 5.255102040816326e-05, + "loss": 0.9345, + "step": 1156 + }, + { + "epoch": 23.41, + "learning_rate": 5.242346938775511e-05, + "loss": 0.9372, + "step": 1157 + }, + { + "epoch": 23.43, + "learning_rate": 5.229591836734694e-05, + "loss": 0.9234, + "step": 1158 + }, + { + "epoch": 23.45, + "learning_rate": 5.2168367346938776e-05, + "loss": 0.9177, + "step": 1159 + }, + { + "epoch": 23.47, + "learning_rate": 5.2040816326530614e-05, + "loss": 0.8757, + "step": 1160 + }, + { + "epoch": 23.49, + "learning_rate": 5.191326530612245e-05, + "loss": 0.9048, + "step": 1161 + }, + { + "epoch": 23.51, + "learning_rate": 5.1785714285714296e-05, + "loss": 0.9248, + "step": 1162 + }, + { + "epoch": 23.53, + "learning_rate": 5.1658163265306127e-05, + "loss": 0.9379, + "step": 1163 + }, + { + "epoch": 23.55, + "learning_rate": 5.153061224489796e-05, + "loss": 0.8596, + "step": 1164 + }, + { + "epoch": 23.57, + "learning_rate": 5.14030612244898e-05, + "loss": 0.9751, + "step": 1165 + }, + { + "epoch": 23.59, + "learning_rate": 5.127551020408163e-05, + "loss": 0.8842, + "step": 1166 + }, + { + "epoch": 23.61, + "learning_rate": 5.114795918367348e-05, + "loss": 0.8765, + "step": 1167 + }, + { + "epoch": 23.63, + "learning_rate": 5.102040816326531e-05, + "loss": 0.8942, + "step": 1168 + }, + { + "epoch": 23.65, + "learning_rate": 5.089285714285714e-05, + "loss": 0.938, + "step": 1169 + }, + { + "epoch": 23.67, + "learning_rate": 5.076530612244898e-05, + "loss": 0.8993, + "step": 1170 + }, + { + "epoch": 23.69, + "learning_rate": 5.063775510204082e-05, + "loss": 0.9362, + "step": 1171 + }, + { + "epoch": 23.71, + "learning_rate": 5.051020408163265e-05, + "loss": 0.9249, + "step": 1172 + }, + { + "epoch": 23.73, + "learning_rate": 5.0382653061224496e-05, + "loss": 0.9055, + "step": 1173 + }, + { + "epoch": 23.75, + "learning_rate": 5.025510204081633e-05, + "loss": 0.8967, + "step": 1174 + }, + { + "epoch": 23.77, + "learning_rate": 5.012755102040817e-05, + "loss": 0.8795, + "step": 1175 + }, + { + "epoch": 23.8, + "learning_rate": 5e-05, + "loss": 0.9452, + "step": 1176 + }, + { + "epoch": 23.82, + "learning_rate": 4.987244897959184e-05, + "loss": 0.926, + "step": 1177 + }, + { + "epoch": 23.84, + "learning_rate": 4.974489795918368e-05, + "loss": 0.8948, + "step": 1178 + }, + { + "epoch": 23.86, + "learning_rate": 4.961734693877551e-05, + "loss": 0.8926, + "step": 1179 + }, + { + "epoch": 23.88, + "learning_rate": 4.9489795918367346e-05, + "loss": 0.8949, + "step": 1180 + }, + { + "epoch": 23.9, + "learning_rate": 4.9362244897959184e-05, + "loss": 0.9648, + "step": 1181 + }, + { + "epoch": 23.92, + "learning_rate": 4.923469387755102e-05, + "loss": 0.9599, + "step": 1182 + }, + { + "epoch": 23.94, + "learning_rate": 4.910714285714286e-05, + "loss": 0.9603, + "step": 1183 + }, + { + "epoch": 23.96, + "learning_rate": 4.89795918367347e-05, + "loss": 0.9302, + "step": 1184 + }, + { + "epoch": 23.98, + "learning_rate": 4.8852040816326534e-05, + "loss": 0.9261, + "step": 1185 + }, + { + "epoch": 24.0, + "learning_rate": 4.872448979591837e-05, + "loss": 0.9257, + "step": 1186 + }, + { + "epoch": 24.02, + "learning_rate": 4.859693877551021e-05, + "loss": 0.8725, + "step": 1187 + }, + { + "epoch": 24.04, + "learning_rate": 4.846938775510204e-05, + "loss": 0.8486, + "step": 1188 + }, + { + "epoch": 24.06, + "learning_rate": 4.834183673469388e-05, + "loss": 0.8457, + "step": 1189 + }, + { + "epoch": 24.08, + "learning_rate": 4.8214285714285716e-05, + "loss": 0.7848, + "step": 1190 + }, + { + "epoch": 24.1, + "learning_rate": 4.8086734693877554e-05, + "loss": 0.8885, + "step": 1191 + }, + { + "epoch": 24.12, + "learning_rate": 4.795918367346939e-05, + "loss": 0.9099, + "step": 1192 + }, + { + "epoch": 24.14, + "learning_rate": 4.783163265306123e-05, + "loss": 0.9147, + "step": 1193 + }, + { + "epoch": 24.16, + "learning_rate": 4.7704081632653066e-05, + "loss": 0.8781, + "step": 1194 + }, + { + "epoch": 24.18, + "learning_rate": 4.7576530612244904e-05, + "loss": 0.8847, + "step": 1195 + }, + { + "epoch": 24.2, + "learning_rate": 4.744897959183674e-05, + "loss": 0.9041, + "step": 1196 + }, + { + "epoch": 24.22, + "learning_rate": 4.732142857142857e-05, + "loss": 0.8639, + "step": 1197 + }, + { + "epoch": 24.24, + "learning_rate": 4.719387755102041e-05, + "loss": 0.8831, + "step": 1198 + }, + { + "epoch": 24.26, + "learning_rate": 4.706632653061225e-05, + "loss": 0.9063, + "step": 1199 + }, + { + "epoch": 24.28, + "learning_rate": 4.6938775510204086e-05, + "loss": 0.8753, + "step": 1200 + }, + { + "epoch": 24.3, + "learning_rate": 4.6811224489795916e-05, + "loss": 0.8977, + "step": 1201 + }, + { + "epoch": 24.32, + "learning_rate": 4.6683673469387754e-05, + "loss": 0.8729, + "step": 1202 + }, + { + "epoch": 24.34, + "learning_rate": 4.655612244897959e-05, + "loss": 0.898, + "step": 1203 + }, + { + "epoch": 24.36, + "learning_rate": 4.642857142857143e-05, + "loss": 0.8521, + "step": 1204 + }, + { + "epoch": 24.38, + "learning_rate": 4.630102040816327e-05, + "loss": 0.8563, + "step": 1205 + }, + { + "epoch": 24.4, + "learning_rate": 4.6173469387755105e-05, + "loss": 0.8462, + "step": 1206 + }, + { + "epoch": 24.42, + "learning_rate": 4.604591836734694e-05, + "loss": 0.8929, + "step": 1207 + }, + { + "epoch": 24.44, + "learning_rate": 4.591836734693878e-05, + "loss": 0.9154, + "step": 1208 + }, + { + "epoch": 24.46, + "learning_rate": 4.579081632653062e-05, + "loss": 0.811, + "step": 1209 + }, + { + "epoch": 24.48, + "learning_rate": 4.566326530612245e-05, + "loss": 0.8667, + "step": 1210 + }, + { + "epoch": 24.5, + "learning_rate": 4.5535714285714286e-05, + "loss": 0.9179, + "step": 1211 + }, + { + "epoch": 24.52, + "learning_rate": 4.5408163265306124e-05, + "loss": 0.8757, + "step": 1212 + }, + { + "epoch": 24.54, + "learning_rate": 4.528061224489796e-05, + "loss": 0.8519, + "step": 1213 + }, + { + "epoch": 24.56, + "learning_rate": 4.515306122448979e-05, + "loss": 0.9335, + "step": 1214 + }, + { + "epoch": 24.58, + "learning_rate": 4.502551020408164e-05, + "loss": 0.8785, + "step": 1215 + }, + { + "epoch": 24.6, + "learning_rate": 4.4897959183673474e-05, + "loss": 0.9022, + "step": 1216 + }, + { + "epoch": 24.62, + "learning_rate": 4.477040816326531e-05, + "loss": 0.9532, + "step": 1217 + }, + { + "epoch": 24.64, + "learning_rate": 4.464285714285715e-05, + "loss": 0.8956, + "step": 1218 + }, + { + "epoch": 24.67, + "learning_rate": 4.451530612244898e-05, + "loss": 0.8739, + "step": 1219 + }, + { + "epoch": 24.69, + "learning_rate": 4.438775510204082e-05, + "loss": 0.9312, + "step": 1220 + }, + { + "epoch": 24.71, + "learning_rate": 4.4260204081632656e-05, + "loss": 0.8536, + "step": 1221 + }, + { + "epoch": 24.73, + "learning_rate": 4.4132653061224493e-05, + "loss": 0.8984, + "step": 1222 + }, + { + "epoch": 24.75, + "learning_rate": 4.4005102040816324e-05, + "loss": 0.8949, + "step": 1223 + }, + { + "epoch": 24.77, + "learning_rate": 4.387755102040816e-05, + "loss": 0.9389, + "step": 1224 + }, + { + "epoch": 24.79, + "learning_rate": 4.375e-05, + "loss": 0.8703, + "step": 1225 + }, + { + "epoch": 24.81, + "learning_rate": 4.362244897959184e-05, + "loss": 0.9407, + "step": 1226 + }, + { + "epoch": 24.83, + "learning_rate": 4.3494897959183675e-05, + "loss": 0.9016, + "step": 1227 + }, + { + "epoch": 24.85, + "learning_rate": 4.336734693877551e-05, + "loss": 0.9025, + "step": 1228 + }, + { + "epoch": 24.87, + "learning_rate": 4.323979591836735e-05, + "loss": 0.9415, + "step": 1229 + }, + { + "epoch": 24.89, + "learning_rate": 4.311224489795919e-05, + "loss": 0.9146, + "step": 1230 + }, + { + "epoch": 24.91, + "learning_rate": 4.2984693877551025e-05, + "loss": 0.9144, + "step": 1231 + }, + { + "epoch": 24.93, + "learning_rate": 4.2857142857142856e-05, + "loss": 0.9138, + "step": 1232 + }, + { + "epoch": 24.95, + "learning_rate": 4.2729591836734694e-05, + "loss": 0.9372, + "step": 1233 + }, + { + "epoch": 24.97, + "learning_rate": 4.260204081632653e-05, + "loss": 0.8701, + "step": 1234 + }, + { + "epoch": 24.99, + "learning_rate": 4.247448979591837e-05, + "loss": 0.9278, + "step": 1235 + }, + { + "epoch": 25.01, + "learning_rate": 4.234693877551021e-05, + "loss": 0.9157, + "step": 1236 + }, + { + "epoch": 25.03, + "learning_rate": 4.2219387755102045e-05, + "loss": 0.8852, + "step": 1237 + }, + { + "epoch": 25.05, + "learning_rate": 4.209183673469388e-05, + "loss": 0.855, + "step": 1238 + }, + { + "epoch": 25.07, + "learning_rate": 4.196428571428572e-05, + "loss": 0.8547, + "step": 1239 + }, + { + "epoch": 25.09, + "learning_rate": 4.183673469387756e-05, + "loss": 0.8691, + "step": 1240 + }, + { + "epoch": 25.11, + "learning_rate": 4.170918367346939e-05, + "loss": 0.9101, + "step": 1241 + }, + { + "epoch": 25.13, + "learning_rate": 4.1581632653061226e-05, + "loss": 0.8408, + "step": 1242 + }, + { + "epoch": 25.15, + "learning_rate": 4.1454081632653064e-05, + "loss": 0.9008, + "step": 1243 + }, + { + "epoch": 25.17, + "learning_rate": 4.13265306122449e-05, + "loss": 0.859, + "step": 1244 + }, + { + "epoch": 25.19, + "learning_rate": 4.119897959183674e-05, + "loss": 0.8525, + "step": 1245 + }, + { + "epoch": 25.21, + "learning_rate": 4.107142857142857e-05, + "loss": 0.8682, + "step": 1246 + }, + { + "epoch": 25.23, + "learning_rate": 4.094387755102041e-05, + "loss": 0.8426, + "step": 1247 + }, + { + "epoch": 25.25, + "learning_rate": 4.0816326530612245e-05, + "loss": 0.8948, + "step": 1248 + }, + { + "epoch": 25.27, + "learning_rate": 4.068877551020408e-05, + "loss": 0.8333, + "step": 1249 + }, + { + "epoch": 25.29, + "learning_rate": 4.056122448979592e-05, + "loss": 0.87, + "step": 1250 + }, + { + "epoch": 25.31, + "learning_rate": 4.043367346938776e-05, + "loss": 0.8215, + "step": 1251 + }, + { + "epoch": 25.33, + "learning_rate": 4.0306122448979596e-05, + "loss": 0.862, + "step": 1252 + }, + { + "epoch": 25.35, + "learning_rate": 4.017857142857143e-05, + "loss": 0.8607, + "step": 1253 + }, + { + "epoch": 25.37, + "learning_rate": 4.0051020408163264e-05, + "loss": 0.8218, + "step": 1254 + }, + { + "epoch": 25.39, + "learning_rate": 3.99234693877551e-05, + "loss": 0.806, + "step": 1255 + }, + { + "epoch": 25.41, + "learning_rate": 3.979591836734694e-05, + "loss": 0.8929, + "step": 1256 + }, + { + "epoch": 25.43, + "learning_rate": 3.966836734693878e-05, + "loss": 0.8551, + "step": 1257 + }, + { + "epoch": 25.45, + "learning_rate": 3.9540816326530615e-05, + "loss": 0.8408, + "step": 1258 + }, + { + "epoch": 25.47, + "learning_rate": 3.9413265306122446e-05, + "loss": 0.8819, + "step": 1259 + }, + { + "epoch": 25.49, + "learning_rate": 3.928571428571429e-05, + "loss": 0.8757, + "step": 1260 + }, + { + "epoch": 25.52, + "learning_rate": 3.915816326530613e-05, + "loss": 0.8778, + "step": 1261 + }, + { + "epoch": 25.54, + "learning_rate": 3.9030612244897965e-05, + "loss": 0.8524, + "step": 1262 + }, + { + "epoch": 25.56, + "learning_rate": 3.8903061224489796e-05, + "loss": 0.846, + "step": 1263 + }, + { + "epoch": 25.58, + "learning_rate": 3.8775510204081634e-05, + "loss": 0.8757, + "step": 1264 + }, + { + "epoch": 25.6, + "learning_rate": 3.864795918367347e-05, + "loss": 0.9084, + "step": 1265 + }, + { + "epoch": 25.62, + "learning_rate": 3.852040816326531e-05, + "loss": 0.8826, + "step": 1266 + }, + { + "epoch": 25.64, + "learning_rate": 3.839285714285715e-05, + "loss": 0.8619, + "step": 1267 + }, + { + "epoch": 25.66, + "learning_rate": 3.826530612244898e-05, + "loss": 0.8942, + "step": 1268 + }, + { + "epoch": 25.68, + "learning_rate": 3.8137755102040815e-05, + "loss": 0.8342, + "step": 1269 + }, + { + "epoch": 25.7, + "learning_rate": 3.801020408163265e-05, + "loss": 0.8512, + "step": 1270 + }, + { + "epoch": 25.72, + "learning_rate": 3.788265306122449e-05, + "loss": 0.8393, + "step": 1271 + }, + { + "epoch": 25.74, + "learning_rate": 3.775510204081633e-05, + "loss": 0.8508, + "step": 1272 + }, + { + "epoch": 25.76, + "learning_rate": 3.7627551020408166e-05, + "loss": 0.9094, + "step": 1273 + }, + { + "epoch": 25.78, + "learning_rate": 3.7500000000000003e-05, + "loss": 0.9175, + "step": 1274 + }, + { + "epoch": 25.8, + "learning_rate": 3.737244897959184e-05, + "loss": 0.9179, + "step": 1275 + }, + { + "epoch": 25.82, + "learning_rate": 3.724489795918368e-05, + "loss": 0.869, + "step": 1276 + }, + { + "epoch": 25.84, + "learning_rate": 3.711734693877551e-05, + "loss": 0.8568, + "step": 1277 + }, + { + "epoch": 25.86, + "learning_rate": 3.698979591836735e-05, + "loss": 0.9104, + "step": 1278 + }, + { + "epoch": 25.88, + "learning_rate": 3.6862244897959185e-05, + "loss": 0.8912, + "step": 1279 + }, + { + "epoch": 25.9, + "learning_rate": 3.673469387755102e-05, + "loss": 0.878, + "step": 1280 + }, + { + "epoch": 25.92, + "learning_rate": 3.6607142857142853e-05, + "loss": 0.8711, + "step": 1281 + }, + { + "epoch": 25.94, + "learning_rate": 3.64795918367347e-05, + "loss": 0.9404, + "step": 1282 + }, + { + "epoch": 25.96, + "learning_rate": 3.6352040816326536e-05, + "loss": 0.9302, + "step": 1283 + }, + { + "epoch": 25.98, + "learning_rate": 3.622448979591837e-05, + "loss": 0.8907, + "step": 1284 + }, + { + "epoch": 26.0, + "learning_rate": 3.609693877551021e-05, + "loss": 0.8473, + "step": 1285 + }, + { + "epoch": 26.02, + "learning_rate": 3.596938775510204e-05, + "loss": 0.8482, + "step": 1286 + }, + { + "epoch": 26.04, + "learning_rate": 3.584183673469388e-05, + "loss": 0.8683, + "step": 1287 + }, + { + "epoch": 26.06, + "learning_rate": 3.571428571428572e-05, + "loss": 0.8443, + "step": 1288 + }, + { + "epoch": 26.08, + "learning_rate": 3.5586734693877555e-05, + "loss": 0.8462, + "step": 1289 + }, + { + "epoch": 26.1, + "learning_rate": 3.5459183673469385e-05, + "loss": 0.8204, + "step": 1290 + }, + { + "epoch": 26.12, + "learning_rate": 3.533163265306122e-05, + "loss": 0.8632, + "step": 1291 + }, + { + "epoch": 26.14, + "learning_rate": 3.520408163265306e-05, + "loss": 0.8883, + "step": 1292 + }, + { + "epoch": 26.16, + "learning_rate": 3.50765306122449e-05, + "loss": 0.8369, + "step": 1293 + }, + { + "epoch": 26.18, + "learning_rate": 3.4948979591836736e-05, + "loss": 0.8369, + "step": 1294 + }, + { + "epoch": 26.2, + "learning_rate": 3.4821428571428574e-05, + "loss": 0.8506, + "step": 1295 + }, + { + "epoch": 26.22, + "learning_rate": 3.469387755102041e-05, + "loss": 0.839, + "step": 1296 + }, + { + "epoch": 26.24, + "learning_rate": 3.456632653061225e-05, + "loss": 0.8421, + "step": 1297 + }, + { + "epoch": 26.26, + "learning_rate": 3.443877551020409e-05, + "loss": 0.8292, + "step": 1298 + }, + { + "epoch": 26.28, + "learning_rate": 3.431122448979592e-05, + "loss": 0.8412, + "step": 1299 + }, + { + "epoch": 26.3, + "learning_rate": 3.4183673469387755e-05, + "loss": 0.8305, + "step": 1300 + }, + { + "epoch": 26.32, + "learning_rate": 3.405612244897959e-05, + "loss": 0.8181, + "step": 1301 + }, + { + "epoch": 26.34, + "learning_rate": 3.392857142857143e-05, + "loss": 0.8588, + "step": 1302 + }, + { + "epoch": 26.36, + "learning_rate": 3.380102040816326e-05, + "loss": 0.8528, + "step": 1303 + }, + { + "epoch": 26.39, + "learning_rate": 3.36734693877551e-05, + "loss": 0.9055, + "step": 1304 + }, + { + "epoch": 26.41, + "learning_rate": 3.354591836734694e-05, + "loss": 0.8762, + "step": 1305 + }, + { + "epoch": 26.43, + "learning_rate": 3.341836734693878e-05, + "loss": 0.8507, + "step": 1306 + }, + { + "epoch": 26.45, + "learning_rate": 3.329081632653062e-05, + "loss": 0.8541, + "step": 1307 + }, + { + "epoch": 26.47, + "learning_rate": 3.316326530612245e-05, + "loss": 0.8205, + "step": 1308 + }, + { + "epoch": 26.49, + "learning_rate": 3.303571428571429e-05, + "loss": 0.8133, + "step": 1309 + }, + { + "epoch": 26.51, + "learning_rate": 3.2908163265306125e-05, + "loss": 0.8854, + "step": 1310 + }, + { + "epoch": 26.53, + "learning_rate": 3.278061224489796e-05, + "loss": 0.9397, + "step": 1311 + }, + { + "epoch": 26.55, + "learning_rate": 3.265306122448979e-05, + "loss": 0.854, + "step": 1312 + }, + { + "epoch": 26.57, + "learning_rate": 3.252551020408163e-05, + "loss": 0.8617, + "step": 1313 + }, + { + "epoch": 26.59, + "learning_rate": 3.239795918367347e-05, + "loss": 0.8739, + "step": 1314 + }, + { + "epoch": 26.61, + "learning_rate": 3.2270408163265306e-05, + "loss": 0.8139, + "step": 1315 + }, + { + "epoch": 26.63, + "learning_rate": 3.2142857142857144e-05, + "loss": 0.7575, + "step": 1316 + }, + { + "epoch": 26.65, + "learning_rate": 3.201530612244898e-05, + "loss": 0.846, + "step": 1317 + }, + { + "epoch": 26.67, + "learning_rate": 3.188775510204082e-05, + "loss": 0.8797, + "step": 1318 + }, + { + "epoch": 26.69, + "learning_rate": 3.176020408163266e-05, + "loss": 0.8525, + "step": 1319 + }, + { + "epoch": 26.71, + "learning_rate": 3.1632653061224494e-05, + "loss": 0.8276, + "step": 1320 + }, + { + "epoch": 26.73, + "learning_rate": 3.1505102040816325e-05, + "loss": 0.8734, + "step": 1321 + }, + { + "epoch": 26.75, + "learning_rate": 3.137755102040816e-05, + "loss": 0.8663, + "step": 1322 + }, + { + "epoch": 26.77, + "learning_rate": 3.125e-05, + "loss": 0.8354, + "step": 1323 + }, + { + "epoch": 26.79, + "learning_rate": 3.112244897959184e-05, + "loss": 0.8374, + "step": 1324 + }, + { + "epoch": 26.81, + "learning_rate": 3.0994897959183676e-05, + "loss": 0.9025, + "step": 1325 + }, + { + "epoch": 26.83, + "learning_rate": 3.086734693877551e-05, + "loss": 0.8618, + "step": 1326 + }, + { + "epoch": 26.85, + "learning_rate": 3.073979591836735e-05, + "loss": 0.8867, + "step": 1327 + }, + { + "epoch": 26.87, + "learning_rate": 3.061224489795919e-05, + "loss": 0.7864, + "step": 1328 + }, + { + "epoch": 26.89, + "learning_rate": 3.0484693877551023e-05, + "loss": 0.7706, + "step": 1329 + }, + { + "epoch": 26.91, + "learning_rate": 3.0357142857142857e-05, + "loss": 0.8677, + "step": 1330 + }, + { + "epoch": 26.93, + "learning_rate": 3.0229591836734695e-05, + "loss": 0.8619, + "step": 1331 + }, + { + "epoch": 26.95, + "learning_rate": 3.0102040816326533e-05, + "loss": 0.8487, + "step": 1332 + }, + { + "epoch": 26.97, + "learning_rate": 2.997448979591837e-05, + "loss": 0.8644, + "step": 1333 + }, + { + "epoch": 26.99, + "learning_rate": 2.9846938775510208e-05, + "loss": 0.8779, + "step": 1334 + }, + { + "epoch": 27.01, + "learning_rate": 2.9719387755102042e-05, + "loss": 0.8589, + "step": 1335 + }, + { + "epoch": 27.03, + "learning_rate": 2.959183673469388e-05, + "loss": 0.8214, + "step": 1336 + }, + { + "epoch": 27.05, + "learning_rate": 2.9464285714285718e-05, + "loss": 0.7907, + "step": 1337 + }, + { + "epoch": 27.07, + "learning_rate": 2.9336734693877555e-05, + "loss": 0.8493, + "step": 1338 + }, + { + "epoch": 27.09, + "learning_rate": 2.9209183673469386e-05, + "loss": 0.8423, + "step": 1339 + }, + { + "epoch": 27.11, + "learning_rate": 2.9081632653061224e-05, + "loss": 0.7737, + "step": 1340 + }, + { + "epoch": 27.13, + "learning_rate": 2.895408163265306e-05, + "loss": 0.792, + "step": 1341 + }, + { + "epoch": 27.15, + "learning_rate": 2.8826530612244902e-05, + "loss": 0.8145, + "step": 1342 + }, + { + "epoch": 27.17, + "learning_rate": 2.869897959183674e-05, + "loss": 0.8356, + "step": 1343 + }, + { + "epoch": 27.19, + "learning_rate": 2.857142857142857e-05, + "loss": 0.815, + "step": 1344 + }, + { + "epoch": 27.21, + "learning_rate": 2.844387755102041e-05, + "loss": 0.7806, + "step": 1345 + }, + { + "epoch": 27.23, + "learning_rate": 2.8316326530612246e-05, + "loss": 0.835, + "step": 1346 + }, + { + "epoch": 27.26, + "learning_rate": 2.8188775510204084e-05, + "loss": 0.8514, + "step": 1347 + }, + { + "epoch": 27.28, + "learning_rate": 2.8061224489795918e-05, + "loss": 0.8251, + "step": 1348 + }, + { + "epoch": 27.3, + "learning_rate": 2.7933673469387756e-05, + "loss": 0.8456, + "step": 1349 + }, + { + "epoch": 27.32, + "learning_rate": 2.7806122448979593e-05, + "loss": 0.8925, + "step": 1350 + }, + { + "epoch": 27.34, + "learning_rate": 2.767857142857143e-05, + "loss": 0.8284, + "step": 1351 + }, + { + "epoch": 27.36, + "learning_rate": 2.7551020408163265e-05, + "loss": 0.8471, + "step": 1352 + }, + { + "epoch": 27.38, + "learning_rate": 2.7423469387755103e-05, + "loss": 0.819, + "step": 1353 + }, + { + "epoch": 27.4, + "learning_rate": 2.729591836734694e-05, + "loss": 0.8474, + "step": 1354 + }, + { + "epoch": 27.42, + "learning_rate": 2.7168367346938778e-05, + "loss": 0.8378, + "step": 1355 + }, + { + "epoch": 27.44, + "learning_rate": 2.7040816326530616e-05, + "loss": 0.8383, + "step": 1356 + }, + { + "epoch": 27.46, + "learning_rate": 2.6913265306122447e-05, + "loss": 0.8534, + "step": 1357 + }, + { + "epoch": 27.48, + "learning_rate": 2.6785714285714288e-05, + "loss": 0.8243, + "step": 1358 + }, + { + "epoch": 27.5, + "learning_rate": 2.6658163265306125e-05, + "loss": 0.8467, + "step": 1359 + }, + { + "epoch": 27.52, + "learning_rate": 2.6530612244897963e-05, + "loss": 0.8503, + "step": 1360 + }, + { + "epoch": 27.54, + "learning_rate": 2.6403061224489794e-05, + "loss": 0.7655, + "step": 1361 + }, + { + "epoch": 27.56, + "learning_rate": 2.627551020408163e-05, + "loss": 0.854, + "step": 1362 + }, + { + "epoch": 27.58, + "learning_rate": 2.614795918367347e-05, + "loss": 0.838, + "step": 1363 + }, + { + "epoch": 27.6, + "learning_rate": 2.6020408163265307e-05, + "loss": 0.8275, + "step": 1364 + }, + { + "epoch": 27.62, + "learning_rate": 2.5892857142857148e-05, + "loss": 0.8494, + "step": 1365 + }, + { + "epoch": 27.64, + "learning_rate": 2.576530612244898e-05, + "loss": 0.842, + "step": 1366 + }, + { + "epoch": 27.66, + "learning_rate": 2.5637755102040816e-05, + "loss": 0.8176, + "step": 1367 + }, + { + "epoch": 27.68, + "learning_rate": 2.5510204081632654e-05, + "loss": 0.8301, + "step": 1368 + }, + { + "epoch": 27.7, + "learning_rate": 2.538265306122449e-05, + "loss": 0.8182, + "step": 1369 + }, + { + "epoch": 27.72, + "learning_rate": 2.5255102040816326e-05, + "loss": 0.8067, + "step": 1370 + }, + { + "epoch": 27.74, + "learning_rate": 2.5127551020408164e-05, + "loss": 0.8322, + "step": 1371 + }, + { + "epoch": 27.76, + "learning_rate": 2.5e-05, + "loss": 0.828, + "step": 1372 + }, + { + "epoch": 27.78, + "learning_rate": 2.487244897959184e-05, + "loss": 0.8583, + "step": 1373 + }, + { + "epoch": 27.8, + "learning_rate": 2.4744897959183673e-05, + "loss": 0.8273, + "step": 1374 + }, + { + "epoch": 27.82, + "learning_rate": 2.461734693877551e-05, + "loss": 0.8292, + "step": 1375 + }, + { + "epoch": 27.84, + "learning_rate": 2.448979591836735e-05, + "loss": 0.9004, + "step": 1376 + }, + { + "epoch": 27.86, + "learning_rate": 2.4362244897959186e-05, + "loss": 0.8589, + "step": 1377 + }, + { + "epoch": 27.88, + "learning_rate": 2.423469387755102e-05, + "loss": 0.8559, + "step": 1378 + }, + { + "epoch": 27.9, + "learning_rate": 2.4107142857142858e-05, + "loss": 0.8224, + "step": 1379 + }, + { + "epoch": 27.92, + "learning_rate": 2.3979591836734696e-05, + "loss": 0.8438, + "step": 1380 + }, + { + "epoch": 27.94, + "learning_rate": 2.3852040816326533e-05, + "loss": 0.8267, + "step": 1381 + }, + { + "epoch": 27.96, + "learning_rate": 2.372448979591837e-05, + "loss": 0.8472, + "step": 1382 + }, + { + "epoch": 27.98, + "learning_rate": 2.3596938775510205e-05, + "loss": 0.835, + "step": 1383 + }, + { + "epoch": 28.0, + "learning_rate": 2.3469387755102043e-05, + "loss": 0.847, + "step": 1384 + }, + { + "epoch": 28.02, + "learning_rate": 2.3341836734693877e-05, + "loss": 0.865, + "step": 1385 + }, + { + "epoch": 28.04, + "learning_rate": 2.3214285714285715e-05, + "loss": 0.807, + "step": 1386 + }, + { + "epoch": 28.06, + "learning_rate": 2.3086734693877552e-05, + "loss": 0.8133, + "step": 1387 + }, + { + "epoch": 28.08, + "learning_rate": 2.295918367346939e-05, + "loss": 0.8242, + "step": 1388 + }, + { + "epoch": 28.1, + "learning_rate": 2.2831632653061224e-05, + "loss": 0.8142, + "step": 1389 + }, + { + "epoch": 28.13, + "learning_rate": 2.2704081632653062e-05, + "loss": 0.7772, + "step": 1390 + }, + { + "epoch": 28.15, + "learning_rate": 2.2576530612244896e-05, + "loss": 0.7885, + "step": 1391 + }, + { + "epoch": 28.17, + "learning_rate": 2.2448979591836737e-05, + "loss": 0.8096, + "step": 1392 + }, + { + "epoch": 28.19, + "learning_rate": 2.2321428571428575e-05, + "loss": 0.8497, + "step": 1393 + }, + { + "epoch": 28.21, + "learning_rate": 2.219387755102041e-05, + "loss": 0.8814, + "step": 1394 + }, + { + "epoch": 28.23, + "learning_rate": 2.2066326530612247e-05, + "loss": 0.8634, + "step": 1395 + }, + { + "epoch": 28.25, + "learning_rate": 2.193877551020408e-05, + "loss": 0.8084, + "step": 1396 + }, + { + "epoch": 28.27, + "learning_rate": 2.181122448979592e-05, + "loss": 0.7792, + "step": 1397 + }, + { + "epoch": 28.29, + "learning_rate": 2.1683673469387756e-05, + "loss": 0.7998, + "step": 1398 + }, + { + "epoch": 28.31, + "learning_rate": 2.1556122448979594e-05, + "loss": 0.7963, + "step": 1399 + }, + { + "epoch": 28.33, + "learning_rate": 2.1428571428571428e-05, + "loss": 0.8058, + "step": 1400 + } + ], + "logging_steps": 1, + "max_steps": 1568, + "num_train_epochs": 32, + "save_steps": 100, + "total_flos": 2.0726121875201434e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-1400/training_args.bin b/checkpoint-1400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..db23e07d097c18532e52f58a70eb72d22e39c8c1 --- /dev/null +++ b/checkpoint-1400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b610cbc4242bb50b4985b00e205994ae514fec6d9e2273f2b545a583a07b154b +size 4155 diff --git a/checkpoint-1500/README.md b/checkpoint-1500/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-1500/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-1500/adapter_config.json b/checkpoint-1500/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b --- /dev/null +++ b/checkpoint-1500/adapter_config.json @@ -0,0 +1,23 @@ +{ + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-1500/adapter_model.bin b/checkpoint-1500/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..a54ef173d463fbaf57af88becaba0148a83f440c --- /dev/null +++ b/checkpoint-1500/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:05c58591db5338161dc5efc6d86246b821e471c8a3358428e51649d2072b2575 +size 39409357 diff --git a/checkpoint-1500/optimizer.pt b/checkpoint-1500/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..1409a7543904f8b214da7709e6e9b9ba9b4852de --- /dev/null +++ b/checkpoint-1500/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:12b96929e734f9083d1f6be11dba845f54c010dca8edd68a875fe686f90e5891 +size 78844421 diff --git a/checkpoint-1500/rng_state.pth b/checkpoint-1500/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..d6f0f64e8115144f892f2b588cbb58afc5152fb1 --- /dev/null +++ b/checkpoint-1500/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91db4cb8b4c509d633633237f7fc5865ed0256714ee41edb2e45f150f9672a02 +size 14575 diff --git a/checkpoint-1500/scheduler.pt b/checkpoint-1500/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..8a98039d2baf48d02abeb1652e29e136cea7416e --- /dev/null +++ b/checkpoint-1500/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9ba3724142851a49b42c448d0d165013e44b8dc1b4b3045a373eeb2f22eeefb4 +size 627 diff --git a/checkpoint-1500/special_tokens_map.json b/checkpoint-1500/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-1500/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-1500/tokenizer.json b/checkpoint-1500/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf002cafbd4818dcff2abc9156c088d681b4533 --- /dev/null +++ b/checkpoint-1500/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba +size 14500471 diff --git a/checkpoint-1500/tokenizer_config.json b/checkpoint-1500/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-1500/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-1500/trainer_state.json b/checkpoint-1500/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..93bbab40f0d15b7f566bc093e10aab0d70811d66 --- /dev/null +++ b/checkpoint-1500/trainer_state.json @@ -0,0 +1,9019 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 30.350932658868164, + "eval_steps": 500, + "global_step": 1500, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.02, + "learning_rate": 0.00019987244897959184, + "loss": 3.2215, + "step": 1 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019974489795918367, + "loss": 2.8365, + "step": 2 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019961734693877553, + "loss": 2.602, + "step": 3 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019948979591836736, + "loss": 2.4196, + "step": 4 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001993622448979592, + "loss": 2.2574, + "step": 5 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019923469387755102, + "loss": 2.2239, + "step": 6 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019910714285714288, + "loss": 2.1661, + "step": 7 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001989795918367347, + "loss": 2.0987, + "step": 8 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019885204081632654, + "loss": 2.015, + "step": 9 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019872448979591837, + "loss": 1.9771, + "step": 10 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019859693877551023, + "loss": 2.0271, + "step": 11 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019846938775510203, + "loss": 1.9812, + "step": 12 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001983418367346939, + "loss": 2.0834, + "step": 13 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019821428571428572, + "loss": 1.9174, + "step": 14 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019808673469387755, + "loss": 1.8409, + "step": 15 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019795918367346938, + "loss": 1.929, + "step": 16 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019783163265306124, + "loss": 2.0041, + "step": 17 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019770408163265305, + "loss": 1.9385, + "step": 18 + }, + { + "epoch": 0.38, + "learning_rate": 0.0001975765306122449, + "loss": 1.9592, + "step": 19 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019744897959183674, + "loss": 1.9701, + "step": 20 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001973214285714286, + "loss": 1.9277, + "step": 21 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019719387755102042, + "loss": 1.8394, + "step": 22 + }, + { + "epoch": 0.47, + "learning_rate": 0.00019706632653061226, + "loss": 1.8666, + "step": 23 + }, + { + "epoch": 0.49, + "learning_rate": 0.00019693877551020409, + "loss": 1.8997, + "step": 24 + }, + { + "epoch": 0.51, + "learning_rate": 0.00019681122448979592, + "loss": 1.9432, + "step": 25 + }, + { + "epoch": 0.53, + "learning_rate": 0.00019668367346938777, + "loss": 1.9137, + "step": 26 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001965561224489796, + "loss": 1.905, + "step": 27 + }, + { + "epoch": 0.57, + "learning_rate": 0.00019642857142857144, + "loss": 1.8708, + "step": 28 + }, + { + "epoch": 0.59, + "learning_rate": 0.00019630102040816327, + "loss": 1.9097, + "step": 29 + }, + { + "epoch": 0.61, + "learning_rate": 0.00019617346938775513, + "loss": 1.896, + "step": 30 + }, + { + "epoch": 0.63, + "learning_rate": 0.00019604591836734696, + "loss": 1.8834, + "step": 31 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001959183673469388, + "loss": 1.8323, + "step": 32 + }, + { + "epoch": 0.67, + "learning_rate": 0.00019579081632653062, + "loss": 1.804, + "step": 33 + }, + { + "epoch": 0.69, + "learning_rate": 0.00019566326530612248, + "loss": 1.8906, + "step": 34 + }, + { + "epoch": 0.71, + "learning_rate": 0.00019553571428571428, + "loss": 1.8693, + "step": 35 + }, + { + "epoch": 0.73, + "learning_rate": 0.00019540816326530614, + "loss": 1.9308, + "step": 36 + }, + { + "epoch": 0.75, + "learning_rate": 0.00019528061224489797, + "loss": 1.8082, + "step": 37 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001951530612244898, + "loss": 1.848, + "step": 38 + }, + { + "epoch": 0.79, + "learning_rate": 0.00019502551020408163, + "loss": 1.8866, + "step": 39 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001948979591836735, + "loss": 1.7844, + "step": 40 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001947704081632653, + "loss": 1.8485, + "step": 41 + }, + { + "epoch": 0.85, + "learning_rate": 0.00019464285714285715, + "loss": 1.7917, + "step": 42 + }, + { + "epoch": 0.87, + "learning_rate": 0.00019451530612244898, + "loss": 1.7342, + "step": 43 + }, + { + "epoch": 0.89, + "learning_rate": 0.00019438775510204084, + "loss": 1.8479, + "step": 44 + }, + { + "epoch": 0.91, + "learning_rate": 0.00019426020408163267, + "loss": 1.8639, + "step": 45 + }, + { + "epoch": 0.93, + "learning_rate": 0.0001941326530612245, + "loss": 1.8166, + "step": 46 + }, + { + "epoch": 0.95, + "learning_rate": 0.00019400510204081633, + "loss": 1.7566, + "step": 47 + }, + { + "epoch": 0.97, + "learning_rate": 0.00019387755102040816, + "loss": 1.8071, + "step": 48 + }, + { + "epoch": 0.99, + "learning_rate": 0.00019375000000000002, + "loss": 1.8612, + "step": 49 + }, + { + "epoch": 1.01, + "learning_rate": 0.00019362244897959185, + "loss": 1.7819, + "step": 50 + }, + { + "epoch": 1.03, + "learning_rate": 0.00019349489795918368, + "loss": 1.8647, + "step": 51 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001933673469387755, + "loss": 1.8196, + "step": 52 + }, + { + "epoch": 1.07, + "learning_rate": 0.00019323979591836737, + "loss": 1.8027, + "step": 53 + }, + { + "epoch": 1.09, + "learning_rate": 0.00019311224489795917, + "loss": 1.8927, + "step": 54 + }, + { + "epoch": 1.11, + "learning_rate": 0.00019298469387755103, + "loss": 1.8481, + "step": 55 + }, + { + "epoch": 1.13, + "learning_rate": 0.00019285714285714286, + "loss": 1.7781, + "step": 56 + }, + { + "epoch": 1.15, + "learning_rate": 0.00019272959183673472, + "loss": 1.8101, + "step": 57 + }, + { + "epoch": 1.17, + "learning_rate": 0.00019260204081632653, + "loss": 1.7257, + "step": 58 + }, + { + "epoch": 1.19, + "learning_rate": 0.00019247448979591838, + "loss": 1.8185, + "step": 59 + }, + { + "epoch": 1.21, + "learning_rate": 0.00019234693877551021, + "loss": 1.8557, + "step": 60 + }, + { + "epoch": 1.23, + "learning_rate": 0.00019221938775510204, + "loss": 1.7418, + "step": 61 + }, + { + "epoch": 1.25, + "learning_rate": 0.00019209183673469388, + "loss": 1.6879, + "step": 62 + }, + { + "epoch": 1.27, + "learning_rate": 0.00019196428571428573, + "loss": 1.7651, + "step": 63 + }, + { + "epoch": 1.29, + "learning_rate": 0.00019183673469387756, + "loss": 1.7759, + "step": 64 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001917091836734694, + "loss": 1.7691, + "step": 65 + }, + { + "epoch": 1.34, + "learning_rate": 0.00019158163265306123, + "loss": 1.7794, + "step": 66 + }, + { + "epoch": 1.36, + "learning_rate": 0.00019145408163265306, + "loss": 1.8152, + "step": 67 + }, + { + "epoch": 1.38, + "learning_rate": 0.00019132653061224492, + "loss": 1.8052, + "step": 68 + }, + { + "epoch": 1.4, + "learning_rate": 0.00019119897959183675, + "loss": 1.8054, + "step": 69 + }, + { + "epoch": 1.42, + "learning_rate": 0.00019107142857142858, + "loss": 1.8114, + "step": 70 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001909438775510204, + "loss": 1.7749, + "step": 71 + }, + { + "epoch": 1.46, + "learning_rate": 0.00019081632653061227, + "loss": 1.777, + "step": 72 + }, + { + "epoch": 1.48, + "learning_rate": 0.0001906887755102041, + "loss": 1.7896, + "step": 73 + }, + { + "epoch": 1.5, + "learning_rate": 0.00019056122448979593, + "loss": 1.8335, + "step": 74 + }, + { + "epoch": 1.52, + "learning_rate": 0.00019043367346938776, + "loss": 1.8155, + "step": 75 + }, + { + "epoch": 1.54, + "learning_rate": 0.00019030612244897962, + "loss": 1.8224, + "step": 76 + }, + { + "epoch": 1.56, + "learning_rate": 0.00019017857142857142, + "loss": 1.7889, + "step": 77 + }, + { + "epoch": 1.58, + "learning_rate": 0.00019005102040816328, + "loss": 1.8866, + "step": 78 + }, + { + "epoch": 1.6, + "learning_rate": 0.0001899234693877551, + "loss": 1.8439, + "step": 79 + }, + { + "epoch": 1.62, + "learning_rate": 0.00018979591836734697, + "loss": 1.7906, + "step": 80 + }, + { + "epoch": 1.64, + "learning_rate": 0.00018966836734693877, + "loss": 1.8627, + "step": 81 + }, + { + "epoch": 1.66, + "learning_rate": 0.00018954081632653063, + "loss": 1.7497, + "step": 82 + }, + { + "epoch": 1.68, + "learning_rate": 0.00018941326530612246, + "loss": 1.7936, + "step": 83 + }, + { + "epoch": 1.7, + "learning_rate": 0.0001892857142857143, + "loss": 1.8341, + "step": 84 + }, + { + "epoch": 1.72, + "learning_rate": 0.00018915816326530612, + "loss": 1.7868, + "step": 85 + }, + { + "epoch": 1.74, + "learning_rate": 0.00018903061224489798, + "loss": 1.7493, + "step": 86 + }, + { + "epoch": 1.76, + "learning_rate": 0.0001889030612244898, + "loss": 1.7926, + "step": 87 + }, + { + "epoch": 1.78, + "learning_rate": 0.00018877551020408164, + "loss": 1.8278, + "step": 88 + }, + { + "epoch": 1.8, + "learning_rate": 0.00018864795918367347, + "loss": 1.7387, + "step": 89 + }, + { + "epoch": 1.82, + "learning_rate": 0.0001885204081632653, + "loss": 1.7669, + "step": 90 + }, + { + "epoch": 1.84, + "learning_rate": 0.00018839285714285716, + "loss": 1.7686, + "step": 91 + }, + { + "epoch": 1.86, + "learning_rate": 0.000188265306122449, + "loss": 1.7759, + "step": 92 + }, + { + "epoch": 1.88, + "learning_rate": 0.00018813775510204082, + "loss": 1.7016, + "step": 93 + }, + { + "epoch": 1.9, + "learning_rate": 0.00018801020408163265, + "loss": 1.8123, + "step": 94 + }, + { + "epoch": 1.92, + "learning_rate": 0.0001878826530612245, + "loss": 1.8315, + "step": 95 + }, + { + "epoch": 1.94, + "learning_rate": 0.00018775510204081634, + "loss": 1.7679, + "step": 96 + }, + { + "epoch": 1.96, + "learning_rate": 0.00018762755102040817, + "loss": 1.7874, + "step": 97 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001875, + "loss": 1.8008, + "step": 98 + }, + { + "epoch": 2.0, + "learning_rate": 0.00018737244897959186, + "loss": 1.7177, + "step": 99 + }, + { + "epoch": 2.02, + "learning_rate": 0.00018724489795918367, + "loss": 1.7272, + "step": 100 + }, + { + "epoch": 2.04, + "learning_rate": 0.00018711734693877552, + "loss": 1.7848, + "step": 101 + }, + { + "epoch": 2.06, + "learning_rate": 0.00018698979591836735, + "loss": 1.744, + "step": 102 + }, + { + "epoch": 2.08, + "learning_rate": 0.00018686224489795919, + "loss": 1.7005, + "step": 103 + }, + { + "epoch": 2.1, + "learning_rate": 0.00018673469387755102, + "loss": 1.8247, + "step": 104 + }, + { + "epoch": 2.12, + "learning_rate": 0.00018660714285714287, + "loss": 1.6855, + "step": 105 + }, + { + "epoch": 2.14, + "learning_rate": 0.0001864795918367347, + "loss": 1.7627, + "step": 106 + }, + { + "epoch": 2.17, + "learning_rate": 0.00018635204081632654, + "loss": 1.7564, + "step": 107 + }, + { + "epoch": 2.19, + "learning_rate": 0.00018622448979591837, + "loss": 1.8237, + "step": 108 + }, + { + "epoch": 2.21, + "learning_rate": 0.00018609693877551022, + "loss": 1.7421, + "step": 109 + }, + { + "epoch": 2.23, + "learning_rate": 0.00018596938775510206, + "loss": 1.7517, + "step": 110 + }, + { + "epoch": 2.25, + "learning_rate": 0.0001858418367346939, + "loss": 1.7515, + "step": 111 + }, + { + "epoch": 2.27, + "learning_rate": 0.00018571428571428572, + "loss": 1.7842, + "step": 112 + }, + { + "epoch": 2.29, + "learning_rate": 0.00018558673469387755, + "loss": 1.8001, + "step": 113 + }, + { + "epoch": 2.31, + "learning_rate": 0.0001854591836734694, + "loss": 1.7653, + "step": 114 + }, + { + "epoch": 2.33, + "learning_rate": 0.00018533163265306124, + "loss": 1.694, + "step": 115 + }, + { + "epoch": 2.35, + "learning_rate": 0.00018520408163265307, + "loss": 1.7457, + "step": 116 + }, + { + "epoch": 2.37, + "learning_rate": 0.0001850765306122449, + "loss": 1.7899, + "step": 117 + }, + { + "epoch": 2.39, + "learning_rate": 0.00018494897959183676, + "loss": 1.7473, + "step": 118 + }, + { + "epoch": 2.41, + "learning_rate": 0.0001848214285714286, + "loss": 1.6639, + "step": 119 + }, + { + "epoch": 2.43, + "learning_rate": 0.00018469387755102042, + "loss": 1.762, + "step": 120 + }, + { + "epoch": 2.45, + "learning_rate": 0.00018456632653061225, + "loss": 1.7378, + "step": 121 + }, + { + "epoch": 2.47, + "learning_rate": 0.0001844387755102041, + "loss": 1.672, + "step": 122 + }, + { + "epoch": 2.49, + "learning_rate": 0.0001843112244897959, + "loss": 1.7267, + "step": 123 + }, + { + "epoch": 2.51, + "learning_rate": 0.00018418367346938777, + "loss": 1.7825, + "step": 124 + }, + { + "epoch": 2.53, + "learning_rate": 0.0001840561224489796, + "loss": 1.7566, + "step": 125 + }, + { + "epoch": 2.55, + "learning_rate": 0.00018392857142857143, + "loss": 1.8169, + "step": 126 + }, + { + "epoch": 2.57, + "learning_rate": 0.00018380102040816326, + "loss": 1.6801, + "step": 127 + }, + { + "epoch": 2.59, + "learning_rate": 0.00018367346938775512, + "loss": 1.7292, + "step": 128 + }, + { + "epoch": 2.61, + "learning_rate": 0.00018354591836734695, + "loss": 1.737, + "step": 129 + }, + { + "epoch": 2.63, + "learning_rate": 0.00018341836734693878, + "loss": 1.7696, + "step": 130 + }, + { + "epoch": 2.65, + "learning_rate": 0.0001832908163265306, + "loss": 1.7239, + "step": 131 + }, + { + "epoch": 2.67, + "learning_rate": 0.00018316326530612247, + "loss": 1.7441, + "step": 132 + }, + { + "epoch": 2.69, + "learning_rate": 0.0001830357142857143, + "loss": 1.7825, + "step": 133 + }, + { + "epoch": 2.71, + "learning_rate": 0.00018290816326530613, + "loss": 1.7411, + "step": 134 + }, + { + "epoch": 2.73, + "learning_rate": 0.00018278061224489796, + "loss": 1.7119, + "step": 135 + }, + { + "epoch": 2.75, + "learning_rate": 0.0001826530612244898, + "loss": 1.7443, + "step": 136 + }, + { + "epoch": 2.77, + "learning_rate": 0.00018252551020408165, + "loss": 1.7197, + "step": 137 + }, + { + "epoch": 2.79, + "learning_rate": 0.00018239795918367348, + "loss": 1.7273, + "step": 138 + }, + { + "epoch": 2.81, + "learning_rate": 0.0001822704081632653, + "loss": 1.7681, + "step": 139 + }, + { + "epoch": 2.83, + "learning_rate": 0.00018214285714285714, + "loss": 1.8088, + "step": 140 + }, + { + "epoch": 2.85, + "learning_rate": 0.000182015306122449, + "loss": 1.7301, + "step": 141 + }, + { + "epoch": 2.87, + "learning_rate": 0.00018188775510204083, + "loss": 1.6853, + "step": 142 + }, + { + "epoch": 2.89, + "learning_rate": 0.00018176020408163266, + "loss": 1.6966, + "step": 143 + }, + { + "epoch": 2.91, + "learning_rate": 0.0001816326530612245, + "loss": 1.7938, + "step": 144 + }, + { + "epoch": 2.93, + "learning_rate": 0.00018150510204081635, + "loss": 1.7639, + "step": 145 + }, + { + "epoch": 2.95, + "learning_rate": 0.00018137755102040816, + "loss": 1.7527, + "step": 146 + }, + { + "epoch": 2.97, + "learning_rate": 0.00018125000000000001, + "loss": 1.7386, + "step": 147 + }, + { + "epoch": 2.99, + "learning_rate": 0.00018112244897959185, + "loss": 1.7223, + "step": 148 + }, + { + "epoch": 3.01, + "learning_rate": 0.00018099489795918368, + "loss": 1.7571, + "step": 149 + }, + { + "epoch": 3.04, + "learning_rate": 0.0001808673469387755, + "loss": 1.7054, + "step": 150 + }, + { + "epoch": 3.06, + "learning_rate": 0.00018073979591836737, + "loss": 1.6581, + "step": 151 + }, + { + "epoch": 3.08, + "learning_rate": 0.00018061224489795917, + "loss": 1.681, + "step": 152 + }, + { + "epoch": 3.1, + "learning_rate": 0.00018048469387755103, + "loss": 1.7425, + "step": 153 + }, + { + "epoch": 3.12, + "learning_rate": 0.00018035714285714286, + "loss": 1.7108, + "step": 154 + }, + { + "epoch": 3.14, + "learning_rate": 0.00018022959183673472, + "loss": 1.7194, + "step": 155 + }, + { + "epoch": 3.16, + "learning_rate": 0.00018010204081632655, + "loss": 1.6953, + "step": 156 + }, + { + "epoch": 3.18, + "learning_rate": 0.00017997448979591838, + "loss": 1.669, + "step": 157 + }, + { + "epoch": 3.2, + "learning_rate": 0.0001798469387755102, + "loss": 1.744, + "step": 158 + }, + { + "epoch": 3.22, + "learning_rate": 0.00017971938775510204, + "loss": 1.6467, + "step": 159 + }, + { + "epoch": 3.24, + "learning_rate": 0.0001795918367346939, + "loss": 1.7103, + "step": 160 + }, + { + "epoch": 3.26, + "learning_rate": 0.00017946428571428573, + "loss": 1.6662, + "step": 161 + }, + { + "epoch": 3.28, + "learning_rate": 0.00017933673469387756, + "loss": 1.6657, + "step": 162 + }, + { + "epoch": 3.3, + "learning_rate": 0.0001792091836734694, + "loss": 1.791, + "step": 163 + }, + { + "epoch": 3.32, + "learning_rate": 0.00017908163265306125, + "loss": 1.7704, + "step": 164 + }, + { + "epoch": 3.34, + "learning_rate": 0.00017895408163265305, + "loss": 1.7229, + "step": 165 + }, + { + "epoch": 3.36, + "learning_rate": 0.0001788265306122449, + "loss": 1.76, + "step": 166 + }, + { + "epoch": 3.38, + "learning_rate": 0.00017869897959183674, + "loss": 1.6482, + "step": 167 + }, + { + "epoch": 3.4, + "learning_rate": 0.0001785714285714286, + "loss": 1.8076, + "step": 168 + }, + { + "epoch": 3.42, + "learning_rate": 0.0001784438775510204, + "loss": 1.7368, + "step": 169 + }, + { + "epoch": 3.44, + "learning_rate": 0.00017831632653061226, + "loss": 1.6264, + "step": 170 + }, + { + "epoch": 3.46, + "learning_rate": 0.0001781887755102041, + "loss": 1.6289, + "step": 171 + }, + { + "epoch": 3.48, + "learning_rate": 0.00017806122448979592, + "loss": 1.7913, + "step": 172 + }, + { + "epoch": 3.5, + "learning_rate": 0.00017793367346938775, + "loss": 1.6985, + "step": 173 + }, + { + "epoch": 3.52, + "learning_rate": 0.0001778061224489796, + "loss": 1.6936, + "step": 174 + }, + { + "epoch": 3.54, + "learning_rate": 0.00017767857142857141, + "loss": 1.8068, + "step": 175 + }, + { + "epoch": 3.56, + "learning_rate": 0.00017755102040816327, + "loss": 1.7243, + "step": 176 + }, + { + "epoch": 3.58, + "learning_rate": 0.0001774234693877551, + "loss": 1.6893, + "step": 177 + }, + { + "epoch": 3.6, + "learning_rate": 0.00017729591836734696, + "loss": 1.8122, + "step": 178 + }, + { + "epoch": 3.62, + "learning_rate": 0.0001771683673469388, + "loss": 1.6562, + "step": 179 + }, + { + "epoch": 3.64, + "learning_rate": 0.00017704081632653062, + "loss": 1.6999, + "step": 180 + }, + { + "epoch": 3.66, + "learning_rate": 0.00017691326530612245, + "loss": 1.7229, + "step": 181 + }, + { + "epoch": 3.68, + "learning_rate": 0.00017678571428571428, + "loss": 1.6764, + "step": 182 + }, + { + "epoch": 3.7, + "learning_rate": 0.00017665816326530614, + "loss": 1.6982, + "step": 183 + }, + { + "epoch": 3.72, + "learning_rate": 0.00017653061224489797, + "loss": 1.696, + "step": 184 + }, + { + "epoch": 3.74, + "learning_rate": 0.0001764030612244898, + "loss": 1.6797, + "step": 185 + }, + { + "epoch": 3.76, + "learning_rate": 0.00017627551020408164, + "loss": 1.637, + "step": 186 + }, + { + "epoch": 3.78, + "learning_rate": 0.0001761479591836735, + "loss": 1.7074, + "step": 187 + }, + { + "epoch": 3.8, + "learning_rate": 0.0001760204081632653, + "loss": 1.705, + "step": 188 + }, + { + "epoch": 3.82, + "learning_rate": 0.00017589285714285716, + "loss": 1.6153, + "step": 189 + }, + { + "epoch": 3.84, + "learning_rate": 0.00017576530612244899, + "loss": 1.7354, + "step": 190 + }, + { + "epoch": 3.86, + "learning_rate": 0.00017563775510204084, + "loss": 1.6941, + "step": 191 + }, + { + "epoch": 3.88, + "learning_rate": 0.00017551020408163265, + "loss": 1.7231, + "step": 192 + }, + { + "epoch": 3.91, + "learning_rate": 0.0001753826530612245, + "loss": 1.7663, + "step": 193 + }, + { + "epoch": 3.93, + "learning_rate": 0.00017525510204081634, + "loss": 1.6532, + "step": 194 + }, + { + "epoch": 3.95, + "learning_rate": 0.00017512755102040817, + "loss": 1.7115, + "step": 195 + }, + { + "epoch": 3.97, + "learning_rate": 0.000175, + "loss": 1.6955, + "step": 196 + }, + { + "epoch": 3.99, + "learning_rate": 0.00017487244897959186, + "loss": 1.6863, + "step": 197 + }, + { + "epoch": 4.01, + "learning_rate": 0.00017474489795918366, + "loss": 1.7012, + "step": 198 + }, + { + "epoch": 4.03, + "learning_rate": 0.00017461734693877552, + "loss": 1.5927, + "step": 199 + }, + { + "epoch": 4.05, + "learning_rate": 0.00017448979591836735, + "loss": 1.6272, + "step": 200 + }, + { + "epoch": 4.07, + "learning_rate": 0.00017436224489795918, + "loss": 1.5994, + "step": 201 + }, + { + "epoch": 4.09, + "learning_rate": 0.00017423469387755104, + "loss": 1.7141, + "step": 202 + }, + { + "epoch": 4.11, + "learning_rate": 0.00017410714285714287, + "loss": 1.7547, + "step": 203 + }, + { + "epoch": 4.13, + "learning_rate": 0.0001739795918367347, + "loss": 1.6254, + "step": 204 + }, + { + "epoch": 4.15, + "learning_rate": 0.00017385204081632653, + "loss": 1.6686, + "step": 205 + }, + { + "epoch": 4.17, + "learning_rate": 0.0001737244897959184, + "loss": 1.6684, + "step": 206 + }, + { + "epoch": 4.19, + "learning_rate": 0.00017359693877551022, + "loss": 1.6724, + "step": 207 + }, + { + "epoch": 4.21, + "learning_rate": 0.00017346938775510205, + "loss": 1.7361, + "step": 208 + }, + { + "epoch": 4.23, + "learning_rate": 0.00017334183673469388, + "loss": 1.7167, + "step": 209 + }, + { + "epoch": 4.25, + "learning_rate": 0.00017321428571428574, + "loss": 1.7226, + "step": 210 + }, + { + "epoch": 4.27, + "learning_rate": 0.00017308673469387754, + "loss": 1.7133, + "step": 211 + }, + { + "epoch": 4.29, + "learning_rate": 0.0001729591836734694, + "loss": 1.649, + "step": 212 + }, + { + "epoch": 4.31, + "learning_rate": 0.00017283163265306123, + "loss": 1.7104, + "step": 213 + }, + { + "epoch": 4.33, + "learning_rate": 0.00017270408163265306, + "loss": 1.6861, + "step": 214 + }, + { + "epoch": 4.35, + "learning_rate": 0.0001725765306122449, + "loss": 1.648, + "step": 215 + }, + { + "epoch": 4.37, + "learning_rate": 0.00017244897959183675, + "loss": 1.6215, + "step": 216 + }, + { + "epoch": 4.39, + "learning_rate": 0.00017232142857142858, + "loss": 1.6334, + "step": 217 + }, + { + "epoch": 4.41, + "learning_rate": 0.0001721938775510204, + "loss": 1.6283, + "step": 218 + }, + { + "epoch": 4.43, + "learning_rate": 0.00017206632653061224, + "loss": 1.6462, + "step": 219 + }, + { + "epoch": 4.45, + "learning_rate": 0.0001719387755102041, + "loss": 1.7233, + "step": 220 + }, + { + "epoch": 4.47, + "learning_rate": 0.0001718112244897959, + "loss": 1.7839, + "step": 221 + }, + { + "epoch": 4.49, + "learning_rate": 0.00017168367346938776, + "loss": 1.7204, + "step": 222 + }, + { + "epoch": 4.51, + "learning_rate": 0.0001715561224489796, + "loss": 1.7671, + "step": 223 + }, + { + "epoch": 4.53, + "learning_rate": 0.00017142857142857143, + "loss": 1.6824, + "step": 224 + }, + { + "epoch": 4.55, + "learning_rate": 0.00017130102040816328, + "loss": 1.7068, + "step": 225 + }, + { + "epoch": 4.57, + "learning_rate": 0.00017117346938775511, + "loss": 1.6515, + "step": 226 + }, + { + "epoch": 4.59, + "learning_rate": 0.00017104591836734694, + "loss": 1.6586, + "step": 227 + }, + { + "epoch": 4.61, + "learning_rate": 0.00017091836734693878, + "loss": 1.6355, + "step": 228 + }, + { + "epoch": 4.63, + "learning_rate": 0.00017079081632653063, + "loss": 1.7173, + "step": 229 + }, + { + "epoch": 4.65, + "learning_rate": 0.00017066326530612246, + "loss": 1.6585, + "step": 230 + }, + { + "epoch": 4.67, + "learning_rate": 0.0001705357142857143, + "loss": 1.5856, + "step": 231 + }, + { + "epoch": 4.69, + "learning_rate": 0.00017040816326530613, + "loss": 1.5923, + "step": 232 + }, + { + "epoch": 4.71, + "learning_rate": 0.00017028061224489798, + "loss": 1.7128, + "step": 233 + }, + { + "epoch": 4.73, + "learning_rate": 0.0001701530612244898, + "loss": 1.6971, + "step": 234 + }, + { + "epoch": 4.75, + "learning_rate": 0.00017002551020408165, + "loss": 1.6416, + "step": 235 + }, + { + "epoch": 4.78, + "learning_rate": 0.00016989795918367348, + "loss": 1.645, + "step": 236 + }, + { + "epoch": 4.8, + "learning_rate": 0.0001697704081632653, + "loss": 1.6792, + "step": 237 + }, + { + "epoch": 4.82, + "learning_rate": 0.00016964285714285714, + "loss": 1.6522, + "step": 238 + }, + { + "epoch": 4.84, + "learning_rate": 0.000169515306122449, + "loss": 1.6315, + "step": 239 + }, + { + "epoch": 4.86, + "learning_rate": 0.00016938775510204083, + "loss": 1.6622, + "step": 240 + }, + { + "epoch": 4.88, + "learning_rate": 0.00016926020408163266, + "loss": 1.6566, + "step": 241 + }, + { + "epoch": 4.9, + "learning_rate": 0.0001691326530612245, + "loss": 1.7141, + "step": 242 + }, + { + "epoch": 4.92, + "learning_rate": 0.00016900510204081635, + "loss": 1.5873, + "step": 243 + }, + { + "epoch": 4.94, + "learning_rate": 0.00016887755102040818, + "loss": 1.6571, + "step": 244 + }, + { + "epoch": 4.96, + "learning_rate": 0.00016875, + "loss": 1.6829, + "step": 245 + }, + { + "epoch": 4.98, + "learning_rate": 0.00016862244897959184, + "loss": 1.6935, + "step": 246 + }, + { + "epoch": 5.0, + "learning_rate": 0.00016849489795918367, + "loss": 1.6782, + "step": 247 + }, + { + "epoch": 5.02, + "learning_rate": 0.00016836734693877553, + "loss": 1.622, + "step": 248 + }, + { + "epoch": 5.04, + "learning_rate": 0.00016823979591836736, + "loss": 1.6596, + "step": 249 + }, + { + "epoch": 5.06, + "learning_rate": 0.0001681122448979592, + "loss": 1.5821, + "step": 250 + }, + { + "epoch": 5.08, + "learning_rate": 0.00016798469387755102, + "loss": 1.7292, + "step": 251 + }, + { + "epoch": 5.1, + "learning_rate": 0.00016785714285714288, + "loss": 1.646, + "step": 252 + }, + { + "epoch": 5.12, + "learning_rate": 0.0001677295918367347, + "loss": 1.6969, + "step": 253 + }, + { + "epoch": 5.14, + "learning_rate": 0.00016760204081632654, + "loss": 1.6082, + "step": 254 + }, + { + "epoch": 5.16, + "learning_rate": 0.00016747448979591837, + "loss": 1.5843, + "step": 255 + }, + { + "epoch": 5.18, + "learning_rate": 0.00016734693877551023, + "loss": 1.6827, + "step": 256 + }, + { + "epoch": 5.2, + "learning_rate": 0.00016721938775510203, + "loss": 1.5824, + "step": 257 + }, + { + "epoch": 5.22, + "learning_rate": 0.0001670918367346939, + "loss": 1.6795, + "step": 258 + }, + { + "epoch": 5.24, + "learning_rate": 0.00016696428571428572, + "loss": 1.5639, + "step": 259 + }, + { + "epoch": 5.26, + "learning_rate": 0.00016683673469387755, + "loss": 1.592, + "step": 260 + }, + { + "epoch": 5.28, + "learning_rate": 0.00016670918367346938, + "loss": 1.65, + "step": 261 + }, + { + "epoch": 5.3, + "learning_rate": 0.00016658163265306124, + "loss": 1.5592, + "step": 262 + }, + { + "epoch": 5.32, + "learning_rate": 0.00016645408163265305, + "loss": 1.5091, + "step": 263 + }, + { + "epoch": 5.34, + "learning_rate": 0.0001663265306122449, + "loss": 1.6138, + "step": 264 + }, + { + "epoch": 5.36, + "learning_rate": 0.00016619897959183673, + "loss": 1.625, + "step": 265 + }, + { + "epoch": 5.38, + "learning_rate": 0.0001660714285714286, + "loss": 1.5757, + "step": 266 + }, + { + "epoch": 5.4, + "learning_rate": 0.00016594387755102042, + "loss": 1.6372, + "step": 267 + }, + { + "epoch": 5.42, + "learning_rate": 0.00016581632653061225, + "loss": 1.5891, + "step": 268 + }, + { + "epoch": 5.44, + "learning_rate": 0.00016568877551020409, + "loss": 1.6893, + "step": 269 + }, + { + "epoch": 5.46, + "learning_rate": 0.00016556122448979592, + "loss": 1.6662, + "step": 270 + }, + { + "epoch": 5.48, + "learning_rate": 0.00016543367346938777, + "loss": 1.7132, + "step": 271 + }, + { + "epoch": 5.5, + "learning_rate": 0.0001653061224489796, + "loss": 1.5835, + "step": 272 + }, + { + "epoch": 5.52, + "learning_rate": 0.00016517857142857144, + "loss": 1.6342, + "step": 273 + }, + { + "epoch": 5.54, + "learning_rate": 0.00016505102040816327, + "loss": 1.6717, + "step": 274 + }, + { + "epoch": 5.56, + "learning_rate": 0.00016492346938775512, + "loss": 1.6248, + "step": 275 + }, + { + "epoch": 5.58, + "learning_rate": 0.00016479591836734696, + "loss": 1.6117, + "step": 276 + }, + { + "epoch": 5.6, + "learning_rate": 0.0001646683673469388, + "loss": 1.6798, + "step": 277 + }, + { + "epoch": 5.63, + "learning_rate": 0.00016454081632653062, + "loss": 1.6406, + "step": 278 + }, + { + "epoch": 5.65, + "learning_rate": 0.00016441326530612248, + "loss": 1.6512, + "step": 279 + }, + { + "epoch": 5.67, + "learning_rate": 0.00016428571428571428, + "loss": 1.6102, + "step": 280 + }, + { + "epoch": 5.69, + "learning_rate": 0.00016415816326530614, + "loss": 1.6113, + "step": 281 + }, + { + "epoch": 5.71, + "learning_rate": 0.00016403061224489797, + "loss": 1.7116, + "step": 282 + }, + { + "epoch": 5.73, + "learning_rate": 0.0001639030612244898, + "loss": 1.6846, + "step": 283 + }, + { + "epoch": 5.75, + "learning_rate": 0.00016377551020408163, + "loss": 1.6911, + "step": 284 + }, + { + "epoch": 5.77, + "learning_rate": 0.0001636479591836735, + "loss": 1.6202, + "step": 285 + }, + { + "epoch": 5.79, + "learning_rate": 0.0001635204081632653, + "loss": 1.5715, + "step": 286 + }, + { + "epoch": 5.81, + "learning_rate": 0.00016339285714285715, + "loss": 1.6461, + "step": 287 + }, + { + "epoch": 5.83, + "learning_rate": 0.00016326530612244898, + "loss": 1.6624, + "step": 288 + }, + { + "epoch": 5.85, + "learning_rate": 0.00016313775510204084, + "loss": 1.6535, + "step": 289 + }, + { + "epoch": 5.87, + "learning_rate": 0.00016301020408163267, + "loss": 1.6275, + "step": 290 + }, + { + "epoch": 5.89, + "learning_rate": 0.0001628826530612245, + "loss": 1.6636, + "step": 291 + }, + { + "epoch": 5.91, + "learning_rate": 0.00016275510204081633, + "loss": 1.6546, + "step": 292 + }, + { + "epoch": 5.93, + "learning_rate": 0.00016262755102040816, + "loss": 1.7274, + "step": 293 + }, + { + "epoch": 5.95, + "learning_rate": 0.00016250000000000002, + "loss": 1.5901, + "step": 294 + }, + { + "epoch": 5.97, + "learning_rate": 0.00016237244897959185, + "loss": 1.6046, + "step": 295 + }, + { + "epoch": 5.99, + "learning_rate": 0.00016224489795918368, + "loss": 1.5828, + "step": 296 + }, + { + "epoch": 6.01, + "learning_rate": 0.0001621173469387755, + "loss": 1.6435, + "step": 297 + }, + { + "epoch": 6.03, + "learning_rate": 0.00016198979591836737, + "loss": 1.6263, + "step": 298 + }, + { + "epoch": 6.05, + "learning_rate": 0.00016186224489795917, + "loss": 1.4944, + "step": 299 + }, + { + "epoch": 6.07, + "learning_rate": 0.00016173469387755103, + "loss": 1.6286, + "step": 300 + }, + { + "epoch": 6.09, + "learning_rate": 0.00016160714285714286, + "loss": 1.694, + "step": 301 + }, + { + "epoch": 6.11, + "learning_rate": 0.00016147959183673472, + "loss": 1.6197, + "step": 302 + }, + { + "epoch": 6.13, + "learning_rate": 0.00016135204081632652, + "loss": 1.5597, + "step": 303 + }, + { + "epoch": 6.15, + "learning_rate": 0.00016122448979591838, + "loss": 1.5487, + "step": 304 + }, + { + "epoch": 6.17, + "learning_rate": 0.0001610969387755102, + "loss": 1.5769, + "step": 305 + }, + { + "epoch": 6.19, + "learning_rate": 0.00016096938775510204, + "loss": 1.6367, + "step": 306 + }, + { + "epoch": 6.21, + "learning_rate": 0.00016084183673469388, + "loss": 1.583, + "step": 307 + }, + { + "epoch": 6.23, + "learning_rate": 0.00016071428571428573, + "loss": 1.6201, + "step": 308 + }, + { + "epoch": 6.25, + "learning_rate": 0.00016058673469387754, + "loss": 1.6586, + "step": 309 + }, + { + "epoch": 6.27, + "learning_rate": 0.0001604591836734694, + "loss": 1.6711, + "step": 310 + }, + { + "epoch": 6.29, + "learning_rate": 0.00016033163265306123, + "loss": 1.6402, + "step": 311 + }, + { + "epoch": 6.31, + "learning_rate": 0.00016020408163265306, + "loss": 1.5247, + "step": 312 + }, + { + "epoch": 6.33, + "learning_rate": 0.00016007653061224491, + "loss": 1.5356, + "step": 313 + }, + { + "epoch": 6.35, + "learning_rate": 0.00015994897959183675, + "loss": 1.564, + "step": 314 + }, + { + "epoch": 6.37, + "learning_rate": 0.00015982142857142858, + "loss": 1.563, + "step": 315 + }, + { + "epoch": 6.39, + "learning_rate": 0.0001596938775510204, + "loss": 1.5198, + "step": 316 + }, + { + "epoch": 6.41, + "learning_rate": 0.00015956632653061227, + "loss": 1.6558, + "step": 317 + }, + { + "epoch": 6.43, + "learning_rate": 0.0001594387755102041, + "loss": 1.5534, + "step": 318 + }, + { + "epoch": 6.45, + "learning_rate": 0.00015931122448979593, + "loss": 1.6239, + "step": 319 + }, + { + "epoch": 6.47, + "learning_rate": 0.00015918367346938776, + "loss": 1.5645, + "step": 320 + }, + { + "epoch": 6.5, + "learning_rate": 0.00015905612244897962, + "loss": 1.5713, + "step": 321 + }, + { + "epoch": 6.52, + "learning_rate": 0.00015892857142857142, + "loss": 1.6176, + "step": 322 + }, + { + "epoch": 6.54, + "learning_rate": 0.00015880102040816328, + "loss": 1.502, + "step": 323 + }, + { + "epoch": 6.56, + "learning_rate": 0.0001586734693877551, + "loss": 1.645, + "step": 324 + }, + { + "epoch": 6.58, + "learning_rate": 0.00015854591836734697, + "loss": 1.5904, + "step": 325 + }, + { + "epoch": 6.6, + "learning_rate": 0.00015841836734693877, + "loss": 1.6149, + "step": 326 + }, + { + "epoch": 6.62, + "learning_rate": 0.00015829081632653063, + "loss": 1.6757, + "step": 327 + }, + { + "epoch": 6.64, + "learning_rate": 0.00015816326530612246, + "loss": 1.541, + "step": 328 + }, + { + "epoch": 6.66, + "learning_rate": 0.0001580357142857143, + "loss": 1.5898, + "step": 329 + }, + { + "epoch": 6.68, + "learning_rate": 0.00015790816326530612, + "loss": 1.5441, + "step": 330 + }, + { + "epoch": 6.7, + "learning_rate": 0.00015778061224489798, + "loss": 1.61, + "step": 331 + }, + { + "epoch": 6.72, + "learning_rate": 0.00015765306122448978, + "loss": 1.615, + "step": 332 + }, + { + "epoch": 6.74, + "learning_rate": 0.00015752551020408164, + "loss": 1.6575, + "step": 333 + }, + { + "epoch": 6.76, + "learning_rate": 0.00015739795918367347, + "loss": 1.6702, + "step": 334 + }, + { + "epoch": 6.78, + "learning_rate": 0.0001572704081632653, + "loss": 1.6009, + "step": 335 + }, + { + "epoch": 6.8, + "learning_rate": 0.00015714285714285716, + "loss": 1.5568, + "step": 336 + }, + { + "epoch": 6.82, + "learning_rate": 0.000157015306122449, + "loss": 1.619, + "step": 337 + }, + { + "epoch": 6.84, + "learning_rate": 0.00015688775510204082, + "loss": 1.5563, + "step": 338 + }, + { + "epoch": 6.86, + "learning_rate": 0.00015676020408163265, + "loss": 1.6328, + "step": 339 + }, + { + "epoch": 6.88, + "learning_rate": 0.0001566326530612245, + "loss": 1.5726, + "step": 340 + }, + { + "epoch": 6.9, + "learning_rate": 0.00015650510204081634, + "loss": 1.6199, + "step": 341 + }, + { + "epoch": 6.92, + "learning_rate": 0.00015637755102040817, + "loss": 1.5722, + "step": 342 + }, + { + "epoch": 6.94, + "learning_rate": 0.00015625, + "loss": 1.5685, + "step": 343 + }, + { + "epoch": 6.96, + "learning_rate": 0.00015612244897959186, + "loss": 1.5615, + "step": 344 + }, + { + "epoch": 6.98, + "learning_rate": 0.00015599489795918366, + "loss": 1.5994, + "step": 345 + }, + { + "epoch": 7.0, + "learning_rate": 0.00015586734693877552, + "loss": 1.5579, + "step": 346 + }, + { + "epoch": 7.02, + "learning_rate": 0.00015573979591836735, + "loss": 1.547, + "step": 347 + }, + { + "epoch": 7.04, + "learning_rate": 0.00015561224489795918, + "loss": 1.5292, + "step": 348 + }, + { + "epoch": 7.06, + "learning_rate": 0.00015548469387755102, + "loss": 1.6032, + "step": 349 + }, + { + "epoch": 7.08, + "learning_rate": 0.00015535714285714287, + "loss": 1.5149, + "step": 350 + }, + { + "epoch": 7.1, + "learning_rate": 0.0001552295918367347, + "loss": 1.6093, + "step": 351 + }, + { + "epoch": 7.12, + "learning_rate": 0.00015510204081632654, + "loss": 1.5421, + "step": 352 + }, + { + "epoch": 7.14, + "learning_rate": 0.00015497448979591837, + "loss": 1.5733, + "step": 353 + }, + { + "epoch": 7.16, + "learning_rate": 0.00015484693877551022, + "loss": 1.5703, + "step": 354 + }, + { + "epoch": 7.18, + "learning_rate": 0.00015471938775510203, + "loss": 1.6141, + "step": 355 + }, + { + "epoch": 7.2, + "learning_rate": 0.00015459183673469389, + "loss": 1.5526, + "step": 356 + }, + { + "epoch": 7.22, + "learning_rate": 0.00015446428571428572, + "loss": 1.5347, + "step": 357 + }, + { + "epoch": 7.24, + "learning_rate": 0.00015433673469387755, + "loss": 1.5682, + "step": 358 + }, + { + "epoch": 7.26, + "learning_rate": 0.0001542091836734694, + "loss": 1.5292, + "step": 359 + }, + { + "epoch": 7.28, + "learning_rate": 0.00015408163265306124, + "loss": 1.499, + "step": 360 + }, + { + "epoch": 7.3, + "learning_rate": 0.00015395408163265307, + "loss": 1.5624, + "step": 361 + }, + { + "epoch": 7.32, + "learning_rate": 0.0001538265306122449, + "loss": 1.627, + "step": 362 + }, + { + "epoch": 7.34, + "learning_rate": 0.00015369897959183676, + "loss": 1.5327, + "step": 363 + }, + { + "epoch": 7.37, + "learning_rate": 0.0001535714285714286, + "loss": 1.5622, + "step": 364 + }, + { + "epoch": 7.39, + "learning_rate": 0.00015344387755102042, + "loss": 1.5659, + "step": 365 + }, + { + "epoch": 7.41, + "learning_rate": 0.00015331632653061225, + "loss": 1.5019, + "step": 366 + }, + { + "epoch": 7.43, + "learning_rate": 0.0001531887755102041, + "loss": 1.5921, + "step": 367 + }, + { + "epoch": 7.45, + "learning_rate": 0.0001530612244897959, + "loss": 1.5914, + "step": 368 + }, + { + "epoch": 7.47, + "learning_rate": 0.00015293367346938777, + "loss": 1.5045, + "step": 369 + }, + { + "epoch": 7.49, + "learning_rate": 0.0001528061224489796, + "loss": 1.6209, + "step": 370 + }, + { + "epoch": 7.51, + "learning_rate": 0.00015267857142857143, + "loss": 1.5198, + "step": 371 + }, + { + "epoch": 7.53, + "learning_rate": 0.00015255102040816326, + "loss": 1.5363, + "step": 372 + }, + { + "epoch": 7.55, + "learning_rate": 0.00015242346938775512, + "loss": 1.5391, + "step": 373 + }, + { + "epoch": 7.57, + "learning_rate": 0.00015229591836734695, + "loss": 1.4546, + "step": 374 + }, + { + "epoch": 7.59, + "learning_rate": 0.00015216836734693878, + "loss": 1.5546, + "step": 375 + }, + { + "epoch": 7.61, + "learning_rate": 0.0001520408163265306, + "loss": 1.5629, + "step": 376 + }, + { + "epoch": 7.63, + "learning_rate": 0.00015191326530612247, + "loss": 1.6002, + "step": 377 + }, + { + "epoch": 7.65, + "learning_rate": 0.00015178571428571427, + "loss": 1.5543, + "step": 378 + }, + { + "epoch": 7.67, + "learning_rate": 0.00015165816326530613, + "loss": 1.5925, + "step": 379 + }, + { + "epoch": 7.69, + "learning_rate": 0.00015153061224489796, + "loss": 1.5631, + "step": 380 + }, + { + "epoch": 7.71, + "learning_rate": 0.0001514030612244898, + "loss": 1.5677, + "step": 381 + }, + { + "epoch": 7.73, + "learning_rate": 0.00015127551020408165, + "loss": 1.5828, + "step": 382 + }, + { + "epoch": 7.75, + "learning_rate": 0.00015114795918367348, + "loss": 1.6494, + "step": 383 + }, + { + "epoch": 7.77, + "learning_rate": 0.0001510204081632653, + "loss": 1.553, + "step": 384 + }, + { + "epoch": 7.79, + "learning_rate": 0.00015089285714285714, + "loss": 1.6156, + "step": 385 + }, + { + "epoch": 7.81, + "learning_rate": 0.000150765306122449, + "loss": 1.5001, + "step": 386 + }, + { + "epoch": 7.83, + "learning_rate": 0.00015063775510204083, + "loss": 1.5321, + "step": 387 + }, + { + "epoch": 7.85, + "learning_rate": 0.00015051020408163266, + "loss": 1.5307, + "step": 388 + }, + { + "epoch": 7.87, + "learning_rate": 0.0001503826530612245, + "loss": 1.5639, + "step": 389 + }, + { + "epoch": 7.89, + "learning_rate": 0.00015025510204081635, + "loss": 1.517, + "step": 390 + }, + { + "epoch": 7.91, + "learning_rate": 0.00015012755102040816, + "loss": 1.4776, + "step": 391 + }, + { + "epoch": 7.93, + "learning_rate": 0.00015000000000000001, + "loss": 1.5368, + "step": 392 + }, + { + "epoch": 7.95, + "learning_rate": 0.00014987244897959184, + "loss": 1.5636, + "step": 393 + }, + { + "epoch": 7.97, + "learning_rate": 0.00014974489795918368, + "loss": 1.6004, + "step": 394 + }, + { + "epoch": 7.99, + "learning_rate": 0.0001496173469387755, + "loss": 1.5524, + "step": 395 + }, + { + "epoch": 8.01, + "learning_rate": 0.00014948979591836736, + "loss": 1.5307, + "step": 396 + }, + { + "epoch": 8.03, + "learning_rate": 0.00014936224489795917, + "loss": 1.5123, + "step": 397 + }, + { + "epoch": 8.05, + "learning_rate": 0.00014923469387755103, + "loss": 1.5132, + "step": 398 + }, + { + "epoch": 8.07, + "learning_rate": 0.00014910714285714286, + "loss": 1.5109, + "step": 399 + }, + { + "epoch": 8.09, + "learning_rate": 0.00014897959183673472, + "loss": 1.5302, + "step": 400 + }, + { + "epoch": 8.11, + "learning_rate": 0.00014885204081632652, + "loss": 1.5238, + "step": 401 + }, + { + "epoch": 8.13, + "learning_rate": 0.00014872448979591838, + "loss": 1.4781, + "step": 402 + }, + { + "epoch": 8.15, + "learning_rate": 0.0001485969387755102, + "loss": 1.5446, + "step": 403 + }, + { + "epoch": 8.17, + "learning_rate": 0.00014846938775510204, + "loss": 1.5, + "step": 404 + }, + { + "epoch": 8.19, + "learning_rate": 0.0001483418367346939, + "loss": 1.5458, + "step": 405 + }, + { + "epoch": 8.21, + "learning_rate": 0.00014821428571428573, + "loss": 1.5257, + "step": 406 + }, + { + "epoch": 8.24, + "learning_rate": 0.00014808673469387756, + "loss": 1.4607, + "step": 407 + }, + { + "epoch": 8.26, + "learning_rate": 0.0001479591836734694, + "loss": 1.4282, + "step": 408 + }, + { + "epoch": 8.28, + "learning_rate": 0.00014783163265306125, + "loss": 1.4519, + "step": 409 + }, + { + "epoch": 8.3, + "learning_rate": 0.00014770408163265305, + "loss": 1.475, + "step": 410 + }, + { + "epoch": 8.32, + "learning_rate": 0.0001475765306122449, + "loss": 1.5425, + "step": 411 + }, + { + "epoch": 8.34, + "learning_rate": 0.00014744897959183674, + "loss": 1.5407, + "step": 412 + }, + { + "epoch": 8.36, + "learning_rate": 0.0001473214285714286, + "loss": 1.5698, + "step": 413 + }, + { + "epoch": 8.38, + "learning_rate": 0.0001471938775510204, + "loss": 1.4282, + "step": 414 + }, + { + "epoch": 8.4, + "learning_rate": 0.00014706632653061226, + "loss": 1.5301, + "step": 415 + }, + { + "epoch": 8.42, + "learning_rate": 0.0001469387755102041, + "loss": 1.5083, + "step": 416 + }, + { + "epoch": 8.44, + "learning_rate": 0.00014681122448979592, + "loss": 1.5712, + "step": 417 + }, + { + "epoch": 8.46, + "learning_rate": 0.00014668367346938775, + "loss": 1.4363, + "step": 418 + }, + { + "epoch": 8.48, + "learning_rate": 0.0001465561224489796, + "loss": 1.4463, + "step": 419 + }, + { + "epoch": 8.5, + "learning_rate": 0.00014642857142857141, + "loss": 1.4738, + "step": 420 + }, + { + "epoch": 8.52, + "learning_rate": 0.00014630102040816327, + "loss": 1.5396, + "step": 421 + }, + { + "epoch": 8.54, + "learning_rate": 0.0001461734693877551, + "loss": 1.4384, + "step": 422 + }, + { + "epoch": 8.56, + "learning_rate": 0.00014604591836734696, + "loss": 1.5345, + "step": 423 + }, + { + "epoch": 8.58, + "learning_rate": 0.0001459183673469388, + "loss": 1.5355, + "step": 424 + }, + { + "epoch": 8.6, + "learning_rate": 0.00014579081632653062, + "loss": 1.5188, + "step": 425 + }, + { + "epoch": 8.62, + "learning_rate": 0.00014566326530612245, + "loss": 1.5575, + "step": 426 + }, + { + "epoch": 8.64, + "learning_rate": 0.00014553571428571428, + "loss": 1.5279, + "step": 427 + }, + { + "epoch": 8.66, + "learning_rate": 0.00014540816326530614, + "loss": 1.5484, + "step": 428 + }, + { + "epoch": 8.68, + "learning_rate": 0.00014528061224489797, + "loss": 1.4878, + "step": 429 + }, + { + "epoch": 8.7, + "learning_rate": 0.0001451530612244898, + "loss": 1.503, + "step": 430 + }, + { + "epoch": 8.72, + "learning_rate": 0.00014502551020408163, + "loss": 1.4723, + "step": 431 + }, + { + "epoch": 8.74, + "learning_rate": 0.0001448979591836735, + "loss": 1.5579, + "step": 432 + }, + { + "epoch": 8.76, + "learning_rate": 0.0001447704081632653, + "loss": 1.4789, + "step": 433 + }, + { + "epoch": 8.78, + "learning_rate": 0.00014464285714285715, + "loss": 1.5501, + "step": 434 + }, + { + "epoch": 8.8, + "learning_rate": 0.00014451530612244899, + "loss": 1.5204, + "step": 435 + }, + { + "epoch": 8.82, + "learning_rate": 0.00014438775510204084, + "loss": 1.5489, + "step": 436 + }, + { + "epoch": 8.84, + "learning_rate": 0.00014426020408163265, + "loss": 1.5464, + "step": 437 + }, + { + "epoch": 8.86, + "learning_rate": 0.0001441326530612245, + "loss": 1.5896, + "step": 438 + }, + { + "epoch": 8.88, + "learning_rate": 0.00014400510204081634, + "loss": 1.5465, + "step": 439 + }, + { + "epoch": 8.9, + "learning_rate": 0.00014387755102040817, + "loss": 1.5094, + "step": 440 + }, + { + "epoch": 8.92, + "learning_rate": 0.00014375, + "loss": 1.5144, + "step": 441 + }, + { + "epoch": 8.94, + "learning_rate": 0.00014362244897959186, + "loss": 1.4919, + "step": 442 + }, + { + "epoch": 8.96, + "learning_rate": 0.00014349489795918366, + "loss": 1.4702, + "step": 443 + }, + { + "epoch": 8.98, + "learning_rate": 0.00014336734693877552, + "loss": 1.4996, + "step": 444 + }, + { + "epoch": 9.0, + "learning_rate": 0.00014323979591836735, + "loss": 1.5503, + "step": 445 + }, + { + "epoch": 9.02, + "learning_rate": 0.00014311224489795918, + "loss": 1.4125, + "step": 446 + }, + { + "epoch": 9.04, + "learning_rate": 0.00014298469387755104, + "loss": 1.4722, + "step": 447 + }, + { + "epoch": 9.06, + "learning_rate": 0.00014285714285714287, + "loss": 1.5199, + "step": 448 + }, + { + "epoch": 9.09, + "learning_rate": 0.0001427295918367347, + "loss": 1.4571, + "step": 449 + }, + { + "epoch": 9.11, + "learning_rate": 0.00014260204081632653, + "loss": 1.4996, + "step": 450 + }, + { + "epoch": 9.13, + "learning_rate": 0.0001424744897959184, + "loss": 1.4092, + "step": 451 + }, + { + "epoch": 9.15, + "learning_rate": 0.00014234693877551022, + "loss": 1.4198, + "step": 452 + }, + { + "epoch": 9.17, + "learning_rate": 0.00014221938775510205, + "loss": 1.4916, + "step": 453 + }, + { + "epoch": 9.19, + "learning_rate": 0.00014209183673469388, + "loss": 1.5051, + "step": 454 + }, + { + "epoch": 9.21, + "learning_rate": 0.00014196428571428574, + "loss": 1.4321, + "step": 455 + }, + { + "epoch": 9.23, + "learning_rate": 0.00014183673469387754, + "loss": 1.4097, + "step": 456 + }, + { + "epoch": 9.25, + "learning_rate": 0.0001417091836734694, + "loss": 1.4853, + "step": 457 + }, + { + "epoch": 9.27, + "learning_rate": 0.00014158163265306123, + "loss": 1.4593, + "step": 458 + }, + { + "epoch": 9.29, + "learning_rate": 0.00014145408163265306, + "loss": 1.3729, + "step": 459 + }, + { + "epoch": 9.31, + "learning_rate": 0.0001413265306122449, + "loss": 1.4467, + "step": 460 + }, + { + "epoch": 9.33, + "learning_rate": 0.00014119897959183675, + "loss": 1.4467, + "step": 461 + }, + { + "epoch": 9.35, + "learning_rate": 0.00014107142857142858, + "loss": 1.4785, + "step": 462 + }, + { + "epoch": 9.37, + "learning_rate": 0.0001409438775510204, + "loss": 1.4089, + "step": 463 + }, + { + "epoch": 9.39, + "learning_rate": 0.00014081632653061224, + "loss": 1.5026, + "step": 464 + }, + { + "epoch": 9.41, + "learning_rate": 0.0001406887755102041, + "loss": 1.4857, + "step": 465 + }, + { + "epoch": 9.43, + "learning_rate": 0.0001405612244897959, + "loss": 1.3745, + "step": 466 + }, + { + "epoch": 9.45, + "learning_rate": 0.00014043367346938776, + "loss": 1.4733, + "step": 467 + }, + { + "epoch": 9.47, + "learning_rate": 0.0001403061224489796, + "loss": 1.5212, + "step": 468 + }, + { + "epoch": 9.49, + "learning_rate": 0.00014017857142857142, + "loss": 1.5398, + "step": 469 + }, + { + "epoch": 9.51, + "learning_rate": 0.00014005102040816328, + "loss": 1.478, + "step": 470 + }, + { + "epoch": 9.53, + "learning_rate": 0.0001399234693877551, + "loss": 1.496, + "step": 471 + }, + { + "epoch": 9.55, + "learning_rate": 0.00013979591836734694, + "loss": 1.4837, + "step": 472 + }, + { + "epoch": 9.57, + "learning_rate": 0.00013966836734693878, + "loss": 1.4724, + "step": 473 + }, + { + "epoch": 9.59, + "learning_rate": 0.00013954081632653063, + "loss": 1.4828, + "step": 474 + }, + { + "epoch": 9.61, + "learning_rate": 0.00013941326530612246, + "loss": 1.5012, + "step": 475 + }, + { + "epoch": 9.63, + "learning_rate": 0.0001392857142857143, + "loss": 1.4879, + "step": 476 + }, + { + "epoch": 9.65, + "learning_rate": 0.00013915816326530613, + "loss": 1.4196, + "step": 477 + }, + { + "epoch": 9.67, + "learning_rate": 0.00013903061224489798, + "loss": 1.4915, + "step": 478 + }, + { + "epoch": 9.69, + "learning_rate": 0.0001389030612244898, + "loss": 1.3878, + "step": 479 + }, + { + "epoch": 9.71, + "learning_rate": 0.00013877551020408165, + "loss": 1.466, + "step": 480 + }, + { + "epoch": 9.73, + "learning_rate": 0.00013864795918367348, + "loss": 1.4582, + "step": 481 + }, + { + "epoch": 9.75, + "learning_rate": 0.0001385204081632653, + "loss": 1.533, + "step": 482 + }, + { + "epoch": 9.77, + "learning_rate": 0.00013839285714285714, + "loss": 1.4697, + "step": 483 + }, + { + "epoch": 9.79, + "learning_rate": 0.000138265306122449, + "loss": 1.3989, + "step": 484 + }, + { + "epoch": 9.81, + "learning_rate": 0.00013813775510204083, + "loss": 1.4361, + "step": 485 + }, + { + "epoch": 9.83, + "learning_rate": 0.00013801020408163266, + "loss": 1.5271, + "step": 486 + }, + { + "epoch": 9.85, + "learning_rate": 0.0001378826530612245, + "loss": 1.4905, + "step": 487 + }, + { + "epoch": 9.87, + "learning_rate": 0.00013775510204081635, + "loss": 1.4757, + "step": 488 + }, + { + "epoch": 9.89, + "learning_rate": 0.00013762755102040815, + "loss": 1.5485, + "step": 489 + }, + { + "epoch": 9.91, + "learning_rate": 0.0001375, + "loss": 1.4783, + "step": 490 + }, + { + "epoch": 9.93, + "learning_rate": 0.00013737244897959184, + "loss": 1.4849, + "step": 491 + }, + { + "epoch": 9.96, + "learning_rate": 0.00013724489795918367, + "loss": 1.5382, + "step": 492 + }, + { + "epoch": 9.98, + "learning_rate": 0.00013711734693877553, + "loss": 1.4902, + "step": 493 + }, + { + "epoch": 10.0, + "learning_rate": 0.00013698979591836736, + "loss": 1.4865, + "step": 494 + }, + { + "epoch": 10.02, + "learning_rate": 0.0001368622448979592, + "loss": 1.4436, + "step": 495 + }, + { + "epoch": 10.04, + "learning_rate": 0.00013673469387755102, + "loss": 1.408, + "step": 496 + }, + { + "epoch": 10.06, + "learning_rate": 0.00013660714285714288, + "loss": 1.4764, + "step": 497 + }, + { + "epoch": 10.08, + "learning_rate": 0.0001364795918367347, + "loss": 1.4646, + "step": 498 + }, + { + "epoch": 10.1, + "learning_rate": 0.00013635204081632654, + "loss": 1.406, + "step": 499 + }, + { + "epoch": 10.12, + "learning_rate": 0.00013622448979591837, + "loss": 1.4785, + "step": 500 + }, + { + "epoch": 10.14, + "learning_rate": 0.00013609693877551023, + "loss": 1.4117, + "step": 501 + }, + { + "epoch": 10.16, + "learning_rate": 0.00013596938775510203, + "loss": 1.4108, + "step": 502 + }, + { + "epoch": 10.18, + "learning_rate": 0.0001358418367346939, + "loss": 1.4155, + "step": 503 + }, + { + "epoch": 10.2, + "learning_rate": 0.00013571428571428572, + "loss": 1.4021, + "step": 504 + }, + { + "epoch": 10.22, + "learning_rate": 0.00013558673469387755, + "loss": 1.411, + "step": 505 + }, + { + "epoch": 10.24, + "learning_rate": 0.00013545918367346938, + "loss": 1.3851, + "step": 506 + }, + { + "epoch": 10.26, + "learning_rate": 0.00013533163265306124, + "loss": 1.387, + "step": 507 + }, + { + "epoch": 10.28, + "learning_rate": 0.00013520408163265305, + "loss": 1.4163, + "step": 508 + }, + { + "epoch": 10.3, + "learning_rate": 0.0001350765306122449, + "loss": 1.3343, + "step": 509 + }, + { + "epoch": 10.32, + "learning_rate": 0.00013494897959183673, + "loss": 1.4811, + "step": 510 + }, + { + "epoch": 10.34, + "learning_rate": 0.0001348214285714286, + "loss": 1.4086, + "step": 511 + }, + { + "epoch": 10.36, + "learning_rate": 0.0001346938775510204, + "loss": 1.3879, + "step": 512 + }, + { + "epoch": 10.38, + "learning_rate": 0.00013456632653061225, + "loss": 1.4204, + "step": 513 + }, + { + "epoch": 10.4, + "learning_rate": 0.00013443877551020408, + "loss": 1.4158, + "step": 514 + }, + { + "epoch": 10.42, + "learning_rate": 0.00013431122448979592, + "loss": 1.4521, + "step": 515 + }, + { + "epoch": 10.44, + "learning_rate": 0.00013418367346938777, + "loss": 1.4196, + "step": 516 + }, + { + "epoch": 10.46, + "learning_rate": 0.0001340561224489796, + "loss": 1.4361, + "step": 517 + }, + { + "epoch": 10.48, + "learning_rate": 0.00013392857142857144, + "loss": 1.4482, + "step": 518 + }, + { + "epoch": 10.5, + "learning_rate": 0.00013380102040816327, + "loss": 1.4801, + "step": 519 + }, + { + "epoch": 10.52, + "learning_rate": 0.00013367346938775512, + "loss": 1.4556, + "step": 520 + }, + { + "epoch": 10.54, + "learning_rate": 0.00013354591836734695, + "loss": 1.3902, + "step": 521 + }, + { + "epoch": 10.56, + "learning_rate": 0.00013341836734693879, + "loss": 1.4269, + "step": 522 + }, + { + "epoch": 10.58, + "learning_rate": 0.00013329081632653062, + "loss": 1.4899, + "step": 523 + }, + { + "epoch": 10.6, + "learning_rate": 0.00013316326530612247, + "loss": 1.3952, + "step": 524 + }, + { + "epoch": 10.62, + "learning_rate": 0.00013303571428571428, + "loss": 1.4116, + "step": 525 + }, + { + "epoch": 10.64, + "learning_rate": 0.00013290816326530614, + "loss": 1.4583, + "step": 526 + }, + { + "epoch": 10.66, + "learning_rate": 0.00013278061224489797, + "loss": 1.4466, + "step": 527 + }, + { + "epoch": 10.68, + "learning_rate": 0.0001326530612244898, + "loss": 1.4242, + "step": 528 + }, + { + "epoch": 10.7, + "learning_rate": 0.00013252551020408163, + "loss": 1.3717, + "step": 529 + }, + { + "epoch": 10.72, + "learning_rate": 0.0001323979591836735, + "loss": 1.4583, + "step": 530 + }, + { + "epoch": 10.74, + "learning_rate": 0.0001322704081632653, + "loss": 1.4185, + "step": 531 + }, + { + "epoch": 10.76, + "learning_rate": 0.00013214285714285715, + "loss": 1.4287, + "step": 532 + }, + { + "epoch": 10.78, + "learning_rate": 0.00013201530612244898, + "loss": 1.4385, + "step": 533 + }, + { + "epoch": 10.8, + "learning_rate": 0.00013188775510204084, + "loss": 1.453, + "step": 534 + }, + { + "epoch": 10.83, + "learning_rate": 0.00013176020408163264, + "loss": 1.4161, + "step": 535 + }, + { + "epoch": 10.85, + "learning_rate": 0.0001316326530612245, + "loss": 1.457, + "step": 536 + }, + { + "epoch": 10.87, + "learning_rate": 0.00013150510204081633, + "loss": 1.4367, + "step": 537 + }, + { + "epoch": 10.89, + "learning_rate": 0.00013137755102040816, + "loss": 1.4256, + "step": 538 + }, + { + "epoch": 10.91, + "learning_rate": 0.00013125000000000002, + "loss": 1.424, + "step": 539 + }, + { + "epoch": 10.93, + "learning_rate": 0.00013112244897959185, + "loss": 1.3923, + "step": 540 + }, + { + "epoch": 10.95, + "learning_rate": 0.00013099489795918368, + "loss": 1.4225, + "step": 541 + }, + { + "epoch": 10.97, + "learning_rate": 0.0001308673469387755, + "loss": 1.3969, + "step": 542 + }, + { + "epoch": 10.99, + "learning_rate": 0.00013073979591836737, + "loss": 1.4446, + "step": 543 + }, + { + "epoch": 11.01, + "learning_rate": 0.00013061224489795917, + "loss": 1.4375, + "step": 544 + }, + { + "epoch": 11.03, + "learning_rate": 0.00013048469387755103, + "loss": 1.4064, + "step": 545 + }, + { + "epoch": 11.05, + "learning_rate": 0.00013035714285714286, + "loss": 1.3454, + "step": 546 + }, + { + "epoch": 11.07, + "learning_rate": 0.00013022959183673472, + "loss": 1.3234, + "step": 547 + }, + { + "epoch": 11.09, + "learning_rate": 0.00013010204081632652, + "loss": 1.3759, + "step": 548 + }, + { + "epoch": 11.11, + "learning_rate": 0.00012997448979591838, + "loss": 1.4221, + "step": 549 + }, + { + "epoch": 11.13, + "learning_rate": 0.0001298469387755102, + "loss": 1.4261, + "step": 550 + }, + { + "epoch": 11.15, + "learning_rate": 0.00012971938775510204, + "loss": 1.3341, + "step": 551 + }, + { + "epoch": 11.17, + "learning_rate": 0.00012959183673469387, + "loss": 1.3994, + "step": 552 + }, + { + "epoch": 11.19, + "learning_rate": 0.00012946428571428573, + "loss": 1.3894, + "step": 553 + }, + { + "epoch": 11.21, + "learning_rate": 0.00012933673469387754, + "loss": 1.3585, + "step": 554 + }, + { + "epoch": 11.23, + "learning_rate": 0.0001292091836734694, + "loss": 1.3763, + "step": 555 + }, + { + "epoch": 11.25, + "learning_rate": 0.00012908163265306123, + "loss": 1.3623, + "step": 556 + }, + { + "epoch": 11.27, + "learning_rate": 0.00012895408163265306, + "loss": 1.3907, + "step": 557 + }, + { + "epoch": 11.29, + "learning_rate": 0.0001288265306122449, + "loss": 1.3807, + "step": 558 + }, + { + "epoch": 11.31, + "learning_rate": 0.00012869897959183674, + "loss": 1.4045, + "step": 559 + }, + { + "epoch": 11.33, + "learning_rate": 0.00012857142857142858, + "loss": 1.4038, + "step": 560 + }, + { + "epoch": 11.35, + "learning_rate": 0.0001284438775510204, + "loss": 1.3466, + "step": 561 + }, + { + "epoch": 11.37, + "learning_rate": 0.00012831632653061226, + "loss": 1.3449, + "step": 562 + }, + { + "epoch": 11.39, + "learning_rate": 0.0001281887755102041, + "loss": 1.3866, + "step": 563 + }, + { + "epoch": 11.41, + "learning_rate": 0.00012806122448979593, + "loss": 1.3106, + "step": 564 + }, + { + "epoch": 11.43, + "learning_rate": 0.00012793367346938776, + "loss": 1.4414, + "step": 565 + }, + { + "epoch": 11.45, + "learning_rate": 0.00012780612244897962, + "loss": 1.3737, + "step": 566 + }, + { + "epoch": 11.47, + "learning_rate": 0.00012767857142857142, + "loss": 1.4053, + "step": 567 + }, + { + "epoch": 11.49, + "learning_rate": 0.00012755102040816328, + "loss": 1.4561, + "step": 568 + }, + { + "epoch": 11.51, + "learning_rate": 0.0001274234693877551, + "loss": 1.3684, + "step": 569 + }, + { + "epoch": 11.53, + "learning_rate": 0.00012729591836734697, + "loss": 1.3117, + "step": 570 + }, + { + "epoch": 11.55, + "learning_rate": 0.00012716836734693877, + "loss": 1.3474, + "step": 571 + }, + { + "epoch": 11.57, + "learning_rate": 0.00012704081632653063, + "loss": 1.3804, + "step": 572 + }, + { + "epoch": 11.59, + "learning_rate": 0.00012691326530612246, + "loss": 1.3656, + "step": 573 + }, + { + "epoch": 11.61, + "learning_rate": 0.0001267857142857143, + "loss": 1.3133, + "step": 574 + }, + { + "epoch": 11.63, + "learning_rate": 0.00012665816326530612, + "loss": 1.4077, + "step": 575 + }, + { + "epoch": 11.65, + "learning_rate": 0.00012653061224489798, + "loss": 1.4087, + "step": 576 + }, + { + "epoch": 11.67, + "learning_rate": 0.00012640306122448978, + "loss": 1.3524, + "step": 577 + }, + { + "epoch": 11.7, + "learning_rate": 0.00012627551020408164, + "loss": 1.3481, + "step": 578 + }, + { + "epoch": 11.72, + "learning_rate": 0.00012614795918367347, + "loss": 1.4497, + "step": 579 + }, + { + "epoch": 11.74, + "learning_rate": 0.0001260204081632653, + "loss": 1.3866, + "step": 580 + }, + { + "epoch": 11.76, + "learning_rate": 0.00012589285714285713, + "loss": 1.42, + "step": 581 + }, + { + "epoch": 11.78, + "learning_rate": 0.000125765306122449, + "loss": 1.3562, + "step": 582 + }, + { + "epoch": 11.8, + "learning_rate": 0.00012563775510204082, + "loss": 1.3249, + "step": 583 + }, + { + "epoch": 11.82, + "learning_rate": 0.00012551020408163265, + "loss": 1.4277, + "step": 584 + }, + { + "epoch": 11.84, + "learning_rate": 0.0001253826530612245, + "loss": 1.3734, + "step": 585 + }, + { + "epoch": 11.86, + "learning_rate": 0.00012525510204081634, + "loss": 1.3765, + "step": 586 + }, + { + "epoch": 11.88, + "learning_rate": 0.00012512755102040817, + "loss": 1.4153, + "step": 587 + }, + { + "epoch": 11.9, + "learning_rate": 0.000125, + "loss": 1.3847, + "step": 588 + }, + { + "epoch": 11.92, + "learning_rate": 0.00012487244897959186, + "loss": 1.3824, + "step": 589 + }, + { + "epoch": 11.94, + "learning_rate": 0.00012474489795918366, + "loss": 1.3938, + "step": 590 + }, + { + "epoch": 11.96, + "learning_rate": 0.00012461734693877552, + "loss": 1.4143, + "step": 591 + }, + { + "epoch": 11.98, + "learning_rate": 0.00012448979591836735, + "loss": 1.3794, + "step": 592 + }, + { + "epoch": 12.0, + "learning_rate": 0.00012436224489795918, + "loss": 1.3755, + "step": 593 + }, + { + "epoch": 12.02, + "learning_rate": 0.00012423469387755101, + "loss": 1.3736, + "step": 594 + }, + { + "epoch": 12.04, + "learning_rate": 0.00012410714285714287, + "loss": 1.2957, + "step": 595 + }, + { + "epoch": 12.06, + "learning_rate": 0.0001239795918367347, + "loss": 1.2996, + "step": 596 + }, + { + "epoch": 12.08, + "learning_rate": 0.00012385204081632653, + "loss": 1.3648, + "step": 597 + }, + { + "epoch": 12.1, + "learning_rate": 0.00012372448979591837, + "loss": 1.3031, + "step": 598 + }, + { + "epoch": 12.12, + "learning_rate": 0.00012359693877551022, + "loss": 1.2933, + "step": 599 + }, + { + "epoch": 12.14, + "learning_rate": 0.00012346938775510203, + "loss": 1.322, + "step": 600 + }, + { + "epoch": 12.16, + "learning_rate": 0.00012334183673469389, + "loss": 1.3123, + "step": 601 + }, + { + "epoch": 12.18, + "learning_rate": 0.00012321428571428572, + "loss": 1.3187, + "step": 602 + }, + { + "epoch": 12.2, + "learning_rate": 0.00012308673469387755, + "loss": 1.3353, + "step": 603 + }, + { + "epoch": 12.22, + "learning_rate": 0.0001229591836734694, + "loss": 1.3221, + "step": 604 + }, + { + "epoch": 12.24, + "learning_rate": 0.00012283163265306124, + "loss": 1.3458, + "step": 605 + }, + { + "epoch": 12.26, + "learning_rate": 0.00012270408163265307, + "loss": 1.275, + "step": 606 + }, + { + "epoch": 12.28, + "learning_rate": 0.0001225765306122449, + "loss": 1.3455, + "step": 607 + }, + { + "epoch": 12.3, + "learning_rate": 0.00012244897959183676, + "loss": 1.2769, + "step": 608 + }, + { + "epoch": 12.32, + "learning_rate": 0.00012232142857142859, + "loss": 1.3201, + "step": 609 + }, + { + "epoch": 12.34, + "learning_rate": 0.00012219387755102042, + "loss": 1.3073, + "step": 610 + }, + { + "epoch": 12.36, + "learning_rate": 0.00012206632653061225, + "loss": 1.3103, + "step": 611 + }, + { + "epoch": 12.38, + "learning_rate": 0.00012193877551020409, + "loss": 1.4437, + "step": 612 + }, + { + "epoch": 12.4, + "learning_rate": 0.00012181122448979591, + "loss": 1.3086, + "step": 613 + }, + { + "epoch": 12.42, + "learning_rate": 0.00012168367346938775, + "loss": 1.3867, + "step": 614 + }, + { + "epoch": 12.44, + "learning_rate": 0.0001215561224489796, + "loss": 1.2565, + "step": 615 + }, + { + "epoch": 12.46, + "learning_rate": 0.00012142857142857143, + "loss": 1.335, + "step": 616 + }, + { + "epoch": 12.48, + "learning_rate": 0.00012130102040816327, + "loss": 1.3423, + "step": 617 + }, + { + "epoch": 12.5, + "learning_rate": 0.00012117346938775512, + "loss": 1.3433, + "step": 618 + }, + { + "epoch": 12.52, + "learning_rate": 0.00012104591836734695, + "loss": 1.3387, + "step": 619 + }, + { + "epoch": 12.55, + "learning_rate": 0.00012091836734693878, + "loss": 1.3923, + "step": 620 + }, + { + "epoch": 12.57, + "learning_rate": 0.00012079081632653062, + "loss": 1.3774, + "step": 621 + }, + { + "epoch": 12.59, + "learning_rate": 0.00012066326530612247, + "loss": 1.3203, + "step": 622 + }, + { + "epoch": 12.61, + "learning_rate": 0.00012053571428571429, + "loss": 1.2924, + "step": 623 + }, + { + "epoch": 12.63, + "learning_rate": 0.00012040816326530613, + "loss": 1.3292, + "step": 624 + }, + { + "epoch": 12.65, + "learning_rate": 0.00012028061224489798, + "loss": 1.3161, + "step": 625 + }, + { + "epoch": 12.67, + "learning_rate": 0.00012015306122448979, + "loss": 1.352, + "step": 626 + }, + { + "epoch": 12.69, + "learning_rate": 0.00012002551020408164, + "loss": 1.3577, + "step": 627 + }, + { + "epoch": 12.71, + "learning_rate": 0.00011989795918367348, + "loss": 1.3575, + "step": 628 + }, + { + "epoch": 12.73, + "learning_rate": 0.0001197704081632653, + "loss": 1.3727, + "step": 629 + }, + { + "epoch": 12.75, + "learning_rate": 0.00011964285714285714, + "loss": 1.3312, + "step": 630 + }, + { + "epoch": 12.77, + "learning_rate": 0.00011951530612244899, + "loss": 1.3378, + "step": 631 + }, + { + "epoch": 12.79, + "learning_rate": 0.00011938775510204083, + "loss": 1.295, + "step": 632 + }, + { + "epoch": 12.81, + "learning_rate": 0.00011926020408163265, + "loss": 1.3447, + "step": 633 + }, + { + "epoch": 12.83, + "learning_rate": 0.0001191326530612245, + "loss": 1.3835, + "step": 634 + }, + { + "epoch": 12.85, + "learning_rate": 0.00011900510204081634, + "loss": 1.3222, + "step": 635 + }, + { + "epoch": 12.87, + "learning_rate": 0.00011887755102040817, + "loss": 1.2851, + "step": 636 + }, + { + "epoch": 12.89, + "learning_rate": 0.00011875, + "loss": 1.2723, + "step": 637 + }, + { + "epoch": 12.91, + "learning_rate": 0.00011862244897959184, + "loss": 1.3924, + "step": 638 + }, + { + "epoch": 12.93, + "learning_rate": 0.00011849489795918368, + "loss": 1.4625, + "step": 639 + }, + { + "epoch": 12.95, + "learning_rate": 0.00011836734693877552, + "loss": 1.3245, + "step": 640 + }, + { + "epoch": 12.97, + "learning_rate": 0.00011823979591836736, + "loss": 1.4042, + "step": 641 + }, + { + "epoch": 12.99, + "learning_rate": 0.00011811224489795918, + "loss": 1.3761, + "step": 642 + }, + { + "epoch": 13.01, + "learning_rate": 0.00011798469387755103, + "loss": 1.3376, + "step": 643 + }, + { + "epoch": 13.03, + "learning_rate": 0.00011785714285714287, + "loss": 1.2174, + "step": 644 + }, + { + "epoch": 13.05, + "learning_rate": 0.00011772959183673471, + "loss": 1.3602, + "step": 645 + }, + { + "epoch": 13.07, + "learning_rate": 0.00011760204081632653, + "loss": 1.3002, + "step": 646 + }, + { + "epoch": 13.09, + "learning_rate": 0.00011747448979591838, + "loss": 1.2262, + "step": 647 + }, + { + "epoch": 13.11, + "learning_rate": 0.00011734693877551022, + "loss": 1.3048, + "step": 648 + }, + { + "epoch": 13.13, + "learning_rate": 0.00011721938775510204, + "loss": 1.2231, + "step": 649 + }, + { + "epoch": 13.15, + "learning_rate": 0.00011709183673469388, + "loss": 1.2996, + "step": 650 + }, + { + "epoch": 13.17, + "learning_rate": 0.00011696428571428573, + "loss": 1.2708, + "step": 651 + }, + { + "epoch": 13.19, + "learning_rate": 0.00011683673469387754, + "loss": 1.2776, + "step": 652 + }, + { + "epoch": 13.21, + "learning_rate": 0.00011670918367346939, + "loss": 1.248, + "step": 653 + }, + { + "epoch": 13.23, + "learning_rate": 0.00011658163265306123, + "loss": 1.2582, + "step": 654 + }, + { + "epoch": 13.25, + "learning_rate": 0.00011645408163265305, + "loss": 1.3011, + "step": 655 + }, + { + "epoch": 13.27, + "learning_rate": 0.0001163265306122449, + "loss": 1.2969, + "step": 656 + }, + { + "epoch": 13.29, + "learning_rate": 0.00011619897959183674, + "loss": 1.2454, + "step": 657 + }, + { + "epoch": 13.31, + "learning_rate": 0.00011607142857142858, + "loss": 1.1914, + "step": 658 + }, + { + "epoch": 13.33, + "learning_rate": 0.00011594387755102041, + "loss": 1.34, + "step": 659 + }, + { + "epoch": 13.35, + "learning_rate": 0.00011581632653061225, + "loss": 1.2828, + "step": 660 + }, + { + "epoch": 13.37, + "learning_rate": 0.00011568877551020409, + "loss": 1.2962, + "step": 661 + }, + { + "epoch": 13.39, + "learning_rate": 0.00011556122448979592, + "loss": 1.3334, + "step": 662 + }, + { + "epoch": 13.42, + "learning_rate": 0.00011543367346938776, + "loss": 1.2832, + "step": 663 + }, + { + "epoch": 13.44, + "learning_rate": 0.00011530612244897961, + "loss": 1.3012, + "step": 664 + }, + { + "epoch": 13.46, + "learning_rate": 0.00011517857142857143, + "loss": 1.2857, + "step": 665 + }, + { + "epoch": 13.48, + "learning_rate": 0.00011505102040816327, + "loss": 1.2855, + "step": 666 + }, + { + "epoch": 13.5, + "learning_rate": 0.00011492346938775512, + "loss": 1.3077, + "step": 667 + }, + { + "epoch": 13.52, + "learning_rate": 0.00011479591836734696, + "loss": 1.3139, + "step": 668 + }, + { + "epoch": 13.54, + "learning_rate": 0.00011466836734693878, + "loss": 1.3138, + "step": 669 + }, + { + "epoch": 13.56, + "learning_rate": 0.00011454081632653062, + "loss": 1.2808, + "step": 670 + }, + { + "epoch": 13.58, + "learning_rate": 0.00011441326530612247, + "loss": 1.2492, + "step": 671 + }, + { + "epoch": 13.6, + "learning_rate": 0.00011428571428571428, + "loss": 1.2027, + "step": 672 + }, + { + "epoch": 13.62, + "learning_rate": 0.00011415816326530613, + "loss": 1.33, + "step": 673 + }, + { + "epoch": 13.64, + "learning_rate": 0.00011403061224489797, + "loss": 1.3112, + "step": 674 + }, + { + "epoch": 13.66, + "learning_rate": 0.00011390306122448979, + "loss": 1.2772, + "step": 675 + }, + { + "epoch": 13.68, + "learning_rate": 0.00011377551020408163, + "loss": 1.2701, + "step": 676 + }, + { + "epoch": 13.7, + "learning_rate": 0.00011364795918367348, + "loss": 1.1973, + "step": 677 + }, + { + "epoch": 13.72, + "learning_rate": 0.0001135204081632653, + "loss": 1.3124, + "step": 678 + }, + { + "epoch": 13.74, + "learning_rate": 0.00011339285714285714, + "loss": 1.3085, + "step": 679 + }, + { + "epoch": 13.76, + "learning_rate": 0.00011326530612244898, + "loss": 1.3457, + "step": 680 + }, + { + "epoch": 13.78, + "learning_rate": 0.00011313775510204083, + "loss": 1.3338, + "step": 681 + }, + { + "epoch": 13.8, + "learning_rate": 0.00011301020408163266, + "loss": 1.2753, + "step": 682 + }, + { + "epoch": 13.82, + "learning_rate": 0.00011288265306122449, + "loss": 1.2786, + "step": 683 + }, + { + "epoch": 13.84, + "learning_rate": 0.00011275510204081634, + "loss": 1.2584, + "step": 684 + }, + { + "epoch": 13.86, + "learning_rate": 0.00011262755102040817, + "loss": 1.2779, + "step": 685 + }, + { + "epoch": 13.88, + "learning_rate": 0.00011250000000000001, + "loss": 1.3502, + "step": 686 + }, + { + "epoch": 13.9, + "learning_rate": 0.00011237244897959185, + "loss": 1.3251, + "step": 687 + }, + { + "epoch": 13.92, + "learning_rate": 0.00011224489795918367, + "loss": 1.273, + "step": 688 + }, + { + "epoch": 13.94, + "learning_rate": 0.00011211734693877552, + "loss": 1.3341, + "step": 689 + }, + { + "epoch": 13.96, + "learning_rate": 0.00011198979591836736, + "loss": 1.2654, + "step": 690 + }, + { + "epoch": 13.98, + "learning_rate": 0.00011186224489795918, + "loss": 1.3333, + "step": 691 + }, + { + "epoch": 14.0, + "learning_rate": 0.00011173469387755102, + "loss": 1.3246, + "step": 692 + }, + { + "epoch": 14.02, + "learning_rate": 0.00011160714285714287, + "loss": 1.2547, + "step": 693 + }, + { + "epoch": 14.04, + "learning_rate": 0.00011147959183673471, + "loss": 1.208, + "step": 694 + }, + { + "epoch": 14.06, + "learning_rate": 0.00011135204081632653, + "loss": 1.223, + "step": 695 + }, + { + "epoch": 14.08, + "learning_rate": 0.00011122448979591837, + "loss": 1.2483, + "step": 696 + }, + { + "epoch": 14.1, + "learning_rate": 0.00011109693877551022, + "loss": 1.2823, + "step": 697 + }, + { + "epoch": 14.12, + "learning_rate": 0.00011096938775510204, + "loss": 1.2013, + "step": 698 + }, + { + "epoch": 14.14, + "learning_rate": 0.00011084183673469388, + "loss": 1.1883, + "step": 699 + }, + { + "epoch": 14.16, + "learning_rate": 0.00011071428571428572, + "loss": 1.2364, + "step": 700 + }, + { + "epoch": 14.18, + "learning_rate": 0.00011058673469387754, + "loss": 1.2069, + "step": 701 + }, + { + "epoch": 14.2, + "learning_rate": 0.00011045918367346939, + "loss": 1.1968, + "step": 702 + }, + { + "epoch": 14.22, + "learning_rate": 0.00011033163265306123, + "loss": 1.2236, + "step": 703 + }, + { + "epoch": 14.24, + "learning_rate": 0.00011020408163265306, + "loss": 1.1942, + "step": 704 + }, + { + "epoch": 14.26, + "learning_rate": 0.0001100765306122449, + "loss": 1.2561, + "step": 705 + }, + { + "epoch": 14.29, + "learning_rate": 0.00010994897959183674, + "loss": 1.1839, + "step": 706 + }, + { + "epoch": 14.31, + "learning_rate": 0.00010982142857142858, + "loss": 1.2128, + "step": 707 + }, + { + "epoch": 14.33, + "learning_rate": 0.00010969387755102041, + "loss": 1.3086, + "step": 708 + }, + { + "epoch": 14.35, + "learning_rate": 0.00010956632653061226, + "loss": 1.2379, + "step": 709 + }, + { + "epoch": 14.37, + "learning_rate": 0.0001094387755102041, + "loss": 1.176, + "step": 710 + }, + { + "epoch": 14.39, + "learning_rate": 0.00010931122448979592, + "loss": 1.2105, + "step": 711 + }, + { + "epoch": 14.41, + "learning_rate": 0.00010918367346938776, + "loss": 1.2149, + "step": 712 + }, + { + "epoch": 14.43, + "learning_rate": 0.0001090561224489796, + "loss": 1.2392, + "step": 713 + }, + { + "epoch": 14.45, + "learning_rate": 0.00010892857142857142, + "loss": 1.2471, + "step": 714 + }, + { + "epoch": 14.47, + "learning_rate": 0.00010880102040816327, + "loss": 1.2561, + "step": 715 + }, + { + "epoch": 14.49, + "learning_rate": 0.00010867346938775511, + "loss": 1.2179, + "step": 716 + }, + { + "epoch": 14.51, + "learning_rate": 0.00010854591836734696, + "loss": 1.2459, + "step": 717 + }, + { + "epoch": 14.53, + "learning_rate": 0.00010841836734693877, + "loss": 1.2933, + "step": 718 + }, + { + "epoch": 14.55, + "learning_rate": 0.00010829081632653062, + "loss": 1.2862, + "step": 719 + }, + { + "epoch": 14.57, + "learning_rate": 0.00010816326530612246, + "loss": 1.2976, + "step": 720 + }, + { + "epoch": 14.59, + "learning_rate": 0.00010803571428571428, + "loss": 1.231, + "step": 721 + }, + { + "epoch": 14.61, + "learning_rate": 0.00010790816326530613, + "loss": 1.2464, + "step": 722 + }, + { + "epoch": 14.63, + "learning_rate": 0.00010778061224489797, + "loss": 1.2181, + "step": 723 + }, + { + "epoch": 14.65, + "learning_rate": 0.00010765306122448979, + "loss": 1.3307, + "step": 724 + }, + { + "epoch": 14.67, + "learning_rate": 0.00010752551020408163, + "loss": 1.1723, + "step": 725 + }, + { + "epoch": 14.69, + "learning_rate": 0.00010739795918367348, + "loss": 1.1528, + "step": 726 + }, + { + "epoch": 14.71, + "learning_rate": 0.0001072704081632653, + "loss": 1.215, + "step": 727 + }, + { + "epoch": 14.73, + "learning_rate": 0.00010714285714285715, + "loss": 1.2624, + "step": 728 + }, + { + "epoch": 14.75, + "learning_rate": 0.00010701530612244898, + "loss": 1.3117, + "step": 729 + }, + { + "epoch": 14.77, + "learning_rate": 0.00010688775510204083, + "loss": 1.2572, + "step": 730 + }, + { + "epoch": 14.79, + "learning_rate": 0.00010676020408163266, + "loss": 1.222, + "step": 731 + }, + { + "epoch": 14.81, + "learning_rate": 0.0001066326530612245, + "loss": 1.2881, + "step": 732 + }, + { + "epoch": 14.83, + "learning_rate": 0.00010650510204081635, + "loss": 1.2676, + "step": 733 + }, + { + "epoch": 14.85, + "learning_rate": 0.00010637755102040816, + "loss": 1.2734, + "step": 734 + }, + { + "epoch": 14.87, + "learning_rate": 0.00010625000000000001, + "loss": 1.2885, + "step": 735 + }, + { + "epoch": 14.89, + "learning_rate": 0.00010612244897959185, + "loss": 1.2764, + "step": 736 + }, + { + "epoch": 14.91, + "learning_rate": 0.00010599489795918367, + "loss": 1.3267, + "step": 737 + }, + { + "epoch": 14.93, + "learning_rate": 0.00010586734693877551, + "loss": 1.2445, + "step": 738 + }, + { + "epoch": 14.95, + "learning_rate": 0.00010573979591836736, + "loss": 1.3359, + "step": 739 + }, + { + "epoch": 14.97, + "learning_rate": 0.00010561224489795918, + "loss": 1.2508, + "step": 740 + }, + { + "epoch": 14.99, + "learning_rate": 0.00010548469387755102, + "loss": 1.2227, + "step": 741 + }, + { + "epoch": 15.01, + "learning_rate": 0.00010535714285714286, + "loss": 1.1889, + "step": 742 + }, + { + "epoch": 15.03, + "learning_rate": 0.00010522959183673471, + "loss": 1.1919, + "step": 743 + }, + { + "epoch": 15.05, + "learning_rate": 0.00010510204081632653, + "loss": 1.2383, + "step": 744 + }, + { + "epoch": 15.07, + "learning_rate": 0.00010497448979591837, + "loss": 1.2401, + "step": 745 + }, + { + "epoch": 15.09, + "learning_rate": 0.00010484693877551021, + "loss": 1.2015, + "step": 746 + }, + { + "epoch": 15.11, + "learning_rate": 0.00010471938775510203, + "loss": 1.1509, + "step": 747 + }, + { + "epoch": 15.13, + "learning_rate": 0.00010459183673469388, + "loss": 1.1878, + "step": 748 + }, + { + "epoch": 15.16, + "learning_rate": 0.00010446428571428572, + "loss": 1.1706, + "step": 749 + }, + { + "epoch": 15.18, + "learning_rate": 0.00010433673469387755, + "loss": 1.1285, + "step": 750 + }, + { + "epoch": 15.2, + "learning_rate": 0.0001042091836734694, + "loss": 1.1608, + "step": 751 + }, + { + "epoch": 15.22, + "learning_rate": 0.00010408163265306123, + "loss": 1.1178, + "step": 752 + }, + { + "epoch": 15.24, + "learning_rate": 0.00010395408163265306, + "loss": 1.1293, + "step": 753 + }, + { + "epoch": 15.26, + "learning_rate": 0.0001038265306122449, + "loss": 1.2306, + "step": 754 + }, + { + "epoch": 15.28, + "learning_rate": 0.00010369897959183675, + "loss": 1.1541, + "step": 755 + }, + { + "epoch": 15.3, + "learning_rate": 0.00010357142857142859, + "loss": 1.1702, + "step": 756 + }, + { + "epoch": 15.32, + "learning_rate": 0.00010344387755102041, + "loss": 1.2119, + "step": 757 + }, + { + "epoch": 15.34, + "learning_rate": 0.00010331632653061225, + "loss": 1.2239, + "step": 758 + }, + { + "epoch": 15.36, + "learning_rate": 0.0001031887755102041, + "loss": 1.2019, + "step": 759 + }, + { + "epoch": 15.38, + "learning_rate": 0.00010306122448979591, + "loss": 1.2197, + "step": 760 + }, + { + "epoch": 15.4, + "learning_rate": 0.00010293367346938776, + "loss": 1.1769, + "step": 761 + }, + { + "epoch": 15.42, + "learning_rate": 0.0001028061224489796, + "loss": 1.1907, + "step": 762 + }, + { + "epoch": 15.44, + "learning_rate": 0.00010267857142857142, + "loss": 1.2089, + "step": 763 + }, + { + "epoch": 15.46, + "learning_rate": 0.00010255102040816327, + "loss": 1.1335, + "step": 764 + }, + { + "epoch": 15.48, + "learning_rate": 0.00010242346938775511, + "loss": 1.1633, + "step": 765 + }, + { + "epoch": 15.5, + "learning_rate": 0.00010229591836734695, + "loss": 1.1578, + "step": 766 + }, + { + "epoch": 15.52, + "learning_rate": 0.00010216836734693877, + "loss": 1.2236, + "step": 767 + }, + { + "epoch": 15.54, + "learning_rate": 0.00010204081632653062, + "loss": 1.1941, + "step": 768 + }, + { + "epoch": 15.56, + "learning_rate": 0.00010191326530612246, + "loss": 1.2666, + "step": 769 + }, + { + "epoch": 15.58, + "learning_rate": 0.00010178571428571428, + "loss": 1.1232, + "step": 770 + }, + { + "epoch": 15.6, + "learning_rate": 0.00010165816326530612, + "loss": 1.2242, + "step": 771 + }, + { + "epoch": 15.62, + "learning_rate": 0.00010153061224489797, + "loss": 1.1852, + "step": 772 + }, + { + "epoch": 15.64, + "learning_rate": 0.0001014030612244898, + "loss": 1.2626, + "step": 773 + }, + { + "epoch": 15.66, + "learning_rate": 0.00010127551020408164, + "loss": 1.1873, + "step": 774 + }, + { + "epoch": 15.68, + "learning_rate": 0.00010114795918367349, + "loss": 1.3005, + "step": 775 + }, + { + "epoch": 15.7, + "learning_rate": 0.0001010204081632653, + "loss": 1.1904, + "step": 776 + }, + { + "epoch": 15.72, + "learning_rate": 0.00010089285714285715, + "loss": 1.2927, + "step": 777 + }, + { + "epoch": 15.74, + "learning_rate": 0.00010076530612244899, + "loss": 1.179, + "step": 778 + }, + { + "epoch": 15.76, + "learning_rate": 0.00010063775510204084, + "loss": 1.2027, + "step": 779 + }, + { + "epoch": 15.78, + "learning_rate": 0.00010051020408163265, + "loss": 1.2428, + "step": 780 + }, + { + "epoch": 15.8, + "learning_rate": 0.0001003826530612245, + "loss": 1.2324, + "step": 781 + }, + { + "epoch": 15.82, + "learning_rate": 0.00010025510204081634, + "loss": 1.1251, + "step": 782 + }, + { + "epoch": 15.84, + "learning_rate": 0.00010012755102040816, + "loss": 1.2405, + "step": 783 + }, + { + "epoch": 15.86, + "learning_rate": 0.0001, + "loss": 1.2005, + "step": 784 + }, + { + "epoch": 15.88, + "learning_rate": 9.987244897959184e-05, + "loss": 1.2259, + "step": 785 + }, + { + "epoch": 15.9, + "learning_rate": 9.974489795918368e-05, + "loss": 1.1576, + "step": 786 + }, + { + "epoch": 15.92, + "learning_rate": 9.961734693877551e-05, + "loss": 1.1834, + "step": 787 + }, + { + "epoch": 15.94, + "learning_rate": 9.948979591836736e-05, + "loss": 1.2396, + "step": 788 + }, + { + "epoch": 15.96, + "learning_rate": 9.936224489795919e-05, + "loss": 1.1865, + "step": 789 + }, + { + "epoch": 15.98, + "learning_rate": 9.923469387755102e-05, + "loss": 1.2356, + "step": 790 + }, + { + "epoch": 16.01, + "learning_rate": 9.910714285714286e-05, + "loss": 1.2639, + "step": 791 + }, + { + "epoch": 16.03, + "learning_rate": 9.897959183673469e-05, + "loss": 1.1216, + "step": 792 + }, + { + "epoch": 16.05, + "learning_rate": 9.885204081632652e-05, + "loss": 1.1051, + "step": 793 + }, + { + "epoch": 16.07, + "learning_rate": 9.872448979591837e-05, + "loss": 1.0864, + "step": 794 + }, + { + "epoch": 16.09, + "learning_rate": 9.859693877551021e-05, + "loss": 1.182, + "step": 795 + }, + { + "epoch": 16.11, + "learning_rate": 9.846938775510204e-05, + "loss": 1.1272, + "step": 796 + }, + { + "epoch": 16.13, + "learning_rate": 9.834183673469389e-05, + "loss": 1.1946, + "step": 797 + }, + { + "epoch": 16.15, + "learning_rate": 9.821428571428572e-05, + "loss": 1.0875, + "step": 798 + }, + { + "epoch": 16.17, + "learning_rate": 9.808673469387756e-05, + "loss": 1.1671, + "step": 799 + }, + { + "epoch": 16.19, + "learning_rate": 9.79591836734694e-05, + "loss": 1.1502, + "step": 800 + }, + { + "epoch": 16.21, + "learning_rate": 9.783163265306124e-05, + "loss": 1.19, + "step": 801 + }, + { + "epoch": 16.23, + "learning_rate": 9.770408163265307e-05, + "loss": 1.1258, + "step": 802 + }, + { + "epoch": 16.25, + "learning_rate": 9.75765306122449e-05, + "loss": 1.1765, + "step": 803 + }, + { + "epoch": 16.27, + "learning_rate": 9.744897959183674e-05, + "loss": 1.1217, + "step": 804 + }, + { + "epoch": 16.29, + "learning_rate": 9.732142857142858e-05, + "loss": 1.1293, + "step": 805 + }, + { + "epoch": 16.31, + "learning_rate": 9.719387755102042e-05, + "loss": 1.17, + "step": 806 + }, + { + "epoch": 16.33, + "learning_rate": 9.706632653061225e-05, + "loss": 1.17, + "step": 807 + }, + { + "epoch": 16.35, + "learning_rate": 9.693877551020408e-05, + "loss": 1.2004, + "step": 808 + }, + { + "epoch": 16.37, + "learning_rate": 9.681122448979593e-05, + "loss": 1.1648, + "step": 809 + }, + { + "epoch": 16.39, + "learning_rate": 9.668367346938776e-05, + "loss": 1.0688, + "step": 810 + }, + { + "epoch": 16.41, + "learning_rate": 9.655612244897959e-05, + "loss": 1.1607, + "step": 811 + }, + { + "epoch": 16.43, + "learning_rate": 9.642857142857143e-05, + "loss": 1.1298, + "step": 812 + }, + { + "epoch": 16.45, + "learning_rate": 9.630102040816326e-05, + "loss": 1.1064, + "step": 813 + }, + { + "epoch": 16.47, + "learning_rate": 9.617346938775511e-05, + "loss": 1.1472, + "step": 814 + }, + { + "epoch": 16.49, + "learning_rate": 9.604591836734694e-05, + "loss": 1.1577, + "step": 815 + }, + { + "epoch": 16.51, + "learning_rate": 9.591836734693878e-05, + "loss": 1.1436, + "step": 816 + }, + { + "epoch": 16.53, + "learning_rate": 9.579081632653061e-05, + "loss": 1.1657, + "step": 817 + }, + { + "epoch": 16.55, + "learning_rate": 9.566326530612246e-05, + "loss": 1.1147, + "step": 818 + }, + { + "epoch": 16.57, + "learning_rate": 9.553571428571429e-05, + "loss": 1.1839, + "step": 819 + }, + { + "epoch": 16.59, + "learning_rate": 9.540816326530613e-05, + "loss": 1.1298, + "step": 820 + }, + { + "epoch": 16.61, + "learning_rate": 9.528061224489796e-05, + "loss": 1.2141, + "step": 821 + }, + { + "epoch": 16.63, + "learning_rate": 9.515306122448981e-05, + "loss": 1.2045, + "step": 822 + }, + { + "epoch": 16.65, + "learning_rate": 9.502551020408164e-05, + "loss": 1.1791, + "step": 823 + }, + { + "epoch": 16.67, + "learning_rate": 9.489795918367348e-05, + "loss": 1.1137, + "step": 824 + }, + { + "epoch": 16.69, + "learning_rate": 9.477040816326531e-05, + "loss": 1.1312, + "step": 825 + }, + { + "epoch": 16.71, + "learning_rate": 9.464285714285715e-05, + "loss": 1.1102, + "step": 826 + }, + { + "epoch": 16.73, + "learning_rate": 9.451530612244899e-05, + "loss": 1.1865, + "step": 827 + }, + { + "epoch": 16.75, + "learning_rate": 9.438775510204082e-05, + "loss": 1.1232, + "step": 828 + }, + { + "epoch": 16.77, + "learning_rate": 9.426020408163265e-05, + "loss": 1.2068, + "step": 829 + }, + { + "epoch": 16.79, + "learning_rate": 9.41326530612245e-05, + "loss": 1.1864, + "step": 830 + }, + { + "epoch": 16.81, + "learning_rate": 9.400510204081633e-05, + "loss": 1.2195, + "step": 831 + }, + { + "epoch": 16.83, + "learning_rate": 9.387755102040817e-05, + "loss": 1.2063, + "step": 832 + }, + { + "epoch": 16.85, + "learning_rate": 9.375e-05, + "loss": 1.1455, + "step": 833 + }, + { + "epoch": 16.88, + "learning_rate": 9.362244897959183e-05, + "loss": 1.1819, + "step": 834 + }, + { + "epoch": 16.9, + "learning_rate": 9.349489795918368e-05, + "loss": 1.1887, + "step": 835 + }, + { + "epoch": 16.92, + "learning_rate": 9.336734693877551e-05, + "loss": 1.1557, + "step": 836 + }, + { + "epoch": 16.94, + "learning_rate": 9.323979591836735e-05, + "loss": 1.2094, + "step": 837 + }, + { + "epoch": 16.96, + "learning_rate": 9.311224489795918e-05, + "loss": 1.1512, + "step": 838 + }, + { + "epoch": 16.98, + "learning_rate": 9.298469387755103e-05, + "loss": 1.1463, + "step": 839 + }, + { + "epoch": 17.0, + "learning_rate": 9.285714285714286e-05, + "loss": 1.155, + "step": 840 + }, + { + "epoch": 17.02, + "learning_rate": 9.27295918367347e-05, + "loss": 1.1292, + "step": 841 + }, + { + "epoch": 17.04, + "learning_rate": 9.260204081632653e-05, + "loss": 1.0996, + "step": 842 + }, + { + "epoch": 17.06, + "learning_rate": 9.247448979591838e-05, + "loss": 1.0662, + "step": 843 + }, + { + "epoch": 17.08, + "learning_rate": 9.234693877551021e-05, + "loss": 1.0931, + "step": 844 + }, + { + "epoch": 17.1, + "learning_rate": 9.221938775510205e-05, + "loss": 1.0727, + "step": 845 + }, + { + "epoch": 17.12, + "learning_rate": 9.209183673469388e-05, + "loss": 1.1043, + "step": 846 + }, + { + "epoch": 17.14, + "learning_rate": 9.196428571428572e-05, + "loss": 1.0594, + "step": 847 + }, + { + "epoch": 17.16, + "learning_rate": 9.183673469387756e-05, + "loss": 1.0952, + "step": 848 + }, + { + "epoch": 17.18, + "learning_rate": 9.170918367346939e-05, + "loss": 1.0639, + "step": 849 + }, + { + "epoch": 17.2, + "learning_rate": 9.158163265306124e-05, + "loss": 1.132, + "step": 850 + }, + { + "epoch": 17.22, + "learning_rate": 9.145408163265307e-05, + "loss": 1.1083, + "step": 851 + }, + { + "epoch": 17.24, + "learning_rate": 9.13265306122449e-05, + "loss": 1.1282, + "step": 852 + }, + { + "epoch": 17.26, + "learning_rate": 9.119897959183674e-05, + "loss": 1.0474, + "step": 853 + }, + { + "epoch": 17.28, + "learning_rate": 9.107142857142857e-05, + "loss": 1.1138, + "step": 854 + }, + { + "epoch": 17.3, + "learning_rate": 9.094387755102042e-05, + "loss": 1.1025, + "step": 855 + }, + { + "epoch": 17.32, + "learning_rate": 9.081632653061225e-05, + "loss": 1.0968, + "step": 856 + }, + { + "epoch": 17.34, + "learning_rate": 9.068877551020408e-05, + "loss": 1.1683, + "step": 857 + }, + { + "epoch": 17.36, + "learning_rate": 9.056122448979592e-05, + "loss": 1.0975, + "step": 858 + }, + { + "epoch": 17.38, + "learning_rate": 9.043367346938775e-05, + "loss": 1.1274, + "step": 859 + }, + { + "epoch": 17.4, + "learning_rate": 9.030612244897958e-05, + "loss": 1.0916, + "step": 860 + }, + { + "epoch": 17.42, + "learning_rate": 9.017857142857143e-05, + "loss": 1.0912, + "step": 861 + }, + { + "epoch": 17.44, + "learning_rate": 9.005102040816327e-05, + "loss": 1.0875, + "step": 862 + }, + { + "epoch": 17.46, + "learning_rate": 8.99234693877551e-05, + "loss": 1.05, + "step": 863 + }, + { + "epoch": 17.48, + "learning_rate": 8.979591836734695e-05, + "loss": 1.1418, + "step": 864 + }, + { + "epoch": 17.5, + "learning_rate": 8.966836734693878e-05, + "loss": 1.0609, + "step": 865 + }, + { + "epoch": 17.52, + "learning_rate": 8.954081632653062e-05, + "loss": 1.1611, + "step": 866 + }, + { + "epoch": 17.54, + "learning_rate": 8.941326530612245e-05, + "loss": 1.1065, + "step": 867 + }, + { + "epoch": 17.56, + "learning_rate": 8.92857142857143e-05, + "loss": 1.1611, + "step": 868 + }, + { + "epoch": 17.58, + "learning_rate": 8.915816326530613e-05, + "loss": 1.1398, + "step": 869 + }, + { + "epoch": 17.6, + "learning_rate": 8.903061224489796e-05, + "loss": 1.1055, + "step": 870 + }, + { + "epoch": 17.62, + "learning_rate": 8.89030612244898e-05, + "loss": 1.1314, + "step": 871 + }, + { + "epoch": 17.64, + "learning_rate": 8.877551020408164e-05, + "loss": 1.1084, + "step": 872 + }, + { + "epoch": 17.66, + "learning_rate": 8.864795918367348e-05, + "loss": 1.1254, + "step": 873 + }, + { + "epoch": 17.68, + "learning_rate": 8.852040816326531e-05, + "loss": 1.142, + "step": 874 + }, + { + "epoch": 17.7, + "learning_rate": 8.839285714285714e-05, + "loss": 1.1371, + "step": 875 + }, + { + "epoch": 17.72, + "learning_rate": 8.826530612244899e-05, + "loss": 1.1092, + "step": 876 + }, + { + "epoch": 17.75, + "learning_rate": 8.813775510204082e-05, + "loss": 1.161, + "step": 877 + }, + { + "epoch": 17.77, + "learning_rate": 8.801020408163265e-05, + "loss": 1.1044, + "step": 878 + }, + { + "epoch": 17.79, + "learning_rate": 8.788265306122449e-05, + "loss": 1.117, + "step": 879 + }, + { + "epoch": 17.81, + "learning_rate": 8.775510204081632e-05, + "loss": 1.1262, + "step": 880 + }, + { + "epoch": 17.83, + "learning_rate": 8.762755102040817e-05, + "loss": 1.0829, + "step": 881 + }, + { + "epoch": 17.85, + "learning_rate": 8.75e-05, + "loss": 1.1393, + "step": 882 + }, + { + "epoch": 17.87, + "learning_rate": 8.737244897959183e-05, + "loss": 1.1781, + "step": 883 + }, + { + "epoch": 17.89, + "learning_rate": 8.724489795918367e-05, + "loss": 1.1582, + "step": 884 + }, + { + "epoch": 17.91, + "learning_rate": 8.711734693877552e-05, + "loss": 1.1469, + "step": 885 + }, + { + "epoch": 17.93, + "learning_rate": 8.698979591836735e-05, + "loss": 1.1494, + "step": 886 + }, + { + "epoch": 17.95, + "learning_rate": 8.68622448979592e-05, + "loss": 1.1251, + "step": 887 + }, + { + "epoch": 17.97, + "learning_rate": 8.673469387755102e-05, + "loss": 1.1624, + "step": 888 + }, + { + "epoch": 17.99, + "learning_rate": 8.660714285714287e-05, + "loss": 1.0842, + "step": 889 + }, + { + "epoch": 18.01, + "learning_rate": 8.64795918367347e-05, + "loss": 1.1944, + "step": 890 + }, + { + "epoch": 18.03, + "learning_rate": 8.635204081632653e-05, + "loss": 1.0642, + "step": 891 + }, + { + "epoch": 18.05, + "learning_rate": 8.622448979591838e-05, + "loss": 1.0459, + "step": 892 + }, + { + "epoch": 18.07, + "learning_rate": 8.60969387755102e-05, + "loss": 1.0941, + "step": 893 + }, + { + "epoch": 18.09, + "learning_rate": 8.596938775510205e-05, + "loss": 1.0457, + "step": 894 + }, + { + "epoch": 18.11, + "learning_rate": 8.584183673469388e-05, + "loss": 1.1033, + "step": 895 + }, + { + "epoch": 18.13, + "learning_rate": 8.571428571428571e-05, + "loss": 1.0756, + "step": 896 + }, + { + "epoch": 18.15, + "learning_rate": 8.558673469387756e-05, + "loss": 1.0615, + "step": 897 + }, + { + "epoch": 18.17, + "learning_rate": 8.545918367346939e-05, + "loss": 1.0828, + "step": 898 + }, + { + "epoch": 18.19, + "learning_rate": 8.533163265306123e-05, + "loss": 1.1158, + "step": 899 + }, + { + "epoch": 18.21, + "learning_rate": 8.520408163265306e-05, + "loss": 1.0133, + "step": 900 + }, + { + "epoch": 18.23, + "learning_rate": 8.50765306122449e-05, + "loss": 1.0437, + "step": 901 + }, + { + "epoch": 18.25, + "learning_rate": 8.494897959183674e-05, + "loss": 1.0372, + "step": 902 + }, + { + "epoch": 18.27, + "learning_rate": 8.482142857142857e-05, + "loss": 1.1012, + "step": 903 + }, + { + "epoch": 18.29, + "learning_rate": 8.469387755102041e-05, + "loss": 1.0777, + "step": 904 + }, + { + "epoch": 18.31, + "learning_rate": 8.456632653061224e-05, + "loss": 1.0799, + "step": 905 + }, + { + "epoch": 18.33, + "learning_rate": 8.443877551020409e-05, + "loss": 0.9846, + "step": 906 + }, + { + "epoch": 18.35, + "learning_rate": 8.431122448979592e-05, + "loss": 1.1, + "step": 907 + }, + { + "epoch": 18.37, + "learning_rate": 8.418367346938776e-05, + "loss": 1.0787, + "step": 908 + }, + { + "epoch": 18.39, + "learning_rate": 8.40561224489796e-05, + "loss": 1.0647, + "step": 909 + }, + { + "epoch": 18.41, + "learning_rate": 8.392857142857144e-05, + "loss": 1.056, + "step": 910 + }, + { + "epoch": 18.43, + "learning_rate": 8.380102040816327e-05, + "loss": 1.1131, + "step": 911 + }, + { + "epoch": 18.45, + "learning_rate": 8.367346938775511e-05, + "loss": 1.0825, + "step": 912 + }, + { + "epoch": 18.47, + "learning_rate": 8.354591836734695e-05, + "loss": 1.0681, + "step": 913 + }, + { + "epoch": 18.49, + "learning_rate": 8.341836734693878e-05, + "loss": 1.0479, + "step": 914 + }, + { + "epoch": 18.51, + "learning_rate": 8.329081632653062e-05, + "loss": 1.0921, + "step": 915 + }, + { + "epoch": 18.53, + "learning_rate": 8.316326530612245e-05, + "loss": 1.0626, + "step": 916 + }, + { + "epoch": 18.55, + "learning_rate": 8.30357142857143e-05, + "loss": 1.0518, + "step": 917 + }, + { + "epoch": 18.57, + "learning_rate": 8.290816326530613e-05, + "loss": 1.0557, + "step": 918 + }, + { + "epoch": 18.6, + "learning_rate": 8.278061224489796e-05, + "loss": 1.0831, + "step": 919 + }, + { + "epoch": 18.62, + "learning_rate": 8.26530612244898e-05, + "loss": 1.0307, + "step": 920 + }, + { + "epoch": 18.64, + "learning_rate": 8.252551020408163e-05, + "loss": 1.0455, + "step": 921 + }, + { + "epoch": 18.66, + "learning_rate": 8.239795918367348e-05, + "loss": 1.0667, + "step": 922 + }, + { + "epoch": 18.68, + "learning_rate": 8.227040816326531e-05, + "loss": 1.0736, + "step": 923 + }, + { + "epoch": 18.7, + "learning_rate": 8.214285714285714e-05, + "loss": 1.0108, + "step": 924 + }, + { + "epoch": 18.72, + "learning_rate": 8.201530612244898e-05, + "loss": 1.0458, + "step": 925 + }, + { + "epoch": 18.74, + "learning_rate": 8.188775510204081e-05, + "loss": 1.0852, + "step": 926 + }, + { + "epoch": 18.76, + "learning_rate": 8.176020408163265e-05, + "loss": 1.1207, + "step": 927 + }, + { + "epoch": 18.78, + "learning_rate": 8.163265306122449e-05, + "loss": 1.0914, + "step": 928 + }, + { + "epoch": 18.8, + "learning_rate": 8.150510204081633e-05, + "loss": 1.1108, + "step": 929 + }, + { + "epoch": 18.82, + "learning_rate": 8.137755102040817e-05, + "loss": 1.1394, + "step": 930 + }, + { + "epoch": 18.84, + "learning_rate": 8.125000000000001e-05, + "loss": 1.029, + "step": 931 + }, + { + "epoch": 18.86, + "learning_rate": 8.112244897959184e-05, + "loss": 1.0661, + "step": 932 + }, + { + "epoch": 18.88, + "learning_rate": 8.099489795918369e-05, + "loss": 1.0303, + "step": 933 + }, + { + "epoch": 18.9, + "learning_rate": 8.086734693877552e-05, + "loss": 1.1144, + "step": 934 + }, + { + "epoch": 18.92, + "learning_rate": 8.073979591836736e-05, + "loss": 1.1096, + "step": 935 + }, + { + "epoch": 18.94, + "learning_rate": 8.061224489795919e-05, + "loss": 1.123, + "step": 936 + }, + { + "epoch": 18.96, + "learning_rate": 8.048469387755102e-05, + "loss": 1.1002, + "step": 937 + }, + { + "epoch": 18.98, + "learning_rate": 8.035714285714287e-05, + "loss": 1.1016, + "step": 938 + }, + { + "epoch": 19.0, + "learning_rate": 8.02295918367347e-05, + "loss": 1.0847, + "step": 939 + }, + { + "epoch": 19.02, + "learning_rate": 8.010204081632653e-05, + "loss": 1.1029, + "step": 940 + }, + { + "epoch": 19.04, + "learning_rate": 7.997448979591837e-05, + "loss": 1.041, + "step": 941 + }, + { + "epoch": 19.06, + "learning_rate": 7.98469387755102e-05, + "loss": 1.01, + "step": 942 + }, + { + "epoch": 19.08, + "learning_rate": 7.971938775510205e-05, + "loss": 1.0197, + "step": 943 + }, + { + "epoch": 19.1, + "learning_rate": 7.959183673469388e-05, + "loss": 1.0543, + "step": 944 + }, + { + "epoch": 19.12, + "learning_rate": 7.946428571428571e-05, + "loss": 1.0369, + "step": 945 + }, + { + "epoch": 19.14, + "learning_rate": 7.933673469387755e-05, + "loss": 1.0154, + "step": 946 + }, + { + "epoch": 19.16, + "learning_rate": 7.920918367346939e-05, + "loss": 0.9546, + "step": 947 + }, + { + "epoch": 19.18, + "learning_rate": 7.908163265306123e-05, + "loss": 0.9982, + "step": 948 + }, + { + "epoch": 19.2, + "learning_rate": 7.895408163265306e-05, + "loss": 1.0748, + "step": 949 + }, + { + "epoch": 19.22, + "learning_rate": 7.882653061224489e-05, + "loss": 1.0562, + "step": 950 + }, + { + "epoch": 19.24, + "learning_rate": 7.869897959183674e-05, + "loss": 1.0352, + "step": 951 + }, + { + "epoch": 19.26, + "learning_rate": 7.857142857142858e-05, + "loss": 0.9976, + "step": 952 + }, + { + "epoch": 19.28, + "learning_rate": 7.844387755102041e-05, + "loss": 1.0221, + "step": 953 + }, + { + "epoch": 19.3, + "learning_rate": 7.831632653061226e-05, + "loss": 1.0119, + "step": 954 + }, + { + "epoch": 19.32, + "learning_rate": 7.818877551020409e-05, + "loss": 1.0657, + "step": 955 + }, + { + "epoch": 19.34, + "learning_rate": 7.806122448979593e-05, + "loss": 0.9591, + "step": 956 + }, + { + "epoch": 19.36, + "learning_rate": 7.793367346938776e-05, + "loss": 1.0101, + "step": 957 + }, + { + "epoch": 19.38, + "learning_rate": 7.780612244897959e-05, + "loss": 1.0453, + "step": 958 + }, + { + "epoch": 19.4, + "learning_rate": 7.767857142857144e-05, + "loss": 1.0461, + "step": 959 + }, + { + "epoch": 19.42, + "learning_rate": 7.755102040816327e-05, + "loss": 1.0959, + "step": 960 + }, + { + "epoch": 19.44, + "learning_rate": 7.742346938775511e-05, + "loss": 1.0608, + "step": 961 + }, + { + "epoch": 19.47, + "learning_rate": 7.729591836734694e-05, + "loss": 1.1177, + "step": 962 + }, + { + "epoch": 19.49, + "learning_rate": 7.716836734693877e-05, + "loss": 1.0354, + "step": 963 + }, + { + "epoch": 19.51, + "learning_rate": 7.704081632653062e-05, + "loss": 1.0507, + "step": 964 + }, + { + "epoch": 19.53, + "learning_rate": 7.691326530612245e-05, + "loss": 1.0313, + "step": 965 + }, + { + "epoch": 19.55, + "learning_rate": 7.67857142857143e-05, + "loss": 1.0569, + "step": 966 + }, + { + "epoch": 19.57, + "learning_rate": 7.665816326530612e-05, + "loss": 1.0862, + "step": 967 + }, + { + "epoch": 19.59, + "learning_rate": 7.653061224489796e-05, + "loss": 1.0593, + "step": 968 + }, + { + "epoch": 19.61, + "learning_rate": 7.64030612244898e-05, + "loss": 1.0602, + "step": 969 + }, + { + "epoch": 19.63, + "learning_rate": 7.627551020408163e-05, + "loss": 1.0048, + "step": 970 + }, + { + "epoch": 19.65, + "learning_rate": 7.614795918367347e-05, + "loss": 1.0346, + "step": 971 + }, + { + "epoch": 19.67, + "learning_rate": 7.60204081632653e-05, + "loss": 1.0172, + "step": 972 + }, + { + "epoch": 19.69, + "learning_rate": 7.589285714285714e-05, + "loss": 1.02, + "step": 973 + }, + { + "epoch": 19.71, + "learning_rate": 7.576530612244898e-05, + "loss": 1.0028, + "step": 974 + }, + { + "epoch": 19.73, + "learning_rate": 7.563775510204083e-05, + "loss": 1.08, + "step": 975 + }, + { + "epoch": 19.75, + "learning_rate": 7.551020408163266e-05, + "loss": 1.0402, + "step": 976 + }, + { + "epoch": 19.77, + "learning_rate": 7.53826530612245e-05, + "loss": 1.0567, + "step": 977 + }, + { + "epoch": 19.79, + "learning_rate": 7.525510204081633e-05, + "loss": 1.0169, + "step": 978 + }, + { + "epoch": 19.81, + "learning_rate": 7.512755102040818e-05, + "loss": 0.9881, + "step": 979 + }, + { + "epoch": 19.83, + "learning_rate": 7.500000000000001e-05, + "loss": 1.0677, + "step": 980 + }, + { + "epoch": 19.85, + "learning_rate": 7.487244897959184e-05, + "loss": 1.1026, + "step": 981 + }, + { + "epoch": 19.87, + "learning_rate": 7.474489795918368e-05, + "loss": 1.0101, + "step": 982 + }, + { + "epoch": 19.89, + "learning_rate": 7.461734693877551e-05, + "loss": 1.069, + "step": 983 + }, + { + "epoch": 19.91, + "learning_rate": 7.448979591836736e-05, + "loss": 1.0493, + "step": 984 + }, + { + "epoch": 19.93, + "learning_rate": 7.436224489795919e-05, + "loss": 1.0858, + "step": 985 + }, + { + "epoch": 19.95, + "learning_rate": 7.423469387755102e-05, + "loss": 1.0734, + "step": 986 + }, + { + "epoch": 19.97, + "learning_rate": 7.410714285714286e-05, + "loss": 1.0203, + "step": 987 + }, + { + "epoch": 19.99, + "learning_rate": 7.39795918367347e-05, + "loss": 1.0285, + "step": 988 + }, + { + "epoch": 20.01, + "learning_rate": 7.385204081632653e-05, + "loss": 0.9446, + "step": 989 + }, + { + "epoch": 20.03, + "learning_rate": 7.372448979591837e-05, + "loss": 0.9915, + "step": 990 + }, + { + "epoch": 20.05, + "learning_rate": 7.35969387755102e-05, + "loss": 0.9882, + "step": 991 + }, + { + "epoch": 20.07, + "learning_rate": 7.346938775510205e-05, + "loss": 0.9338, + "step": 992 + }, + { + "epoch": 20.09, + "learning_rate": 7.334183673469388e-05, + "loss": 0.942, + "step": 993 + }, + { + "epoch": 20.11, + "learning_rate": 7.321428571428571e-05, + "loss": 0.9725, + "step": 994 + }, + { + "epoch": 20.13, + "learning_rate": 7.308673469387755e-05, + "loss": 1.027, + "step": 995 + }, + { + "epoch": 20.15, + "learning_rate": 7.29591836734694e-05, + "loss": 1.0081, + "step": 996 + }, + { + "epoch": 20.17, + "learning_rate": 7.283163265306123e-05, + "loss": 1.0117, + "step": 997 + }, + { + "epoch": 20.19, + "learning_rate": 7.270408163265307e-05, + "loss": 0.969, + "step": 998 + }, + { + "epoch": 20.21, + "learning_rate": 7.25765306122449e-05, + "loss": 1.0024, + "step": 999 + }, + { + "epoch": 20.23, + "learning_rate": 7.244897959183675e-05, + "loss": 0.994, + "step": 1000 + }, + { + "epoch": 20.25, + "learning_rate": 7.232142857142858e-05, + "loss": 1.0248, + "step": 1001 + }, + { + "epoch": 20.27, + "learning_rate": 7.219387755102042e-05, + "loss": 1.0493, + "step": 1002 + }, + { + "epoch": 20.29, + "learning_rate": 7.206632653061225e-05, + "loss": 1.0011, + "step": 1003 + }, + { + "epoch": 20.31, + "learning_rate": 7.193877551020408e-05, + "loss": 0.9874, + "step": 1004 + }, + { + "epoch": 20.34, + "learning_rate": 7.181122448979593e-05, + "loss": 1.0049, + "step": 1005 + }, + { + "epoch": 20.36, + "learning_rate": 7.168367346938776e-05, + "loss": 1.0314, + "step": 1006 + }, + { + "epoch": 20.38, + "learning_rate": 7.155612244897959e-05, + "loss": 0.9742, + "step": 1007 + }, + { + "epoch": 20.4, + "learning_rate": 7.142857142857143e-05, + "loss": 1.0621, + "step": 1008 + }, + { + "epoch": 20.42, + "learning_rate": 7.130102040816326e-05, + "loss": 0.9672, + "step": 1009 + }, + { + "epoch": 20.44, + "learning_rate": 7.117346938775511e-05, + "loss": 1.0018, + "step": 1010 + }, + { + "epoch": 20.46, + "learning_rate": 7.104591836734694e-05, + "loss": 1.0045, + "step": 1011 + }, + { + "epoch": 20.48, + "learning_rate": 7.091836734693877e-05, + "loss": 0.9675, + "step": 1012 + }, + { + "epoch": 20.5, + "learning_rate": 7.079081632653062e-05, + "loss": 0.976, + "step": 1013 + }, + { + "epoch": 20.52, + "learning_rate": 7.066326530612245e-05, + "loss": 1.0523, + "step": 1014 + }, + { + "epoch": 20.54, + "learning_rate": 7.053571428571429e-05, + "loss": 1.052, + "step": 1015 + }, + { + "epoch": 20.56, + "learning_rate": 7.040816326530612e-05, + "loss": 0.9903, + "step": 1016 + }, + { + "epoch": 20.58, + "learning_rate": 7.028061224489795e-05, + "loss": 1.0337, + "step": 1017 + }, + { + "epoch": 20.6, + "learning_rate": 7.01530612244898e-05, + "loss": 1.1122, + "step": 1018 + }, + { + "epoch": 20.62, + "learning_rate": 7.002551020408164e-05, + "loss": 1.0133, + "step": 1019 + }, + { + "epoch": 20.64, + "learning_rate": 6.989795918367347e-05, + "loss": 0.9588, + "step": 1020 + }, + { + "epoch": 20.66, + "learning_rate": 6.977040816326532e-05, + "loss": 0.9892, + "step": 1021 + }, + { + "epoch": 20.68, + "learning_rate": 6.964285714285715e-05, + "loss": 1.025, + "step": 1022 + }, + { + "epoch": 20.7, + "learning_rate": 6.951530612244899e-05, + "loss": 1.0196, + "step": 1023 + }, + { + "epoch": 20.72, + "learning_rate": 6.938775510204082e-05, + "loss": 1.0146, + "step": 1024 + }, + { + "epoch": 20.74, + "learning_rate": 6.926020408163265e-05, + "loss": 1.0656, + "step": 1025 + }, + { + "epoch": 20.76, + "learning_rate": 6.91326530612245e-05, + "loss": 0.9584, + "step": 1026 + }, + { + "epoch": 20.78, + "learning_rate": 6.900510204081633e-05, + "loss": 0.9877, + "step": 1027 + }, + { + "epoch": 20.8, + "learning_rate": 6.887755102040817e-05, + "loss": 1.0607, + "step": 1028 + }, + { + "epoch": 20.82, + "learning_rate": 6.875e-05, + "loss": 0.9969, + "step": 1029 + }, + { + "epoch": 20.84, + "learning_rate": 6.862244897959184e-05, + "loss": 0.9506, + "step": 1030 + }, + { + "epoch": 20.86, + "learning_rate": 6.849489795918368e-05, + "loss": 1.0576, + "step": 1031 + }, + { + "epoch": 20.88, + "learning_rate": 6.836734693877551e-05, + "loss": 1.0094, + "step": 1032 + }, + { + "epoch": 20.9, + "learning_rate": 6.823979591836735e-05, + "loss": 0.9872, + "step": 1033 + }, + { + "epoch": 20.92, + "learning_rate": 6.811224489795919e-05, + "loss": 1.0544, + "step": 1034 + }, + { + "epoch": 20.94, + "learning_rate": 6.798469387755102e-05, + "loss": 1.0194, + "step": 1035 + }, + { + "epoch": 20.96, + "learning_rate": 6.785714285714286e-05, + "loss": 1.0009, + "step": 1036 + }, + { + "epoch": 20.98, + "learning_rate": 6.772959183673469e-05, + "loss": 0.9727, + "step": 1037 + }, + { + "epoch": 21.0, + "learning_rate": 6.760204081632652e-05, + "loss": 0.9754, + "step": 1038 + }, + { + "epoch": 21.02, + "learning_rate": 6.747448979591837e-05, + "loss": 0.9953, + "step": 1039 + }, + { + "epoch": 21.04, + "learning_rate": 6.73469387755102e-05, + "loss": 0.9307, + "step": 1040 + }, + { + "epoch": 21.06, + "learning_rate": 6.721938775510204e-05, + "loss": 0.9151, + "step": 1041 + }, + { + "epoch": 21.08, + "learning_rate": 6.709183673469389e-05, + "loss": 0.9474, + "step": 1042 + }, + { + "epoch": 21.1, + "learning_rate": 6.696428571428572e-05, + "loss": 0.9697, + "step": 1043 + }, + { + "epoch": 21.12, + "learning_rate": 6.683673469387756e-05, + "loss": 0.9423, + "step": 1044 + }, + { + "epoch": 21.14, + "learning_rate": 6.670918367346939e-05, + "loss": 0.9797, + "step": 1045 + }, + { + "epoch": 21.16, + "learning_rate": 6.658163265306124e-05, + "loss": 0.919, + "step": 1046 + }, + { + "epoch": 21.18, + "learning_rate": 6.645408163265307e-05, + "loss": 0.9743, + "step": 1047 + }, + { + "epoch": 21.21, + "learning_rate": 6.63265306122449e-05, + "loss": 0.9575, + "step": 1048 + }, + { + "epoch": 21.23, + "learning_rate": 6.619897959183674e-05, + "loss": 0.9861, + "step": 1049 + }, + { + "epoch": 21.25, + "learning_rate": 6.607142857142857e-05, + "loss": 0.9103, + "step": 1050 + }, + { + "epoch": 21.27, + "learning_rate": 6.594387755102042e-05, + "loss": 0.993, + "step": 1051 + }, + { + "epoch": 21.29, + "learning_rate": 6.581632653061225e-05, + "loss": 0.9668, + "step": 1052 + }, + { + "epoch": 21.31, + "learning_rate": 6.568877551020408e-05, + "loss": 1.0008, + "step": 1053 + }, + { + "epoch": 21.33, + "learning_rate": 6.556122448979592e-05, + "loss": 0.9825, + "step": 1054 + }, + { + "epoch": 21.35, + "learning_rate": 6.543367346938776e-05, + "loss": 1.0174, + "step": 1055 + }, + { + "epoch": 21.37, + "learning_rate": 6.530612244897959e-05, + "loss": 0.9685, + "step": 1056 + }, + { + "epoch": 21.39, + "learning_rate": 6.517857142857143e-05, + "loss": 0.9265, + "step": 1057 + }, + { + "epoch": 21.41, + "learning_rate": 6.505102040816326e-05, + "loss": 0.9495, + "step": 1058 + }, + { + "epoch": 21.43, + "learning_rate": 6.49234693877551e-05, + "loss": 0.9541, + "step": 1059 + }, + { + "epoch": 21.45, + "learning_rate": 6.479591836734694e-05, + "loss": 0.9299, + "step": 1060 + }, + { + "epoch": 21.47, + "learning_rate": 6.466836734693877e-05, + "loss": 0.9625, + "step": 1061 + }, + { + "epoch": 21.49, + "learning_rate": 6.454081632653061e-05, + "loss": 1.0054, + "step": 1062 + }, + { + "epoch": 21.51, + "learning_rate": 6.441326530612244e-05, + "loss": 0.9893, + "step": 1063 + }, + { + "epoch": 21.53, + "learning_rate": 6.428571428571429e-05, + "loss": 0.9906, + "step": 1064 + }, + { + "epoch": 21.55, + "learning_rate": 6.415816326530613e-05, + "loss": 0.9487, + "step": 1065 + }, + { + "epoch": 21.57, + "learning_rate": 6.403061224489796e-05, + "loss": 0.9728, + "step": 1066 + }, + { + "epoch": 21.59, + "learning_rate": 6.390306122448981e-05, + "loss": 0.9883, + "step": 1067 + }, + { + "epoch": 21.61, + "learning_rate": 6.377551020408164e-05, + "loss": 1.053, + "step": 1068 + }, + { + "epoch": 21.63, + "learning_rate": 6.364795918367348e-05, + "loss": 1.012, + "step": 1069 + }, + { + "epoch": 21.65, + "learning_rate": 6.352040816326531e-05, + "loss": 0.962, + "step": 1070 + }, + { + "epoch": 21.67, + "learning_rate": 6.339285714285714e-05, + "loss": 0.9955, + "step": 1071 + }, + { + "epoch": 21.69, + "learning_rate": 6.326530612244899e-05, + "loss": 0.9908, + "step": 1072 + }, + { + "epoch": 21.71, + "learning_rate": 6.313775510204082e-05, + "loss": 1.0327, + "step": 1073 + }, + { + "epoch": 21.73, + "learning_rate": 6.301020408163265e-05, + "loss": 0.9255, + "step": 1074 + }, + { + "epoch": 21.75, + "learning_rate": 6.28826530612245e-05, + "loss": 0.9268, + "step": 1075 + }, + { + "epoch": 21.77, + "learning_rate": 6.275510204081633e-05, + "loss": 0.9204, + "step": 1076 + }, + { + "epoch": 21.79, + "learning_rate": 6.262755102040817e-05, + "loss": 0.9838, + "step": 1077 + }, + { + "epoch": 21.81, + "learning_rate": 6.25e-05, + "loss": 0.954, + "step": 1078 + }, + { + "epoch": 21.83, + "learning_rate": 6.237244897959183e-05, + "loss": 1.0102, + "step": 1079 + }, + { + "epoch": 21.85, + "learning_rate": 6.224489795918368e-05, + "loss": 0.916, + "step": 1080 + }, + { + "epoch": 21.87, + "learning_rate": 6.211734693877551e-05, + "loss": 0.9939, + "step": 1081 + }, + { + "epoch": 21.89, + "learning_rate": 6.198979591836735e-05, + "loss": 0.9675, + "step": 1082 + }, + { + "epoch": 21.91, + "learning_rate": 6.186224489795918e-05, + "loss": 0.9666, + "step": 1083 + }, + { + "epoch": 21.93, + "learning_rate": 6.173469387755101e-05, + "loss": 0.9919, + "step": 1084 + }, + { + "epoch": 21.95, + "learning_rate": 6.160714285714286e-05, + "loss": 1.0106, + "step": 1085 + }, + { + "epoch": 21.97, + "learning_rate": 6.14795918367347e-05, + "loss": 0.9982, + "step": 1086 + }, + { + "epoch": 21.99, + "learning_rate": 6.135204081632653e-05, + "loss": 1.0137, + "step": 1087 + }, + { + "epoch": 22.01, + "learning_rate": 6.122448979591838e-05, + "loss": 0.9331, + "step": 1088 + }, + { + "epoch": 22.03, + "learning_rate": 6.109693877551021e-05, + "loss": 0.8834, + "step": 1089 + }, + { + "epoch": 22.06, + "learning_rate": 6.0969387755102046e-05, + "loss": 0.9757, + "step": 1090 + }, + { + "epoch": 22.08, + "learning_rate": 6.084183673469388e-05, + "loss": 0.9038, + "step": 1091 + }, + { + "epoch": 22.1, + "learning_rate": 6.0714285714285715e-05, + "loss": 0.9097, + "step": 1092 + }, + { + "epoch": 22.12, + "learning_rate": 6.058673469387756e-05, + "loss": 0.8972, + "step": 1093 + }, + { + "epoch": 22.14, + "learning_rate": 6.045918367346939e-05, + "loss": 0.8825, + "step": 1094 + }, + { + "epoch": 22.16, + "learning_rate": 6.0331632653061234e-05, + "loss": 0.9814, + "step": 1095 + }, + { + "epoch": 22.18, + "learning_rate": 6.0204081632653065e-05, + "loss": 0.9874, + "step": 1096 + }, + { + "epoch": 22.2, + "learning_rate": 6.0076530612244896e-05, + "loss": 0.912, + "step": 1097 + }, + { + "epoch": 22.22, + "learning_rate": 5.994897959183674e-05, + "loss": 0.9206, + "step": 1098 + }, + { + "epoch": 22.24, + "learning_rate": 5.982142857142857e-05, + "loss": 0.9497, + "step": 1099 + }, + { + "epoch": 22.26, + "learning_rate": 5.9693877551020416e-05, + "loss": 0.9269, + "step": 1100 + }, + { + "epoch": 22.28, + "learning_rate": 5.956632653061225e-05, + "loss": 0.9452, + "step": 1101 + }, + { + "epoch": 22.3, + "learning_rate": 5.9438775510204084e-05, + "loss": 0.9548, + "step": 1102 + }, + { + "epoch": 22.32, + "learning_rate": 5.931122448979592e-05, + "loss": 0.9689, + "step": 1103 + }, + { + "epoch": 22.34, + "learning_rate": 5.918367346938776e-05, + "loss": 0.9455, + "step": 1104 + }, + { + "epoch": 22.36, + "learning_rate": 5.905612244897959e-05, + "loss": 0.9409, + "step": 1105 + }, + { + "epoch": 22.38, + "learning_rate": 5.8928571428571435e-05, + "loss": 0.9093, + "step": 1106 + }, + { + "epoch": 22.4, + "learning_rate": 5.8801020408163266e-05, + "loss": 0.921, + "step": 1107 + }, + { + "epoch": 22.42, + "learning_rate": 5.867346938775511e-05, + "loss": 0.9368, + "step": 1108 + }, + { + "epoch": 22.44, + "learning_rate": 5.854591836734694e-05, + "loss": 0.907, + "step": 1109 + }, + { + "epoch": 22.46, + "learning_rate": 5.841836734693877e-05, + "loss": 0.9126, + "step": 1110 + }, + { + "epoch": 22.48, + "learning_rate": 5.8290816326530616e-05, + "loss": 0.9161, + "step": 1111 + }, + { + "epoch": 22.5, + "learning_rate": 5.816326530612245e-05, + "loss": 0.9542, + "step": 1112 + }, + { + "epoch": 22.52, + "learning_rate": 5.803571428571429e-05, + "loss": 0.9775, + "step": 1113 + }, + { + "epoch": 22.54, + "learning_rate": 5.790816326530612e-05, + "loss": 1.0006, + "step": 1114 + }, + { + "epoch": 22.56, + "learning_rate": 5.778061224489796e-05, + "loss": 0.8965, + "step": 1115 + }, + { + "epoch": 22.58, + "learning_rate": 5.7653061224489805e-05, + "loss": 0.944, + "step": 1116 + }, + { + "epoch": 22.6, + "learning_rate": 5.7525510204081636e-05, + "loss": 0.9162, + "step": 1117 + }, + { + "epoch": 22.62, + "learning_rate": 5.739795918367348e-05, + "loss": 0.9325, + "step": 1118 + }, + { + "epoch": 22.64, + "learning_rate": 5.727040816326531e-05, + "loss": 0.8998, + "step": 1119 + }, + { + "epoch": 22.66, + "learning_rate": 5.714285714285714e-05, + "loss": 0.9362, + "step": 1120 + }, + { + "epoch": 22.68, + "learning_rate": 5.7015306122448986e-05, + "loss": 0.9969, + "step": 1121 + }, + { + "epoch": 22.7, + "learning_rate": 5.688775510204082e-05, + "loss": 0.9104, + "step": 1122 + }, + { + "epoch": 22.72, + "learning_rate": 5.676020408163265e-05, + "loss": 0.9746, + "step": 1123 + }, + { + "epoch": 22.74, + "learning_rate": 5.663265306122449e-05, + "loss": 0.9821, + "step": 1124 + }, + { + "epoch": 22.76, + "learning_rate": 5.650510204081633e-05, + "loss": 0.9526, + "step": 1125 + }, + { + "epoch": 22.78, + "learning_rate": 5.637755102040817e-05, + "loss": 0.871, + "step": 1126 + }, + { + "epoch": 22.8, + "learning_rate": 5.6250000000000005e-05, + "loss": 0.9534, + "step": 1127 + }, + { + "epoch": 22.82, + "learning_rate": 5.6122448979591836e-05, + "loss": 0.9616, + "step": 1128 + }, + { + "epoch": 22.84, + "learning_rate": 5.599489795918368e-05, + "loss": 0.9627, + "step": 1129 + }, + { + "epoch": 22.86, + "learning_rate": 5.586734693877551e-05, + "loss": 0.9704, + "step": 1130 + }, + { + "epoch": 22.88, + "learning_rate": 5.5739795918367356e-05, + "loss": 0.9506, + "step": 1131 + }, + { + "epoch": 22.9, + "learning_rate": 5.561224489795919e-05, + "loss": 0.9553, + "step": 1132 + }, + { + "epoch": 22.93, + "learning_rate": 5.548469387755102e-05, + "loss": 0.9294, + "step": 1133 + }, + { + "epoch": 22.95, + "learning_rate": 5.535714285714286e-05, + "loss": 0.8979, + "step": 1134 + }, + { + "epoch": 22.97, + "learning_rate": 5.522959183673469e-05, + "loss": 1.0004, + "step": 1135 + }, + { + "epoch": 22.99, + "learning_rate": 5.510204081632653e-05, + "loss": 0.9821, + "step": 1136 + }, + { + "epoch": 23.01, + "learning_rate": 5.497448979591837e-05, + "loss": 0.9607, + "step": 1137 + }, + { + "epoch": 23.03, + "learning_rate": 5.4846938775510206e-05, + "loss": 0.9757, + "step": 1138 + }, + { + "epoch": 23.05, + "learning_rate": 5.471938775510205e-05, + "loss": 0.9096, + "step": 1139 + }, + { + "epoch": 23.07, + "learning_rate": 5.459183673469388e-05, + "loss": 0.9144, + "step": 1140 + }, + { + "epoch": 23.09, + "learning_rate": 5.446428571428571e-05, + "loss": 0.8667, + "step": 1141 + }, + { + "epoch": 23.11, + "learning_rate": 5.4336734693877556e-05, + "loss": 0.8993, + "step": 1142 + }, + { + "epoch": 23.13, + "learning_rate": 5.420918367346939e-05, + "loss": 0.8964, + "step": 1143 + }, + { + "epoch": 23.15, + "learning_rate": 5.408163265306123e-05, + "loss": 0.9173, + "step": 1144 + }, + { + "epoch": 23.17, + "learning_rate": 5.395408163265306e-05, + "loss": 0.9019, + "step": 1145 + }, + { + "epoch": 23.19, + "learning_rate": 5.382653061224489e-05, + "loss": 0.9303, + "step": 1146 + }, + { + "epoch": 23.21, + "learning_rate": 5.369897959183674e-05, + "loss": 0.9268, + "step": 1147 + }, + { + "epoch": 23.23, + "learning_rate": 5.3571428571428575e-05, + "loss": 0.8803, + "step": 1148 + }, + { + "epoch": 23.25, + "learning_rate": 5.344387755102041e-05, + "loss": 0.9197, + "step": 1149 + }, + { + "epoch": 23.27, + "learning_rate": 5.331632653061225e-05, + "loss": 0.9204, + "step": 1150 + }, + { + "epoch": 23.29, + "learning_rate": 5.318877551020408e-05, + "loss": 0.8802, + "step": 1151 + }, + { + "epoch": 23.31, + "learning_rate": 5.3061224489795926e-05, + "loss": 0.9044, + "step": 1152 + }, + { + "epoch": 23.33, + "learning_rate": 5.293367346938776e-05, + "loss": 0.8893, + "step": 1153 + }, + { + "epoch": 23.35, + "learning_rate": 5.280612244897959e-05, + "loss": 0.8928, + "step": 1154 + }, + { + "epoch": 23.37, + "learning_rate": 5.267857142857143e-05, + "loss": 0.9353, + "step": 1155 + }, + { + "epoch": 23.39, + "learning_rate": 5.255102040816326e-05, + "loss": 0.9345, + "step": 1156 + }, + { + "epoch": 23.41, + "learning_rate": 5.242346938775511e-05, + "loss": 0.9372, + "step": 1157 + }, + { + "epoch": 23.43, + "learning_rate": 5.229591836734694e-05, + "loss": 0.9234, + "step": 1158 + }, + { + "epoch": 23.45, + "learning_rate": 5.2168367346938776e-05, + "loss": 0.9177, + "step": 1159 + }, + { + "epoch": 23.47, + "learning_rate": 5.2040816326530614e-05, + "loss": 0.8757, + "step": 1160 + }, + { + "epoch": 23.49, + "learning_rate": 5.191326530612245e-05, + "loss": 0.9048, + "step": 1161 + }, + { + "epoch": 23.51, + "learning_rate": 5.1785714285714296e-05, + "loss": 0.9248, + "step": 1162 + }, + { + "epoch": 23.53, + "learning_rate": 5.1658163265306127e-05, + "loss": 0.9379, + "step": 1163 + }, + { + "epoch": 23.55, + "learning_rate": 5.153061224489796e-05, + "loss": 0.8596, + "step": 1164 + }, + { + "epoch": 23.57, + "learning_rate": 5.14030612244898e-05, + "loss": 0.9751, + "step": 1165 + }, + { + "epoch": 23.59, + "learning_rate": 5.127551020408163e-05, + "loss": 0.8842, + "step": 1166 + }, + { + "epoch": 23.61, + "learning_rate": 5.114795918367348e-05, + "loss": 0.8765, + "step": 1167 + }, + { + "epoch": 23.63, + "learning_rate": 5.102040816326531e-05, + "loss": 0.8942, + "step": 1168 + }, + { + "epoch": 23.65, + "learning_rate": 5.089285714285714e-05, + "loss": 0.938, + "step": 1169 + }, + { + "epoch": 23.67, + "learning_rate": 5.076530612244898e-05, + "loss": 0.8993, + "step": 1170 + }, + { + "epoch": 23.69, + "learning_rate": 5.063775510204082e-05, + "loss": 0.9362, + "step": 1171 + }, + { + "epoch": 23.71, + "learning_rate": 5.051020408163265e-05, + "loss": 0.9249, + "step": 1172 + }, + { + "epoch": 23.73, + "learning_rate": 5.0382653061224496e-05, + "loss": 0.9055, + "step": 1173 + }, + { + "epoch": 23.75, + "learning_rate": 5.025510204081633e-05, + "loss": 0.8967, + "step": 1174 + }, + { + "epoch": 23.77, + "learning_rate": 5.012755102040817e-05, + "loss": 0.8795, + "step": 1175 + }, + { + "epoch": 23.8, + "learning_rate": 5e-05, + "loss": 0.9452, + "step": 1176 + }, + { + "epoch": 23.82, + "learning_rate": 4.987244897959184e-05, + "loss": 0.926, + "step": 1177 + }, + { + "epoch": 23.84, + "learning_rate": 4.974489795918368e-05, + "loss": 0.8948, + "step": 1178 + }, + { + "epoch": 23.86, + "learning_rate": 4.961734693877551e-05, + "loss": 0.8926, + "step": 1179 + }, + { + "epoch": 23.88, + "learning_rate": 4.9489795918367346e-05, + "loss": 0.8949, + "step": 1180 + }, + { + "epoch": 23.9, + "learning_rate": 4.9362244897959184e-05, + "loss": 0.9648, + "step": 1181 + }, + { + "epoch": 23.92, + "learning_rate": 4.923469387755102e-05, + "loss": 0.9599, + "step": 1182 + }, + { + "epoch": 23.94, + "learning_rate": 4.910714285714286e-05, + "loss": 0.9603, + "step": 1183 + }, + { + "epoch": 23.96, + "learning_rate": 4.89795918367347e-05, + "loss": 0.9302, + "step": 1184 + }, + { + "epoch": 23.98, + "learning_rate": 4.8852040816326534e-05, + "loss": 0.9261, + "step": 1185 + }, + { + "epoch": 24.0, + "learning_rate": 4.872448979591837e-05, + "loss": 0.9257, + "step": 1186 + }, + { + "epoch": 24.02, + "learning_rate": 4.859693877551021e-05, + "loss": 0.8725, + "step": 1187 + }, + { + "epoch": 24.04, + "learning_rate": 4.846938775510204e-05, + "loss": 0.8486, + "step": 1188 + }, + { + "epoch": 24.06, + "learning_rate": 4.834183673469388e-05, + "loss": 0.8457, + "step": 1189 + }, + { + "epoch": 24.08, + "learning_rate": 4.8214285714285716e-05, + "loss": 0.7848, + "step": 1190 + }, + { + "epoch": 24.1, + "learning_rate": 4.8086734693877554e-05, + "loss": 0.8885, + "step": 1191 + }, + { + "epoch": 24.12, + "learning_rate": 4.795918367346939e-05, + "loss": 0.9099, + "step": 1192 + }, + { + "epoch": 24.14, + "learning_rate": 4.783163265306123e-05, + "loss": 0.9147, + "step": 1193 + }, + { + "epoch": 24.16, + "learning_rate": 4.7704081632653066e-05, + "loss": 0.8781, + "step": 1194 + }, + { + "epoch": 24.18, + "learning_rate": 4.7576530612244904e-05, + "loss": 0.8847, + "step": 1195 + }, + { + "epoch": 24.2, + "learning_rate": 4.744897959183674e-05, + "loss": 0.9041, + "step": 1196 + }, + { + "epoch": 24.22, + "learning_rate": 4.732142857142857e-05, + "loss": 0.8639, + "step": 1197 + }, + { + "epoch": 24.24, + "learning_rate": 4.719387755102041e-05, + "loss": 0.8831, + "step": 1198 + }, + { + "epoch": 24.26, + "learning_rate": 4.706632653061225e-05, + "loss": 0.9063, + "step": 1199 + }, + { + "epoch": 24.28, + "learning_rate": 4.6938775510204086e-05, + "loss": 0.8753, + "step": 1200 + }, + { + "epoch": 24.3, + "learning_rate": 4.6811224489795916e-05, + "loss": 0.8977, + "step": 1201 + }, + { + "epoch": 24.32, + "learning_rate": 4.6683673469387754e-05, + "loss": 0.8729, + "step": 1202 + }, + { + "epoch": 24.34, + "learning_rate": 4.655612244897959e-05, + "loss": 0.898, + "step": 1203 + }, + { + "epoch": 24.36, + "learning_rate": 4.642857142857143e-05, + "loss": 0.8521, + "step": 1204 + }, + { + "epoch": 24.38, + "learning_rate": 4.630102040816327e-05, + "loss": 0.8563, + "step": 1205 + }, + { + "epoch": 24.4, + "learning_rate": 4.6173469387755105e-05, + "loss": 0.8462, + "step": 1206 + }, + { + "epoch": 24.42, + "learning_rate": 4.604591836734694e-05, + "loss": 0.8929, + "step": 1207 + }, + { + "epoch": 24.44, + "learning_rate": 4.591836734693878e-05, + "loss": 0.9154, + "step": 1208 + }, + { + "epoch": 24.46, + "learning_rate": 4.579081632653062e-05, + "loss": 0.811, + "step": 1209 + }, + { + "epoch": 24.48, + "learning_rate": 4.566326530612245e-05, + "loss": 0.8667, + "step": 1210 + }, + { + "epoch": 24.5, + "learning_rate": 4.5535714285714286e-05, + "loss": 0.9179, + "step": 1211 + }, + { + "epoch": 24.52, + "learning_rate": 4.5408163265306124e-05, + "loss": 0.8757, + "step": 1212 + }, + { + "epoch": 24.54, + "learning_rate": 4.528061224489796e-05, + "loss": 0.8519, + "step": 1213 + }, + { + "epoch": 24.56, + "learning_rate": 4.515306122448979e-05, + "loss": 0.9335, + "step": 1214 + }, + { + "epoch": 24.58, + "learning_rate": 4.502551020408164e-05, + "loss": 0.8785, + "step": 1215 + }, + { + "epoch": 24.6, + "learning_rate": 4.4897959183673474e-05, + "loss": 0.9022, + "step": 1216 + }, + { + "epoch": 24.62, + "learning_rate": 4.477040816326531e-05, + "loss": 0.9532, + "step": 1217 + }, + { + "epoch": 24.64, + "learning_rate": 4.464285714285715e-05, + "loss": 0.8956, + "step": 1218 + }, + { + "epoch": 24.67, + "learning_rate": 4.451530612244898e-05, + "loss": 0.8739, + "step": 1219 + }, + { + "epoch": 24.69, + "learning_rate": 4.438775510204082e-05, + "loss": 0.9312, + "step": 1220 + }, + { + "epoch": 24.71, + "learning_rate": 4.4260204081632656e-05, + "loss": 0.8536, + "step": 1221 + }, + { + "epoch": 24.73, + "learning_rate": 4.4132653061224493e-05, + "loss": 0.8984, + "step": 1222 + }, + { + "epoch": 24.75, + "learning_rate": 4.4005102040816324e-05, + "loss": 0.8949, + "step": 1223 + }, + { + "epoch": 24.77, + "learning_rate": 4.387755102040816e-05, + "loss": 0.9389, + "step": 1224 + }, + { + "epoch": 24.79, + "learning_rate": 4.375e-05, + "loss": 0.8703, + "step": 1225 + }, + { + "epoch": 24.81, + "learning_rate": 4.362244897959184e-05, + "loss": 0.9407, + "step": 1226 + }, + { + "epoch": 24.83, + "learning_rate": 4.3494897959183675e-05, + "loss": 0.9016, + "step": 1227 + }, + { + "epoch": 24.85, + "learning_rate": 4.336734693877551e-05, + "loss": 0.9025, + "step": 1228 + }, + { + "epoch": 24.87, + "learning_rate": 4.323979591836735e-05, + "loss": 0.9415, + "step": 1229 + }, + { + "epoch": 24.89, + "learning_rate": 4.311224489795919e-05, + "loss": 0.9146, + "step": 1230 + }, + { + "epoch": 24.91, + "learning_rate": 4.2984693877551025e-05, + "loss": 0.9144, + "step": 1231 + }, + { + "epoch": 24.93, + "learning_rate": 4.2857142857142856e-05, + "loss": 0.9138, + "step": 1232 + }, + { + "epoch": 24.95, + "learning_rate": 4.2729591836734694e-05, + "loss": 0.9372, + "step": 1233 + }, + { + "epoch": 24.97, + "learning_rate": 4.260204081632653e-05, + "loss": 0.8701, + "step": 1234 + }, + { + "epoch": 24.99, + "learning_rate": 4.247448979591837e-05, + "loss": 0.9278, + "step": 1235 + }, + { + "epoch": 25.01, + "learning_rate": 4.234693877551021e-05, + "loss": 0.9157, + "step": 1236 + }, + { + "epoch": 25.03, + "learning_rate": 4.2219387755102045e-05, + "loss": 0.8852, + "step": 1237 + }, + { + "epoch": 25.05, + "learning_rate": 4.209183673469388e-05, + "loss": 0.855, + "step": 1238 + }, + { + "epoch": 25.07, + "learning_rate": 4.196428571428572e-05, + "loss": 0.8547, + "step": 1239 + }, + { + "epoch": 25.09, + "learning_rate": 4.183673469387756e-05, + "loss": 0.8691, + "step": 1240 + }, + { + "epoch": 25.11, + "learning_rate": 4.170918367346939e-05, + "loss": 0.9101, + "step": 1241 + }, + { + "epoch": 25.13, + "learning_rate": 4.1581632653061226e-05, + "loss": 0.8408, + "step": 1242 + }, + { + "epoch": 25.15, + "learning_rate": 4.1454081632653064e-05, + "loss": 0.9008, + "step": 1243 + }, + { + "epoch": 25.17, + "learning_rate": 4.13265306122449e-05, + "loss": 0.859, + "step": 1244 + }, + { + "epoch": 25.19, + "learning_rate": 4.119897959183674e-05, + "loss": 0.8525, + "step": 1245 + }, + { + "epoch": 25.21, + "learning_rate": 4.107142857142857e-05, + "loss": 0.8682, + "step": 1246 + }, + { + "epoch": 25.23, + "learning_rate": 4.094387755102041e-05, + "loss": 0.8426, + "step": 1247 + }, + { + "epoch": 25.25, + "learning_rate": 4.0816326530612245e-05, + "loss": 0.8948, + "step": 1248 + }, + { + "epoch": 25.27, + "learning_rate": 4.068877551020408e-05, + "loss": 0.8333, + "step": 1249 + }, + { + "epoch": 25.29, + "learning_rate": 4.056122448979592e-05, + "loss": 0.87, + "step": 1250 + }, + { + "epoch": 25.31, + "learning_rate": 4.043367346938776e-05, + "loss": 0.8215, + "step": 1251 + }, + { + "epoch": 25.33, + "learning_rate": 4.0306122448979596e-05, + "loss": 0.862, + "step": 1252 + }, + { + "epoch": 25.35, + "learning_rate": 4.017857142857143e-05, + "loss": 0.8607, + "step": 1253 + }, + { + "epoch": 25.37, + "learning_rate": 4.0051020408163264e-05, + "loss": 0.8218, + "step": 1254 + }, + { + "epoch": 25.39, + "learning_rate": 3.99234693877551e-05, + "loss": 0.806, + "step": 1255 + }, + { + "epoch": 25.41, + "learning_rate": 3.979591836734694e-05, + "loss": 0.8929, + "step": 1256 + }, + { + "epoch": 25.43, + "learning_rate": 3.966836734693878e-05, + "loss": 0.8551, + "step": 1257 + }, + { + "epoch": 25.45, + "learning_rate": 3.9540816326530615e-05, + "loss": 0.8408, + "step": 1258 + }, + { + "epoch": 25.47, + "learning_rate": 3.9413265306122446e-05, + "loss": 0.8819, + "step": 1259 + }, + { + "epoch": 25.49, + "learning_rate": 3.928571428571429e-05, + "loss": 0.8757, + "step": 1260 + }, + { + "epoch": 25.52, + "learning_rate": 3.915816326530613e-05, + "loss": 0.8778, + "step": 1261 + }, + { + "epoch": 25.54, + "learning_rate": 3.9030612244897965e-05, + "loss": 0.8524, + "step": 1262 + }, + { + "epoch": 25.56, + "learning_rate": 3.8903061224489796e-05, + "loss": 0.846, + "step": 1263 + }, + { + "epoch": 25.58, + "learning_rate": 3.8775510204081634e-05, + "loss": 0.8757, + "step": 1264 + }, + { + "epoch": 25.6, + "learning_rate": 3.864795918367347e-05, + "loss": 0.9084, + "step": 1265 + }, + { + "epoch": 25.62, + "learning_rate": 3.852040816326531e-05, + "loss": 0.8826, + "step": 1266 + }, + { + "epoch": 25.64, + "learning_rate": 3.839285714285715e-05, + "loss": 0.8619, + "step": 1267 + }, + { + "epoch": 25.66, + "learning_rate": 3.826530612244898e-05, + "loss": 0.8942, + "step": 1268 + }, + { + "epoch": 25.68, + "learning_rate": 3.8137755102040815e-05, + "loss": 0.8342, + "step": 1269 + }, + { + "epoch": 25.7, + "learning_rate": 3.801020408163265e-05, + "loss": 0.8512, + "step": 1270 + }, + { + "epoch": 25.72, + "learning_rate": 3.788265306122449e-05, + "loss": 0.8393, + "step": 1271 + }, + { + "epoch": 25.74, + "learning_rate": 3.775510204081633e-05, + "loss": 0.8508, + "step": 1272 + }, + { + "epoch": 25.76, + "learning_rate": 3.7627551020408166e-05, + "loss": 0.9094, + "step": 1273 + }, + { + "epoch": 25.78, + "learning_rate": 3.7500000000000003e-05, + "loss": 0.9175, + "step": 1274 + }, + { + "epoch": 25.8, + "learning_rate": 3.737244897959184e-05, + "loss": 0.9179, + "step": 1275 + }, + { + "epoch": 25.82, + "learning_rate": 3.724489795918368e-05, + "loss": 0.869, + "step": 1276 + }, + { + "epoch": 25.84, + "learning_rate": 3.711734693877551e-05, + "loss": 0.8568, + "step": 1277 + }, + { + "epoch": 25.86, + "learning_rate": 3.698979591836735e-05, + "loss": 0.9104, + "step": 1278 + }, + { + "epoch": 25.88, + "learning_rate": 3.6862244897959185e-05, + "loss": 0.8912, + "step": 1279 + }, + { + "epoch": 25.9, + "learning_rate": 3.673469387755102e-05, + "loss": 0.878, + "step": 1280 + }, + { + "epoch": 25.92, + "learning_rate": 3.6607142857142853e-05, + "loss": 0.8711, + "step": 1281 + }, + { + "epoch": 25.94, + "learning_rate": 3.64795918367347e-05, + "loss": 0.9404, + "step": 1282 + }, + { + "epoch": 25.96, + "learning_rate": 3.6352040816326536e-05, + "loss": 0.9302, + "step": 1283 + }, + { + "epoch": 25.98, + "learning_rate": 3.622448979591837e-05, + "loss": 0.8907, + "step": 1284 + }, + { + "epoch": 26.0, + "learning_rate": 3.609693877551021e-05, + "loss": 0.8473, + "step": 1285 + }, + { + "epoch": 26.02, + "learning_rate": 3.596938775510204e-05, + "loss": 0.8482, + "step": 1286 + }, + { + "epoch": 26.04, + "learning_rate": 3.584183673469388e-05, + "loss": 0.8683, + "step": 1287 + }, + { + "epoch": 26.06, + "learning_rate": 3.571428571428572e-05, + "loss": 0.8443, + "step": 1288 + }, + { + "epoch": 26.08, + "learning_rate": 3.5586734693877555e-05, + "loss": 0.8462, + "step": 1289 + }, + { + "epoch": 26.1, + "learning_rate": 3.5459183673469385e-05, + "loss": 0.8204, + "step": 1290 + }, + { + "epoch": 26.12, + "learning_rate": 3.533163265306122e-05, + "loss": 0.8632, + "step": 1291 + }, + { + "epoch": 26.14, + "learning_rate": 3.520408163265306e-05, + "loss": 0.8883, + "step": 1292 + }, + { + "epoch": 26.16, + "learning_rate": 3.50765306122449e-05, + "loss": 0.8369, + "step": 1293 + }, + { + "epoch": 26.18, + "learning_rate": 3.4948979591836736e-05, + "loss": 0.8369, + "step": 1294 + }, + { + "epoch": 26.2, + "learning_rate": 3.4821428571428574e-05, + "loss": 0.8506, + "step": 1295 + }, + { + "epoch": 26.22, + "learning_rate": 3.469387755102041e-05, + "loss": 0.839, + "step": 1296 + }, + { + "epoch": 26.24, + "learning_rate": 3.456632653061225e-05, + "loss": 0.8421, + "step": 1297 + }, + { + "epoch": 26.26, + "learning_rate": 3.443877551020409e-05, + "loss": 0.8292, + "step": 1298 + }, + { + "epoch": 26.28, + "learning_rate": 3.431122448979592e-05, + "loss": 0.8412, + "step": 1299 + }, + { + "epoch": 26.3, + "learning_rate": 3.4183673469387755e-05, + "loss": 0.8305, + "step": 1300 + }, + { + "epoch": 26.32, + "learning_rate": 3.405612244897959e-05, + "loss": 0.8181, + "step": 1301 + }, + { + "epoch": 26.34, + "learning_rate": 3.392857142857143e-05, + "loss": 0.8588, + "step": 1302 + }, + { + "epoch": 26.36, + "learning_rate": 3.380102040816326e-05, + "loss": 0.8528, + "step": 1303 + }, + { + "epoch": 26.39, + "learning_rate": 3.36734693877551e-05, + "loss": 0.9055, + "step": 1304 + }, + { + "epoch": 26.41, + "learning_rate": 3.354591836734694e-05, + "loss": 0.8762, + "step": 1305 + }, + { + "epoch": 26.43, + "learning_rate": 3.341836734693878e-05, + "loss": 0.8507, + "step": 1306 + }, + { + "epoch": 26.45, + "learning_rate": 3.329081632653062e-05, + "loss": 0.8541, + "step": 1307 + }, + { + "epoch": 26.47, + "learning_rate": 3.316326530612245e-05, + "loss": 0.8205, + "step": 1308 + }, + { + "epoch": 26.49, + "learning_rate": 3.303571428571429e-05, + "loss": 0.8133, + "step": 1309 + }, + { + "epoch": 26.51, + "learning_rate": 3.2908163265306125e-05, + "loss": 0.8854, + "step": 1310 + }, + { + "epoch": 26.53, + "learning_rate": 3.278061224489796e-05, + "loss": 0.9397, + "step": 1311 + }, + { + "epoch": 26.55, + "learning_rate": 3.265306122448979e-05, + "loss": 0.854, + "step": 1312 + }, + { + "epoch": 26.57, + "learning_rate": 3.252551020408163e-05, + "loss": 0.8617, + "step": 1313 + }, + { + "epoch": 26.59, + "learning_rate": 3.239795918367347e-05, + "loss": 0.8739, + "step": 1314 + }, + { + "epoch": 26.61, + "learning_rate": 3.2270408163265306e-05, + "loss": 0.8139, + "step": 1315 + }, + { + "epoch": 26.63, + "learning_rate": 3.2142857142857144e-05, + "loss": 0.7575, + "step": 1316 + }, + { + "epoch": 26.65, + "learning_rate": 3.201530612244898e-05, + "loss": 0.846, + "step": 1317 + }, + { + "epoch": 26.67, + "learning_rate": 3.188775510204082e-05, + "loss": 0.8797, + "step": 1318 + }, + { + "epoch": 26.69, + "learning_rate": 3.176020408163266e-05, + "loss": 0.8525, + "step": 1319 + }, + { + "epoch": 26.71, + "learning_rate": 3.1632653061224494e-05, + "loss": 0.8276, + "step": 1320 + }, + { + "epoch": 26.73, + "learning_rate": 3.1505102040816325e-05, + "loss": 0.8734, + "step": 1321 + }, + { + "epoch": 26.75, + "learning_rate": 3.137755102040816e-05, + "loss": 0.8663, + "step": 1322 + }, + { + "epoch": 26.77, + "learning_rate": 3.125e-05, + "loss": 0.8354, + "step": 1323 + }, + { + "epoch": 26.79, + "learning_rate": 3.112244897959184e-05, + "loss": 0.8374, + "step": 1324 + }, + { + "epoch": 26.81, + "learning_rate": 3.0994897959183676e-05, + "loss": 0.9025, + "step": 1325 + }, + { + "epoch": 26.83, + "learning_rate": 3.086734693877551e-05, + "loss": 0.8618, + "step": 1326 + }, + { + "epoch": 26.85, + "learning_rate": 3.073979591836735e-05, + "loss": 0.8867, + "step": 1327 + }, + { + "epoch": 26.87, + "learning_rate": 3.061224489795919e-05, + "loss": 0.7864, + "step": 1328 + }, + { + "epoch": 26.89, + "learning_rate": 3.0484693877551023e-05, + "loss": 0.7706, + "step": 1329 + }, + { + "epoch": 26.91, + "learning_rate": 3.0357142857142857e-05, + "loss": 0.8677, + "step": 1330 + }, + { + "epoch": 26.93, + "learning_rate": 3.0229591836734695e-05, + "loss": 0.8619, + "step": 1331 + }, + { + "epoch": 26.95, + "learning_rate": 3.0102040816326533e-05, + "loss": 0.8487, + "step": 1332 + }, + { + "epoch": 26.97, + "learning_rate": 2.997448979591837e-05, + "loss": 0.8644, + "step": 1333 + }, + { + "epoch": 26.99, + "learning_rate": 2.9846938775510208e-05, + "loss": 0.8779, + "step": 1334 + }, + { + "epoch": 27.01, + "learning_rate": 2.9719387755102042e-05, + "loss": 0.8589, + "step": 1335 + }, + { + "epoch": 27.03, + "learning_rate": 2.959183673469388e-05, + "loss": 0.8214, + "step": 1336 + }, + { + "epoch": 27.05, + "learning_rate": 2.9464285714285718e-05, + "loss": 0.7907, + "step": 1337 + }, + { + "epoch": 27.07, + "learning_rate": 2.9336734693877555e-05, + "loss": 0.8493, + "step": 1338 + }, + { + "epoch": 27.09, + "learning_rate": 2.9209183673469386e-05, + "loss": 0.8423, + "step": 1339 + }, + { + "epoch": 27.11, + "learning_rate": 2.9081632653061224e-05, + "loss": 0.7737, + "step": 1340 + }, + { + "epoch": 27.13, + "learning_rate": 2.895408163265306e-05, + "loss": 0.792, + "step": 1341 + }, + { + "epoch": 27.15, + "learning_rate": 2.8826530612244902e-05, + "loss": 0.8145, + "step": 1342 + }, + { + "epoch": 27.17, + "learning_rate": 2.869897959183674e-05, + "loss": 0.8356, + "step": 1343 + }, + { + "epoch": 27.19, + "learning_rate": 2.857142857142857e-05, + "loss": 0.815, + "step": 1344 + }, + { + "epoch": 27.21, + "learning_rate": 2.844387755102041e-05, + "loss": 0.7806, + "step": 1345 + }, + { + "epoch": 27.23, + "learning_rate": 2.8316326530612246e-05, + "loss": 0.835, + "step": 1346 + }, + { + "epoch": 27.26, + "learning_rate": 2.8188775510204084e-05, + "loss": 0.8514, + "step": 1347 + }, + { + "epoch": 27.28, + "learning_rate": 2.8061224489795918e-05, + "loss": 0.8251, + "step": 1348 + }, + { + "epoch": 27.3, + "learning_rate": 2.7933673469387756e-05, + "loss": 0.8456, + "step": 1349 + }, + { + "epoch": 27.32, + "learning_rate": 2.7806122448979593e-05, + "loss": 0.8925, + "step": 1350 + }, + { + "epoch": 27.34, + "learning_rate": 2.767857142857143e-05, + "loss": 0.8284, + "step": 1351 + }, + { + "epoch": 27.36, + "learning_rate": 2.7551020408163265e-05, + "loss": 0.8471, + "step": 1352 + }, + { + "epoch": 27.38, + "learning_rate": 2.7423469387755103e-05, + "loss": 0.819, + "step": 1353 + }, + { + "epoch": 27.4, + "learning_rate": 2.729591836734694e-05, + "loss": 0.8474, + "step": 1354 + }, + { + "epoch": 27.42, + "learning_rate": 2.7168367346938778e-05, + "loss": 0.8378, + "step": 1355 + }, + { + "epoch": 27.44, + "learning_rate": 2.7040816326530616e-05, + "loss": 0.8383, + "step": 1356 + }, + { + "epoch": 27.46, + "learning_rate": 2.6913265306122447e-05, + "loss": 0.8534, + "step": 1357 + }, + { + "epoch": 27.48, + "learning_rate": 2.6785714285714288e-05, + "loss": 0.8243, + "step": 1358 + }, + { + "epoch": 27.5, + "learning_rate": 2.6658163265306125e-05, + "loss": 0.8467, + "step": 1359 + }, + { + "epoch": 27.52, + "learning_rate": 2.6530612244897963e-05, + "loss": 0.8503, + "step": 1360 + }, + { + "epoch": 27.54, + "learning_rate": 2.6403061224489794e-05, + "loss": 0.7655, + "step": 1361 + }, + { + "epoch": 27.56, + "learning_rate": 2.627551020408163e-05, + "loss": 0.854, + "step": 1362 + }, + { + "epoch": 27.58, + "learning_rate": 2.614795918367347e-05, + "loss": 0.838, + "step": 1363 + }, + { + "epoch": 27.6, + "learning_rate": 2.6020408163265307e-05, + "loss": 0.8275, + "step": 1364 + }, + { + "epoch": 27.62, + "learning_rate": 2.5892857142857148e-05, + "loss": 0.8494, + "step": 1365 + }, + { + "epoch": 27.64, + "learning_rate": 2.576530612244898e-05, + "loss": 0.842, + "step": 1366 + }, + { + "epoch": 27.66, + "learning_rate": 2.5637755102040816e-05, + "loss": 0.8176, + "step": 1367 + }, + { + "epoch": 27.68, + "learning_rate": 2.5510204081632654e-05, + "loss": 0.8301, + "step": 1368 + }, + { + "epoch": 27.7, + "learning_rate": 2.538265306122449e-05, + "loss": 0.8182, + "step": 1369 + }, + { + "epoch": 27.72, + "learning_rate": 2.5255102040816326e-05, + "loss": 0.8067, + "step": 1370 + }, + { + "epoch": 27.74, + "learning_rate": 2.5127551020408164e-05, + "loss": 0.8322, + "step": 1371 + }, + { + "epoch": 27.76, + "learning_rate": 2.5e-05, + "loss": 0.828, + "step": 1372 + }, + { + "epoch": 27.78, + "learning_rate": 2.487244897959184e-05, + "loss": 0.8583, + "step": 1373 + }, + { + "epoch": 27.8, + "learning_rate": 2.4744897959183673e-05, + "loss": 0.8273, + "step": 1374 + }, + { + "epoch": 27.82, + "learning_rate": 2.461734693877551e-05, + "loss": 0.8292, + "step": 1375 + }, + { + "epoch": 27.84, + "learning_rate": 2.448979591836735e-05, + "loss": 0.9004, + "step": 1376 + }, + { + "epoch": 27.86, + "learning_rate": 2.4362244897959186e-05, + "loss": 0.8589, + "step": 1377 + }, + { + "epoch": 27.88, + "learning_rate": 2.423469387755102e-05, + "loss": 0.8559, + "step": 1378 + }, + { + "epoch": 27.9, + "learning_rate": 2.4107142857142858e-05, + "loss": 0.8224, + "step": 1379 + }, + { + "epoch": 27.92, + "learning_rate": 2.3979591836734696e-05, + "loss": 0.8438, + "step": 1380 + }, + { + "epoch": 27.94, + "learning_rate": 2.3852040816326533e-05, + "loss": 0.8267, + "step": 1381 + }, + { + "epoch": 27.96, + "learning_rate": 2.372448979591837e-05, + "loss": 0.8472, + "step": 1382 + }, + { + "epoch": 27.98, + "learning_rate": 2.3596938775510205e-05, + "loss": 0.835, + "step": 1383 + }, + { + "epoch": 28.0, + "learning_rate": 2.3469387755102043e-05, + "loss": 0.847, + "step": 1384 + }, + { + "epoch": 28.02, + "learning_rate": 2.3341836734693877e-05, + "loss": 0.865, + "step": 1385 + }, + { + "epoch": 28.04, + "learning_rate": 2.3214285714285715e-05, + "loss": 0.807, + "step": 1386 + }, + { + "epoch": 28.06, + "learning_rate": 2.3086734693877552e-05, + "loss": 0.8133, + "step": 1387 + }, + { + "epoch": 28.08, + "learning_rate": 2.295918367346939e-05, + "loss": 0.8242, + "step": 1388 + }, + { + "epoch": 28.1, + "learning_rate": 2.2831632653061224e-05, + "loss": 0.8142, + "step": 1389 + }, + { + "epoch": 28.13, + "learning_rate": 2.2704081632653062e-05, + "loss": 0.7772, + "step": 1390 + }, + { + "epoch": 28.15, + "learning_rate": 2.2576530612244896e-05, + "loss": 0.7885, + "step": 1391 + }, + { + "epoch": 28.17, + "learning_rate": 2.2448979591836737e-05, + "loss": 0.8096, + "step": 1392 + }, + { + "epoch": 28.19, + "learning_rate": 2.2321428571428575e-05, + "loss": 0.8497, + "step": 1393 + }, + { + "epoch": 28.21, + "learning_rate": 2.219387755102041e-05, + "loss": 0.8814, + "step": 1394 + }, + { + "epoch": 28.23, + "learning_rate": 2.2066326530612247e-05, + "loss": 0.8634, + "step": 1395 + }, + { + "epoch": 28.25, + "learning_rate": 2.193877551020408e-05, + "loss": 0.8084, + "step": 1396 + }, + { + "epoch": 28.27, + "learning_rate": 2.181122448979592e-05, + "loss": 0.7792, + "step": 1397 + }, + { + "epoch": 28.29, + "learning_rate": 2.1683673469387756e-05, + "loss": 0.7998, + "step": 1398 + }, + { + "epoch": 28.31, + "learning_rate": 2.1556122448979594e-05, + "loss": 0.7963, + "step": 1399 + }, + { + "epoch": 28.33, + "learning_rate": 2.1428571428571428e-05, + "loss": 0.8058, + "step": 1400 + }, + { + "epoch": 28.35, + "learning_rate": 2.1301020408163266e-05, + "loss": 0.7777, + "step": 1401 + }, + { + "epoch": 28.37, + "learning_rate": 2.1173469387755103e-05, + "loss": 0.8472, + "step": 1402 + }, + { + "epoch": 28.39, + "learning_rate": 2.104591836734694e-05, + "loss": 0.818, + "step": 1403 + }, + { + "epoch": 28.41, + "learning_rate": 2.091836734693878e-05, + "loss": 0.8171, + "step": 1404 + }, + { + "epoch": 28.43, + "learning_rate": 2.0790816326530613e-05, + "loss": 0.858, + "step": 1405 + }, + { + "epoch": 28.45, + "learning_rate": 2.066326530612245e-05, + "loss": 0.7636, + "step": 1406 + }, + { + "epoch": 28.47, + "learning_rate": 2.0535714285714285e-05, + "loss": 0.8519, + "step": 1407 + }, + { + "epoch": 28.49, + "learning_rate": 2.0408163265306123e-05, + "loss": 0.8529, + "step": 1408 + }, + { + "epoch": 28.51, + "learning_rate": 2.028061224489796e-05, + "loss": 0.8043, + "step": 1409 + }, + { + "epoch": 28.53, + "learning_rate": 2.0153061224489798e-05, + "loss": 0.7734, + "step": 1410 + }, + { + "epoch": 28.55, + "learning_rate": 2.0025510204081632e-05, + "loss": 0.8228, + "step": 1411 + }, + { + "epoch": 28.57, + "learning_rate": 1.989795918367347e-05, + "loss": 0.7956, + "step": 1412 + }, + { + "epoch": 28.59, + "learning_rate": 1.9770408163265307e-05, + "loss": 0.8682, + "step": 1413 + }, + { + "epoch": 28.61, + "learning_rate": 1.9642857142857145e-05, + "loss": 0.8637, + "step": 1414 + }, + { + "epoch": 28.63, + "learning_rate": 1.9515306122448983e-05, + "loss": 0.8287, + "step": 1415 + }, + { + "epoch": 28.65, + "learning_rate": 1.9387755102040817e-05, + "loss": 0.8668, + "step": 1416 + }, + { + "epoch": 28.67, + "learning_rate": 1.9260204081632655e-05, + "loss": 0.8462, + "step": 1417 + }, + { + "epoch": 28.69, + "learning_rate": 1.913265306122449e-05, + "loss": 0.7887, + "step": 1418 + }, + { + "epoch": 28.71, + "learning_rate": 1.9005102040816326e-05, + "loss": 0.8296, + "step": 1419 + }, + { + "epoch": 28.73, + "learning_rate": 1.8877551020408164e-05, + "loss": 0.8078, + "step": 1420 + }, + { + "epoch": 28.75, + "learning_rate": 1.8750000000000002e-05, + "loss": 0.8447, + "step": 1421 + }, + { + "epoch": 28.77, + "learning_rate": 1.862244897959184e-05, + "loss": 0.8507, + "step": 1422 + }, + { + "epoch": 28.79, + "learning_rate": 1.8494897959183674e-05, + "loss": 0.8074, + "step": 1423 + }, + { + "epoch": 28.81, + "learning_rate": 1.836734693877551e-05, + "loss": 0.8837, + "step": 1424 + }, + { + "epoch": 28.83, + "learning_rate": 1.823979591836735e-05, + "loss": 0.8696, + "step": 1425 + }, + { + "epoch": 28.85, + "learning_rate": 1.8112244897959187e-05, + "loss": 0.758, + "step": 1426 + }, + { + "epoch": 28.87, + "learning_rate": 1.798469387755102e-05, + "loss": 0.8471, + "step": 1427 + }, + { + "epoch": 28.89, + "learning_rate": 1.785714285714286e-05, + "loss": 0.8379, + "step": 1428 + }, + { + "epoch": 28.91, + "learning_rate": 1.7729591836734693e-05, + "loss": 0.8304, + "step": 1429 + }, + { + "epoch": 28.93, + "learning_rate": 1.760204081632653e-05, + "loss": 0.8281, + "step": 1430 + }, + { + "epoch": 28.95, + "learning_rate": 1.7474489795918368e-05, + "loss": 0.7635, + "step": 1431 + }, + { + "epoch": 28.98, + "learning_rate": 1.7346938775510206e-05, + "loss": 0.7618, + "step": 1432 + }, + { + "epoch": 29.0, + "learning_rate": 1.7219387755102043e-05, + "loss": 0.8288, + "step": 1433 + }, + { + "epoch": 29.02, + "learning_rate": 1.7091836734693878e-05, + "loss": 0.8453, + "step": 1434 + }, + { + "epoch": 29.04, + "learning_rate": 1.6964285714285715e-05, + "loss": 0.8312, + "step": 1435 + }, + { + "epoch": 29.06, + "learning_rate": 1.683673469387755e-05, + "loss": 0.8354, + "step": 1436 + }, + { + "epoch": 29.08, + "learning_rate": 1.670918367346939e-05, + "loss": 0.8417, + "step": 1437 + }, + { + "epoch": 29.1, + "learning_rate": 1.6581632653061225e-05, + "loss": 0.7835, + "step": 1438 + }, + { + "epoch": 29.12, + "learning_rate": 1.6454081632653062e-05, + "loss": 0.7782, + "step": 1439 + }, + { + "epoch": 29.14, + "learning_rate": 1.6326530612244897e-05, + "loss": 0.8389, + "step": 1440 + }, + { + "epoch": 29.16, + "learning_rate": 1.6198979591836734e-05, + "loss": 0.7897, + "step": 1441 + }, + { + "epoch": 29.18, + "learning_rate": 1.6071428571428572e-05, + "loss": 0.832, + "step": 1442 + }, + { + "epoch": 29.2, + "learning_rate": 1.594387755102041e-05, + "loss": 0.8016, + "step": 1443 + }, + { + "epoch": 29.22, + "learning_rate": 1.5816326530612247e-05, + "loss": 0.8481, + "step": 1444 + }, + { + "epoch": 29.24, + "learning_rate": 1.568877551020408e-05, + "loss": 0.8243, + "step": 1445 + }, + { + "epoch": 29.26, + "learning_rate": 1.556122448979592e-05, + "loss": 0.806, + "step": 1446 + }, + { + "epoch": 29.28, + "learning_rate": 1.5433673469387753e-05, + "loss": 0.7738, + "step": 1447 + }, + { + "epoch": 29.3, + "learning_rate": 1.5306122448979594e-05, + "loss": 0.8573, + "step": 1448 + }, + { + "epoch": 29.32, + "learning_rate": 1.5178571428571429e-05, + "loss": 0.7937, + "step": 1449 + }, + { + "epoch": 29.34, + "learning_rate": 1.5051020408163266e-05, + "loss": 0.8163, + "step": 1450 + }, + { + "epoch": 29.36, + "learning_rate": 1.4923469387755104e-05, + "loss": 0.8614, + "step": 1451 + }, + { + "epoch": 29.38, + "learning_rate": 1.479591836734694e-05, + "loss": 0.7731, + "step": 1452 + }, + { + "epoch": 29.4, + "learning_rate": 1.4668367346938778e-05, + "loss": 0.805, + "step": 1453 + }, + { + "epoch": 29.42, + "learning_rate": 1.4540816326530612e-05, + "loss": 0.7942, + "step": 1454 + }, + { + "epoch": 29.44, + "learning_rate": 1.4413265306122451e-05, + "loss": 0.7983, + "step": 1455 + }, + { + "epoch": 29.46, + "learning_rate": 1.4285714285714285e-05, + "loss": 0.8119, + "step": 1456 + }, + { + "epoch": 29.48, + "learning_rate": 1.4158163265306123e-05, + "loss": 0.7567, + "step": 1457 + }, + { + "epoch": 29.5, + "learning_rate": 1.4030612244897959e-05, + "loss": 0.7317, + "step": 1458 + }, + { + "epoch": 29.52, + "learning_rate": 1.3903061224489797e-05, + "loss": 0.8269, + "step": 1459 + }, + { + "epoch": 29.54, + "learning_rate": 1.3775510204081633e-05, + "loss": 0.812, + "step": 1460 + }, + { + "epoch": 29.56, + "learning_rate": 1.364795918367347e-05, + "loss": 0.7838, + "step": 1461 + }, + { + "epoch": 29.58, + "learning_rate": 1.3520408163265308e-05, + "loss": 0.829, + "step": 1462 + }, + { + "epoch": 29.6, + "learning_rate": 1.3392857142857144e-05, + "loss": 0.8102, + "step": 1463 + }, + { + "epoch": 29.62, + "learning_rate": 1.3265306122448982e-05, + "loss": 0.8186, + "step": 1464 + }, + { + "epoch": 29.64, + "learning_rate": 1.3137755102040816e-05, + "loss": 0.8352, + "step": 1465 + }, + { + "epoch": 29.66, + "learning_rate": 1.3010204081632653e-05, + "loss": 0.8249, + "step": 1466 + }, + { + "epoch": 29.68, + "learning_rate": 1.288265306122449e-05, + "loss": 0.8082, + "step": 1467 + }, + { + "epoch": 29.7, + "learning_rate": 1.2755102040816327e-05, + "loss": 0.7936, + "step": 1468 + }, + { + "epoch": 29.72, + "learning_rate": 1.2627551020408163e-05, + "loss": 0.8288, + "step": 1469 + }, + { + "epoch": 29.74, + "learning_rate": 1.25e-05, + "loss": 0.7974, + "step": 1470 + }, + { + "epoch": 29.76, + "learning_rate": 1.2372448979591837e-05, + "loss": 0.8425, + "step": 1471 + }, + { + "epoch": 29.78, + "learning_rate": 1.2244897959183674e-05, + "loss": 0.8488, + "step": 1472 + }, + { + "epoch": 29.8, + "learning_rate": 1.211734693877551e-05, + "loss": 0.8154, + "step": 1473 + }, + { + "epoch": 29.82, + "learning_rate": 1.1989795918367348e-05, + "loss": 0.839, + "step": 1474 + }, + { + "epoch": 29.85, + "learning_rate": 1.1862244897959185e-05, + "loss": 0.7959, + "step": 1475 + }, + { + "epoch": 29.87, + "learning_rate": 1.1734693877551021e-05, + "loss": 0.8123, + "step": 1476 + }, + { + "epoch": 29.89, + "learning_rate": 1.1607142857142857e-05, + "loss": 0.8426, + "step": 1477 + }, + { + "epoch": 29.91, + "learning_rate": 1.1479591836734695e-05, + "loss": 0.8093, + "step": 1478 + }, + { + "epoch": 29.93, + "learning_rate": 1.1352040816326531e-05, + "loss": 0.8086, + "step": 1479 + }, + { + "epoch": 29.95, + "learning_rate": 1.1224489795918369e-05, + "loss": 0.8346, + "step": 1480 + }, + { + "epoch": 29.97, + "learning_rate": 1.1096938775510205e-05, + "loss": 0.7846, + "step": 1481 + }, + { + "epoch": 29.99, + "learning_rate": 1.096938775510204e-05, + "loss": 0.8098, + "step": 1482 + }, + { + "epoch": 30.01, + "learning_rate": 1.0841836734693878e-05, + "loss": 0.7874, + "step": 1483 + }, + { + "epoch": 30.03, + "learning_rate": 1.0714285714285714e-05, + "loss": 0.7854, + "step": 1484 + }, + { + "epoch": 30.05, + "learning_rate": 1.0586734693877552e-05, + "loss": 0.8542, + "step": 1485 + }, + { + "epoch": 30.07, + "learning_rate": 1.045918367346939e-05, + "loss": 0.8136, + "step": 1486 + }, + { + "epoch": 30.09, + "learning_rate": 1.0331632653061225e-05, + "loss": 0.8391, + "step": 1487 + }, + { + "epoch": 30.11, + "learning_rate": 1.0204081632653061e-05, + "loss": 0.8057, + "step": 1488 + }, + { + "epoch": 30.13, + "learning_rate": 1.0076530612244899e-05, + "loss": 0.7967, + "step": 1489 + }, + { + "epoch": 30.15, + "learning_rate": 9.948979591836735e-06, + "loss": 0.774, + "step": 1490 + }, + { + "epoch": 30.17, + "learning_rate": 9.821428571428573e-06, + "loss": 0.7764, + "step": 1491 + }, + { + "epoch": 30.19, + "learning_rate": 9.693877551020408e-06, + "loss": 0.7785, + "step": 1492 + }, + { + "epoch": 30.21, + "learning_rate": 9.566326530612244e-06, + "loss": 0.8099, + "step": 1493 + }, + { + "epoch": 30.23, + "learning_rate": 9.438775510204082e-06, + "loss": 0.7936, + "step": 1494 + }, + { + "epoch": 30.25, + "learning_rate": 9.31122448979592e-06, + "loss": 0.7829, + "step": 1495 + }, + { + "epoch": 30.27, + "learning_rate": 9.183673469387756e-06, + "loss": 0.7447, + "step": 1496 + }, + { + "epoch": 30.29, + "learning_rate": 9.056122448979593e-06, + "loss": 0.808, + "step": 1497 + }, + { + "epoch": 30.31, + "learning_rate": 8.92857142857143e-06, + "loss": 0.7925, + "step": 1498 + }, + { + "epoch": 30.33, + "learning_rate": 8.801020408163265e-06, + "loss": 0.7776, + "step": 1499 + }, + { + "epoch": 30.35, + "learning_rate": 8.673469387755103e-06, + "loss": 0.7887, + "step": 1500 + } + ], + "logging_steps": 1, + "max_steps": 1568, + "num_train_epochs": 32, + "save_steps": 100, + "total_flos": 2.2208537530806682e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-1500/training_args.bin b/checkpoint-1500/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..db23e07d097c18532e52f58a70eb72d22e39c8c1 --- /dev/null +++ b/checkpoint-1500/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b610cbc4242bb50b4985b00e205994ae514fec6d9e2273f2b545a583a07b154b +size 4155 diff --git a/checkpoint-600/README.md b/checkpoint-600/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-600/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-600/adapter_config.json b/checkpoint-600/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b --- /dev/null +++ b/checkpoint-600/adapter_config.json @@ -0,0 +1,23 @@ +{ + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-600/adapter_model.bin b/checkpoint-600/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..9187ebbdb23fe0566e9b1bc80bf61091168bec53 --- /dev/null +++ b/checkpoint-600/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8bdafbf712a13caac2eff2baadedfa7d69fb1ba1a1618c0652bba7fda3e99d02 +size 39409357 diff --git a/checkpoint-600/optimizer.pt b/checkpoint-600/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..ff5066c6b6e3961344dee396ea0cf019fe2c8008 --- /dev/null +++ b/checkpoint-600/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2e98eee29adfb5b0c5dfcad4773c74d2234215ec17cf81e21ff40b5dcdd0980f +size 78844421 diff --git a/checkpoint-600/rng_state.pth b/checkpoint-600/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..961069ec281e351c01eb6735b97b75dd3d8d33b7 --- /dev/null +++ b/checkpoint-600/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:696b4cef517e79a03215d5c5fccde0c73d2d962fe70ebab1e472f650136142b2 +size 14575 diff --git a/checkpoint-600/scheduler.pt b/checkpoint-600/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..85ac73474f178c78555b00fc82c696e29cc9f8ca --- /dev/null +++ b/checkpoint-600/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b6ec2d50f574aac2aa9116fabbb4b97fdb31897bc783b4e235f34f9907d573b9 +size 627 diff --git a/checkpoint-600/special_tokens_map.json b/checkpoint-600/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-600/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-600/tokenizer.json b/checkpoint-600/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf002cafbd4818dcff2abc9156c088d681b4533 --- /dev/null +++ b/checkpoint-600/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba +size 14500471 diff --git a/checkpoint-600/tokenizer_config.json b/checkpoint-600/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-600/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-600/trainer_state.json b/checkpoint-600/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..ceb031af3f1beb812ee9cf0893a1d35d18d1d2fc --- /dev/null +++ b/checkpoint-600/trainer_state.json @@ -0,0 +1,3619 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 12.140373063547266, + "eval_steps": 500, + "global_step": 600, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.02, + "learning_rate": 0.00019987244897959184, + "loss": 3.2215, + "step": 1 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019974489795918367, + "loss": 2.8365, + "step": 2 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019961734693877553, + "loss": 2.602, + "step": 3 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019948979591836736, + "loss": 2.4196, + "step": 4 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001993622448979592, + "loss": 2.2574, + "step": 5 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019923469387755102, + "loss": 2.2239, + "step": 6 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019910714285714288, + "loss": 2.1661, + "step": 7 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001989795918367347, + "loss": 2.0987, + "step": 8 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019885204081632654, + "loss": 2.015, + "step": 9 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019872448979591837, + "loss": 1.9771, + "step": 10 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019859693877551023, + "loss": 2.0271, + "step": 11 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019846938775510203, + "loss": 1.9812, + "step": 12 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001983418367346939, + "loss": 2.0834, + "step": 13 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019821428571428572, + "loss": 1.9174, + "step": 14 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019808673469387755, + "loss": 1.8409, + "step": 15 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019795918367346938, + "loss": 1.929, + "step": 16 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019783163265306124, + "loss": 2.0041, + "step": 17 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019770408163265305, + "loss": 1.9385, + "step": 18 + }, + { + "epoch": 0.38, + "learning_rate": 0.0001975765306122449, + "loss": 1.9592, + "step": 19 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019744897959183674, + "loss": 1.9701, + "step": 20 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001973214285714286, + "loss": 1.9277, + "step": 21 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019719387755102042, + "loss": 1.8394, + "step": 22 + }, + { + "epoch": 0.47, + "learning_rate": 0.00019706632653061226, + "loss": 1.8666, + "step": 23 + }, + { + "epoch": 0.49, + "learning_rate": 0.00019693877551020409, + "loss": 1.8997, + "step": 24 + }, + { + "epoch": 0.51, + "learning_rate": 0.00019681122448979592, + "loss": 1.9432, + "step": 25 + }, + { + "epoch": 0.53, + "learning_rate": 0.00019668367346938777, + "loss": 1.9137, + "step": 26 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001965561224489796, + "loss": 1.905, + "step": 27 + }, + { + "epoch": 0.57, + "learning_rate": 0.00019642857142857144, + "loss": 1.8708, + "step": 28 + }, + { + "epoch": 0.59, + "learning_rate": 0.00019630102040816327, + "loss": 1.9097, + "step": 29 + }, + { + "epoch": 0.61, + "learning_rate": 0.00019617346938775513, + "loss": 1.896, + "step": 30 + }, + { + "epoch": 0.63, + "learning_rate": 0.00019604591836734696, + "loss": 1.8834, + "step": 31 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001959183673469388, + "loss": 1.8323, + "step": 32 + }, + { + "epoch": 0.67, + "learning_rate": 0.00019579081632653062, + "loss": 1.804, + "step": 33 + }, + { + "epoch": 0.69, + "learning_rate": 0.00019566326530612248, + "loss": 1.8906, + "step": 34 + }, + { + "epoch": 0.71, + "learning_rate": 0.00019553571428571428, + "loss": 1.8693, + "step": 35 + }, + { + "epoch": 0.73, + "learning_rate": 0.00019540816326530614, + "loss": 1.9308, + "step": 36 + }, + { + "epoch": 0.75, + "learning_rate": 0.00019528061224489797, + "loss": 1.8082, + "step": 37 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001951530612244898, + "loss": 1.848, + "step": 38 + }, + { + "epoch": 0.79, + "learning_rate": 0.00019502551020408163, + "loss": 1.8866, + "step": 39 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001948979591836735, + "loss": 1.7844, + "step": 40 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001947704081632653, + "loss": 1.8485, + "step": 41 + }, + { + "epoch": 0.85, + "learning_rate": 0.00019464285714285715, + "loss": 1.7917, + "step": 42 + }, + { + "epoch": 0.87, + "learning_rate": 0.00019451530612244898, + "loss": 1.7342, + "step": 43 + }, + { + "epoch": 0.89, + "learning_rate": 0.00019438775510204084, + "loss": 1.8479, + "step": 44 + }, + { + "epoch": 0.91, + "learning_rate": 0.00019426020408163267, + "loss": 1.8639, + "step": 45 + }, + { + "epoch": 0.93, + "learning_rate": 0.0001941326530612245, + "loss": 1.8166, + "step": 46 + }, + { + "epoch": 0.95, + "learning_rate": 0.00019400510204081633, + "loss": 1.7566, + "step": 47 + }, + { + "epoch": 0.97, + "learning_rate": 0.00019387755102040816, + "loss": 1.8071, + "step": 48 + }, + { + "epoch": 0.99, + "learning_rate": 0.00019375000000000002, + "loss": 1.8612, + "step": 49 + }, + { + "epoch": 1.01, + "learning_rate": 0.00019362244897959185, + "loss": 1.7819, + "step": 50 + }, + { + "epoch": 1.03, + "learning_rate": 0.00019349489795918368, + "loss": 1.8647, + "step": 51 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001933673469387755, + "loss": 1.8196, + "step": 52 + }, + { + "epoch": 1.07, + "learning_rate": 0.00019323979591836737, + "loss": 1.8027, + "step": 53 + }, + { + "epoch": 1.09, + "learning_rate": 0.00019311224489795917, + "loss": 1.8927, + "step": 54 + }, + { + "epoch": 1.11, + "learning_rate": 0.00019298469387755103, + "loss": 1.8481, + "step": 55 + }, + { + "epoch": 1.13, + "learning_rate": 0.00019285714285714286, + "loss": 1.7781, + "step": 56 + }, + { + "epoch": 1.15, + "learning_rate": 0.00019272959183673472, + "loss": 1.8101, + "step": 57 + }, + { + "epoch": 1.17, + "learning_rate": 0.00019260204081632653, + "loss": 1.7257, + "step": 58 + }, + { + "epoch": 1.19, + "learning_rate": 0.00019247448979591838, + "loss": 1.8185, + "step": 59 + }, + { + "epoch": 1.21, + "learning_rate": 0.00019234693877551021, + "loss": 1.8557, + "step": 60 + }, + { + "epoch": 1.23, + "learning_rate": 0.00019221938775510204, + "loss": 1.7418, + "step": 61 + }, + { + "epoch": 1.25, + "learning_rate": 0.00019209183673469388, + "loss": 1.6879, + "step": 62 + }, + { + "epoch": 1.27, + "learning_rate": 0.00019196428571428573, + "loss": 1.7651, + "step": 63 + }, + { + "epoch": 1.29, + "learning_rate": 0.00019183673469387756, + "loss": 1.7759, + "step": 64 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001917091836734694, + "loss": 1.7691, + "step": 65 + }, + { + "epoch": 1.34, + "learning_rate": 0.00019158163265306123, + "loss": 1.7794, + "step": 66 + }, + { + "epoch": 1.36, + "learning_rate": 0.00019145408163265306, + "loss": 1.8152, + "step": 67 + }, + { + "epoch": 1.38, + "learning_rate": 0.00019132653061224492, + "loss": 1.8052, + "step": 68 + }, + { + "epoch": 1.4, + "learning_rate": 0.00019119897959183675, + "loss": 1.8054, + "step": 69 + }, + { + "epoch": 1.42, + "learning_rate": 0.00019107142857142858, + "loss": 1.8114, + "step": 70 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001909438775510204, + "loss": 1.7749, + "step": 71 + }, + { + "epoch": 1.46, + "learning_rate": 0.00019081632653061227, + "loss": 1.777, + "step": 72 + }, + { + "epoch": 1.48, + "learning_rate": 0.0001906887755102041, + "loss": 1.7896, + "step": 73 + }, + { + "epoch": 1.5, + "learning_rate": 0.00019056122448979593, + "loss": 1.8335, + "step": 74 + }, + { + "epoch": 1.52, + "learning_rate": 0.00019043367346938776, + "loss": 1.8155, + "step": 75 + }, + { + "epoch": 1.54, + "learning_rate": 0.00019030612244897962, + "loss": 1.8224, + "step": 76 + }, + { + "epoch": 1.56, + "learning_rate": 0.00019017857142857142, + "loss": 1.7889, + "step": 77 + }, + { + "epoch": 1.58, + "learning_rate": 0.00019005102040816328, + "loss": 1.8866, + "step": 78 + }, + { + "epoch": 1.6, + "learning_rate": 0.0001899234693877551, + "loss": 1.8439, + "step": 79 + }, + { + "epoch": 1.62, + "learning_rate": 0.00018979591836734697, + "loss": 1.7906, + "step": 80 + }, + { + "epoch": 1.64, + "learning_rate": 0.00018966836734693877, + "loss": 1.8627, + "step": 81 + }, + { + "epoch": 1.66, + "learning_rate": 0.00018954081632653063, + "loss": 1.7497, + "step": 82 + }, + { + "epoch": 1.68, + "learning_rate": 0.00018941326530612246, + "loss": 1.7936, + "step": 83 + }, + { + "epoch": 1.7, + "learning_rate": 0.0001892857142857143, + "loss": 1.8341, + "step": 84 + }, + { + "epoch": 1.72, + "learning_rate": 0.00018915816326530612, + "loss": 1.7868, + "step": 85 + }, + { + "epoch": 1.74, + "learning_rate": 0.00018903061224489798, + "loss": 1.7493, + "step": 86 + }, + { + "epoch": 1.76, + "learning_rate": 0.0001889030612244898, + "loss": 1.7926, + "step": 87 + }, + { + "epoch": 1.78, + "learning_rate": 0.00018877551020408164, + "loss": 1.8278, + "step": 88 + }, + { + "epoch": 1.8, + "learning_rate": 0.00018864795918367347, + "loss": 1.7387, + "step": 89 + }, + { + "epoch": 1.82, + "learning_rate": 0.0001885204081632653, + "loss": 1.7669, + "step": 90 + }, + { + "epoch": 1.84, + "learning_rate": 0.00018839285714285716, + "loss": 1.7686, + "step": 91 + }, + { + "epoch": 1.86, + "learning_rate": 0.000188265306122449, + "loss": 1.7759, + "step": 92 + }, + { + "epoch": 1.88, + "learning_rate": 0.00018813775510204082, + "loss": 1.7016, + "step": 93 + }, + { + "epoch": 1.9, + "learning_rate": 0.00018801020408163265, + "loss": 1.8123, + "step": 94 + }, + { + "epoch": 1.92, + "learning_rate": 0.0001878826530612245, + "loss": 1.8315, + "step": 95 + }, + { + "epoch": 1.94, + "learning_rate": 0.00018775510204081634, + "loss": 1.7679, + "step": 96 + }, + { + "epoch": 1.96, + "learning_rate": 0.00018762755102040817, + "loss": 1.7874, + "step": 97 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001875, + "loss": 1.8008, + "step": 98 + }, + { + "epoch": 2.0, + "learning_rate": 0.00018737244897959186, + "loss": 1.7177, + "step": 99 + }, + { + "epoch": 2.02, + "learning_rate": 0.00018724489795918367, + "loss": 1.7272, + "step": 100 + }, + { + "epoch": 2.04, + "learning_rate": 0.00018711734693877552, + "loss": 1.7848, + "step": 101 + }, + { + "epoch": 2.06, + "learning_rate": 0.00018698979591836735, + "loss": 1.744, + "step": 102 + }, + { + "epoch": 2.08, + "learning_rate": 0.00018686224489795919, + "loss": 1.7005, + "step": 103 + }, + { + "epoch": 2.1, + "learning_rate": 0.00018673469387755102, + "loss": 1.8247, + "step": 104 + }, + { + "epoch": 2.12, + "learning_rate": 0.00018660714285714287, + "loss": 1.6855, + "step": 105 + }, + { + "epoch": 2.14, + "learning_rate": 0.0001864795918367347, + "loss": 1.7627, + "step": 106 + }, + { + "epoch": 2.17, + "learning_rate": 0.00018635204081632654, + "loss": 1.7564, + "step": 107 + }, + { + "epoch": 2.19, + "learning_rate": 0.00018622448979591837, + "loss": 1.8237, + "step": 108 + }, + { + "epoch": 2.21, + "learning_rate": 0.00018609693877551022, + "loss": 1.7421, + "step": 109 + }, + { + "epoch": 2.23, + "learning_rate": 0.00018596938775510206, + "loss": 1.7517, + "step": 110 + }, + { + "epoch": 2.25, + "learning_rate": 0.0001858418367346939, + "loss": 1.7515, + "step": 111 + }, + { + "epoch": 2.27, + "learning_rate": 0.00018571428571428572, + "loss": 1.7842, + "step": 112 + }, + { + "epoch": 2.29, + "learning_rate": 0.00018558673469387755, + "loss": 1.8001, + "step": 113 + }, + { + "epoch": 2.31, + "learning_rate": 0.0001854591836734694, + "loss": 1.7653, + "step": 114 + }, + { + "epoch": 2.33, + "learning_rate": 0.00018533163265306124, + "loss": 1.694, + "step": 115 + }, + { + "epoch": 2.35, + "learning_rate": 0.00018520408163265307, + "loss": 1.7457, + "step": 116 + }, + { + "epoch": 2.37, + "learning_rate": 0.0001850765306122449, + "loss": 1.7899, + "step": 117 + }, + { + "epoch": 2.39, + "learning_rate": 0.00018494897959183676, + "loss": 1.7473, + "step": 118 + }, + { + "epoch": 2.41, + "learning_rate": 0.0001848214285714286, + "loss": 1.6639, + "step": 119 + }, + { + "epoch": 2.43, + "learning_rate": 0.00018469387755102042, + "loss": 1.762, + "step": 120 + }, + { + "epoch": 2.45, + "learning_rate": 0.00018456632653061225, + "loss": 1.7378, + "step": 121 + }, + { + "epoch": 2.47, + "learning_rate": 0.0001844387755102041, + "loss": 1.672, + "step": 122 + }, + { + "epoch": 2.49, + "learning_rate": 0.0001843112244897959, + "loss": 1.7267, + "step": 123 + }, + { + "epoch": 2.51, + "learning_rate": 0.00018418367346938777, + "loss": 1.7825, + "step": 124 + }, + { + "epoch": 2.53, + "learning_rate": 0.0001840561224489796, + "loss": 1.7566, + "step": 125 + }, + { + "epoch": 2.55, + "learning_rate": 0.00018392857142857143, + "loss": 1.8169, + "step": 126 + }, + { + "epoch": 2.57, + "learning_rate": 0.00018380102040816326, + "loss": 1.6801, + "step": 127 + }, + { + "epoch": 2.59, + "learning_rate": 0.00018367346938775512, + "loss": 1.7292, + "step": 128 + }, + { + "epoch": 2.61, + "learning_rate": 0.00018354591836734695, + "loss": 1.737, + "step": 129 + }, + { + "epoch": 2.63, + "learning_rate": 0.00018341836734693878, + "loss": 1.7696, + "step": 130 + }, + { + "epoch": 2.65, + "learning_rate": 0.0001832908163265306, + "loss": 1.7239, + "step": 131 + }, + { + "epoch": 2.67, + "learning_rate": 0.00018316326530612247, + "loss": 1.7441, + "step": 132 + }, + { + "epoch": 2.69, + "learning_rate": 0.0001830357142857143, + "loss": 1.7825, + "step": 133 + }, + { + "epoch": 2.71, + "learning_rate": 0.00018290816326530613, + "loss": 1.7411, + "step": 134 + }, + { + "epoch": 2.73, + "learning_rate": 0.00018278061224489796, + "loss": 1.7119, + "step": 135 + }, + { + "epoch": 2.75, + "learning_rate": 0.0001826530612244898, + "loss": 1.7443, + "step": 136 + }, + { + "epoch": 2.77, + "learning_rate": 0.00018252551020408165, + "loss": 1.7197, + "step": 137 + }, + { + "epoch": 2.79, + "learning_rate": 0.00018239795918367348, + "loss": 1.7273, + "step": 138 + }, + { + "epoch": 2.81, + "learning_rate": 0.0001822704081632653, + "loss": 1.7681, + "step": 139 + }, + { + "epoch": 2.83, + "learning_rate": 0.00018214285714285714, + "loss": 1.8088, + "step": 140 + }, + { + "epoch": 2.85, + "learning_rate": 0.000182015306122449, + "loss": 1.7301, + "step": 141 + }, + { + "epoch": 2.87, + "learning_rate": 0.00018188775510204083, + "loss": 1.6853, + "step": 142 + }, + { + "epoch": 2.89, + "learning_rate": 0.00018176020408163266, + "loss": 1.6966, + "step": 143 + }, + { + "epoch": 2.91, + "learning_rate": 0.0001816326530612245, + "loss": 1.7938, + "step": 144 + }, + { + "epoch": 2.93, + "learning_rate": 0.00018150510204081635, + "loss": 1.7639, + "step": 145 + }, + { + "epoch": 2.95, + "learning_rate": 0.00018137755102040816, + "loss": 1.7527, + "step": 146 + }, + { + "epoch": 2.97, + "learning_rate": 0.00018125000000000001, + "loss": 1.7386, + "step": 147 + }, + { + "epoch": 2.99, + "learning_rate": 0.00018112244897959185, + "loss": 1.7223, + "step": 148 + }, + { + "epoch": 3.01, + "learning_rate": 0.00018099489795918368, + "loss": 1.7571, + "step": 149 + }, + { + "epoch": 3.04, + "learning_rate": 0.0001808673469387755, + "loss": 1.7054, + "step": 150 + }, + { + "epoch": 3.06, + "learning_rate": 0.00018073979591836737, + "loss": 1.6581, + "step": 151 + }, + { + "epoch": 3.08, + "learning_rate": 0.00018061224489795917, + "loss": 1.681, + "step": 152 + }, + { + "epoch": 3.1, + "learning_rate": 0.00018048469387755103, + "loss": 1.7425, + "step": 153 + }, + { + "epoch": 3.12, + "learning_rate": 0.00018035714285714286, + "loss": 1.7108, + "step": 154 + }, + { + "epoch": 3.14, + "learning_rate": 0.00018022959183673472, + "loss": 1.7194, + "step": 155 + }, + { + "epoch": 3.16, + "learning_rate": 0.00018010204081632655, + "loss": 1.6953, + "step": 156 + }, + { + "epoch": 3.18, + "learning_rate": 0.00017997448979591838, + "loss": 1.669, + "step": 157 + }, + { + "epoch": 3.2, + "learning_rate": 0.0001798469387755102, + "loss": 1.744, + "step": 158 + }, + { + "epoch": 3.22, + "learning_rate": 0.00017971938775510204, + "loss": 1.6467, + "step": 159 + }, + { + "epoch": 3.24, + "learning_rate": 0.0001795918367346939, + "loss": 1.7103, + "step": 160 + }, + { + "epoch": 3.26, + "learning_rate": 0.00017946428571428573, + "loss": 1.6662, + "step": 161 + }, + { + "epoch": 3.28, + "learning_rate": 0.00017933673469387756, + "loss": 1.6657, + "step": 162 + }, + { + "epoch": 3.3, + "learning_rate": 0.0001792091836734694, + "loss": 1.791, + "step": 163 + }, + { + "epoch": 3.32, + "learning_rate": 0.00017908163265306125, + "loss": 1.7704, + "step": 164 + }, + { + "epoch": 3.34, + "learning_rate": 0.00017895408163265305, + "loss": 1.7229, + "step": 165 + }, + { + "epoch": 3.36, + "learning_rate": 0.0001788265306122449, + "loss": 1.76, + "step": 166 + }, + { + "epoch": 3.38, + "learning_rate": 0.00017869897959183674, + "loss": 1.6482, + "step": 167 + }, + { + "epoch": 3.4, + "learning_rate": 0.0001785714285714286, + "loss": 1.8076, + "step": 168 + }, + { + "epoch": 3.42, + "learning_rate": 0.0001784438775510204, + "loss": 1.7368, + "step": 169 + }, + { + "epoch": 3.44, + "learning_rate": 0.00017831632653061226, + "loss": 1.6264, + "step": 170 + }, + { + "epoch": 3.46, + "learning_rate": 0.0001781887755102041, + "loss": 1.6289, + "step": 171 + }, + { + "epoch": 3.48, + "learning_rate": 0.00017806122448979592, + "loss": 1.7913, + "step": 172 + }, + { + "epoch": 3.5, + "learning_rate": 0.00017793367346938775, + "loss": 1.6985, + "step": 173 + }, + { + "epoch": 3.52, + "learning_rate": 0.0001778061224489796, + "loss": 1.6936, + "step": 174 + }, + { + "epoch": 3.54, + "learning_rate": 0.00017767857142857141, + "loss": 1.8068, + "step": 175 + }, + { + "epoch": 3.56, + "learning_rate": 0.00017755102040816327, + "loss": 1.7243, + "step": 176 + }, + { + "epoch": 3.58, + "learning_rate": 0.0001774234693877551, + "loss": 1.6893, + "step": 177 + }, + { + "epoch": 3.6, + "learning_rate": 0.00017729591836734696, + "loss": 1.8122, + "step": 178 + }, + { + "epoch": 3.62, + "learning_rate": 0.0001771683673469388, + "loss": 1.6562, + "step": 179 + }, + { + "epoch": 3.64, + "learning_rate": 0.00017704081632653062, + "loss": 1.6999, + "step": 180 + }, + { + "epoch": 3.66, + "learning_rate": 0.00017691326530612245, + "loss": 1.7229, + "step": 181 + }, + { + "epoch": 3.68, + "learning_rate": 0.00017678571428571428, + "loss": 1.6764, + "step": 182 + }, + { + "epoch": 3.7, + "learning_rate": 0.00017665816326530614, + "loss": 1.6982, + "step": 183 + }, + { + "epoch": 3.72, + "learning_rate": 0.00017653061224489797, + "loss": 1.696, + "step": 184 + }, + { + "epoch": 3.74, + "learning_rate": 0.0001764030612244898, + "loss": 1.6797, + "step": 185 + }, + { + "epoch": 3.76, + "learning_rate": 0.00017627551020408164, + "loss": 1.637, + "step": 186 + }, + { + "epoch": 3.78, + "learning_rate": 0.0001761479591836735, + "loss": 1.7074, + "step": 187 + }, + { + "epoch": 3.8, + "learning_rate": 0.0001760204081632653, + "loss": 1.705, + "step": 188 + }, + { + "epoch": 3.82, + "learning_rate": 0.00017589285714285716, + "loss": 1.6153, + "step": 189 + }, + { + "epoch": 3.84, + "learning_rate": 0.00017576530612244899, + "loss": 1.7354, + "step": 190 + }, + { + "epoch": 3.86, + "learning_rate": 0.00017563775510204084, + "loss": 1.6941, + "step": 191 + }, + { + "epoch": 3.88, + "learning_rate": 0.00017551020408163265, + "loss": 1.7231, + "step": 192 + }, + { + "epoch": 3.91, + "learning_rate": 0.0001753826530612245, + "loss": 1.7663, + "step": 193 + }, + { + "epoch": 3.93, + "learning_rate": 0.00017525510204081634, + "loss": 1.6532, + "step": 194 + }, + { + "epoch": 3.95, + "learning_rate": 0.00017512755102040817, + "loss": 1.7115, + "step": 195 + }, + { + "epoch": 3.97, + "learning_rate": 0.000175, + "loss": 1.6955, + "step": 196 + }, + { + "epoch": 3.99, + "learning_rate": 0.00017487244897959186, + "loss": 1.6863, + "step": 197 + }, + { + "epoch": 4.01, + "learning_rate": 0.00017474489795918366, + "loss": 1.7012, + "step": 198 + }, + { + "epoch": 4.03, + "learning_rate": 0.00017461734693877552, + "loss": 1.5927, + "step": 199 + }, + { + "epoch": 4.05, + "learning_rate": 0.00017448979591836735, + "loss": 1.6272, + "step": 200 + }, + { + "epoch": 4.07, + "learning_rate": 0.00017436224489795918, + "loss": 1.5994, + "step": 201 + }, + { + "epoch": 4.09, + "learning_rate": 0.00017423469387755104, + "loss": 1.7141, + "step": 202 + }, + { + "epoch": 4.11, + "learning_rate": 0.00017410714285714287, + "loss": 1.7547, + "step": 203 + }, + { + "epoch": 4.13, + "learning_rate": 0.0001739795918367347, + "loss": 1.6254, + "step": 204 + }, + { + "epoch": 4.15, + "learning_rate": 0.00017385204081632653, + "loss": 1.6686, + "step": 205 + }, + { + "epoch": 4.17, + "learning_rate": 0.0001737244897959184, + "loss": 1.6684, + "step": 206 + }, + { + "epoch": 4.19, + "learning_rate": 0.00017359693877551022, + "loss": 1.6724, + "step": 207 + }, + { + "epoch": 4.21, + "learning_rate": 0.00017346938775510205, + "loss": 1.7361, + "step": 208 + }, + { + "epoch": 4.23, + "learning_rate": 0.00017334183673469388, + "loss": 1.7167, + "step": 209 + }, + { + "epoch": 4.25, + "learning_rate": 0.00017321428571428574, + "loss": 1.7226, + "step": 210 + }, + { + "epoch": 4.27, + "learning_rate": 0.00017308673469387754, + "loss": 1.7133, + "step": 211 + }, + { + "epoch": 4.29, + "learning_rate": 0.0001729591836734694, + "loss": 1.649, + "step": 212 + }, + { + "epoch": 4.31, + "learning_rate": 0.00017283163265306123, + "loss": 1.7104, + "step": 213 + }, + { + "epoch": 4.33, + "learning_rate": 0.00017270408163265306, + "loss": 1.6861, + "step": 214 + }, + { + "epoch": 4.35, + "learning_rate": 0.0001725765306122449, + "loss": 1.648, + "step": 215 + }, + { + "epoch": 4.37, + "learning_rate": 0.00017244897959183675, + "loss": 1.6215, + "step": 216 + }, + { + "epoch": 4.39, + "learning_rate": 0.00017232142857142858, + "loss": 1.6334, + "step": 217 + }, + { + "epoch": 4.41, + "learning_rate": 0.0001721938775510204, + "loss": 1.6283, + "step": 218 + }, + { + "epoch": 4.43, + "learning_rate": 0.00017206632653061224, + "loss": 1.6462, + "step": 219 + }, + { + "epoch": 4.45, + "learning_rate": 0.0001719387755102041, + "loss": 1.7233, + "step": 220 + }, + { + "epoch": 4.47, + "learning_rate": 0.0001718112244897959, + "loss": 1.7839, + "step": 221 + }, + { + "epoch": 4.49, + "learning_rate": 0.00017168367346938776, + "loss": 1.7204, + "step": 222 + }, + { + "epoch": 4.51, + "learning_rate": 0.0001715561224489796, + "loss": 1.7671, + "step": 223 + }, + { + "epoch": 4.53, + "learning_rate": 0.00017142857142857143, + "loss": 1.6824, + "step": 224 + }, + { + "epoch": 4.55, + "learning_rate": 0.00017130102040816328, + "loss": 1.7068, + "step": 225 + }, + { + "epoch": 4.57, + "learning_rate": 0.00017117346938775511, + "loss": 1.6515, + "step": 226 + }, + { + "epoch": 4.59, + "learning_rate": 0.00017104591836734694, + "loss": 1.6586, + "step": 227 + }, + { + "epoch": 4.61, + "learning_rate": 0.00017091836734693878, + "loss": 1.6355, + "step": 228 + }, + { + "epoch": 4.63, + "learning_rate": 0.00017079081632653063, + "loss": 1.7173, + "step": 229 + }, + { + "epoch": 4.65, + "learning_rate": 0.00017066326530612246, + "loss": 1.6585, + "step": 230 + }, + { + "epoch": 4.67, + "learning_rate": 0.0001705357142857143, + "loss": 1.5856, + "step": 231 + }, + { + "epoch": 4.69, + "learning_rate": 0.00017040816326530613, + "loss": 1.5923, + "step": 232 + }, + { + "epoch": 4.71, + "learning_rate": 0.00017028061224489798, + "loss": 1.7128, + "step": 233 + }, + { + "epoch": 4.73, + "learning_rate": 0.0001701530612244898, + "loss": 1.6971, + "step": 234 + }, + { + "epoch": 4.75, + "learning_rate": 0.00017002551020408165, + "loss": 1.6416, + "step": 235 + }, + { + "epoch": 4.78, + "learning_rate": 0.00016989795918367348, + "loss": 1.645, + "step": 236 + }, + { + "epoch": 4.8, + "learning_rate": 0.0001697704081632653, + "loss": 1.6792, + "step": 237 + }, + { + "epoch": 4.82, + "learning_rate": 0.00016964285714285714, + "loss": 1.6522, + "step": 238 + }, + { + "epoch": 4.84, + "learning_rate": 0.000169515306122449, + "loss": 1.6315, + "step": 239 + }, + { + "epoch": 4.86, + "learning_rate": 0.00016938775510204083, + "loss": 1.6622, + "step": 240 + }, + { + "epoch": 4.88, + "learning_rate": 0.00016926020408163266, + "loss": 1.6566, + "step": 241 + }, + { + "epoch": 4.9, + "learning_rate": 0.0001691326530612245, + "loss": 1.7141, + "step": 242 + }, + { + "epoch": 4.92, + "learning_rate": 0.00016900510204081635, + "loss": 1.5873, + "step": 243 + }, + { + "epoch": 4.94, + "learning_rate": 0.00016887755102040818, + "loss": 1.6571, + "step": 244 + }, + { + "epoch": 4.96, + "learning_rate": 0.00016875, + "loss": 1.6829, + "step": 245 + }, + { + "epoch": 4.98, + "learning_rate": 0.00016862244897959184, + "loss": 1.6935, + "step": 246 + }, + { + "epoch": 5.0, + "learning_rate": 0.00016849489795918367, + "loss": 1.6782, + "step": 247 + }, + { + "epoch": 5.02, + "learning_rate": 0.00016836734693877553, + "loss": 1.622, + "step": 248 + }, + { + "epoch": 5.04, + "learning_rate": 0.00016823979591836736, + "loss": 1.6596, + "step": 249 + }, + { + "epoch": 5.06, + "learning_rate": 0.0001681122448979592, + "loss": 1.5821, + "step": 250 + }, + { + "epoch": 5.08, + "learning_rate": 0.00016798469387755102, + "loss": 1.7292, + "step": 251 + }, + { + "epoch": 5.1, + "learning_rate": 0.00016785714285714288, + "loss": 1.646, + "step": 252 + }, + { + "epoch": 5.12, + "learning_rate": 0.0001677295918367347, + "loss": 1.6969, + "step": 253 + }, + { + "epoch": 5.14, + "learning_rate": 0.00016760204081632654, + "loss": 1.6082, + "step": 254 + }, + { + "epoch": 5.16, + "learning_rate": 0.00016747448979591837, + "loss": 1.5843, + "step": 255 + }, + { + "epoch": 5.18, + "learning_rate": 0.00016734693877551023, + "loss": 1.6827, + "step": 256 + }, + { + "epoch": 5.2, + "learning_rate": 0.00016721938775510203, + "loss": 1.5824, + "step": 257 + }, + { + "epoch": 5.22, + "learning_rate": 0.0001670918367346939, + "loss": 1.6795, + "step": 258 + }, + { + "epoch": 5.24, + "learning_rate": 0.00016696428571428572, + "loss": 1.5639, + "step": 259 + }, + { + "epoch": 5.26, + "learning_rate": 0.00016683673469387755, + "loss": 1.592, + "step": 260 + }, + { + "epoch": 5.28, + "learning_rate": 0.00016670918367346938, + "loss": 1.65, + "step": 261 + }, + { + "epoch": 5.3, + "learning_rate": 0.00016658163265306124, + "loss": 1.5592, + "step": 262 + }, + { + "epoch": 5.32, + "learning_rate": 0.00016645408163265305, + "loss": 1.5091, + "step": 263 + }, + { + "epoch": 5.34, + "learning_rate": 0.0001663265306122449, + "loss": 1.6138, + "step": 264 + }, + { + "epoch": 5.36, + "learning_rate": 0.00016619897959183673, + "loss": 1.625, + "step": 265 + }, + { + "epoch": 5.38, + "learning_rate": 0.0001660714285714286, + "loss": 1.5757, + "step": 266 + }, + { + "epoch": 5.4, + "learning_rate": 0.00016594387755102042, + "loss": 1.6372, + "step": 267 + }, + { + "epoch": 5.42, + "learning_rate": 0.00016581632653061225, + "loss": 1.5891, + "step": 268 + }, + { + "epoch": 5.44, + "learning_rate": 0.00016568877551020409, + "loss": 1.6893, + "step": 269 + }, + { + "epoch": 5.46, + "learning_rate": 0.00016556122448979592, + "loss": 1.6662, + "step": 270 + }, + { + "epoch": 5.48, + "learning_rate": 0.00016543367346938777, + "loss": 1.7132, + "step": 271 + }, + { + "epoch": 5.5, + "learning_rate": 0.0001653061224489796, + "loss": 1.5835, + "step": 272 + }, + { + "epoch": 5.52, + "learning_rate": 0.00016517857142857144, + "loss": 1.6342, + "step": 273 + }, + { + "epoch": 5.54, + "learning_rate": 0.00016505102040816327, + "loss": 1.6717, + "step": 274 + }, + { + "epoch": 5.56, + "learning_rate": 0.00016492346938775512, + "loss": 1.6248, + "step": 275 + }, + { + "epoch": 5.58, + "learning_rate": 0.00016479591836734696, + "loss": 1.6117, + "step": 276 + }, + { + "epoch": 5.6, + "learning_rate": 0.0001646683673469388, + "loss": 1.6798, + "step": 277 + }, + { + "epoch": 5.63, + "learning_rate": 0.00016454081632653062, + "loss": 1.6406, + "step": 278 + }, + { + "epoch": 5.65, + "learning_rate": 0.00016441326530612248, + "loss": 1.6512, + "step": 279 + }, + { + "epoch": 5.67, + "learning_rate": 0.00016428571428571428, + "loss": 1.6102, + "step": 280 + }, + { + "epoch": 5.69, + "learning_rate": 0.00016415816326530614, + "loss": 1.6113, + "step": 281 + }, + { + "epoch": 5.71, + "learning_rate": 0.00016403061224489797, + "loss": 1.7116, + "step": 282 + }, + { + "epoch": 5.73, + "learning_rate": 0.0001639030612244898, + "loss": 1.6846, + "step": 283 + }, + { + "epoch": 5.75, + "learning_rate": 0.00016377551020408163, + "loss": 1.6911, + "step": 284 + }, + { + "epoch": 5.77, + "learning_rate": 0.0001636479591836735, + "loss": 1.6202, + "step": 285 + }, + { + "epoch": 5.79, + "learning_rate": 0.0001635204081632653, + "loss": 1.5715, + "step": 286 + }, + { + "epoch": 5.81, + "learning_rate": 0.00016339285714285715, + "loss": 1.6461, + "step": 287 + }, + { + "epoch": 5.83, + "learning_rate": 0.00016326530612244898, + "loss": 1.6624, + "step": 288 + }, + { + "epoch": 5.85, + "learning_rate": 0.00016313775510204084, + "loss": 1.6535, + "step": 289 + }, + { + "epoch": 5.87, + "learning_rate": 0.00016301020408163267, + "loss": 1.6275, + "step": 290 + }, + { + "epoch": 5.89, + "learning_rate": 0.0001628826530612245, + "loss": 1.6636, + "step": 291 + }, + { + "epoch": 5.91, + "learning_rate": 0.00016275510204081633, + "loss": 1.6546, + "step": 292 + }, + { + "epoch": 5.93, + "learning_rate": 0.00016262755102040816, + "loss": 1.7274, + "step": 293 + }, + { + "epoch": 5.95, + "learning_rate": 0.00016250000000000002, + "loss": 1.5901, + "step": 294 + }, + { + "epoch": 5.97, + "learning_rate": 0.00016237244897959185, + "loss": 1.6046, + "step": 295 + }, + { + "epoch": 5.99, + "learning_rate": 0.00016224489795918368, + "loss": 1.5828, + "step": 296 + }, + { + "epoch": 6.01, + "learning_rate": 0.0001621173469387755, + "loss": 1.6435, + "step": 297 + }, + { + "epoch": 6.03, + "learning_rate": 0.00016198979591836737, + "loss": 1.6263, + "step": 298 + }, + { + "epoch": 6.05, + "learning_rate": 0.00016186224489795917, + "loss": 1.4944, + "step": 299 + }, + { + "epoch": 6.07, + "learning_rate": 0.00016173469387755103, + "loss": 1.6286, + "step": 300 + }, + { + "epoch": 6.09, + "learning_rate": 0.00016160714285714286, + "loss": 1.694, + "step": 301 + }, + { + "epoch": 6.11, + "learning_rate": 0.00016147959183673472, + "loss": 1.6197, + "step": 302 + }, + { + "epoch": 6.13, + "learning_rate": 0.00016135204081632652, + "loss": 1.5597, + "step": 303 + }, + { + "epoch": 6.15, + "learning_rate": 0.00016122448979591838, + "loss": 1.5487, + "step": 304 + }, + { + "epoch": 6.17, + "learning_rate": 0.0001610969387755102, + "loss": 1.5769, + "step": 305 + }, + { + "epoch": 6.19, + "learning_rate": 0.00016096938775510204, + "loss": 1.6367, + "step": 306 + }, + { + "epoch": 6.21, + "learning_rate": 0.00016084183673469388, + "loss": 1.583, + "step": 307 + }, + { + "epoch": 6.23, + "learning_rate": 0.00016071428571428573, + "loss": 1.6201, + "step": 308 + }, + { + "epoch": 6.25, + "learning_rate": 0.00016058673469387754, + "loss": 1.6586, + "step": 309 + }, + { + "epoch": 6.27, + "learning_rate": 0.0001604591836734694, + "loss": 1.6711, + "step": 310 + }, + { + "epoch": 6.29, + "learning_rate": 0.00016033163265306123, + "loss": 1.6402, + "step": 311 + }, + { + "epoch": 6.31, + "learning_rate": 0.00016020408163265306, + "loss": 1.5247, + "step": 312 + }, + { + "epoch": 6.33, + "learning_rate": 0.00016007653061224491, + "loss": 1.5356, + "step": 313 + }, + { + "epoch": 6.35, + "learning_rate": 0.00015994897959183675, + "loss": 1.564, + "step": 314 + }, + { + "epoch": 6.37, + "learning_rate": 0.00015982142857142858, + "loss": 1.563, + "step": 315 + }, + { + "epoch": 6.39, + "learning_rate": 0.0001596938775510204, + "loss": 1.5198, + "step": 316 + }, + { + "epoch": 6.41, + "learning_rate": 0.00015956632653061227, + "loss": 1.6558, + "step": 317 + }, + { + "epoch": 6.43, + "learning_rate": 0.0001594387755102041, + "loss": 1.5534, + "step": 318 + }, + { + "epoch": 6.45, + "learning_rate": 0.00015931122448979593, + "loss": 1.6239, + "step": 319 + }, + { + "epoch": 6.47, + "learning_rate": 0.00015918367346938776, + "loss": 1.5645, + "step": 320 + }, + { + "epoch": 6.5, + "learning_rate": 0.00015905612244897962, + "loss": 1.5713, + "step": 321 + }, + { + "epoch": 6.52, + "learning_rate": 0.00015892857142857142, + "loss": 1.6176, + "step": 322 + }, + { + "epoch": 6.54, + "learning_rate": 0.00015880102040816328, + "loss": 1.502, + "step": 323 + }, + { + "epoch": 6.56, + "learning_rate": 0.0001586734693877551, + "loss": 1.645, + "step": 324 + }, + { + "epoch": 6.58, + "learning_rate": 0.00015854591836734697, + "loss": 1.5904, + "step": 325 + }, + { + "epoch": 6.6, + "learning_rate": 0.00015841836734693877, + "loss": 1.6149, + "step": 326 + }, + { + "epoch": 6.62, + "learning_rate": 0.00015829081632653063, + "loss": 1.6757, + "step": 327 + }, + { + "epoch": 6.64, + "learning_rate": 0.00015816326530612246, + "loss": 1.541, + "step": 328 + }, + { + "epoch": 6.66, + "learning_rate": 0.0001580357142857143, + "loss": 1.5898, + "step": 329 + }, + { + "epoch": 6.68, + "learning_rate": 0.00015790816326530612, + "loss": 1.5441, + "step": 330 + }, + { + "epoch": 6.7, + "learning_rate": 0.00015778061224489798, + "loss": 1.61, + "step": 331 + }, + { + "epoch": 6.72, + "learning_rate": 0.00015765306122448978, + "loss": 1.615, + "step": 332 + }, + { + "epoch": 6.74, + "learning_rate": 0.00015752551020408164, + "loss": 1.6575, + "step": 333 + }, + { + "epoch": 6.76, + "learning_rate": 0.00015739795918367347, + "loss": 1.6702, + "step": 334 + }, + { + "epoch": 6.78, + "learning_rate": 0.0001572704081632653, + "loss": 1.6009, + "step": 335 + }, + { + "epoch": 6.8, + "learning_rate": 0.00015714285714285716, + "loss": 1.5568, + "step": 336 + }, + { + "epoch": 6.82, + "learning_rate": 0.000157015306122449, + "loss": 1.619, + "step": 337 + }, + { + "epoch": 6.84, + "learning_rate": 0.00015688775510204082, + "loss": 1.5563, + "step": 338 + }, + { + "epoch": 6.86, + "learning_rate": 0.00015676020408163265, + "loss": 1.6328, + "step": 339 + }, + { + "epoch": 6.88, + "learning_rate": 0.0001566326530612245, + "loss": 1.5726, + "step": 340 + }, + { + "epoch": 6.9, + "learning_rate": 0.00015650510204081634, + "loss": 1.6199, + "step": 341 + }, + { + "epoch": 6.92, + "learning_rate": 0.00015637755102040817, + "loss": 1.5722, + "step": 342 + }, + { + "epoch": 6.94, + "learning_rate": 0.00015625, + "loss": 1.5685, + "step": 343 + }, + { + "epoch": 6.96, + "learning_rate": 0.00015612244897959186, + "loss": 1.5615, + "step": 344 + }, + { + "epoch": 6.98, + "learning_rate": 0.00015599489795918366, + "loss": 1.5994, + "step": 345 + }, + { + "epoch": 7.0, + "learning_rate": 0.00015586734693877552, + "loss": 1.5579, + "step": 346 + }, + { + "epoch": 7.02, + "learning_rate": 0.00015573979591836735, + "loss": 1.547, + "step": 347 + }, + { + "epoch": 7.04, + "learning_rate": 0.00015561224489795918, + "loss": 1.5292, + "step": 348 + }, + { + "epoch": 7.06, + "learning_rate": 0.00015548469387755102, + "loss": 1.6032, + "step": 349 + }, + { + "epoch": 7.08, + "learning_rate": 0.00015535714285714287, + "loss": 1.5149, + "step": 350 + }, + { + "epoch": 7.1, + "learning_rate": 0.0001552295918367347, + "loss": 1.6093, + "step": 351 + }, + { + "epoch": 7.12, + "learning_rate": 0.00015510204081632654, + "loss": 1.5421, + "step": 352 + }, + { + "epoch": 7.14, + "learning_rate": 0.00015497448979591837, + "loss": 1.5733, + "step": 353 + }, + { + "epoch": 7.16, + "learning_rate": 0.00015484693877551022, + "loss": 1.5703, + "step": 354 + }, + { + "epoch": 7.18, + "learning_rate": 0.00015471938775510203, + "loss": 1.6141, + "step": 355 + }, + { + "epoch": 7.2, + "learning_rate": 0.00015459183673469389, + "loss": 1.5526, + "step": 356 + }, + { + "epoch": 7.22, + "learning_rate": 0.00015446428571428572, + "loss": 1.5347, + "step": 357 + }, + { + "epoch": 7.24, + "learning_rate": 0.00015433673469387755, + "loss": 1.5682, + "step": 358 + }, + { + "epoch": 7.26, + "learning_rate": 0.0001542091836734694, + "loss": 1.5292, + "step": 359 + }, + { + "epoch": 7.28, + "learning_rate": 0.00015408163265306124, + "loss": 1.499, + "step": 360 + }, + { + "epoch": 7.3, + "learning_rate": 0.00015395408163265307, + "loss": 1.5624, + "step": 361 + }, + { + "epoch": 7.32, + "learning_rate": 0.0001538265306122449, + "loss": 1.627, + "step": 362 + }, + { + "epoch": 7.34, + "learning_rate": 0.00015369897959183676, + "loss": 1.5327, + "step": 363 + }, + { + "epoch": 7.37, + "learning_rate": 0.0001535714285714286, + "loss": 1.5622, + "step": 364 + }, + { + "epoch": 7.39, + "learning_rate": 0.00015344387755102042, + "loss": 1.5659, + "step": 365 + }, + { + "epoch": 7.41, + "learning_rate": 0.00015331632653061225, + "loss": 1.5019, + "step": 366 + }, + { + "epoch": 7.43, + "learning_rate": 0.0001531887755102041, + "loss": 1.5921, + "step": 367 + }, + { + "epoch": 7.45, + "learning_rate": 0.0001530612244897959, + "loss": 1.5914, + "step": 368 + }, + { + "epoch": 7.47, + "learning_rate": 0.00015293367346938777, + "loss": 1.5045, + "step": 369 + }, + { + "epoch": 7.49, + "learning_rate": 0.0001528061224489796, + "loss": 1.6209, + "step": 370 + }, + { + "epoch": 7.51, + "learning_rate": 0.00015267857142857143, + "loss": 1.5198, + "step": 371 + }, + { + "epoch": 7.53, + "learning_rate": 0.00015255102040816326, + "loss": 1.5363, + "step": 372 + }, + { + "epoch": 7.55, + "learning_rate": 0.00015242346938775512, + "loss": 1.5391, + "step": 373 + }, + { + "epoch": 7.57, + "learning_rate": 0.00015229591836734695, + "loss": 1.4546, + "step": 374 + }, + { + "epoch": 7.59, + "learning_rate": 0.00015216836734693878, + "loss": 1.5546, + "step": 375 + }, + { + "epoch": 7.61, + "learning_rate": 0.0001520408163265306, + "loss": 1.5629, + "step": 376 + }, + { + "epoch": 7.63, + "learning_rate": 0.00015191326530612247, + "loss": 1.6002, + "step": 377 + }, + { + "epoch": 7.65, + "learning_rate": 0.00015178571428571427, + "loss": 1.5543, + "step": 378 + }, + { + "epoch": 7.67, + "learning_rate": 0.00015165816326530613, + "loss": 1.5925, + "step": 379 + }, + { + "epoch": 7.69, + "learning_rate": 0.00015153061224489796, + "loss": 1.5631, + "step": 380 + }, + { + "epoch": 7.71, + "learning_rate": 0.0001514030612244898, + "loss": 1.5677, + "step": 381 + }, + { + "epoch": 7.73, + "learning_rate": 0.00015127551020408165, + "loss": 1.5828, + "step": 382 + }, + { + "epoch": 7.75, + "learning_rate": 0.00015114795918367348, + "loss": 1.6494, + "step": 383 + }, + { + "epoch": 7.77, + "learning_rate": 0.0001510204081632653, + "loss": 1.553, + "step": 384 + }, + { + "epoch": 7.79, + "learning_rate": 0.00015089285714285714, + "loss": 1.6156, + "step": 385 + }, + { + "epoch": 7.81, + "learning_rate": 0.000150765306122449, + "loss": 1.5001, + "step": 386 + }, + { + "epoch": 7.83, + "learning_rate": 0.00015063775510204083, + "loss": 1.5321, + "step": 387 + }, + { + "epoch": 7.85, + "learning_rate": 0.00015051020408163266, + "loss": 1.5307, + "step": 388 + }, + { + "epoch": 7.87, + "learning_rate": 0.0001503826530612245, + "loss": 1.5639, + "step": 389 + }, + { + "epoch": 7.89, + "learning_rate": 0.00015025510204081635, + "loss": 1.517, + "step": 390 + }, + { + "epoch": 7.91, + "learning_rate": 0.00015012755102040816, + "loss": 1.4776, + "step": 391 + }, + { + "epoch": 7.93, + "learning_rate": 0.00015000000000000001, + "loss": 1.5368, + "step": 392 + }, + { + "epoch": 7.95, + "learning_rate": 0.00014987244897959184, + "loss": 1.5636, + "step": 393 + }, + { + "epoch": 7.97, + "learning_rate": 0.00014974489795918368, + "loss": 1.6004, + "step": 394 + }, + { + "epoch": 7.99, + "learning_rate": 0.0001496173469387755, + "loss": 1.5524, + "step": 395 + }, + { + "epoch": 8.01, + "learning_rate": 0.00014948979591836736, + "loss": 1.5307, + "step": 396 + }, + { + "epoch": 8.03, + "learning_rate": 0.00014936224489795917, + "loss": 1.5123, + "step": 397 + }, + { + "epoch": 8.05, + "learning_rate": 0.00014923469387755103, + "loss": 1.5132, + "step": 398 + }, + { + "epoch": 8.07, + "learning_rate": 0.00014910714285714286, + "loss": 1.5109, + "step": 399 + }, + { + "epoch": 8.09, + "learning_rate": 0.00014897959183673472, + "loss": 1.5302, + "step": 400 + }, + { + "epoch": 8.11, + "learning_rate": 0.00014885204081632652, + "loss": 1.5238, + "step": 401 + }, + { + "epoch": 8.13, + "learning_rate": 0.00014872448979591838, + "loss": 1.4781, + "step": 402 + }, + { + "epoch": 8.15, + "learning_rate": 0.0001485969387755102, + "loss": 1.5446, + "step": 403 + }, + { + "epoch": 8.17, + "learning_rate": 0.00014846938775510204, + "loss": 1.5, + "step": 404 + }, + { + "epoch": 8.19, + "learning_rate": 0.0001483418367346939, + "loss": 1.5458, + "step": 405 + }, + { + "epoch": 8.21, + "learning_rate": 0.00014821428571428573, + "loss": 1.5257, + "step": 406 + }, + { + "epoch": 8.24, + "learning_rate": 0.00014808673469387756, + "loss": 1.4607, + "step": 407 + }, + { + "epoch": 8.26, + "learning_rate": 0.0001479591836734694, + "loss": 1.4282, + "step": 408 + }, + { + "epoch": 8.28, + "learning_rate": 0.00014783163265306125, + "loss": 1.4519, + "step": 409 + }, + { + "epoch": 8.3, + "learning_rate": 0.00014770408163265305, + "loss": 1.475, + "step": 410 + }, + { + "epoch": 8.32, + "learning_rate": 0.0001475765306122449, + "loss": 1.5425, + "step": 411 + }, + { + "epoch": 8.34, + "learning_rate": 0.00014744897959183674, + "loss": 1.5407, + "step": 412 + }, + { + "epoch": 8.36, + "learning_rate": 0.0001473214285714286, + "loss": 1.5698, + "step": 413 + }, + { + "epoch": 8.38, + "learning_rate": 0.0001471938775510204, + "loss": 1.4282, + "step": 414 + }, + { + "epoch": 8.4, + "learning_rate": 0.00014706632653061226, + "loss": 1.5301, + "step": 415 + }, + { + "epoch": 8.42, + "learning_rate": 0.0001469387755102041, + "loss": 1.5083, + "step": 416 + }, + { + "epoch": 8.44, + "learning_rate": 0.00014681122448979592, + "loss": 1.5712, + "step": 417 + }, + { + "epoch": 8.46, + "learning_rate": 0.00014668367346938775, + "loss": 1.4363, + "step": 418 + }, + { + "epoch": 8.48, + "learning_rate": 0.0001465561224489796, + "loss": 1.4463, + "step": 419 + }, + { + "epoch": 8.5, + "learning_rate": 0.00014642857142857141, + "loss": 1.4738, + "step": 420 + }, + { + "epoch": 8.52, + "learning_rate": 0.00014630102040816327, + "loss": 1.5396, + "step": 421 + }, + { + "epoch": 8.54, + "learning_rate": 0.0001461734693877551, + "loss": 1.4384, + "step": 422 + }, + { + "epoch": 8.56, + "learning_rate": 0.00014604591836734696, + "loss": 1.5345, + "step": 423 + }, + { + "epoch": 8.58, + "learning_rate": 0.0001459183673469388, + "loss": 1.5355, + "step": 424 + }, + { + "epoch": 8.6, + "learning_rate": 0.00014579081632653062, + "loss": 1.5188, + "step": 425 + }, + { + "epoch": 8.62, + "learning_rate": 0.00014566326530612245, + "loss": 1.5575, + "step": 426 + }, + { + "epoch": 8.64, + "learning_rate": 0.00014553571428571428, + "loss": 1.5279, + "step": 427 + }, + { + "epoch": 8.66, + "learning_rate": 0.00014540816326530614, + "loss": 1.5484, + "step": 428 + }, + { + "epoch": 8.68, + "learning_rate": 0.00014528061224489797, + "loss": 1.4878, + "step": 429 + }, + { + "epoch": 8.7, + "learning_rate": 0.0001451530612244898, + "loss": 1.503, + "step": 430 + }, + { + "epoch": 8.72, + "learning_rate": 0.00014502551020408163, + "loss": 1.4723, + "step": 431 + }, + { + "epoch": 8.74, + "learning_rate": 0.0001448979591836735, + "loss": 1.5579, + "step": 432 + }, + { + "epoch": 8.76, + "learning_rate": 0.0001447704081632653, + "loss": 1.4789, + "step": 433 + }, + { + "epoch": 8.78, + "learning_rate": 0.00014464285714285715, + "loss": 1.5501, + "step": 434 + }, + { + "epoch": 8.8, + "learning_rate": 0.00014451530612244899, + "loss": 1.5204, + "step": 435 + }, + { + "epoch": 8.82, + "learning_rate": 0.00014438775510204084, + "loss": 1.5489, + "step": 436 + }, + { + "epoch": 8.84, + "learning_rate": 0.00014426020408163265, + "loss": 1.5464, + "step": 437 + }, + { + "epoch": 8.86, + "learning_rate": 0.0001441326530612245, + "loss": 1.5896, + "step": 438 + }, + { + "epoch": 8.88, + "learning_rate": 0.00014400510204081634, + "loss": 1.5465, + "step": 439 + }, + { + "epoch": 8.9, + "learning_rate": 0.00014387755102040817, + "loss": 1.5094, + "step": 440 + }, + { + "epoch": 8.92, + "learning_rate": 0.00014375, + "loss": 1.5144, + "step": 441 + }, + { + "epoch": 8.94, + "learning_rate": 0.00014362244897959186, + "loss": 1.4919, + "step": 442 + }, + { + "epoch": 8.96, + "learning_rate": 0.00014349489795918366, + "loss": 1.4702, + "step": 443 + }, + { + "epoch": 8.98, + "learning_rate": 0.00014336734693877552, + "loss": 1.4996, + "step": 444 + }, + { + "epoch": 9.0, + "learning_rate": 0.00014323979591836735, + "loss": 1.5503, + "step": 445 + }, + { + "epoch": 9.02, + "learning_rate": 0.00014311224489795918, + "loss": 1.4125, + "step": 446 + }, + { + "epoch": 9.04, + "learning_rate": 0.00014298469387755104, + "loss": 1.4722, + "step": 447 + }, + { + "epoch": 9.06, + "learning_rate": 0.00014285714285714287, + "loss": 1.5199, + "step": 448 + }, + { + "epoch": 9.09, + "learning_rate": 0.0001427295918367347, + "loss": 1.4571, + "step": 449 + }, + { + "epoch": 9.11, + "learning_rate": 0.00014260204081632653, + "loss": 1.4996, + "step": 450 + }, + { + "epoch": 9.13, + "learning_rate": 0.0001424744897959184, + "loss": 1.4092, + "step": 451 + }, + { + "epoch": 9.15, + "learning_rate": 0.00014234693877551022, + "loss": 1.4198, + "step": 452 + }, + { + "epoch": 9.17, + "learning_rate": 0.00014221938775510205, + "loss": 1.4916, + "step": 453 + }, + { + "epoch": 9.19, + "learning_rate": 0.00014209183673469388, + "loss": 1.5051, + "step": 454 + }, + { + "epoch": 9.21, + "learning_rate": 0.00014196428571428574, + "loss": 1.4321, + "step": 455 + }, + { + "epoch": 9.23, + "learning_rate": 0.00014183673469387754, + "loss": 1.4097, + "step": 456 + }, + { + "epoch": 9.25, + "learning_rate": 0.0001417091836734694, + "loss": 1.4853, + "step": 457 + }, + { + "epoch": 9.27, + "learning_rate": 0.00014158163265306123, + "loss": 1.4593, + "step": 458 + }, + { + "epoch": 9.29, + "learning_rate": 0.00014145408163265306, + "loss": 1.3729, + "step": 459 + }, + { + "epoch": 9.31, + "learning_rate": 0.0001413265306122449, + "loss": 1.4467, + "step": 460 + }, + { + "epoch": 9.33, + "learning_rate": 0.00014119897959183675, + "loss": 1.4467, + "step": 461 + }, + { + "epoch": 9.35, + "learning_rate": 0.00014107142857142858, + "loss": 1.4785, + "step": 462 + }, + { + "epoch": 9.37, + "learning_rate": 0.0001409438775510204, + "loss": 1.4089, + "step": 463 + }, + { + "epoch": 9.39, + "learning_rate": 0.00014081632653061224, + "loss": 1.5026, + "step": 464 + }, + { + "epoch": 9.41, + "learning_rate": 0.0001406887755102041, + "loss": 1.4857, + "step": 465 + }, + { + "epoch": 9.43, + "learning_rate": 0.0001405612244897959, + "loss": 1.3745, + "step": 466 + }, + { + "epoch": 9.45, + "learning_rate": 0.00014043367346938776, + "loss": 1.4733, + "step": 467 + }, + { + "epoch": 9.47, + "learning_rate": 0.0001403061224489796, + "loss": 1.5212, + "step": 468 + }, + { + "epoch": 9.49, + "learning_rate": 0.00014017857142857142, + "loss": 1.5398, + "step": 469 + }, + { + "epoch": 9.51, + "learning_rate": 0.00014005102040816328, + "loss": 1.478, + "step": 470 + }, + { + "epoch": 9.53, + "learning_rate": 0.0001399234693877551, + "loss": 1.496, + "step": 471 + }, + { + "epoch": 9.55, + "learning_rate": 0.00013979591836734694, + "loss": 1.4837, + "step": 472 + }, + { + "epoch": 9.57, + "learning_rate": 0.00013966836734693878, + "loss": 1.4724, + "step": 473 + }, + { + "epoch": 9.59, + "learning_rate": 0.00013954081632653063, + "loss": 1.4828, + "step": 474 + }, + { + "epoch": 9.61, + "learning_rate": 0.00013941326530612246, + "loss": 1.5012, + "step": 475 + }, + { + "epoch": 9.63, + "learning_rate": 0.0001392857142857143, + "loss": 1.4879, + "step": 476 + }, + { + "epoch": 9.65, + "learning_rate": 0.00013915816326530613, + "loss": 1.4196, + "step": 477 + }, + { + "epoch": 9.67, + "learning_rate": 0.00013903061224489798, + "loss": 1.4915, + "step": 478 + }, + { + "epoch": 9.69, + "learning_rate": 0.0001389030612244898, + "loss": 1.3878, + "step": 479 + }, + { + "epoch": 9.71, + "learning_rate": 0.00013877551020408165, + "loss": 1.466, + "step": 480 + }, + { + "epoch": 9.73, + "learning_rate": 0.00013864795918367348, + "loss": 1.4582, + "step": 481 + }, + { + "epoch": 9.75, + "learning_rate": 0.0001385204081632653, + "loss": 1.533, + "step": 482 + }, + { + "epoch": 9.77, + "learning_rate": 0.00013839285714285714, + "loss": 1.4697, + "step": 483 + }, + { + "epoch": 9.79, + "learning_rate": 0.000138265306122449, + "loss": 1.3989, + "step": 484 + }, + { + "epoch": 9.81, + "learning_rate": 0.00013813775510204083, + "loss": 1.4361, + "step": 485 + }, + { + "epoch": 9.83, + "learning_rate": 0.00013801020408163266, + "loss": 1.5271, + "step": 486 + }, + { + "epoch": 9.85, + "learning_rate": 0.0001378826530612245, + "loss": 1.4905, + "step": 487 + }, + { + "epoch": 9.87, + "learning_rate": 0.00013775510204081635, + "loss": 1.4757, + "step": 488 + }, + { + "epoch": 9.89, + "learning_rate": 0.00013762755102040815, + "loss": 1.5485, + "step": 489 + }, + { + "epoch": 9.91, + "learning_rate": 0.0001375, + "loss": 1.4783, + "step": 490 + }, + { + "epoch": 9.93, + "learning_rate": 0.00013737244897959184, + "loss": 1.4849, + "step": 491 + }, + { + "epoch": 9.96, + "learning_rate": 0.00013724489795918367, + "loss": 1.5382, + "step": 492 + }, + { + "epoch": 9.98, + "learning_rate": 0.00013711734693877553, + "loss": 1.4902, + "step": 493 + }, + { + "epoch": 10.0, + "learning_rate": 0.00013698979591836736, + "loss": 1.4865, + "step": 494 + }, + { + "epoch": 10.02, + "learning_rate": 0.0001368622448979592, + "loss": 1.4436, + "step": 495 + }, + { + "epoch": 10.04, + "learning_rate": 0.00013673469387755102, + "loss": 1.408, + "step": 496 + }, + { + "epoch": 10.06, + "learning_rate": 0.00013660714285714288, + "loss": 1.4764, + "step": 497 + }, + { + "epoch": 10.08, + "learning_rate": 0.0001364795918367347, + "loss": 1.4646, + "step": 498 + }, + { + "epoch": 10.1, + "learning_rate": 0.00013635204081632654, + "loss": 1.406, + "step": 499 + }, + { + "epoch": 10.12, + "learning_rate": 0.00013622448979591837, + "loss": 1.4785, + "step": 500 + }, + { + "epoch": 10.14, + "learning_rate": 0.00013609693877551023, + "loss": 1.4117, + "step": 501 + }, + { + "epoch": 10.16, + "learning_rate": 0.00013596938775510203, + "loss": 1.4108, + "step": 502 + }, + { + "epoch": 10.18, + "learning_rate": 0.0001358418367346939, + "loss": 1.4155, + "step": 503 + }, + { + "epoch": 10.2, + "learning_rate": 0.00013571428571428572, + "loss": 1.4021, + "step": 504 + }, + { + "epoch": 10.22, + "learning_rate": 0.00013558673469387755, + "loss": 1.411, + "step": 505 + }, + { + "epoch": 10.24, + "learning_rate": 0.00013545918367346938, + "loss": 1.3851, + "step": 506 + }, + { + "epoch": 10.26, + "learning_rate": 0.00013533163265306124, + "loss": 1.387, + "step": 507 + }, + { + "epoch": 10.28, + "learning_rate": 0.00013520408163265305, + "loss": 1.4163, + "step": 508 + }, + { + "epoch": 10.3, + "learning_rate": 0.0001350765306122449, + "loss": 1.3343, + "step": 509 + }, + { + "epoch": 10.32, + "learning_rate": 0.00013494897959183673, + "loss": 1.4811, + "step": 510 + }, + { + "epoch": 10.34, + "learning_rate": 0.0001348214285714286, + "loss": 1.4086, + "step": 511 + }, + { + "epoch": 10.36, + "learning_rate": 0.0001346938775510204, + "loss": 1.3879, + "step": 512 + }, + { + "epoch": 10.38, + "learning_rate": 0.00013456632653061225, + "loss": 1.4204, + "step": 513 + }, + { + "epoch": 10.4, + "learning_rate": 0.00013443877551020408, + "loss": 1.4158, + "step": 514 + }, + { + "epoch": 10.42, + "learning_rate": 0.00013431122448979592, + "loss": 1.4521, + "step": 515 + }, + { + "epoch": 10.44, + "learning_rate": 0.00013418367346938777, + "loss": 1.4196, + "step": 516 + }, + { + "epoch": 10.46, + "learning_rate": 0.0001340561224489796, + "loss": 1.4361, + "step": 517 + }, + { + "epoch": 10.48, + "learning_rate": 0.00013392857142857144, + "loss": 1.4482, + "step": 518 + }, + { + "epoch": 10.5, + "learning_rate": 0.00013380102040816327, + "loss": 1.4801, + "step": 519 + }, + { + "epoch": 10.52, + "learning_rate": 0.00013367346938775512, + "loss": 1.4556, + "step": 520 + }, + { + "epoch": 10.54, + "learning_rate": 0.00013354591836734695, + "loss": 1.3902, + "step": 521 + }, + { + "epoch": 10.56, + "learning_rate": 0.00013341836734693879, + "loss": 1.4269, + "step": 522 + }, + { + "epoch": 10.58, + "learning_rate": 0.00013329081632653062, + "loss": 1.4899, + "step": 523 + }, + { + "epoch": 10.6, + "learning_rate": 0.00013316326530612247, + "loss": 1.3952, + "step": 524 + }, + { + "epoch": 10.62, + "learning_rate": 0.00013303571428571428, + "loss": 1.4116, + "step": 525 + }, + { + "epoch": 10.64, + "learning_rate": 0.00013290816326530614, + "loss": 1.4583, + "step": 526 + }, + { + "epoch": 10.66, + "learning_rate": 0.00013278061224489797, + "loss": 1.4466, + "step": 527 + }, + { + "epoch": 10.68, + "learning_rate": 0.0001326530612244898, + "loss": 1.4242, + "step": 528 + }, + { + "epoch": 10.7, + "learning_rate": 0.00013252551020408163, + "loss": 1.3717, + "step": 529 + }, + { + "epoch": 10.72, + "learning_rate": 0.0001323979591836735, + "loss": 1.4583, + "step": 530 + }, + { + "epoch": 10.74, + "learning_rate": 0.0001322704081632653, + "loss": 1.4185, + "step": 531 + }, + { + "epoch": 10.76, + "learning_rate": 0.00013214285714285715, + "loss": 1.4287, + "step": 532 + }, + { + "epoch": 10.78, + "learning_rate": 0.00013201530612244898, + "loss": 1.4385, + "step": 533 + }, + { + "epoch": 10.8, + "learning_rate": 0.00013188775510204084, + "loss": 1.453, + "step": 534 + }, + { + "epoch": 10.83, + "learning_rate": 0.00013176020408163264, + "loss": 1.4161, + "step": 535 + }, + { + "epoch": 10.85, + "learning_rate": 0.0001316326530612245, + "loss": 1.457, + "step": 536 + }, + { + "epoch": 10.87, + "learning_rate": 0.00013150510204081633, + "loss": 1.4367, + "step": 537 + }, + { + "epoch": 10.89, + "learning_rate": 0.00013137755102040816, + "loss": 1.4256, + "step": 538 + }, + { + "epoch": 10.91, + "learning_rate": 0.00013125000000000002, + "loss": 1.424, + "step": 539 + }, + { + "epoch": 10.93, + "learning_rate": 0.00013112244897959185, + "loss": 1.3923, + "step": 540 + }, + { + "epoch": 10.95, + "learning_rate": 0.00013099489795918368, + "loss": 1.4225, + "step": 541 + }, + { + "epoch": 10.97, + "learning_rate": 0.0001308673469387755, + "loss": 1.3969, + "step": 542 + }, + { + "epoch": 10.99, + "learning_rate": 0.00013073979591836737, + "loss": 1.4446, + "step": 543 + }, + { + "epoch": 11.01, + "learning_rate": 0.00013061224489795917, + "loss": 1.4375, + "step": 544 + }, + { + "epoch": 11.03, + "learning_rate": 0.00013048469387755103, + "loss": 1.4064, + "step": 545 + }, + { + "epoch": 11.05, + "learning_rate": 0.00013035714285714286, + "loss": 1.3454, + "step": 546 + }, + { + "epoch": 11.07, + "learning_rate": 0.00013022959183673472, + "loss": 1.3234, + "step": 547 + }, + { + "epoch": 11.09, + "learning_rate": 0.00013010204081632652, + "loss": 1.3759, + "step": 548 + }, + { + "epoch": 11.11, + "learning_rate": 0.00012997448979591838, + "loss": 1.4221, + "step": 549 + }, + { + "epoch": 11.13, + "learning_rate": 0.0001298469387755102, + "loss": 1.4261, + "step": 550 + }, + { + "epoch": 11.15, + "learning_rate": 0.00012971938775510204, + "loss": 1.3341, + "step": 551 + }, + { + "epoch": 11.17, + "learning_rate": 0.00012959183673469387, + "loss": 1.3994, + "step": 552 + }, + { + "epoch": 11.19, + "learning_rate": 0.00012946428571428573, + "loss": 1.3894, + "step": 553 + }, + { + "epoch": 11.21, + "learning_rate": 0.00012933673469387754, + "loss": 1.3585, + "step": 554 + }, + { + "epoch": 11.23, + "learning_rate": 0.0001292091836734694, + "loss": 1.3763, + "step": 555 + }, + { + "epoch": 11.25, + "learning_rate": 0.00012908163265306123, + "loss": 1.3623, + "step": 556 + }, + { + "epoch": 11.27, + "learning_rate": 0.00012895408163265306, + "loss": 1.3907, + "step": 557 + }, + { + "epoch": 11.29, + "learning_rate": 0.0001288265306122449, + "loss": 1.3807, + "step": 558 + }, + { + "epoch": 11.31, + "learning_rate": 0.00012869897959183674, + "loss": 1.4045, + "step": 559 + }, + { + "epoch": 11.33, + "learning_rate": 0.00012857142857142858, + "loss": 1.4038, + "step": 560 + }, + { + "epoch": 11.35, + "learning_rate": 0.0001284438775510204, + "loss": 1.3466, + "step": 561 + }, + { + "epoch": 11.37, + "learning_rate": 0.00012831632653061226, + "loss": 1.3449, + "step": 562 + }, + { + "epoch": 11.39, + "learning_rate": 0.0001281887755102041, + "loss": 1.3866, + "step": 563 + }, + { + "epoch": 11.41, + "learning_rate": 0.00012806122448979593, + "loss": 1.3106, + "step": 564 + }, + { + "epoch": 11.43, + "learning_rate": 0.00012793367346938776, + "loss": 1.4414, + "step": 565 + }, + { + "epoch": 11.45, + "learning_rate": 0.00012780612244897962, + "loss": 1.3737, + "step": 566 + }, + { + "epoch": 11.47, + "learning_rate": 0.00012767857142857142, + "loss": 1.4053, + "step": 567 + }, + { + "epoch": 11.49, + "learning_rate": 0.00012755102040816328, + "loss": 1.4561, + "step": 568 + }, + { + "epoch": 11.51, + "learning_rate": 0.0001274234693877551, + "loss": 1.3684, + "step": 569 + }, + { + "epoch": 11.53, + "learning_rate": 0.00012729591836734697, + "loss": 1.3117, + "step": 570 + }, + { + "epoch": 11.55, + "learning_rate": 0.00012716836734693877, + "loss": 1.3474, + "step": 571 + }, + { + "epoch": 11.57, + "learning_rate": 0.00012704081632653063, + "loss": 1.3804, + "step": 572 + }, + { + "epoch": 11.59, + "learning_rate": 0.00012691326530612246, + "loss": 1.3656, + "step": 573 + }, + { + "epoch": 11.61, + "learning_rate": 0.0001267857142857143, + "loss": 1.3133, + "step": 574 + }, + { + "epoch": 11.63, + "learning_rate": 0.00012665816326530612, + "loss": 1.4077, + "step": 575 + }, + { + "epoch": 11.65, + "learning_rate": 0.00012653061224489798, + "loss": 1.4087, + "step": 576 + }, + { + "epoch": 11.67, + "learning_rate": 0.00012640306122448978, + "loss": 1.3524, + "step": 577 + }, + { + "epoch": 11.7, + "learning_rate": 0.00012627551020408164, + "loss": 1.3481, + "step": 578 + }, + { + "epoch": 11.72, + "learning_rate": 0.00012614795918367347, + "loss": 1.4497, + "step": 579 + }, + { + "epoch": 11.74, + "learning_rate": 0.0001260204081632653, + "loss": 1.3866, + "step": 580 + }, + { + "epoch": 11.76, + "learning_rate": 0.00012589285714285713, + "loss": 1.42, + "step": 581 + }, + { + "epoch": 11.78, + "learning_rate": 0.000125765306122449, + "loss": 1.3562, + "step": 582 + }, + { + "epoch": 11.8, + "learning_rate": 0.00012563775510204082, + "loss": 1.3249, + "step": 583 + }, + { + "epoch": 11.82, + "learning_rate": 0.00012551020408163265, + "loss": 1.4277, + "step": 584 + }, + { + "epoch": 11.84, + "learning_rate": 0.0001253826530612245, + "loss": 1.3734, + "step": 585 + }, + { + "epoch": 11.86, + "learning_rate": 0.00012525510204081634, + "loss": 1.3765, + "step": 586 + }, + { + "epoch": 11.88, + "learning_rate": 0.00012512755102040817, + "loss": 1.4153, + "step": 587 + }, + { + "epoch": 11.9, + "learning_rate": 0.000125, + "loss": 1.3847, + "step": 588 + }, + { + "epoch": 11.92, + "learning_rate": 0.00012487244897959186, + "loss": 1.3824, + "step": 589 + }, + { + "epoch": 11.94, + "learning_rate": 0.00012474489795918366, + "loss": 1.3938, + "step": 590 + }, + { + "epoch": 11.96, + "learning_rate": 0.00012461734693877552, + "loss": 1.4143, + "step": 591 + }, + { + "epoch": 11.98, + "learning_rate": 0.00012448979591836735, + "loss": 1.3794, + "step": 592 + }, + { + "epoch": 12.0, + "learning_rate": 0.00012436224489795918, + "loss": 1.3755, + "step": 593 + }, + { + "epoch": 12.02, + "learning_rate": 0.00012423469387755101, + "loss": 1.3736, + "step": 594 + }, + { + "epoch": 12.04, + "learning_rate": 0.00012410714285714287, + "loss": 1.2957, + "step": 595 + }, + { + "epoch": 12.06, + "learning_rate": 0.0001239795918367347, + "loss": 1.2996, + "step": 596 + }, + { + "epoch": 12.08, + "learning_rate": 0.00012385204081632653, + "loss": 1.3648, + "step": 597 + }, + { + "epoch": 12.1, + "learning_rate": 0.00012372448979591837, + "loss": 1.3031, + "step": 598 + }, + { + "epoch": 12.12, + "learning_rate": 0.00012359693877551022, + "loss": 1.2933, + "step": 599 + }, + { + "epoch": 12.14, + "learning_rate": 0.00012346938775510203, + "loss": 1.322, + "step": 600 + } + ], + "logging_steps": 1, + "max_steps": 1568, + "num_train_epochs": 32, + "save_steps": 100, + "total_flos": 8.877519690112819e+17, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-600/training_args.bin b/checkpoint-600/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..db23e07d097c18532e52f58a70eb72d22e39c8c1 --- /dev/null +++ b/checkpoint-600/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b610cbc4242bb50b4985b00e205994ae514fec6d9e2273f2b545a583a07b154b +size 4155 diff --git a/checkpoint-700/README.md b/checkpoint-700/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-700/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-700/adapter_config.json b/checkpoint-700/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b --- /dev/null +++ b/checkpoint-700/adapter_config.json @@ -0,0 +1,23 @@ +{ + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-700/adapter_model.bin b/checkpoint-700/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..555b872534b3769fecafaf9320509b206eaa9ba2 --- /dev/null +++ b/checkpoint-700/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6fe79826e26fa903fa64560c79bd0221d35aa416cc9a8d5ec9e09e149435dfbb +size 39409357 diff --git a/checkpoint-700/optimizer.pt b/checkpoint-700/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..6422dd33c8fe76b4f9fbf53da14138518675e40c --- /dev/null +++ b/checkpoint-700/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:829988f0b71481d26c8b54a86768543c7c14a48d695552f6933481d5709420c1 +size 78844421 diff --git a/checkpoint-700/rng_state.pth b/checkpoint-700/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..c1c4c79fcb2148e5de373db02c3ee2987200a3a9 --- /dev/null +++ b/checkpoint-700/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3e46ce4eb16240da9f3a8b3066acb6f59a234249ee2a3052f3323786da479838 +size 14575 diff --git a/checkpoint-700/scheduler.pt b/checkpoint-700/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..4543a318cb958b4b426e7561985db7ab82a9d34b --- /dev/null +++ b/checkpoint-700/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:091c20f4c5e244fe8f3c4fd8851e0e20a9e6123652dcfb2277f8793845f1eb70 +size 627 diff --git a/checkpoint-700/special_tokens_map.json b/checkpoint-700/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-700/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-700/tokenizer.json b/checkpoint-700/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf002cafbd4818dcff2abc9156c088d681b4533 --- /dev/null +++ b/checkpoint-700/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba +size 14500471 diff --git a/checkpoint-700/tokenizer_config.json b/checkpoint-700/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-700/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-700/trainer_state.json b/checkpoint-700/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..2aeb564b0e254822ffc5698a6594bddc49348229 --- /dev/null +++ b/checkpoint-700/trainer_state.json @@ -0,0 +1,4219 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 14.163768574138476, + "eval_steps": 500, + "global_step": 700, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.02, + "learning_rate": 0.00019987244897959184, + "loss": 3.2215, + "step": 1 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019974489795918367, + "loss": 2.8365, + "step": 2 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019961734693877553, + "loss": 2.602, + "step": 3 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019948979591836736, + "loss": 2.4196, + "step": 4 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001993622448979592, + "loss": 2.2574, + "step": 5 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019923469387755102, + "loss": 2.2239, + "step": 6 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019910714285714288, + "loss": 2.1661, + "step": 7 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001989795918367347, + "loss": 2.0987, + "step": 8 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019885204081632654, + "loss": 2.015, + "step": 9 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019872448979591837, + "loss": 1.9771, + "step": 10 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019859693877551023, + "loss": 2.0271, + "step": 11 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019846938775510203, + "loss": 1.9812, + "step": 12 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001983418367346939, + "loss": 2.0834, + "step": 13 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019821428571428572, + "loss": 1.9174, + "step": 14 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019808673469387755, + "loss": 1.8409, + "step": 15 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019795918367346938, + "loss": 1.929, + "step": 16 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019783163265306124, + "loss": 2.0041, + "step": 17 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019770408163265305, + "loss": 1.9385, + "step": 18 + }, + { + "epoch": 0.38, + "learning_rate": 0.0001975765306122449, + "loss": 1.9592, + "step": 19 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019744897959183674, + "loss": 1.9701, + "step": 20 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001973214285714286, + "loss": 1.9277, + "step": 21 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019719387755102042, + "loss": 1.8394, + "step": 22 + }, + { + "epoch": 0.47, + "learning_rate": 0.00019706632653061226, + "loss": 1.8666, + "step": 23 + }, + { + "epoch": 0.49, + "learning_rate": 0.00019693877551020409, + "loss": 1.8997, + "step": 24 + }, + { + "epoch": 0.51, + "learning_rate": 0.00019681122448979592, + "loss": 1.9432, + "step": 25 + }, + { + "epoch": 0.53, + "learning_rate": 0.00019668367346938777, + "loss": 1.9137, + "step": 26 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001965561224489796, + "loss": 1.905, + "step": 27 + }, + { + "epoch": 0.57, + "learning_rate": 0.00019642857142857144, + "loss": 1.8708, + "step": 28 + }, + { + "epoch": 0.59, + "learning_rate": 0.00019630102040816327, + "loss": 1.9097, + "step": 29 + }, + { + "epoch": 0.61, + "learning_rate": 0.00019617346938775513, + "loss": 1.896, + "step": 30 + }, + { + "epoch": 0.63, + "learning_rate": 0.00019604591836734696, + "loss": 1.8834, + "step": 31 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001959183673469388, + "loss": 1.8323, + "step": 32 + }, + { + "epoch": 0.67, + "learning_rate": 0.00019579081632653062, + "loss": 1.804, + "step": 33 + }, + { + "epoch": 0.69, + "learning_rate": 0.00019566326530612248, + "loss": 1.8906, + "step": 34 + }, + { + "epoch": 0.71, + "learning_rate": 0.00019553571428571428, + "loss": 1.8693, + "step": 35 + }, + { + "epoch": 0.73, + "learning_rate": 0.00019540816326530614, + "loss": 1.9308, + "step": 36 + }, + { + "epoch": 0.75, + "learning_rate": 0.00019528061224489797, + "loss": 1.8082, + "step": 37 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001951530612244898, + "loss": 1.848, + "step": 38 + }, + { + "epoch": 0.79, + "learning_rate": 0.00019502551020408163, + "loss": 1.8866, + "step": 39 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001948979591836735, + "loss": 1.7844, + "step": 40 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001947704081632653, + "loss": 1.8485, + "step": 41 + }, + { + "epoch": 0.85, + "learning_rate": 0.00019464285714285715, + "loss": 1.7917, + "step": 42 + }, + { + "epoch": 0.87, + "learning_rate": 0.00019451530612244898, + "loss": 1.7342, + "step": 43 + }, + { + "epoch": 0.89, + "learning_rate": 0.00019438775510204084, + "loss": 1.8479, + "step": 44 + }, + { + "epoch": 0.91, + "learning_rate": 0.00019426020408163267, + "loss": 1.8639, + "step": 45 + }, + { + "epoch": 0.93, + "learning_rate": 0.0001941326530612245, + "loss": 1.8166, + "step": 46 + }, + { + "epoch": 0.95, + "learning_rate": 0.00019400510204081633, + "loss": 1.7566, + "step": 47 + }, + { + "epoch": 0.97, + "learning_rate": 0.00019387755102040816, + "loss": 1.8071, + "step": 48 + }, + { + "epoch": 0.99, + "learning_rate": 0.00019375000000000002, + "loss": 1.8612, + "step": 49 + }, + { + "epoch": 1.01, + "learning_rate": 0.00019362244897959185, + "loss": 1.7819, + "step": 50 + }, + { + "epoch": 1.03, + "learning_rate": 0.00019349489795918368, + "loss": 1.8647, + "step": 51 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001933673469387755, + "loss": 1.8196, + "step": 52 + }, + { + "epoch": 1.07, + "learning_rate": 0.00019323979591836737, + "loss": 1.8027, + "step": 53 + }, + { + "epoch": 1.09, + "learning_rate": 0.00019311224489795917, + "loss": 1.8927, + "step": 54 + }, + { + "epoch": 1.11, + "learning_rate": 0.00019298469387755103, + "loss": 1.8481, + "step": 55 + }, + { + "epoch": 1.13, + "learning_rate": 0.00019285714285714286, + "loss": 1.7781, + "step": 56 + }, + { + "epoch": 1.15, + "learning_rate": 0.00019272959183673472, + "loss": 1.8101, + "step": 57 + }, + { + "epoch": 1.17, + "learning_rate": 0.00019260204081632653, + "loss": 1.7257, + "step": 58 + }, + { + "epoch": 1.19, + "learning_rate": 0.00019247448979591838, + "loss": 1.8185, + "step": 59 + }, + { + "epoch": 1.21, + "learning_rate": 0.00019234693877551021, + "loss": 1.8557, + "step": 60 + }, + { + "epoch": 1.23, + "learning_rate": 0.00019221938775510204, + "loss": 1.7418, + "step": 61 + }, + { + "epoch": 1.25, + "learning_rate": 0.00019209183673469388, + "loss": 1.6879, + "step": 62 + }, + { + "epoch": 1.27, + "learning_rate": 0.00019196428571428573, + "loss": 1.7651, + "step": 63 + }, + { + "epoch": 1.29, + "learning_rate": 0.00019183673469387756, + "loss": 1.7759, + "step": 64 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001917091836734694, + "loss": 1.7691, + "step": 65 + }, + { + "epoch": 1.34, + "learning_rate": 0.00019158163265306123, + "loss": 1.7794, + "step": 66 + }, + { + "epoch": 1.36, + "learning_rate": 0.00019145408163265306, + "loss": 1.8152, + "step": 67 + }, + { + "epoch": 1.38, + "learning_rate": 0.00019132653061224492, + "loss": 1.8052, + "step": 68 + }, + { + "epoch": 1.4, + "learning_rate": 0.00019119897959183675, + "loss": 1.8054, + "step": 69 + }, + { + "epoch": 1.42, + "learning_rate": 0.00019107142857142858, + "loss": 1.8114, + "step": 70 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001909438775510204, + "loss": 1.7749, + "step": 71 + }, + { + "epoch": 1.46, + "learning_rate": 0.00019081632653061227, + "loss": 1.777, + "step": 72 + }, + { + "epoch": 1.48, + "learning_rate": 0.0001906887755102041, + "loss": 1.7896, + "step": 73 + }, + { + "epoch": 1.5, + "learning_rate": 0.00019056122448979593, + "loss": 1.8335, + "step": 74 + }, + { + "epoch": 1.52, + "learning_rate": 0.00019043367346938776, + "loss": 1.8155, + "step": 75 + }, + { + "epoch": 1.54, + "learning_rate": 0.00019030612244897962, + "loss": 1.8224, + "step": 76 + }, + { + "epoch": 1.56, + "learning_rate": 0.00019017857142857142, + "loss": 1.7889, + "step": 77 + }, + { + "epoch": 1.58, + "learning_rate": 0.00019005102040816328, + "loss": 1.8866, + "step": 78 + }, + { + "epoch": 1.6, + "learning_rate": 0.0001899234693877551, + "loss": 1.8439, + "step": 79 + }, + { + "epoch": 1.62, + "learning_rate": 0.00018979591836734697, + "loss": 1.7906, + "step": 80 + }, + { + "epoch": 1.64, + "learning_rate": 0.00018966836734693877, + "loss": 1.8627, + "step": 81 + }, + { + "epoch": 1.66, + "learning_rate": 0.00018954081632653063, + "loss": 1.7497, + "step": 82 + }, + { + "epoch": 1.68, + "learning_rate": 0.00018941326530612246, + "loss": 1.7936, + "step": 83 + }, + { + "epoch": 1.7, + "learning_rate": 0.0001892857142857143, + "loss": 1.8341, + "step": 84 + }, + { + "epoch": 1.72, + "learning_rate": 0.00018915816326530612, + "loss": 1.7868, + "step": 85 + }, + { + "epoch": 1.74, + "learning_rate": 0.00018903061224489798, + "loss": 1.7493, + "step": 86 + }, + { + "epoch": 1.76, + "learning_rate": 0.0001889030612244898, + "loss": 1.7926, + "step": 87 + }, + { + "epoch": 1.78, + "learning_rate": 0.00018877551020408164, + "loss": 1.8278, + "step": 88 + }, + { + "epoch": 1.8, + "learning_rate": 0.00018864795918367347, + "loss": 1.7387, + "step": 89 + }, + { + "epoch": 1.82, + "learning_rate": 0.0001885204081632653, + "loss": 1.7669, + "step": 90 + }, + { + "epoch": 1.84, + "learning_rate": 0.00018839285714285716, + "loss": 1.7686, + "step": 91 + }, + { + "epoch": 1.86, + "learning_rate": 0.000188265306122449, + "loss": 1.7759, + "step": 92 + }, + { + "epoch": 1.88, + "learning_rate": 0.00018813775510204082, + "loss": 1.7016, + "step": 93 + }, + { + "epoch": 1.9, + "learning_rate": 0.00018801020408163265, + "loss": 1.8123, + "step": 94 + }, + { + "epoch": 1.92, + "learning_rate": 0.0001878826530612245, + "loss": 1.8315, + "step": 95 + }, + { + "epoch": 1.94, + "learning_rate": 0.00018775510204081634, + "loss": 1.7679, + "step": 96 + }, + { + "epoch": 1.96, + "learning_rate": 0.00018762755102040817, + "loss": 1.7874, + "step": 97 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001875, + "loss": 1.8008, + "step": 98 + }, + { + "epoch": 2.0, + "learning_rate": 0.00018737244897959186, + "loss": 1.7177, + "step": 99 + }, + { + "epoch": 2.02, + "learning_rate": 0.00018724489795918367, + "loss": 1.7272, + "step": 100 + }, + { + "epoch": 2.04, + "learning_rate": 0.00018711734693877552, + "loss": 1.7848, + "step": 101 + }, + { + "epoch": 2.06, + "learning_rate": 0.00018698979591836735, + "loss": 1.744, + "step": 102 + }, + { + "epoch": 2.08, + "learning_rate": 0.00018686224489795919, + "loss": 1.7005, + "step": 103 + }, + { + "epoch": 2.1, + "learning_rate": 0.00018673469387755102, + "loss": 1.8247, + "step": 104 + }, + { + "epoch": 2.12, + "learning_rate": 0.00018660714285714287, + "loss": 1.6855, + "step": 105 + }, + { + "epoch": 2.14, + "learning_rate": 0.0001864795918367347, + "loss": 1.7627, + "step": 106 + }, + { + "epoch": 2.17, + "learning_rate": 0.00018635204081632654, + "loss": 1.7564, + "step": 107 + }, + { + "epoch": 2.19, + "learning_rate": 0.00018622448979591837, + "loss": 1.8237, + "step": 108 + }, + { + "epoch": 2.21, + "learning_rate": 0.00018609693877551022, + "loss": 1.7421, + "step": 109 + }, + { + "epoch": 2.23, + "learning_rate": 0.00018596938775510206, + "loss": 1.7517, + "step": 110 + }, + { + "epoch": 2.25, + "learning_rate": 0.0001858418367346939, + "loss": 1.7515, + "step": 111 + }, + { + "epoch": 2.27, + "learning_rate": 0.00018571428571428572, + "loss": 1.7842, + "step": 112 + }, + { + "epoch": 2.29, + "learning_rate": 0.00018558673469387755, + "loss": 1.8001, + "step": 113 + }, + { + "epoch": 2.31, + "learning_rate": 0.0001854591836734694, + "loss": 1.7653, + "step": 114 + }, + { + "epoch": 2.33, + "learning_rate": 0.00018533163265306124, + "loss": 1.694, + "step": 115 + }, + { + "epoch": 2.35, + "learning_rate": 0.00018520408163265307, + "loss": 1.7457, + "step": 116 + }, + { + "epoch": 2.37, + "learning_rate": 0.0001850765306122449, + "loss": 1.7899, + "step": 117 + }, + { + "epoch": 2.39, + "learning_rate": 0.00018494897959183676, + "loss": 1.7473, + "step": 118 + }, + { + "epoch": 2.41, + "learning_rate": 0.0001848214285714286, + "loss": 1.6639, + "step": 119 + }, + { + "epoch": 2.43, + "learning_rate": 0.00018469387755102042, + "loss": 1.762, + "step": 120 + }, + { + "epoch": 2.45, + "learning_rate": 0.00018456632653061225, + "loss": 1.7378, + "step": 121 + }, + { + "epoch": 2.47, + "learning_rate": 0.0001844387755102041, + "loss": 1.672, + "step": 122 + }, + { + "epoch": 2.49, + "learning_rate": 0.0001843112244897959, + "loss": 1.7267, + "step": 123 + }, + { + "epoch": 2.51, + "learning_rate": 0.00018418367346938777, + "loss": 1.7825, + "step": 124 + }, + { + "epoch": 2.53, + "learning_rate": 0.0001840561224489796, + "loss": 1.7566, + "step": 125 + }, + { + "epoch": 2.55, + "learning_rate": 0.00018392857142857143, + "loss": 1.8169, + "step": 126 + }, + { + "epoch": 2.57, + "learning_rate": 0.00018380102040816326, + "loss": 1.6801, + "step": 127 + }, + { + "epoch": 2.59, + "learning_rate": 0.00018367346938775512, + "loss": 1.7292, + "step": 128 + }, + { + "epoch": 2.61, + "learning_rate": 0.00018354591836734695, + "loss": 1.737, + "step": 129 + }, + { + "epoch": 2.63, + "learning_rate": 0.00018341836734693878, + "loss": 1.7696, + "step": 130 + }, + { + "epoch": 2.65, + "learning_rate": 0.0001832908163265306, + "loss": 1.7239, + "step": 131 + }, + { + "epoch": 2.67, + "learning_rate": 0.00018316326530612247, + "loss": 1.7441, + "step": 132 + }, + { + "epoch": 2.69, + "learning_rate": 0.0001830357142857143, + "loss": 1.7825, + "step": 133 + }, + { + "epoch": 2.71, + "learning_rate": 0.00018290816326530613, + "loss": 1.7411, + "step": 134 + }, + { + "epoch": 2.73, + "learning_rate": 0.00018278061224489796, + "loss": 1.7119, + "step": 135 + }, + { + "epoch": 2.75, + "learning_rate": 0.0001826530612244898, + "loss": 1.7443, + "step": 136 + }, + { + "epoch": 2.77, + "learning_rate": 0.00018252551020408165, + "loss": 1.7197, + "step": 137 + }, + { + "epoch": 2.79, + "learning_rate": 0.00018239795918367348, + "loss": 1.7273, + "step": 138 + }, + { + "epoch": 2.81, + "learning_rate": 0.0001822704081632653, + "loss": 1.7681, + "step": 139 + }, + { + "epoch": 2.83, + "learning_rate": 0.00018214285714285714, + "loss": 1.8088, + "step": 140 + }, + { + "epoch": 2.85, + "learning_rate": 0.000182015306122449, + "loss": 1.7301, + "step": 141 + }, + { + "epoch": 2.87, + "learning_rate": 0.00018188775510204083, + "loss": 1.6853, + "step": 142 + }, + { + "epoch": 2.89, + "learning_rate": 0.00018176020408163266, + "loss": 1.6966, + "step": 143 + }, + { + "epoch": 2.91, + "learning_rate": 0.0001816326530612245, + "loss": 1.7938, + "step": 144 + }, + { + "epoch": 2.93, + "learning_rate": 0.00018150510204081635, + "loss": 1.7639, + "step": 145 + }, + { + "epoch": 2.95, + "learning_rate": 0.00018137755102040816, + "loss": 1.7527, + "step": 146 + }, + { + "epoch": 2.97, + "learning_rate": 0.00018125000000000001, + "loss": 1.7386, + "step": 147 + }, + { + "epoch": 2.99, + "learning_rate": 0.00018112244897959185, + "loss": 1.7223, + "step": 148 + }, + { + "epoch": 3.01, + "learning_rate": 0.00018099489795918368, + "loss": 1.7571, + "step": 149 + }, + { + "epoch": 3.04, + "learning_rate": 0.0001808673469387755, + "loss": 1.7054, + "step": 150 + }, + { + "epoch": 3.06, + "learning_rate": 0.00018073979591836737, + "loss": 1.6581, + "step": 151 + }, + { + "epoch": 3.08, + "learning_rate": 0.00018061224489795917, + "loss": 1.681, + "step": 152 + }, + { + "epoch": 3.1, + "learning_rate": 0.00018048469387755103, + "loss": 1.7425, + "step": 153 + }, + { + "epoch": 3.12, + "learning_rate": 0.00018035714285714286, + "loss": 1.7108, + "step": 154 + }, + { + "epoch": 3.14, + "learning_rate": 0.00018022959183673472, + "loss": 1.7194, + "step": 155 + }, + { + "epoch": 3.16, + "learning_rate": 0.00018010204081632655, + "loss": 1.6953, + "step": 156 + }, + { + "epoch": 3.18, + "learning_rate": 0.00017997448979591838, + "loss": 1.669, + "step": 157 + }, + { + "epoch": 3.2, + "learning_rate": 0.0001798469387755102, + "loss": 1.744, + "step": 158 + }, + { + "epoch": 3.22, + "learning_rate": 0.00017971938775510204, + "loss": 1.6467, + "step": 159 + }, + { + "epoch": 3.24, + "learning_rate": 0.0001795918367346939, + "loss": 1.7103, + "step": 160 + }, + { + "epoch": 3.26, + "learning_rate": 0.00017946428571428573, + "loss": 1.6662, + "step": 161 + }, + { + "epoch": 3.28, + "learning_rate": 0.00017933673469387756, + "loss": 1.6657, + "step": 162 + }, + { + "epoch": 3.3, + "learning_rate": 0.0001792091836734694, + "loss": 1.791, + "step": 163 + }, + { + "epoch": 3.32, + "learning_rate": 0.00017908163265306125, + "loss": 1.7704, + "step": 164 + }, + { + "epoch": 3.34, + "learning_rate": 0.00017895408163265305, + "loss": 1.7229, + "step": 165 + }, + { + "epoch": 3.36, + "learning_rate": 0.0001788265306122449, + "loss": 1.76, + "step": 166 + }, + { + "epoch": 3.38, + "learning_rate": 0.00017869897959183674, + "loss": 1.6482, + "step": 167 + }, + { + "epoch": 3.4, + "learning_rate": 0.0001785714285714286, + "loss": 1.8076, + "step": 168 + }, + { + "epoch": 3.42, + "learning_rate": 0.0001784438775510204, + "loss": 1.7368, + "step": 169 + }, + { + "epoch": 3.44, + "learning_rate": 0.00017831632653061226, + "loss": 1.6264, + "step": 170 + }, + { + "epoch": 3.46, + "learning_rate": 0.0001781887755102041, + "loss": 1.6289, + "step": 171 + }, + { + "epoch": 3.48, + "learning_rate": 0.00017806122448979592, + "loss": 1.7913, + "step": 172 + }, + { + "epoch": 3.5, + "learning_rate": 0.00017793367346938775, + "loss": 1.6985, + "step": 173 + }, + { + "epoch": 3.52, + "learning_rate": 0.0001778061224489796, + "loss": 1.6936, + "step": 174 + }, + { + "epoch": 3.54, + "learning_rate": 0.00017767857142857141, + "loss": 1.8068, + "step": 175 + }, + { + "epoch": 3.56, + "learning_rate": 0.00017755102040816327, + "loss": 1.7243, + "step": 176 + }, + { + "epoch": 3.58, + "learning_rate": 0.0001774234693877551, + "loss": 1.6893, + "step": 177 + }, + { + "epoch": 3.6, + "learning_rate": 0.00017729591836734696, + "loss": 1.8122, + "step": 178 + }, + { + "epoch": 3.62, + "learning_rate": 0.0001771683673469388, + "loss": 1.6562, + "step": 179 + }, + { + "epoch": 3.64, + "learning_rate": 0.00017704081632653062, + "loss": 1.6999, + "step": 180 + }, + { + "epoch": 3.66, + "learning_rate": 0.00017691326530612245, + "loss": 1.7229, + "step": 181 + }, + { + "epoch": 3.68, + "learning_rate": 0.00017678571428571428, + "loss": 1.6764, + "step": 182 + }, + { + "epoch": 3.7, + "learning_rate": 0.00017665816326530614, + "loss": 1.6982, + "step": 183 + }, + { + "epoch": 3.72, + "learning_rate": 0.00017653061224489797, + "loss": 1.696, + "step": 184 + }, + { + "epoch": 3.74, + "learning_rate": 0.0001764030612244898, + "loss": 1.6797, + "step": 185 + }, + { + "epoch": 3.76, + "learning_rate": 0.00017627551020408164, + "loss": 1.637, + "step": 186 + }, + { + "epoch": 3.78, + "learning_rate": 0.0001761479591836735, + "loss": 1.7074, + "step": 187 + }, + { + "epoch": 3.8, + "learning_rate": 0.0001760204081632653, + "loss": 1.705, + "step": 188 + }, + { + "epoch": 3.82, + "learning_rate": 0.00017589285714285716, + "loss": 1.6153, + "step": 189 + }, + { + "epoch": 3.84, + "learning_rate": 0.00017576530612244899, + "loss": 1.7354, + "step": 190 + }, + { + "epoch": 3.86, + "learning_rate": 0.00017563775510204084, + "loss": 1.6941, + "step": 191 + }, + { + "epoch": 3.88, + "learning_rate": 0.00017551020408163265, + "loss": 1.7231, + "step": 192 + }, + { + "epoch": 3.91, + "learning_rate": 0.0001753826530612245, + "loss": 1.7663, + "step": 193 + }, + { + "epoch": 3.93, + "learning_rate": 0.00017525510204081634, + "loss": 1.6532, + "step": 194 + }, + { + "epoch": 3.95, + "learning_rate": 0.00017512755102040817, + "loss": 1.7115, + "step": 195 + }, + { + "epoch": 3.97, + "learning_rate": 0.000175, + "loss": 1.6955, + "step": 196 + }, + { + "epoch": 3.99, + "learning_rate": 0.00017487244897959186, + "loss": 1.6863, + "step": 197 + }, + { + "epoch": 4.01, + "learning_rate": 0.00017474489795918366, + "loss": 1.7012, + "step": 198 + }, + { + "epoch": 4.03, + "learning_rate": 0.00017461734693877552, + "loss": 1.5927, + "step": 199 + }, + { + "epoch": 4.05, + "learning_rate": 0.00017448979591836735, + "loss": 1.6272, + "step": 200 + }, + { + "epoch": 4.07, + "learning_rate": 0.00017436224489795918, + "loss": 1.5994, + "step": 201 + }, + { + "epoch": 4.09, + "learning_rate": 0.00017423469387755104, + "loss": 1.7141, + "step": 202 + }, + { + "epoch": 4.11, + "learning_rate": 0.00017410714285714287, + "loss": 1.7547, + "step": 203 + }, + { + "epoch": 4.13, + "learning_rate": 0.0001739795918367347, + "loss": 1.6254, + "step": 204 + }, + { + "epoch": 4.15, + "learning_rate": 0.00017385204081632653, + "loss": 1.6686, + "step": 205 + }, + { + "epoch": 4.17, + "learning_rate": 0.0001737244897959184, + "loss": 1.6684, + "step": 206 + }, + { + "epoch": 4.19, + "learning_rate": 0.00017359693877551022, + "loss": 1.6724, + "step": 207 + }, + { + "epoch": 4.21, + "learning_rate": 0.00017346938775510205, + "loss": 1.7361, + "step": 208 + }, + { + "epoch": 4.23, + "learning_rate": 0.00017334183673469388, + "loss": 1.7167, + "step": 209 + }, + { + "epoch": 4.25, + "learning_rate": 0.00017321428571428574, + "loss": 1.7226, + "step": 210 + }, + { + "epoch": 4.27, + "learning_rate": 0.00017308673469387754, + "loss": 1.7133, + "step": 211 + }, + { + "epoch": 4.29, + "learning_rate": 0.0001729591836734694, + "loss": 1.649, + "step": 212 + }, + { + "epoch": 4.31, + "learning_rate": 0.00017283163265306123, + "loss": 1.7104, + "step": 213 + }, + { + "epoch": 4.33, + "learning_rate": 0.00017270408163265306, + "loss": 1.6861, + "step": 214 + }, + { + "epoch": 4.35, + "learning_rate": 0.0001725765306122449, + "loss": 1.648, + "step": 215 + }, + { + "epoch": 4.37, + "learning_rate": 0.00017244897959183675, + "loss": 1.6215, + "step": 216 + }, + { + "epoch": 4.39, + "learning_rate": 0.00017232142857142858, + "loss": 1.6334, + "step": 217 + }, + { + "epoch": 4.41, + "learning_rate": 0.0001721938775510204, + "loss": 1.6283, + "step": 218 + }, + { + "epoch": 4.43, + "learning_rate": 0.00017206632653061224, + "loss": 1.6462, + "step": 219 + }, + { + "epoch": 4.45, + "learning_rate": 0.0001719387755102041, + "loss": 1.7233, + "step": 220 + }, + { + "epoch": 4.47, + "learning_rate": 0.0001718112244897959, + "loss": 1.7839, + "step": 221 + }, + { + "epoch": 4.49, + "learning_rate": 0.00017168367346938776, + "loss": 1.7204, + "step": 222 + }, + { + "epoch": 4.51, + "learning_rate": 0.0001715561224489796, + "loss": 1.7671, + "step": 223 + }, + { + "epoch": 4.53, + "learning_rate": 0.00017142857142857143, + "loss": 1.6824, + "step": 224 + }, + { + "epoch": 4.55, + "learning_rate": 0.00017130102040816328, + "loss": 1.7068, + "step": 225 + }, + { + "epoch": 4.57, + "learning_rate": 0.00017117346938775511, + "loss": 1.6515, + "step": 226 + }, + { + "epoch": 4.59, + "learning_rate": 0.00017104591836734694, + "loss": 1.6586, + "step": 227 + }, + { + "epoch": 4.61, + "learning_rate": 0.00017091836734693878, + "loss": 1.6355, + "step": 228 + }, + { + "epoch": 4.63, + "learning_rate": 0.00017079081632653063, + "loss": 1.7173, + "step": 229 + }, + { + "epoch": 4.65, + "learning_rate": 0.00017066326530612246, + "loss": 1.6585, + "step": 230 + }, + { + "epoch": 4.67, + "learning_rate": 0.0001705357142857143, + "loss": 1.5856, + "step": 231 + }, + { + "epoch": 4.69, + "learning_rate": 0.00017040816326530613, + "loss": 1.5923, + "step": 232 + }, + { + "epoch": 4.71, + "learning_rate": 0.00017028061224489798, + "loss": 1.7128, + "step": 233 + }, + { + "epoch": 4.73, + "learning_rate": 0.0001701530612244898, + "loss": 1.6971, + "step": 234 + }, + { + "epoch": 4.75, + "learning_rate": 0.00017002551020408165, + "loss": 1.6416, + "step": 235 + }, + { + "epoch": 4.78, + "learning_rate": 0.00016989795918367348, + "loss": 1.645, + "step": 236 + }, + { + "epoch": 4.8, + "learning_rate": 0.0001697704081632653, + "loss": 1.6792, + "step": 237 + }, + { + "epoch": 4.82, + "learning_rate": 0.00016964285714285714, + "loss": 1.6522, + "step": 238 + }, + { + "epoch": 4.84, + "learning_rate": 0.000169515306122449, + "loss": 1.6315, + "step": 239 + }, + { + "epoch": 4.86, + "learning_rate": 0.00016938775510204083, + "loss": 1.6622, + "step": 240 + }, + { + "epoch": 4.88, + "learning_rate": 0.00016926020408163266, + "loss": 1.6566, + "step": 241 + }, + { + "epoch": 4.9, + "learning_rate": 0.0001691326530612245, + "loss": 1.7141, + "step": 242 + }, + { + "epoch": 4.92, + "learning_rate": 0.00016900510204081635, + "loss": 1.5873, + "step": 243 + }, + { + "epoch": 4.94, + "learning_rate": 0.00016887755102040818, + "loss": 1.6571, + "step": 244 + }, + { + "epoch": 4.96, + "learning_rate": 0.00016875, + "loss": 1.6829, + "step": 245 + }, + { + "epoch": 4.98, + "learning_rate": 0.00016862244897959184, + "loss": 1.6935, + "step": 246 + }, + { + "epoch": 5.0, + "learning_rate": 0.00016849489795918367, + "loss": 1.6782, + "step": 247 + }, + { + "epoch": 5.02, + "learning_rate": 0.00016836734693877553, + "loss": 1.622, + "step": 248 + }, + { + "epoch": 5.04, + "learning_rate": 0.00016823979591836736, + "loss": 1.6596, + "step": 249 + }, + { + "epoch": 5.06, + "learning_rate": 0.0001681122448979592, + "loss": 1.5821, + "step": 250 + }, + { + "epoch": 5.08, + "learning_rate": 0.00016798469387755102, + "loss": 1.7292, + "step": 251 + }, + { + "epoch": 5.1, + "learning_rate": 0.00016785714285714288, + "loss": 1.646, + "step": 252 + }, + { + "epoch": 5.12, + "learning_rate": 0.0001677295918367347, + "loss": 1.6969, + "step": 253 + }, + { + "epoch": 5.14, + "learning_rate": 0.00016760204081632654, + "loss": 1.6082, + "step": 254 + }, + { + "epoch": 5.16, + "learning_rate": 0.00016747448979591837, + "loss": 1.5843, + "step": 255 + }, + { + "epoch": 5.18, + "learning_rate": 0.00016734693877551023, + "loss": 1.6827, + "step": 256 + }, + { + "epoch": 5.2, + "learning_rate": 0.00016721938775510203, + "loss": 1.5824, + "step": 257 + }, + { + "epoch": 5.22, + "learning_rate": 0.0001670918367346939, + "loss": 1.6795, + "step": 258 + }, + { + "epoch": 5.24, + "learning_rate": 0.00016696428571428572, + "loss": 1.5639, + "step": 259 + }, + { + "epoch": 5.26, + "learning_rate": 0.00016683673469387755, + "loss": 1.592, + "step": 260 + }, + { + "epoch": 5.28, + "learning_rate": 0.00016670918367346938, + "loss": 1.65, + "step": 261 + }, + { + "epoch": 5.3, + "learning_rate": 0.00016658163265306124, + "loss": 1.5592, + "step": 262 + }, + { + "epoch": 5.32, + "learning_rate": 0.00016645408163265305, + "loss": 1.5091, + "step": 263 + }, + { + "epoch": 5.34, + "learning_rate": 0.0001663265306122449, + "loss": 1.6138, + "step": 264 + }, + { + "epoch": 5.36, + "learning_rate": 0.00016619897959183673, + "loss": 1.625, + "step": 265 + }, + { + "epoch": 5.38, + "learning_rate": 0.0001660714285714286, + "loss": 1.5757, + "step": 266 + }, + { + "epoch": 5.4, + "learning_rate": 0.00016594387755102042, + "loss": 1.6372, + "step": 267 + }, + { + "epoch": 5.42, + "learning_rate": 0.00016581632653061225, + "loss": 1.5891, + "step": 268 + }, + { + "epoch": 5.44, + "learning_rate": 0.00016568877551020409, + "loss": 1.6893, + "step": 269 + }, + { + "epoch": 5.46, + "learning_rate": 0.00016556122448979592, + "loss": 1.6662, + "step": 270 + }, + { + "epoch": 5.48, + "learning_rate": 0.00016543367346938777, + "loss": 1.7132, + "step": 271 + }, + { + "epoch": 5.5, + "learning_rate": 0.0001653061224489796, + "loss": 1.5835, + "step": 272 + }, + { + "epoch": 5.52, + "learning_rate": 0.00016517857142857144, + "loss": 1.6342, + "step": 273 + }, + { + "epoch": 5.54, + "learning_rate": 0.00016505102040816327, + "loss": 1.6717, + "step": 274 + }, + { + "epoch": 5.56, + "learning_rate": 0.00016492346938775512, + "loss": 1.6248, + "step": 275 + }, + { + "epoch": 5.58, + "learning_rate": 0.00016479591836734696, + "loss": 1.6117, + "step": 276 + }, + { + "epoch": 5.6, + "learning_rate": 0.0001646683673469388, + "loss": 1.6798, + "step": 277 + }, + { + "epoch": 5.63, + "learning_rate": 0.00016454081632653062, + "loss": 1.6406, + "step": 278 + }, + { + "epoch": 5.65, + "learning_rate": 0.00016441326530612248, + "loss": 1.6512, + "step": 279 + }, + { + "epoch": 5.67, + "learning_rate": 0.00016428571428571428, + "loss": 1.6102, + "step": 280 + }, + { + "epoch": 5.69, + "learning_rate": 0.00016415816326530614, + "loss": 1.6113, + "step": 281 + }, + { + "epoch": 5.71, + "learning_rate": 0.00016403061224489797, + "loss": 1.7116, + "step": 282 + }, + { + "epoch": 5.73, + "learning_rate": 0.0001639030612244898, + "loss": 1.6846, + "step": 283 + }, + { + "epoch": 5.75, + "learning_rate": 0.00016377551020408163, + "loss": 1.6911, + "step": 284 + }, + { + "epoch": 5.77, + "learning_rate": 0.0001636479591836735, + "loss": 1.6202, + "step": 285 + }, + { + "epoch": 5.79, + "learning_rate": 0.0001635204081632653, + "loss": 1.5715, + "step": 286 + }, + { + "epoch": 5.81, + "learning_rate": 0.00016339285714285715, + "loss": 1.6461, + "step": 287 + }, + { + "epoch": 5.83, + "learning_rate": 0.00016326530612244898, + "loss": 1.6624, + "step": 288 + }, + { + "epoch": 5.85, + "learning_rate": 0.00016313775510204084, + "loss": 1.6535, + "step": 289 + }, + { + "epoch": 5.87, + "learning_rate": 0.00016301020408163267, + "loss": 1.6275, + "step": 290 + }, + { + "epoch": 5.89, + "learning_rate": 0.0001628826530612245, + "loss": 1.6636, + "step": 291 + }, + { + "epoch": 5.91, + "learning_rate": 0.00016275510204081633, + "loss": 1.6546, + "step": 292 + }, + { + "epoch": 5.93, + "learning_rate": 0.00016262755102040816, + "loss": 1.7274, + "step": 293 + }, + { + "epoch": 5.95, + "learning_rate": 0.00016250000000000002, + "loss": 1.5901, + "step": 294 + }, + { + "epoch": 5.97, + "learning_rate": 0.00016237244897959185, + "loss": 1.6046, + "step": 295 + }, + { + "epoch": 5.99, + "learning_rate": 0.00016224489795918368, + "loss": 1.5828, + "step": 296 + }, + { + "epoch": 6.01, + "learning_rate": 0.0001621173469387755, + "loss": 1.6435, + "step": 297 + }, + { + "epoch": 6.03, + "learning_rate": 0.00016198979591836737, + "loss": 1.6263, + "step": 298 + }, + { + "epoch": 6.05, + "learning_rate": 0.00016186224489795917, + "loss": 1.4944, + "step": 299 + }, + { + "epoch": 6.07, + "learning_rate": 0.00016173469387755103, + "loss": 1.6286, + "step": 300 + }, + { + "epoch": 6.09, + "learning_rate": 0.00016160714285714286, + "loss": 1.694, + "step": 301 + }, + { + "epoch": 6.11, + "learning_rate": 0.00016147959183673472, + "loss": 1.6197, + "step": 302 + }, + { + "epoch": 6.13, + "learning_rate": 0.00016135204081632652, + "loss": 1.5597, + "step": 303 + }, + { + "epoch": 6.15, + "learning_rate": 0.00016122448979591838, + "loss": 1.5487, + "step": 304 + }, + { + "epoch": 6.17, + "learning_rate": 0.0001610969387755102, + "loss": 1.5769, + "step": 305 + }, + { + "epoch": 6.19, + "learning_rate": 0.00016096938775510204, + "loss": 1.6367, + "step": 306 + }, + { + "epoch": 6.21, + "learning_rate": 0.00016084183673469388, + "loss": 1.583, + "step": 307 + }, + { + "epoch": 6.23, + "learning_rate": 0.00016071428571428573, + "loss": 1.6201, + "step": 308 + }, + { + "epoch": 6.25, + "learning_rate": 0.00016058673469387754, + "loss": 1.6586, + "step": 309 + }, + { + "epoch": 6.27, + "learning_rate": 0.0001604591836734694, + "loss": 1.6711, + "step": 310 + }, + { + "epoch": 6.29, + "learning_rate": 0.00016033163265306123, + "loss": 1.6402, + "step": 311 + }, + { + "epoch": 6.31, + "learning_rate": 0.00016020408163265306, + "loss": 1.5247, + "step": 312 + }, + { + "epoch": 6.33, + "learning_rate": 0.00016007653061224491, + "loss": 1.5356, + "step": 313 + }, + { + "epoch": 6.35, + "learning_rate": 0.00015994897959183675, + "loss": 1.564, + "step": 314 + }, + { + "epoch": 6.37, + "learning_rate": 0.00015982142857142858, + "loss": 1.563, + "step": 315 + }, + { + "epoch": 6.39, + "learning_rate": 0.0001596938775510204, + "loss": 1.5198, + "step": 316 + }, + { + "epoch": 6.41, + "learning_rate": 0.00015956632653061227, + "loss": 1.6558, + "step": 317 + }, + { + "epoch": 6.43, + "learning_rate": 0.0001594387755102041, + "loss": 1.5534, + "step": 318 + }, + { + "epoch": 6.45, + "learning_rate": 0.00015931122448979593, + "loss": 1.6239, + "step": 319 + }, + { + "epoch": 6.47, + "learning_rate": 0.00015918367346938776, + "loss": 1.5645, + "step": 320 + }, + { + "epoch": 6.5, + "learning_rate": 0.00015905612244897962, + "loss": 1.5713, + "step": 321 + }, + { + "epoch": 6.52, + "learning_rate": 0.00015892857142857142, + "loss": 1.6176, + "step": 322 + }, + { + "epoch": 6.54, + "learning_rate": 0.00015880102040816328, + "loss": 1.502, + "step": 323 + }, + { + "epoch": 6.56, + "learning_rate": 0.0001586734693877551, + "loss": 1.645, + "step": 324 + }, + { + "epoch": 6.58, + "learning_rate": 0.00015854591836734697, + "loss": 1.5904, + "step": 325 + }, + { + "epoch": 6.6, + "learning_rate": 0.00015841836734693877, + "loss": 1.6149, + "step": 326 + }, + { + "epoch": 6.62, + "learning_rate": 0.00015829081632653063, + "loss": 1.6757, + "step": 327 + }, + { + "epoch": 6.64, + "learning_rate": 0.00015816326530612246, + "loss": 1.541, + "step": 328 + }, + { + "epoch": 6.66, + "learning_rate": 0.0001580357142857143, + "loss": 1.5898, + "step": 329 + }, + { + "epoch": 6.68, + "learning_rate": 0.00015790816326530612, + "loss": 1.5441, + "step": 330 + }, + { + "epoch": 6.7, + "learning_rate": 0.00015778061224489798, + "loss": 1.61, + "step": 331 + }, + { + "epoch": 6.72, + "learning_rate": 0.00015765306122448978, + "loss": 1.615, + "step": 332 + }, + { + "epoch": 6.74, + "learning_rate": 0.00015752551020408164, + "loss": 1.6575, + "step": 333 + }, + { + "epoch": 6.76, + "learning_rate": 0.00015739795918367347, + "loss": 1.6702, + "step": 334 + }, + { + "epoch": 6.78, + "learning_rate": 0.0001572704081632653, + "loss": 1.6009, + "step": 335 + }, + { + "epoch": 6.8, + "learning_rate": 0.00015714285714285716, + "loss": 1.5568, + "step": 336 + }, + { + "epoch": 6.82, + "learning_rate": 0.000157015306122449, + "loss": 1.619, + "step": 337 + }, + { + "epoch": 6.84, + "learning_rate": 0.00015688775510204082, + "loss": 1.5563, + "step": 338 + }, + { + "epoch": 6.86, + "learning_rate": 0.00015676020408163265, + "loss": 1.6328, + "step": 339 + }, + { + "epoch": 6.88, + "learning_rate": 0.0001566326530612245, + "loss": 1.5726, + "step": 340 + }, + { + "epoch": 6.9, + "learning_rate": 0.00015650510204081634, + "loss": 1.6199, + "step": 341 + }, + { + "epoch": 6.92, + "learning_rate": 0.00015637755102040817, + "loss": 1.5722, + "step": 342 + }, + { + "epoch": 6.94, + "learning_rate": 0.00015625, + "loss": 1.5685, + "step": 343 + }, + { + "epoch": 6.96, + "learning_rate": 0.00015612244897959186, + "loss": 1.5615, + "step": 344 + }, + { + "epoch": 6.98, + "learning_rate": 0.00015599489795918366, + "loss": 1.5994, + "step": 345 + }, + { + "epoch": 7.0, + "learning_rate": 0.00015586734693877552, + "loss": 1.5579, + "step": 346 + }, + { + "epoch": 7.02, + "learning_rate": 0.00015573979591836735, + "loss": 1.547, + "step": 347 + }, + { + "epoch": 7.04, + "learning_rate": 0.00015561224489795918, + "loss": 1.5292, + "step": 348 + }, + { + "epoch": 7.06, + "learning_rate": 0.00015548469387755102, + "loss": 1.6032, + "step": 349 + }, + { + "epoch": 7.08, + "learning_rate": 0.00015535714285714287, + "loss": 1.5149, + "step": 350 + }, + { + "epoch": 7.1, + "learning_rate": 0.0001552295918367347, + "loss": 1.6093, + "step": 351 + }, + { + "epoch": 7.12, + "learning_rate": 0.00015510204081632654, + "loss": 1.5421, + "step": 352 + }, + { + "epoch": 7.14, + "learning_rate": 0.00015497448979591837, + "loss": 1.5733, + "step": 353 + }, + { + "epoch": 7.16, + "learning_rate": 0.00015484693877551022, + "loss": 1.5703, + "step": 354 + }, + { + "epoch": 7.18, + "learning_rate": 0.00015471938775510203, + "loss": 1.6141, + "step": 355 + }, + { + "epoch": 7.2, + "learning_rate": 0.00015459183673469389, + "loss": 1.5526, + "step": 356 + }, + { + "epoch": 7.22, + "learning_rate": 0.00015446428571428572, + "loss": 1.5347, + "step": 357 + }, + { + "epoch": 7.24, + "learning_rate": 0.00015433673469387755, + "loss": 1.5682, + "step": 358 + }, + { + "epoch": 7.26, + "learning_rate": 0.0001542091836734694, + "loss": 1.5292, + "step": 359 + }, + { + "epoch": 7.28, + "learning_rate": 0.00015408163265306124, + "loss": 1.499, + "step": 360 + }, + { + "epoch": 7.3, + "learning_rate": 0.00015395408163265307, + "loss": 1.5624, + "step": 361 + }, + { + "epoch": 7.32, + "learning_rate": 0.0001538265306122449, + "loss": 1.627, + "step": 362 + }, + { + "epoch": 7.34, + "learning_rate": 0.00015369897959183676, + "loss": 1.5327, + "step": 363 + }, + { + "epoch": 7.37, + "learning_rate": 0.0001535714285714286, + "loss": 1.5622, + "step": 364 + }, + { + "epoch": 7.39, + "learning_rate": 0.00015344387755102042, + "loss": 1.5659, + "step": 365 + }, + { + "epoch": 7.41, + "learning_rate": 0.00015331632653061225, + "loss": 1.5019, + "step": 366 + }, + { + "epoch": 7.43, + "learning_rate": 0.0001531887755102041, + "loss": 1.5921, + "step": 367 + }, + { + "epoch": 7.45, + "learning_rate": 0.0001530612244897959, + "loss": 1.5914, + "step": 368 + }, + { + "epoch": 7.47, + "learning_rate": 0.00015293367346938777, + "loss": 1.5045, + "step": 369 + }, + { + "epoch": 7.49, + "learning_rate": 0.0001528061224489796, + "loss": 1.6209, + "step": 370 + }, + { + "epoch": 7.51, + "learning_rate": 0.00015267857142857143, + "loss": 1.5198, + "step": 371 + }, + { + "epoch": 7.53, + "learning_rate": 0.00015255102040816326, + "loss": 1.5363, + "step": 372 + }, + { + "epoch": 7.55, + "learning_rate": 0.00015242346938775512, + "loss": 1.5391, + "step": 373 + }, + { + "epoch": 7.57, + "learning_rate": 0.00015229591836734695, + "loss": 1.4546, + "step": 374 + }, + { + "epoch": 7.59, + "learning_rate": 0.00015216836734693878, + "loss": 1.5546, + "step": 375 + }, + { + "epoch": 7.61, + "learning_rate": 0.0001520408163265306, + "loss": 1.5629, + "step": 376 + }, + { + "epoch": 7.63, + "learning_rate": 0.00015191326530612247, + "loss": 1.6002, + "step": 377 + }, + { + "epoch": 7.65, + "learning_rate": 0.00015178571428571427, + "loss": 1.5543, + "step": 378 + }, + { + "epoch": 7.67, + "learning_rate": 0.00015165816326530613, + "loss": 1.5925, + "step": 379 + }, + { + "epoch": 7.69, + "learning_rate": 0.00015153061224489796, + "loss": 1.5631, + "step": 380 + }, + { + "epoch": 7.71, + "learning_rate": 0.0001514030612244898, + "loss": 1.5677, + "step": 381 + }, + { + "epoch": 7.73, + "learning_rate": 0.00015127551020408165, + "loss": 1.5828, + "step": 382 + }, + { + "epoch": 7.75, + "learning_rate": 0.00015114795918367348, + "loss": 1.6494, + "step": 383 + }, + { + "epoch": 7.77, + "learning_rate": 0.0001510204081632653, + "loss": 1.553, + "step": 384 + }, + { + "epoch": 7.79, + "learning_rate": 0.00015089285714285714, + "loss": 1.6156, + "step": 385 + }, + { + "epoch": 7.81, + "learning_rate": 0.000150765306122449, + "loss": 1.5001, + "step": 386 + }, + { + "epoch": 7.83, + "learning_rate": 0.00015063775510204083, + "loss": 1.5321, + "step": 387 + }, + { + "epoch": 7.85, + "learning_rate": 0.00015051020408163266, + "loss": 1.5307, + "step": 388 + }, + { + "epoch": 7.87, + "learning_rate": 0.0001503826530612245, + "loss": 1.5639, + "step": 389 + }, + { + "epoch": 7.89, + "learning_rate": 0.00015025510204081635, + "loss": 1.517, + "step": 390 + }, + { + "epoch": 7.91, + "learning_rate": 0.00015012755102040816, + "loss": 1.4776, + "step": 391 + }, + { + "epoch": 7.93, + "learning_rate": 0.00015000000000000001, + "loss": 1.5368, + "step": 392 + }, + { + "epoch": 7.95, + "learning_rate": 0.00014987244897959184, + "loss": 1.5636, + "step": 393 + }, + { + "epoch": 7.97, + "learning_rate": 0.00014974489795918368, + "loss": 1.6004, + "step": 394 + }, + { + "epoch": 7.99, + "learning_rate": 0.0001496173469387755, + "loss": 1.5524, + "step": 395 + }, + { + "epoch": 8.01, + "learning_rate": 0.00014948979591836736, + "loss": 1.5307, + "step": 396 + }, + { + "epoch": 8.03, + "learning_rate": 0.00014936224489795917, + "loss": 1.5123, + "step": 397 + }, + { + "epoch": 8.05, + "learning_rate": 0.00014923469387755103, + "loss": 1.5132, + "step": 398 + }, + { + "epoch": 8.07, + "learning_rate": 0.00014910714285714286, + "loss": 1.5109, + "step": 399 + }, + { + "epoch": 8.09, + "learning_rate": 0.00014897959183673472, + "loss": 1.5302, + "step": 400 + }, + { + "epoch": 8.11, + "learning_rate": 0.00014885204081632652, + "loss": 1.5238, + "step": 401 + }, + { + "epoch": 8.13, + "learning_rate": 0.00014872448979591838, + "loss": 1.4781, + "step": 402 + }, + { + "epoch": 8.15, + "learning_rate": 0.0001485969387755102, + "loss": 1.5446, + "step": 403 + }, + { + "epoch": 8.17, + "learning_rate": 0.00014846938775510204, + "loss": 1.5, + "step": 404 + }, + { + "epoch": 8.19, + "learning_rate": 0.0001483418367346939, + "loss": 1.5458, + "step": 405 + }, + { + "epoch": 8.21, + "learning_rate": 0.00014821428571428573, + "loss": 1.5257, + "step": 406 + }, + { + "epoch": 8.24, + "learning_rate": 0.00014808673469387756, + "loss": 1.4607, + "step": 407 + }, + { + "epoch": 8.26, + "learning_rate": 0.0001479591836734694, + "loss": 1.4282, + "step": 408 + }, + { + "epoch": 8.28, + "learning_rate": 0.00014783163265306125, + "loss": 1.4519, + "step": 409 + }, + { + "epoch": 8.3, + "learning_rate": 0.00014770408163265305, + "loss": 1.475, + "step": 410 + }, + { + "epoch": 8.32, + "learning_rate": 0.0001475765306122449, + "loss": 1.5425, + "step": 411 + }, + { + "epoch": 8.34, + "learning_rate": 0.00014744897959183674, + "loss": 1.5407, + "step": 412 + }, + { + "epoch": 8.36, + "learning_rate": 0.0001473214285714286, + "loss": 1.5698, + "step": 413 + }, + { + "epoch": 8.38, + "learning_rate": 0.0001471938775510204, + "loss": 1.4282, + "step": 414 + }, + { + "epoch": 8.4, + "learning_rate": 0.00014706632653061226, + "loss": 1.5301, + "step": 415 + }, + { + "epoch": 8.42, + "learning_rate": 0.0001469387755102041, + "loss": 1.5083, + "step": 416 + }, + { + "epoch": 8.44, + "learning_rate": 0.00014681122448979592, + "loss": 1.5712, + "step": 417 + }, + { + "epoch": 8.46, + "learning_rate": 0.00014668367346938775, + "loss": 1.4363, + "step": 418 + }, + { + "epoch": 8.48, + "learning_rate": 0.0001465561224489796, + "loss": 1.4463, + "step": 419 + }, + { + "epoch": 8.5, + "learning_rate": 0.00014642857142857141, + "loss": 1.4738, + "step": 420 + }, + { + "epoch": 8.52, + "learning_rate": 0.00014630102040816327, + "loss": 1.5396, + "step": 421 + }, + { + "epoch": 8.54, + "learning_rate": 0.0001461734693877551, + "loss": 1.4384, + "step": 422 + }, + { + "epoch": 8.56, + "learning_rate": 0.00014604591836734696, + "loss": 1.5345, + "step": 423 + }, + { + "epoch": 8.58, + "learning_rate": 0.0001459183673469388, + "loss": 1.5355, + "step": 424 + }, + { + "epoch": 8.6, + "learning_rate": 0.00014579081632653062, + "loss": 1.5188, + "step": 425 + }, + { + "epoch": 8.62, + "learning_rate": 0.00014566326530612245, + "loss": 1.5575, + "step": 426 + }, + { + "epoch": 8.64, + "learning_rate": 0.00014553571428571428, + "loss": 1.5279, + "step": 427 + }, + { + "epoch": 8.66, + "learning_rate": 0.00014540816326530614, + "loss": 1.5484, + "step": 428 + }, + { + "epoch": 8.68, + "learning_rate": 0.00014528061224489797, + "loss": 1.4878, + "step": 429 + }, + { + "epoch": 8.7, + "learning_rate": 0.0001451530612244898, + "loss": 1.503, + "step": 430 + }, + { + "epoch": 8.72, + "learning_rate": 0.00014502551020408163, + "loss": 1.4723, + "step": 431 + }, + { + "epoch": 8.74, + "learning_rate": 0.0001448979591836735, + "loss": 1.5579, + "step": 432 + }, + { + "epoch": 8.76, + "learning_rate": 0.0001447704081632653, + "loss": 1.4789, + "step": 433 + }, + { + "epoch": 8.78, + "learning_rate": 0.00014464285714285715, + "loss": 1.5501, + "step": 434 + }, + { + "epoch": 8.8, + "learning_rate": 0.00014451530612244899, + "loss": 1.5204, + "step": 435 + }, + { + "epoch": 8.82, + "learning_rate": 0.00014438775510204084, + "loss": 1.5489, + "step": 436 + }, + { + "epoch": 8.84, + "learning_rate": 0.00014426020408163265, + "loss": 1.5464, + "step": 437 + }, + { + "epoch": 8.86, + "learning_rate": 0.0001441326530612245, + "loss": 1.5896, + "step": 438 + }, + { + "epoch": 8.88, + "learning_rate": 0.00014400510204081634, + "loss": 1.5465, + "step": 439 + }, + { + "epoch": 8.9, + "learning_rate": 0.00014387755102040817, + "loss": 1.5094, + "step": 440 + }, + { + "epoch": 8.92, + "learning_rate": 0.00014375, + "loss": 1.5144, + "step": 441 + }, + { + "epoch": 8.94, + "learning_rate": 0.00014362244897959186, + "loss": 1.4919, + "step": 442 + }, + { + "epoch": 8.96, + "learning_rate": 0.00014349489795918366, + "loss": 1.4702, + "step": 443 + }, + { + "epoch": 8.98, + "learning_rate": 0.00014336734693877552, + "loss": 1.4996, + "step": 444 + }, + { + "epoch": 9.0, + "learning_rate": 0.00014323979591836735, + "loss": 1.5503, + "step": 445 + }, + { + "epoch": 9.02, + "learning_rate": 0.00014311224489795918, + "loss": 1.4125, + "step": 446 + }, + { + "epoch": 9.04, + "learning_rate": 0.00014298469387755104, + "loss": 1.4722, + "step": 447 + }, + { + "epoch": 9.06, + "learning_rate": 0.00014285714285714287, + "loss": 1.5199, + "step": 448 + }, + { + "epoch": 9.09, + "learning_rate": 0.0001427295918367347, + "loss": 1.4571, + "step": 449 + }, + { + "epoch": 9.11, + "learning_rate": 0.00014260204081632653, + "loss": 1.4996, + "step": 450 + }, + { + "epoch": 9.13, + "learning_rate": 0.0001424744897959184, + "loss": 1.4092, + "step": 451 + }, + { + "epoch": 9.15, + "learning_rate": 0.00014234693877551022, + "loss": 1.4198, + "step": 452 + }, + { + "epoch": 9.17, + "learning_rate": 0.00014221938775510205, + "loss": 1.4916, + "step": 453 + }, + { + "epoch": 9.19, + "learning_rate": 0.00014209183673469388, + "loss": 1.5051, + "step": 454 + }, + { + "epoch": 9.21, + "learning_rate": 0.00014196428571428574, + "loss": 1.4321, + "step": 455 + }, + { + "epoch": 9.23, + "learning_rate": 0.00014183673469387754, + "loss": 1.4097, + "step": 456 + }, + { + "epoch": 9.25, + "learning_rate": 0.0001417091836734694, + "loss": 1.4853, + "step": 457 + }, + { + "epoch": 9.27, + "learning_rate": 0.00014158163265306123, + "loss": 1.4593, + "step": 458 + }, + { + "epoch": 9.29, + "learning_rate": 0.00014145408163265306, + "loss": 1.3729, + "step": 459 + }, + { + "epoch": 9.31, + "learning_rate": 0.0001413265306122449, + "loss": 1.4467, + "step": 460 + }, + { + "epoch": 9.33, + "learning_rate": 0.00014119897959183675, + "loss": 1.4467, + "step": 461 + }, + { + "epoch": 9.35, + "learning_rate": 0.00014107142857142858, + "loss": 1.4785, + "step": 462 + }, + { + "epoch": 9.37, + "learning_rate": 0.0001409438775510204, + "loss": 1.4089, + "step": 463 + }, + { + "epoch": 9.39, + "learning_rate": 0.00014081632653061224, + "loss": 1.5026, + "step": 464 + }, + { + "epoch": 9.41, + "learning_rate": 0.0001406887755102041, + "loss": 1.4857, + "step": 465 + }, + { + "epoch": 9.43, + "learning_rate": 0.0001405612244897959, + "loss": 1.3745, + "step": 466 + }, + { + "epoch": 9.45, + "learning_rate": 0.00014043367346938776, + "loss": 1.4733, + "step": 467 + }, + { + "epoch": 9.47, + "learning_rate": 0.0001403061224489796, + "loss": 1.5212, + "step": 468 + }, + { + "epoch": 9.49, + "learning_rate": 0.00014017857142857142, + "loss": 1.5398, + "step": 469 + }, + { + "epoch": 9.51, + "learning_rate": 0.00014005102040816328, + "loss": 1.478, + "step": 470 + }, + { + "epoch": 9.53, + "learning_rate": 0.0001399234693877551, + "loss": 1.496, + "step": 471 + }, + { + "epoch": 9.55, + "learning_rate": 0.00013979591836734694, + "loss": 1.4837, + "step": 472 + }, + { + "epoch": 9.57, + "learning_rate": 0.00013966836734693878, + "loss": 1.4724, + "step": 473 + }, + { + "epoch": 9.59, + "learning_rate": 0.00013954081632653063, + "loss": 1.4828, + "step": 474 + }, + { + "epoch": 9.61, + "learning_rate": 0.00013941326530612246, + "loss": 1.5012, + "step": 475 + }, + { + "epoch": 9.63, + "learning_rate": 0.0001392857142857143, + "loss": 1.4879, + "step": 476 + }, + { + "epoch": 9.65, + "learning_rate": 0.00013915816326530613, + "loss": 1.4196, + "step": 477 + }, + { + "epoch": 9.67, + "learning_rate": 0.00013903061224489798, + "loss": 1.4915, + "step": 478 + }, + { + "epoch": 9.69, + "learning_rate": 0.0001389030612244898, + "loss": 1.3878, + "step": 479 + }, + { + "epoch": 9.71, + "learning_rate": 0.00013877551020408165, + "loss": 1.466, + "step": 480 + }, + { + "epoch": 9.73, + "learning_rate": 0.00013864795918367348, + "loss": 1.4582, + "step": 481 + }, + { + "epoch": 9.75, + "learning_rate": 0.0001385204081632653, + "loss": 1.533, + "step": 482 + }, + { + "epoch": 9.77, + "learning_rate": 0.00013839285714285714, + "loss": 1.4697, + "step": 483 + }, + { + "epoch": 9.79, + "learning_rate": 0.000138265306122449, + "loss": 1.3989, + "step": 484 + }, + { + "epoch": 9.81, + "learning_rate": 0.00013813775510204083, + "loss": 1.4361, + "step": 485 + }, + { + "epoch": 9.83, + "learning_rate": 0.00013801020408163266, + "loss": 1.5271, + "step": 486 + }, + { + "epoch": 9.85, + "learning_rate": 0.0001378826530612245, + "loss": 1.4905, + "step": 487 + }, + { + "epoch": 9.87, + "learning_rate": 0.00013775510204081635, + "loss": 1.4757, + "step": 488 + }, + { + "epoch": 9.89, + "learning_rate": 0.00013762755102040815, + "loss": 1.5485, + "step": 489 + }, + { + "epoch": 9.91, + "learning_rate": 0.0001375, + "loss": 1.4783, + "step": 490 + }, + { + "epoch": 9.93, + "learning_rate": 0.00013737244897959184, + "loss": 1.4849, + "step": 491 + }, + { + "epoch": 9.96, + "learning_rate": 0.00013724489795918367, + "loss": 1.5382, + "step": 492 + }, + { + "epoch": 9.98, + "learning_rate": 0.00013711734693877553, + "loss": 1.4902, + "step": 493 + }, + { + "epoch": 10.0, + "learning_rate": 0.00013698979591836736, + "loss": 1.4865, + "step": 494 + }, + { + "epoch": 10.02, + "learning_rate": 0.0001368622448979592, + "loss": 1.4436, + "step": 495 + }, + { + "epoch": 10.04, + "learning_rate": 0.00013673469387755102, + "loss": 1.408, + "step": 496 + }, + { + "epoch": 10.06, + "learning_rate": 0.00013660714285714288, + "loss": 1.4764, + "step": 497 + }, + { + "epoch": 10.08, + "learning_rate": 0.0001364795918367347, + "loss": 1.4646, + "step": 498 + }, + { + "epoch": 10.1, + "learning_rate": 0.00013635204081632654, + "loss": 1.406, + "step": 499 + }, + { + "epoch": 10.12, + "learning_rate": 0.00013622448979591837, + "loss": 1.4785, + "step": 500 + }, + { + "epoch": 10.14, + "learning_rate": 0.00013609693877551023, + "loss": 1.4117, + "step": 501 + }, + { + "epoch": 10.16, + "learning_rate": 0.00013596938775510203, + "loss": 1.4108, + "step": 502 + }, + { + "epoch": 10.18, + "learning_rate": 0.0001358418367346939, + "loss": 1.4155, + "step": 503 + }, + { + "epoch": 10.2, + "learning_rate": 0.00013571428571428572, + "loss": 1.4021, + "step": 504 + }, + { + "epoch": 10.22, + "learning_rate": 0.00013558673469387755, + "loss": 1.411, + "step": 505 + }, + { + "epoch": 10.24, + "learning_rate": 0.00013545918367346938, + "loss": 1.3851, + "step": 506 + }, + { + "epoch": 10.26, + "learning_rate": 0.00013533163265306124, + "loss": 1.387, + "step": 507 + }, + { + "epoch": 10.28, + "learning_rate": 0.00013520408163265305, + "loss": 1.4163, + "step": 508 + }, + { + "epoch": 10.3, + "learning_rate": 0.0001350765306122449, + "loss": 1.3343, + "step": 509 + }, + { + "epoch": 10.32, + "learning_rate": 0.00013494897959183673, + "loss": 1.4811, + "step": 510 + }, + { + "epoch": 10.34, + "learning_rate": 0.0001348214285714286, + "loss": 1.4086, + "step": 511 + }, + { + "epoch": 10.36, + "learning_rate": 0.0001346938775510204, + "loss": 1.3879, + "step": 512 + }, + { + "epoch": 10.38, + "learning_rate": 0.00013456632653061225, + "loss": 1.4204, + "step": 513 + }, + { + "epoch": 10.4, + "learning_rate": 0.00013443877551020408, + "loss": 1.4158, + "step": 514 + }, + { + "epoch": 10.42, + "learning_rate": 0.00013431122448979592, + "loss": 1.4521, + "step": 515 + }, + { + "epoch": 10.44, + "learning_rate": 0.00013418367346938777, + "loss": 1.4196, + "step": 516 + }, + { + "epoch": 10.46, + "learning_rate": 0.0001340561224489796, + "loss": 1.4361, + "step": 517 + }, + { + "epoch": 10.48, + "learning_rate": 0.00013392857142857144, + "loss": 1.4482, + "step": 518 + }, + { + "epoch": 10.5, + "learning_rate": 0.00013380102040816327, + "loss": 1.4801, + "step": 519 + }, + { + "epoch": 10.52, + "learning_rate": 0.00013367346938775512, + "loss": 1.4556, + "step": 520 + }, + { + "epoch": 10.54, + "learning_rate": 0.00013354591836734695, + "loss": 1.3902, + "step": 521 + }, + { + "epoch": 10.56, + "learning_rate": 0.00013341836734693879, + "loss": 1.4269, + "step": 522 + }, + { + "epoch": 10.58, + "learning_rate": 0.00013329081632653062, + "loss": 1.4899, + "step": 523 + }, + { + "epoch": 10.6, + "learning_rate": 0.00013316326530612247, + "loss": 1.3952, + "step": 524 + }, + { + "epoch": 10.62, + "learning_rate": 0.00013303571428571428, + "loss": 1.4116, + "step": 525 + }, + { + "epoch": 10.64, + "learning_rate": 0.00013290816326530614, + "loss": 1.4583, + "step": 526 + }, + { + "epoch": 10.66, + "learning_rate": 0.00013278061224489797, + "loss": 1.4466, + "step": 527 + }, + { + "epoch": 10.68, + "learning_rate": 0.0001326530612244898, + "loss": 1.4242, + "step": 528 + }, + { + "epoch": 10.7, + "learning_rate": 0.00013252551020408163, + "loss": 1.3717, + "step": 529 + }, + { + "epoch": 10.72, + "learning_rate": 0.0001323979591836735, + "loss": 1.4583, + "step": 530 + }, + { + "epoch": 10.74, + "learning_rate": 0.0001322704081632653, + "loss": 1.4185, + "step": 531 + }, + { + "epoch": 10.76, + "learning_rate": 0.00013214285714285715, + "loss": 1.4287, + "step": 532 + }, + { + "epoch": 10.78, + "learning_rate": 0.00013201530612244898, + "loss": 1.4385, + "step": 533 + }, + { + "epoch": 10.8, + "learning_rate": 0.00013188775510204084, + "loss": 1.453, + "step": 534 + }, + { + "epoch": 10.83, + "learning_rate": 0.00013176020408163264, + "loss": 1.4161, + "step": 535 + }, + { + "epoch": 10.85, + "learning_rate": 0.0001316326530612245, + "loss": 1.457, + "step": 536 + }, + { + "epoch": 10.87, + "learning_rate": 0.00013150510204081633, + "loss": 1.4367, + "step": 537 + }, + { + "epoch": 10.89, + "learning_rate": 0.00013137755102040816, + "loss": 1.4256, + "step": 538 + }, + { + "epoch": 10.91, + "learning_rate": 0.00013125000000000002, + "loss": 1.424, + "step": 539 + }, + { + "epoch": 10.93, + "learning_rate": 0.00013112244897959185, + "loss": 1.3923, + "step": 540 + }, + { + "epoch": 10.95, + "learning_rate": 0.00013099489795918368, + "loss": 1.4225, + "step": 541 + }, + { + "epoch": 10.97, + "learning_rate": 0.0001308673469387755, + "loss": 1.3969, + "step": 542 + }, + { + "epoch": 10.99, + "learning_rate": 0.00013073979591836737, + "loss": 1.4446, + "step": 543 + }, + { + "epoch": 11.01, + "learning_rate": 0.00013061224489795917, + "loss": 1.4375, + "step": 544 + }, + { + "epoch": 11.03, + "learning_rate": 0.00013048469387755103, + "loss": 1.4064, + "step": 545 + }, + { + "epoch": 11.05, + "learning_rate": 0.00013035714285714286, + "loss": 1.3454, + "step": 546 + }, + { + "epoch": 11.07, + "learning_rate": 0.00013022959183673472, + "loss": 1.3234, + "step": 547 + }, + { + "epoch": 11.09, + "learning_rate": 0.00013010204081632652, + "loss": 1.3759, + "step": 548 + }, + { + "epoch": 11.11, + "learning_rate": 0.00012997448979591838, + "loss": 1.4221, + "step": 549 + }, + { + "epoch": 11.13, + "learning_rate": 0.0001298469387755102, + "loss": 1.4261, + "step": 550 + }, + { + "epoch": 11.15, + "learning_rate": 0.00012971938775510204, + "loss": 1.3341, + "step": 551 + }, + { + "epoch": 11.17, + "learning_rate": 0.00012959183673469387, + "loss": 1.3994, + "step": 552 + }, + { + "epoch": 11.19, + "learning_rate": 0.00012946428571428573, + "loss": 1.3894, + "step": 553 + }, + { + "epoch": 11.21, + "learning_rate": 0.00012933673469387754, + "loss": 1.3585, + "step": 554 + }, + { + "epoch": 11.23, + "learning_rate": 0.0001292091836734694, + "loss": 1.3763, + "step": 555 + }, + { + "epoch": 11.25, + "learning_rate": 0.00012908163265306123, + "loss": 1.3623, + "step": 556 + }, + { + "epoch": 11.27, + "learning_rate": 0.00012895408163265306, + "loss": 1.3907, + "step": 557 + }, + { + "epoch": 11.29, + "learning_rate": 0.0001288265306122449, + "loss": 1.3807, + "step": 558 + }, + { + "epoch": 11.31, + "learning_rate": 0.00012869897959183674, + "loss": 1.4045, + "step": 559 + }, + { + "epoch": 11.33, + "learning_rate": 0.00012857142857142858, + "loss": 1.4038, + "step": 560 + }, + { + "epoch": 11.35, + "learning_rate": 0.0001284438775510204, + "loss": 1.3466, + "step": 561 + }, + { + "epoch": 11.37, + "learning_rate": 0.00012831632653061226, + "loss": 1.3449, + "step": 562 + }, + { + "epoch": 11.39, + "learning_rate": 0.0001281887755102041, + "loss": 1.3866, + "step": 563 + }, + { + "epoch": 11.41, + "learning_rate": 0.00012806122448979593, + "loss": 1.3106, + "step": 564 + }, + { + "epoch": 11.43, + "learning_rate": 0.00012793367346938776, + "loss": 1.4414, + "step": 565 + }, + { + "epoch": 11.45, + "learning_rate": 0.00012780612244897962, + "loss": 1.3737, + "step": 566 + }, + { + "epoch": 11.47, + "learning_rate": 0.00012767857142857142, + "loss": 1.4053, + "step": 567 + }, + { + "epoch": 11.49, + "learning_rate": 0.00012755102040816328, + "loss": 1.4561, + "step": 568 + }, + { + "epoch": 11.51, + "learning_rate": 0.0001274234693877551, + "loss": 1.3684, + "step": 569 + }, + { + "epoch": 11.53, + "learning_rate": 0.00012729591836734697, + "loss": 1.3117, + "step": 570 + }, + { + "epoch": 11.55, + "learning_rate": 0.00012716836734693877, + "loss": 1.3474, + "step": 571 + }, + { + "epoch": 11.57, + "learning_rate": 0.00012704081632653063, + "loss": 1.3804, + "step": 572 + }, + { + "epoch": 11.59, + "learning_rate": 0.00012691326530612246, + "loss": 1.3656, + "step": 573 + }, + { + "epoch": 11.61, + "learning_rate": 0.0001267857142857143, + "loss": 1.3133, + "step": 574 + }, + { + "epoch": 11.63, + "learning_rate": 0.00012665816326530612, + "loss": 1.4077, + "step": 575 + }, + { + "epoch": 11.65, + "learning_rate": 0.00012653061224489798, + "loss": 1.4087, + "step": 576 + }, + { + "epoch": 11.67, + "learning_rate": 0.00012640306122448978, + "loss": 1.3524, + "step": 577 + }, + { + "epoch": 11.7, + "learning_rate": 0.00012627551020408164, + "loss": 1.3481, + "step": 578 + }, + { + "epoch": 11.72, + "learning_rate": 0.00012614795918367347, + "loss": 1.4497, + "step": 579 + }, + { + "epoch": 11.74, + "learning_rate": 0.0001260204081632653, + "loss": 1.3866, + "step": 580 + }, + { + "epoch": 11.76, + "learning_rate": 0.00012589285714285713, + "loss": 1.42, + "step": 581 + }, + { + "epoch": 11.78, + "learning_rate": 0.000125765306122449, + "loss": 1.3562, + "step": 582 + }, + { + "epoch": 11.8, + "learning_rate": 0.00012563775510204082, + "loss": 1.3249, + "step": 583 + }, + { + "epoch": 11.82, + "learning_rate": 0.00012551020408163265, + "loss": 1.4277, + "step": 584 + }, + { + "epoch": 11.84, + "learning_rate": 0.0001253826530612245, + "loss": 1.3734, + "step": 585 + }, + { + "epoch": 11.86, + "learning_rate": 0.00012525510204081634, + "loss": 1.3765, + "step": 586 + }, + { + "epoch": 11.88, + "learning_rate": 0.00012512755102040817, + "loss": 1.4153, + "step": 587 + }, + { + "epoch": 11.9, + "learning_rate": 0.000125, + "loss": 1.3847, + "step": 588 + }, + { + "epoch": 11.92, + "learning_rate": 0.00012487244897959186, + "loss": 1.3824, + "step": 589 + }, + { + "epoch": 11.94, + "learning_rate": 0.00012474489795918366, + "loss": 1.3938, + "step": 590 + }, + { + "epoch": 11.96, + "learning_rate": 0.00012461734693877552, + "loss": 1.4143, + "step": 591 + }, + { + "epoch": 11.98, + "learning_rate": 0.00012448979591836735, + "loss": 1.3794, + "step": 592 + }, + { + "epoch": 12.0, + "learning_rate": 0.00012436224489795918, + "loss": 1.3755, + "step": 593 + }, + { + "epoch": 12.02, + "learning_rate": 0.00012423469387755101, + "loss": 1.3736, + "step": 594 + }, + { + "epoch": 12.04, + "learning_rate": 0.00012410714285714287, + "loss": 1.2957, + "step": 595 + }, + { + "epoch": 12.06, + "learning_rate": 0.0001239795918367347, + "loss": 1.2996, + "step": 596 + }, + { + "epoch": 12.08, + "learning_rate": 0.00012385204081632653, + "loss": 1.3648, + "step": 597 + }, + { + "epoch": 12.1, + "learning_rate": 0.00012372448979591837, + "loss": 1.3031, + "step": 598 + }, + { + "epoch": 12.12, + "learning_rate": 0.00012359693877551022, + "loss": 1.2933, + "step": 599 + }, + { + "epoch": 12.14, + "learning_rate": 0.00012346938775510203, + "loss": 1.322, + "step": 600 + }, + { + "epoch": 12.16, + "learning_rate": 0.00012334183673469389, + "loss": 1.3123, + "step": 601 + }, + { + "epoch": 12.18, + "learning_rate": 0.00012321428571428572, + "loss": 1.3187, + "step": 602 + }, + { + "epoch": 12.2, + "learning_rate": 0.00012308673469387755, + "loss": 1.3353, + "step": 603 + }, + { + "epoch": 12.22, + "learning_rate": 0.0001229591836734694, + "loss": 1.3221, + "step": 604 + }, + { + "epoch": 12.24, + "learning_rate": 0.00012283163265306124, + "loss": 1.3458, + "step": 605 + }, + { + "epoch": 12.26, + "learning_rate": 0.00012270408163265307, + "loss": 1.275, + "step": 606 + }, + { + "epoch": 12.28, + "learning_rate": 0.0001225765306122449, + "loss": 1.3455, + "step": 607 + }, + { + "epoch": 12.3, + "learning_rate": 0.00012244897959183676, + "loss": 1.2769, + "step": 608 + }, + { + "epoch": 12.32, + "learning_rate": 0.00012232142857142859, + "loss": 1.3201, + "step": 609 + }, + { + "epoch": 12.34, + "learning_rate": 0.00012219387755102042, + "loss": 1.3073, + "step": 610 + }, + { + "epoch": 12.36, + "learning_rate": 0.00012206632653061225, + "loss": 1.3103, + "step": 611 + }, + { + "epoch": 12.38, + "learning_rate": 0.00012193877551020409, + "loss": 1.4437, + "step": 612 + }, + { + "epoch": 12.4, + "learning_rate": 0.00012181122448979591, + "loss": 1.3086, + "step": 613 + }, + { + "epoch": 12.42, + "learning_rate": 0.00012168367346938775, + "loss": 1.3867, + "step": 614 + }, + { + "epoch": 12.44, + "learning_rate": 0.0001215561224489796, + "loss": 1.2565, + "step": 615 + }, + { + "epoch": 12.46, + "learning_rate": 0.00012142857142857143, + "loss": 1.335, + "step": 616 + }, + { + "epoch": 12.48, + "learning_rate": 0.00012130102040816327, + "loss": 1.3423, + "step": 617 + }, + { + "epoch": 12.5, + "learning_rate": 0.00012117346938775512, + "loss": 1.3433, + "step": 618 + }, + { + "epoch": 12.52, + "learning_rate": 0.00012104591836734695, + "loss": 1.3387, + "step": 619 + }, + { + "epoch": 12.55, + "learning_rate": 0.00012091836734693878, + "loss": 1.3923, + "step": 620 + }, + { + "epoch": 12.57, + "learning_rate": 0.00012079081632653062, + "loss": 1.3774, + "step": 621 + }, + { + "epoch": 12.59, + "learning_rate": 0.00012066326530612247, + "loss": 1.3203, + "step": 622 + }, + { + "epoch": 12.61, + "learning_rate": 0.00012053571428571429, + "loss": 1.2924, + "step": 623 + }, + { + "epoch": 12.63, + "learning_rate": 0.00012040816326530613, + "loss": 1.3292, + "step": 624 + }, + { + "epoch": 12.65, + "learning_rate": 0.00012028061224489798, + "loss": 1.3161, + "step": 625 + }, + { + "epoch": 12.67, + "learning_rate": 0.00012015306122448979, + "loss": 1.352, + "step": 626 + }, + { + "epoch": 12.69, + "learning_rate": 0.00012002551020408164, + "loss": 1.3577, + "step": 627 + }, + { + "epoch": 12.71, + "learning_rate": 0.00011989795918367348, + "loss": 1.3575, + "step": 628 + }, + { + "epoch": 12.73, + "learning_rate": 0.0001197704081632653, + "loss": 1.3727, + "step": 629 + }, + { + "epoch": 12.75, + "learning_rate": 0.00011964285714285714, + "loss": 1.3312, + "step": 630 + }, + { + "epoch": 12.77, + "learning_rate": 0.00011951530612244899, + "loss": 1.3378, + "step": 631 + }, + { + "epoch": 12.79, + "learning_rate": 0.00011938775510204083, + "loss": 1.295, + "step": 632 + }, + { + "epoch": 12.81, + "learning_rate": 0.00011926020408163265, + "loss": 1.3447, + "step": 633 + }, + { + "epoch": 12.83, + "learning_rate": 0.0001191326530612245, + "loss": 1.3835, + "step": 634 + }, + { + "epoch": 12.85, + "learning_rate": 0.00011900510204081634, + "loss": 1.3222, + "step": 635 + }, + { + "epoch": 12.87, + "learning_rate": 0.00011887755102040817, + "loss": 1.2851, + "step": 636 + }, + { + "epoch": 12.89, + "learning_rate": 0.00011875, + "loss": 1.2723, + "step": 637 + }, + { + "epoch": 12.91, + "learning_rate": 0.00011862244897959184, + "loss": 1.3924, + "step": 638 + }, + { + "epoch": 12.93, + "learning_rate": 0.00011849489795918368, + "loss": 1.4625, + "step": 639 + }, + { + "epoch": 12.95, + "learning_rate": 0.00011836734693877552, + "loss": 1.3245, + "step": 640 + }, + { + "epoch": 12.97, + "learning_rate": 0.00011823979591836736, + "loss": 1.4042, + "step": 641 + }, + { + "epoch": 12.99, + "learning_rate": 0.00011811224489795918, + "loss": 1.3761, + "step": 642 + }, + { + "epoch": 13.01, + "learning_rate": 0.00011798469387755103, + "loss": 1.3376, + "step": 643 + }, + { + "epoch": 13.03, + "learning_rate": 0.00011785714285714287, + "loss": 1.2174, + "step": 644 + }, + { + "epoch": 13.05, + "learning_rate": 0.00011772959183673471, + "loss": 1.3602, + "step": 645 + }, + { + "epoch": 13.07, + "learning_rate": 0.00011760204081632653, + "loss": 1.3002, + "step": 646 + }, + { + "epoch": 13.09, + "learning_rate": 0.00011747448979591838, + "loss": 1.2262, + "step": 647 + }, + { + "epoch": 13.11, + "learning_rate": 0.00011734693877551022, + "loss": 1.3048, + "step": 648 + }, + { + "epoch": 13.13, + "learning_rate": 0.00011721938775510204, + "loss": 1.2231, + "step": 649 + }, + { + "epoch": 13.15, + "learning_rate": 0.00011709183673469388, + "loss": 1.2996, + "step": 650 + }, + { + "epoch": 13.17, + "learning_rate": 0.00011696428571428573, + "loss": 1.2708, + "step": 651 + }, + { + "epoch": 13.19, + "learning_rate": 0.00011683673469387754, + "loss": 1.2776, + "step": 652 + }, + { + "epoch": 13.21, + "learning_rate": 0.00011670918367346939, + "loss": 1.248, + "step": 653 + }, + { + "epoch": 13.23, + "learning_rate": 0.00011658163265306123, + "loss": 1.2582, + "step": 654 + }, + { + "epoch": 13.25, + "learning_rate": 0.00011645408163265305, + "loss": 1.3011, + "step": 655 + }, + { + "epoch": 13.27, + "learning_rate": 0.0001163265306122449, + "loss": 1.2969, + "step": 656 + }, + { + "epoch": 13.29, + "learning_rate": 0.00011619897959183674, + "loss": 1.2454, + "step": 657 + }, + { + "epoch": 13.31, + "learning_rate": 0.00011607142857142858, + "loss": 1.1914, + "step": 658 + }, + { + "epoch": 13.33, + "learning_rate": 0.00011594387755102041, + "loss": 1.34, + "step": 659 + }, + { + "epoch": 13.35, + "learning_rate": 0.00011581632653061225, + "loss": 1.2828, + "step": 660 + }, + { + "epoch": 13.37, + "learning_rate": 0.00011568877551020409, + "loss": 1.2962, + "step": 661 + }, + { + "epoch": 13.39, + "learning_rate": 0.00011556122448979592, + "loss": 1.3334, + "step": 662 + }, + { + "epoch": 13.42, + "learning_rate": 0.00011543367346938776, + "loss": 1.2832, + "step": 663 + }, + { + "epoch": 13.44, + "learning_rate": 0.00011530612244897961, + "loss": 1.3012, + "step": 664 + }, + { + "epoch": 13.46, + "learning_rate": 0.00011517857142857143, + "loss": 1.2857, + "step": 665 + }, + { + "epoch": 13.48, + "learning_rate": 0.00011505102040816327, + "loss": 1.2855, + "step": 666 + }, + { + "epoch": 13.5, + "learning_rate": 0.00011492346938775512, + "loss": 1.3077, + "step": 667 + }, + { + "epoch": 13.52, + "learning_rate": 0.00011479591836734696, + "loss": 1.3139, + "step": 668 + }, + { + "epoch": 13.54, + "learning_rate": 0.00011466836734693878, + "loss": 1.3138, + "step": 669 + }, + { + "epoch": 13.56, + "learning_rate": 0.00011454081632653062, + "loss": 1.2808, + "step": 670 + }, + { + "epoch": 13.58, + "learning_rate": 0.00011441326530612247, + "loss": 1.2492, + "step": 671 + }, + { + "epoch": 13.6, + "learning_rate": 0.00011428571428571428, + "loss": 1.2027, + "step": 672 + }, + { + "epoch": 13.62, + "learning_rate": 0.00011415816326530613, + "loss": 1.33, + "step": 673 + }, + { + "epoch": 13.64, + "learning_rate": 0.00011403061224489797, + "loss": 1.3112, + "step": 674 + }, + { + "epoch": 13.66, + "learning_rate": 0.00011390306122448979, + "loss": 1.2772, + "step": 675 + }, + { + "epoch": 13.68, + "learning_rate": 0.00011377551020408163, + "loss": 1.2701, + "step": 676 + }, + { + "epoch": 13.7, + "learning_rate": 0.00011364795918367348, + "loss": 1.1973, + "step": 677 + }, + { + "epoch": 13.72, + "learning_rate": 0.0001135204081632653, + "loss": 1.3124, + "step": 678 + }, + { + "epoch": 13.74, + "learning_rate": 0.00011339285714285714, + "loss": 1.3085, + "step": 679 + }, + { + "epoch": 13.76, + "learning_rate": 0.00011326530612244898, + "loss": 1.3457, + "step": 680 + }, + { + "epoch": 13.78, + "learning_rate": 0.00011313775510204083, + "loss": 1.3338, + "step": 681 + }, + { + "epoch": 13.8, + "learning_rate": 0.00011301020408163266, + "loss": 1.2753, + "step": 682 + }, + { + "epoch": 13.82, + "learning_rate": 0.00011288265306122449, + "loss": 1.2786, + "step": 683 + }, + { + "epoch": 13.84, + "learning_rate": 0.00011275510204081634, + "loss": 1.2584, + "step": 684 + }, + { + "epoch": 13.86, + "learning_rate": 0.00011262755102040817, + "loss": 1.2779, + "step": 685 + }, + { + "epoch": 13.88, + "learning_rate": 0.00011250000000000001, + "loss": 1.3502, + "step": 686 + }, + { + "epoch": 13.9, + "learning_rate": 0.00011237244897959185, + "loss": 1.3251, + "step": 687 + }, + { + "epoch": 13.92, + "learning_rate": 0.00011224489795918367, + "loss": 1.273, + "step": 688 + }, + { + "epoch": 13.94, + "learning_rate": 0.00011211734693877552, + "loss": 1.3341, + "step": 689 + }, + { + "epoch": 13.96, + "learning_rate": 0.00011198979591836736, + "loss": 1.2654, + "step": 690 + }, + { + "epoch": 13.98, + "learning_rate": 0.00011186224489795918, + "loss": 1.3333, + "step": 691 + }, + { + "epoch": 14.0, + "learning_rate": 0.00011173469387755102, + "loss": 1.3246, + "step": 692 + }, + { + "epoch": 14.02, + "learning_rate": 0.00011160714285714287, + "loss": 1.2547, + "step": 693 + }, + { + "epoch": 14.04, + "learning_rate": 0.00011147959183673471, + "loss": 1.208, + "step": 694 + }, + { + "epoch": 14.06, + "learning_rate": 0.00011135204081632653, + "loss": 1.223, + "step": 695 + }, + { + "epoch": 14.08, + "learning_rate": 0.00011122448979591837, + "loss": 1.2483, + "step": 696 + }, + { + "epoch": 14.1, + "learning_rate": 0.00011109693877551022, + "loss": 1.2823, + "step": 697 + }, + { + "epoch": 14.12, + "learning_rate": 0.00011096938775510204, + "loss": 1.2013, + "step": 698 + }, + { + "epoch": 14.14, + "learning_rate": 0.00011084183673469388, + "loss": 1.1883, + "step": 699 + }, + { + "epoch": 14.16, + "learning_rate": 0.00011071428571428572, + "loss": 1.2364, + "step": 700 + } + ], + "logging_steps": 1, + "max_steps": 1568, + "num_train_epochs": 32, + "save_steps": 100, + "total_flos": 1.0356683330832384e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-700/training_args.bin b/checkpoint-700/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..db23e07d097c18532e52f58a70eb72d22e39c8c1 --- /dev/null +++ b/checkpoint-700/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b610cbc4242bb50b4985b00e205994ae514fec6d9e2273f2b545a583a07b154b +size 4155 diff --git a/checkpoint-800/README.md b/checkpoint-800/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-800/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-800/adapter_config.json b/checkpoint-800/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b --- /dev/null +++ b/checkpoint-800/adapter_config.json @@ -0,0 +1,23 @@ +{ + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-800/adapter_model.bin b/checkpoint-800/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..cee32efffd1b20413bbd86ddf1c6b5e172aff0ad --- /dev/null +++ b/checkpoint-800/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f1d0c9cf78a0287e75a067e9f589df56d88beac420ee2db9171f4dc9525fa2ca +size 39409357 diff --git a/checkpoint-800/optimizer.pt b/checkpoint-800/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..b0d866f2df62fc478638a4259af93b239d1fe68d --- /dev/null +++ b/checkpoint-800/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:07dbe2f482aa556941cc3c2c9287c9031960f1c986eb88f6af216069cf2f4eca +size 78844421 diff --git a/checkpoint-800/rng_state.pth b/checkpoint-800/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..b879e8b3e27b533241ce1d1d17eb9d40184e6556 --- /dev/null +++ b/checkpoint-800/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ef5771dabc82868bc27d459c4a2a8532048b4914b817944e763d47579f3a8c81 +size 14575 diff --git a/checkpoint-800/scheduler.pt b/checkpoint-800/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..472e441060b50c337f61e375e95c2d2202b3a99c --- /dev/null +++ b/checkpoint-800/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eb093730edb340d1b1812e0604570255eef4ca2d6126dada512bcd3e775c93af +size 627 diff --git a/checkpoint-800/special_tokens_map.json b/checkpoint-800/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-800/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-800/tokenizer.json b/checkpoint-800/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf002cafbd4818dcff2abc9156c088d681b4533 --- /dev/null +++ b/checkpoint-800/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba +size 14500471 diff --git a/checkpoint-800/tokenizer_config.json b/checkpoint-800/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-800/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-800/trainer_state.json b/checkpoint-800/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..23776a4314c0fade06a35f724e76e246d7aa5dc8 --- /dev/null +++ b/checkpoint-800/trainer_state.json @@ -0,0 +1,4819 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 16.187164084729687, + "eval_steps": 500, + "global_step": 800, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.02, + "learning_rate": 0.00019987244897959184, + "loss": 3.2215, + "step": 1 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019974489795918367, + "loss": 2.8365, + "step": 2 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019961734693877553, + "loss": 2.602, + "step": 3 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019948979591836736, + "loss": 2.4196, + "step": 4 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001993622448979592, + "loss": 2.2574, + "step": 5 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019923469387755102, + "loss": 2.2239, + "step": 6 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019910714285714288, + "loss": 2.1661, + "step": 7 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001989795918367347, + "loss": 2.0987, + "step": 8 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019885204081632654, + "loss": 2.015, + "step": 9 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019872448979591837, + "loss": 1.9771, + "step": 10 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019859693877551023, + "loss": 2.0271, + "step": 11 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019846938775510203, + "loss": 1.9812, + "step": 12 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001983418367346939, + "loss": 2.0834, + "step": 13 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019821428571428572, + "loss": 1.9174, + "step": 14 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019808673469387755, + "loss": 1.8409, + "step": 15 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019795918367346938, + "loss": 1.929, + "step": 16 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019783163265306124, + "loss": 2.0041, + "step": 17 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019770408163265305, + "loss": 1.9385, + "step": 18 + }, + { + "epoch": 0.38, + "learning_rate": 0.0001975765306122449, + "loss": 1.9592, + "step": 19 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019744897959183674, + "loss": 1.9701, + "step": 20 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001973214285714286, + "loss": 1.9277, + "step": 21 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019719387755102042, + "loss": 1.8394, + "step": 22 + }, + { + "epoch": 0.47, + "learning_rate": 0.00019706632653061226, + "loss": 1.8666, + "step": 23 + }, + { + "epoch": 0.49, + "learning_rate": 0.00019693877551020409, + "loss": 1.8997, + "step": 24 + }, + { + "epoch": 0.51, + "learning_rate": 0.00019681122448979592, + "loss": 1.9432, + "step": 25 + }, + { + "epoch": 0.53, + "learning_rate": 0.00019668367346938777, + "loss": 1.9137, + "step": 26 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001965561224489796, + "loss": 1.905, + "step": 27 + }, + { + "epoch": 0.57, + "learning_rate": 0.00019642857142857144, + "loss": 1.8708, + "step": 28 + }, + { + "epoch": 0.59, + "learning_rate": 0.00019630102040816327, + "loss": 1.9097, + "step": 29 + }, + { + "epoch": 0.61, + "learning_rate": 0.00019617346938775513, + "loss": 1.896, + "step": 30 + }, + { + "epoch": 0.63, + "learning_rate": 0.00019604591836734696, + "loss": 1.8834, + "step": 31 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001959183673469388, + "loss": 1.8323, + "step": 32 + }, + { + "epoch": 0.67, + "learning_rate": 0.00019579081632653062, + "loss": 1.804, + "step": 33 + }, + { + "epoch": 0.69, + "learning_rate": 0.00019566326530612248, + "loss": 1.8906, + "step": 34 + }, + { + "epoch": 0.71, + "learning_rate": 0.00019553571428571428, + "loss": 1.8693, + "step": 35 + }, + { + "epoch": 0.73, + "learning_rate": 0.00019540816326530614, + "loss": 1.9308, + "step": 36 + }, + { + "epoch": 0.75, + "learning_rate": 0.00019528061224489797, + "loss": 1.8082, + "step": 37 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001951530612244898, + "loss": 1.848, + "step": 38 + }, + { + "epoch": 0.79, + "learning_rate": 0.00019502551020408163, + "loss": 1.8866, + "step": 39 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001948979591836735, + "loss": 1.7844, + "step": 40 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001947704081632653, + "loss": 1.8485, + "step": 41 + }, + { + "epoch": 0.85, + "learning_rate": 0.00019464285714285715, + "loss": 1.7917, + "step": 42 + }, + { + "epoch": 0.87, + "learning_rate": 0.00019451530612244898, + "loss": 1.7342, + "step": 43 + }, + { + "epoch": 0.89, + "learning_rate": 0.00019438775510204084, + "loss": 1.8479, + "step": 44 + }, + { + "epoch": 0.91, + "learning_rate": 0.00019426020408163267, + "loss": 1.8639, + "step": 45 + }, + { + "epoch": 0.93, + "learning_rate": 0.0001941326530612245, + "loss": 1.8166, + "step": 46 + }, + { + "epoch": 0.95, + "learning_rate": 0.00019400510204081633, + "loss": 1.7566, + "step": 47 + }, + { + "epoch": 0.97, + "learning_rate": 0.00019387755102040816, + "loss": 1.8071, + "step": 48 + }, + { + "epoch": 0.99, + "learning_rate": 0.00019375000000000002, + "loss": 1.8612, + "step": 49 + }, + { + "epoch": 1.01, + "learning_rate": 0.00019362244897959185, + "loss": 1.7819, + "step": 50 + }, + { + "epoch": 1.03, + "learning_rate": 0.00019349489795918368, + "loss": 1.8647, + "step": 51 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001933673469387755, + "loss": 1.8196, + "step": 52 + }, + { + "epoch": 1.07, + "learning_rate": 0.00019323979591836737, + "loss": 1.8027, + "step": 53 + }, + { + "epoch": 1.09, + "learning_rate": 0.00019311224489795917, + "loss": 1.8927, + "step": 54 + }, + { + "epoch": 1.11, + "learning_rate": 0.00019298469387755103, + "loss": 1.8481, + "step": 55 + }, + { + "epoch": 1.13, + "learning_rate": 0.00019285714285714286, + "loss": 1.7781, + "step": 56 + }, + { + "epoch": 1.15, + "learning_rate": 0.00019272959183673472, + "loss": 1.8101, + "step": 57 + }, + { + "epoch": 1.17, + "learning_rate": 0.00019260204081632653, + "loss": 1.7257, + "step": 58 + }, + { + "epoch": 1.19, + "learning_rate": 0.00019247448979591838, + "loss": 1.8185, + "step": 59 + }, + { + "epoch": 1.21, + "learning_rate": 0.00019234693877551021, + "loss": 1.8557, + "step": 60 + }, + { + "epoch": 1.23, + "learning_rate": 0.00019221938775510204, + "loss": 1.7418, + "step": 61 + }, + { + "epoch": 1.25, + "learning_rate": 0.00019209183673469388, + "loss": 1.6879, + "step": 62 + }, + { + "epoch": 1.27, + "learning_rate": 0.00019196428571428573, + "loss": 1.7651, + "step": 63 + }, + { + "epoch": 1.29, + "learning_rate": 0.00019183673469387756, + "loss": 1.7759, + "step": 64 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001917091836734694, + "loss": 1.7691, + "step": 65 + }, + { + "epoch": 1.34, + "learning_rate": 0.00019158163265306123, + "loss": 1.7794, + "step": 66 + }, + { + "epoch": 1.36, + "learning_rate": 0.00019145408163265306, + "loss": 1.8152, + "step": 67 + }, + { + "epoch": 1.38, + "learning_rate": 0.00019132653061224492, + "loss": 1.8052, + "step": 68 + }, + { + "epoch": 1.4, + "learning_rate": 0.00019119897959183675, + "loss": 1.8054, + "step": 69 + }, + { + "epoch": 1.42, + "learning_rate": 0.00019107142857142858, + "loss": 1.8114, + "step": 70 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001909438775510204, + "loss": 1.7749, + "step": 71 + }, + { + "epoch": 1.46, + "learning_rate": 0.00019081632653061227, + "loss": 1.777, + "step": 72 + }, + { + "epoch": 1.48, + "learning_rate": 0.0001906887755102041, + "loss": 1.7896, + "step": 73 + }, + { + "epoch": 1.5, + "learning_rate": 0.00019056122448979593, + "loss": 1.8335, + "step": 74 + }, + { + "epoch": 1.52, + "learning_rate": 0.00019043367346938776, + "loss": 1.8155, + "step": 75 + }, + { + "epoch": 1.54, + "learning_rate": 0.00019030612244897962, + "loss": 1.8224, + "step": 76 + }, + { + "epoch": 1.56, + "learning_rate": 0.00019017857142857142, + "loss": 1.7889, + "step": 77 + }, + { + "epoch": 1.58, + "learning_rate": 0.00019005102040816328, + "loss": 1.8866, + "step": 78 + }, + { + "epoch": 1.6, + "learning_rate": 0.0001899234693877551, + "loss": 1.8439, + "step": 79 + }, + { + "epoch": 1.62, + "learning_rate": 0.00018979591836734697, + "loss": 1.7906, + "step": 80 + }, + { + "epoch": 1.64, + "learning_rate": 0.00018966836734693877, + "loss": 1.8627, + "step": 81 + }, + { + "epoch": 1.66, + "learning_rate": 0.00018954081632653063, + "loss": 1.7497, + "step": 82 + }, + { + "epoch": 1.68, + "learning_rate": 0.00018941326530612246, + "loss": 1.7936, + "step": 83 + }, + { + "epoch": 1.7, + "learning_rate": 0.0001892857142857143, + "loss": 1.8341, + "step": 84 + }, + { + "epoch": 1.72, + "learning_rate": 0.00018915816326530612, + "loss": 1.7868, + "step": 85 + }, + { + "epoch": 1.74, + "learning_rate": 0.00018903061224489798, + "loss": 1.7493, + "step": 86 + }, + { + "epoch": 1.76, + "learning_rate": 0.0001889030612244898, + "loss": 1.7926, + "step": 87 + }, + { + "epoch": 1.78, + "learning_rate": 0.00018877551020408164, + "loss": 1.8278, + "step": 88 + }, + { + "epoch": 1.8, + "learning_rate": 0.00018864795918367347, + "loss": 1.7387, + "step": 89 + }, + { + "epoch": 1.82, + "learning_rate": 0.0001885204081632653, + "loss": 1.7669, + "step": 90 + }, + { + "epoch": 1.84, + "learning_rate": 0.00018839285714285716, + "loss": 1.7686, + "step": 91 + }, + { + "epoch": 1.86, + "learning_rate": 0.000188265306122449, + "loss": 1.7759, + "step": 92 + }, + { + "epoch": 1.88, + "learning_rate": 0.00018813775510204082, + "loss": 1.7016, + "step": 93 + }, + { + "epoch": 1.9, + "learning_rate": 0.00018801020408163265, + "loss": 1.8123, + "step": 94 + }, + { + "epoch": 1.92, + "learning_rate": 0.0001878826530612245, + "loss": 1.8315, + "step": 95 + }, + { + "epoch": 1.94, + "learning_rate": 0.00018775510204081634, + "loss": 1.7679, + "step": 96 + }, + { + "epoch": 1.96, + "learning_rate": 0.00018762755102040817, + "loss": 1.7874, + "step": 97 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001875, + "loss": 1.8008, + "step": 98 + }, + { + "epoch": 2.0, + "learning_rate": 0.00018737244897959186, + "loss": 1.7177, + "step": 99 + }, + { + "epoch": 2.02, + "learning_rate": 0.00018724489795918367, + "loss": 1.7272, + "step": 100 + }, + { + "epoch": 2.04, + "learning_rate": 0.00018711734693877552, + "loss": 1.7848, + "step": 101 + }, + { + "epoch": 2.06, + "learning_rate": 0.00018698979591836735, + "loss": 1.744, + "step": 102 + }, + { + "epoch": 2.08, + "learning_rate": 0.00018686224489795919, + "loss": 1.7005, + "step": 103 + }, + { + "epoch": 2.1, + "learning_rate": 0.00018673469387755102, + "loss": 1.8247, + "step": 104 + }, + { + "epoch": 2.12, + "learning_rate": 0.00018660714285714287, + "loss": 1.6855, + "step": 105 + }, + { + "epoch": 2.14, + "learning_rate": 0.0001864795918367347, + "loss": 1.7627, + "step": 106 + }, + { + "epoch": 2.17, + "learning_rate": 0.00018635204081632654, + "loss": 1.7564, + "step": 107 + }, + { + "epoch": 2.19, + "learning_rate": 0.00018622448979591837, + "loss": 1.8237, + "step": 108 + }, + { + "epoch": 2.21, + "learning_rate": 0.00018609693877551022, + "loss": 1.7421, + "step": 109 + }, + { + "epoch": 2.23, + "learning_rate": 0.00018596938775510206, + "loss": 1.7517, + "step": 110 + }, + { + "epoch": 2.25, + "learning_rate": 0.0001858418367346939, + "loss": 1.7515, + "step": 111 + }, + { + "epoch": 2.27, + "learning_rate": 0.00018571428571428572, + "loss": 1.7842, + "step": 112 + }, + { + "epoch": 2.29, + "learning_rate": 0.00018558673469387755, + "loss": 1.8001, + "step": 113 + }, + { + "epoch": 2.31, + "learning_rate": 0.0001854591836734694, + "loss": 1.7653, + "step": 114 + }, + { + "epoch": 2.33, + "learning_rate": 0.00018533163265306124, + "loss": 1.694, + "step": 115 + }, + { + "epoch": 2.35, + "learning_rate": 0.00018520408163265307, + "loss": 1.7457, + "step": 116 + }, + { + "epoch": 2.37, + "learning_rate": 0.0001850765306122449, + "loss": 1.7899, + "step": 117 + }, + { + "epoch": 2.39, + "learning_rate": 0.00018494897959183676, + "loss": 1.7473, + "step": 118 + }, + { + "epoch": 2.41, + "learning_rate": 0.0001848214285714286, + "loss": 1.6639, + "step": 119 + }, + { + "epoch": 2.43, + "learning_rate": 0.00018469387755102042, + "loss": 1.762, + "step": 120 + }, + { + "epoch": 2.45, + "learning_rate": 0.00018456632653061225, + "loss": 1.7378, + "step": 121 + }, + { + "epoch": 2.47, + "learning_rate": 0.0001844387755102041, + "loss": 1.672, + "step": 122 + }, + { + "epoch": 2.49, + "learning_rate": 0.0001843112244897959, + "loss": 1.7267, + "step": 123 + }, + { + "epoch": 2.51, + "learning_rate": 0.00018418367346938777, + "loss": 1.7825, + "step": 124 + }, + { + "epoch": 2.53, + "learning_rate": 0.0001840561224489796, + "loss": 1.7566, + "step": 125 + }, + { + "epoch": 2.55, + "learning_rate": 0.00018392857142857143, + "loss": 1.8169, + "step": 126 + }, + { + "epoch": 2.57, + "learning_rate": 0.00018380102040816326, + "loss": 1.6801, + "step": 127 + }, + { + "epoch": 2.59, + "learning_rate": 0.00018367346938775512, + "loss": 1.7292, + "step": 128 + }, + { + "epoch": 2.61, + "learning_rate": 0.00018354591836734695, + "loss": 1.737, + "step": 129 + }, + { + "epoch": 2.63, + "learning_rate": 0.00018341836734693878, + "loss": 1.7696, + "step": 130 + }, + { + "epoch": 2.65, + "learning_rate": 0.0001832908163265306, + "loss": 1.7239, + "step": 131 + }, + { + "epoch": 2.67, + "learning_rate": 0.00018316326530612247, + "loss": 1.7441, + "step": 132 + }, + { + "epoch": 2.69, + "learning_rate": 0.0001830357142857143, + "loss": 1.7825, + "step": 133 + }, + { + "epoch": 2.71, + "learning_rate": 0.00018290816326530613, + "loss": 1.7411, + "step": 134 + }, + { + "epoch": 2.73, + "learning_rate": 0.00018278061224489796, + "loss": 1.7119, + "step": 135 + }, + { + "epoch": 2.75, + "learning_rate": 0.0001826530612244898, + "loss": 1.7443, + "step": 136 + }, + { + "epoch": 2.77, + "learning_rate": 0.00018252551020408165, + "loss": 1.7197, + "step": 137 + }, + { + "epoch": 2.79, + "learning_rate": 0.00018239795918367348, + "loss": 1.7273, + "step": 138 + }, + { + "epoch": 2.81, + "learning_rate": 0.0001822704081632653, + "loss": 1.7681, + "step": 139 + }, + { + "epoch": 2.83, + "learning_rate": 0.00018214285714285714, + "loss": 1.8088, + "step": 140 + }, + { + "epoch": 2.85, + "learning_rate": 0.000182015306122449, + "loss": 1.7301, + "step": 141 + }, + { + "epoch": 2.87, + "learning_rate": 0.00018188775510204083, + "loss": 1.6853, + "step": 142 + }, + { + "epoch": 2.89, + "learning_rate": 0.00018176020408163266, + "loss": 1.6966, + "step": 143 + }, + { + "epoch": 2.91, + "learning_rate": 0.0001816326530612245, + "loss": 1.7938, + "step": 144 + }, + { + "epoch": 2.93, + "learning_rate": 0.00018150510204081635, + "loss": 1.7639, + "step": 145 + }, + { + "epoch": 2.95, + "learning_rate": 0.00018137755102040816, + "loss": 1.7527, + "step": 146 + }, + { + "epoch": 2.97, + "learning_rate": 0.00018125000000000001, + "loss": 1.7386, + "step": 147 + }, + { + "epoch": 2.99, + "learning_rate": 0.00018112244897959185, + "loss": 1.7223, + "step": 148 + }, + { + "epoch": 3.01, + "learning_rate": 0.00018099489795918368, + "loss": 1.7571, + "step": 149 + }, + { + "epoch": 3.04, + "learning_rate": 0.0001808673469387755, + "loss": 1.7054, + "step": 150 + }, + { + "epoch": 3.06, + "learning_rate": 0.00018073979591836737, + "loss": 1.6581, + "step": 151 + }, + { + "epoch": 3.08, + "learning_rate": 0.00018061224489795917, + "loss": 1.681, + "step": 152 + }, + { + "epoch": 3.1, + "learning_rate": 0.00018048469387755103, + "loss": 1.7425, + "step": 153 + }, + { + "epoch": 3.12, + "learning_rate": 0.00018035714285714286, + "loss": 1.7108, + "step": 154 + }, + { + "epoch": 3.14, + "learning_rate": 0.00018022959183673472, + "loss": 1.7194, + "step": 155 + }, + { + "epoch": 3.16, + "learning_rate": 0.00018010204081632655, + "loss": 1.6953, + "step": 156 + }, + { + "epoch": 3.18, + "learning_rate": 0.00017997448979591838, + "loss": 1.669, + "step": 157 + }, + { + "epoch": 3.2, + "learning_rate": 0.0001798469387755102, + "loss": 1.744, + "step": 158 + }, + { + "epoch": 3.22, + "learning_rate": 0.00017971938775510204, + "loss": 1.6467, + "step": 159 + }, + { + "epoch": 3.24, + "learning_rate": 0.0001795918367346939, + "loss": 1.7103, + "step": 160 + }, + { + "epoch": 3.26, + "learning_rate": 0.00017946428571428573, + "loss": 1.6662, + "step": 161 + }, + { + "epoch": 3.28, + "learning_rate": 0.00017933673469387756, + "loss": 1.6657, + "step": 162 + }, + { + "epoch": 3.3, + "learning_rate": 0.0001792091836734694, + "loss": 1.791, + "step": 163 + }, + { + "epoch": 3.32, + "learning_rate": 0.00017908163265306125, + "loss": 1.7704, + "step": 164 + }, + { + "epoch": 3.34, + "learning_rate": 0.00017895408163265305, + "loss": 1.7229, + "step": 165 + }, + { + "epoch": 3.36, + "learning_rate": 0.0001788265306122449, + "loss": 1.76, + "step": 166 + }, + { + "epoch": 3.38, + "learning_rate": 0.00017869897959183674, + "loss": 1.6482, + "step": 167 + }, + { + "epoch": 3.4, + "learning_rate": 0.0001785714285714286, + "loss": 1.8076, + "step": 168 + }, + { + "epoch": 3.42, + "learning_rate": 0.0001784438775510204, + "loss": 1.7368, + "step": 169 + }, + { + "epoch": 3.44, + "learning_rate": 0.00017831632653061226, + "loss": 1.6264, + "step": 170 + }, + { + "epoch": 3.46, + "learning_rate": 0.0001781887755102041, + "loss": 1.6289, + "step": 171 + }, + { + "epoch": 3.48, + "learning_rate": 0.00017806122448979592, + "loss": 1.7913, + "step": 172 + }, + { + "epoch": 3.5, + "learning_rate": 0.00017793367346938775, + "loss": 1.6985, + "step": 173 + }, + { + "epoch": 3.52, + "learning_rate": 0.0001778061224489796, + "loss": 1.6936, + "step": 174 + }, + { + "epoch": 3.54, + "learning_rate": 0.00017767857142857141, + "loss": 1.8068, + "step": 175 + }, + { + "epoch": 3.56, + "learning_rate": 0.00017755102040816327, + "loss": 1.7243, + "step": 176 + }, + { + "epoch": 3.58, + "learning_rate": 0.0001774234693877551, + "loss": 1.6893, + "step": 177 + }, + { + "epoch": 3.6, + "learning_rate": 0.00017729591836734696, + "loss": 1.8122, + "step": 178 + }, + { + "epoch": 3.62, + "learning_rate": 0.0001771683673469388, + "loss": 1.6562, + "step": 179 + }, + { + "epoch": 3.64, + "learning_rate": 0.00017704081632653062, + "loss": 1.6999, + "step": 180 + }, + { + "epoch": 3.66, + "learning_rate": 0.00017691326530612245, + "loss": 1.7229, + "step": 181 + }, + { + "epoch": 3.68, + "learning_rate": 0.00017678571428571428, + "loss": 1.6764, + "step": 182 + }, + { + "epoch": 3.7, + "learning_rate": 0.00017665816326530614, + "loss": 1.6982, + "step": 183 + }, + { + "epoch": 3.72, + "learning_rate": 0.00017653061224489797, + "loss": 1.696, + "step": 184 + }, + { + "epoch": 3.74, + "learning_rate": 0.0001764030612244898, + "loss": 1.6797, + "step": 185 + }, + { + "epoch": 3.76, + "learning_rate": 0.00017627551020408164, + "loss": 1.637, + "step": 186 + }, + { + "epoch": 3.78, + "learning_rate": 0.0001761479591836735, + "loss": 1.7074, + "step": 187 + }, + { + "epoch": 3.8, + "learning_rate": 0.0001760204081632653, + "loss": 1.705, + "step": 188 + }, + { + "epoch": 3.82, + "learning_rate": 0.00017589285714285716, + "loss": 1.6153, + "step": 189 + }, + { + "epoch": 3.84, + "learning_rate": 0.00017576530612244899, + "loss": 1.7354, + "step": 190 + }, + { + "epoch": 3.86, + "learning_rate": 0.00017563775510204084, + "loss": 1.6941, + "step": 191 + }, + { + "epoch": 3.88, + "learning_rate": 0.00017551020408163265, + "loss": 1.7231, + "step": 192 + }, + { + "epoch": 3.91, + "learning_rate": 0.0001753826530612245, + "loss": 1.7663, + "step": 193 + }, + { + "epoch": 3.93, + "learning_rate": 0.00017525510204081634, + "loss": 1.6532, + "step": 194 + }, + { + "epoch": 3.95, + "learning_rate": 0.00017512755102040817, + "loss": 1.7115, + "step": 195 + }, + { + "epoch": 3.97, + "learning_rate": 0.000175, + "loss": 1.6955, + "step": 196 + }, + { + "epoch": 3.99, + "learning_rate": 0.00017487244897959186, + "loss": 1.6863, + "step": 197 + }, + { + "epoch": 4.01, + "learning_rate": 0.00017474489795918366, + "loss": 1.7012, + "step": 198 + }, + { + "epoch": 4.03, + "learning_rate": 0.00017461734693877552, + "loss": 1.5927, + "step": 199 + }, + { + "epoch": 4.05, + "learning_rate": 0.00017448979591836735, + "loss": 1.6272, + "step": 200 + }, + { + "epoch": 4.07, + "learning_rate": 0.00017436224489795918, + "loss": 1.5994, + "step": 201 + }, + { + "epoch": 4.09, + "learning_rate": 0.00017423469387755104, + "loss": 1.7141, + "step": 202 + }, + { + "epoch": 4.11, + "learning_rate": 0.00017410714285714287, + "loss": 1.7547, + "step": 203 + }, + { + "epoch": 4.13, + "learning_rate": 0.0001739795918367347, + "loss": 1.6254, + "step": 204 + }, + { + "epoch": 4.15, + "learning_rate": 0.00017385204081632653, + "loss": 1.6686, + "step": 205 + }, + { + "epoch": 4.17, + "learning_rate": 0.0001737244897959184, + "loss": 1.6684, + "step": 206 + }, + { + "epoch": 4.19, + "learning_rate": 0.00017359693877551022, + "loss": 1.6724, + "step": 207 + }, + { + "epoch": 4.21, + "learning_rate": 0.00017346938775510205, + "loss": 1.7361, + "step": 208 + }, + { + "epoch": 4.23, + "learning_rate": 0.00017334183673469388, + "loss": 1.7167, + "step": 209 + }, + { + "epoch": 4.25, + "learning_rate": 0.00017321428571428574, + "loss": 1.7226, + "step": 210 + }, + { + "epoch": 4.27, + "learning_rate": 0.00017308673469387754, + "loss": 1.7133, + "step": 211 + }, + { + "epoch": 4.29, + "learning_rate": 0.0001729591836734694, + "loss": 1.649, + "step": 212 + }, + { + "epoch": 4.31, + "learning_rate": 0.00017283163265306123, + "loss": 1.7104, + "step": 213 + }, + { + "epoch": 4.33, + "learning_rate": 0.00017270408163265306, + "loss": 1.6861, + "step": 214 + }, + { + "epoch": 4.35, + "learning_rate": 0.0001725765306122449, + "loss": 1.648, + "step": 215 + }, + { + "epoch": 4.37, + "learning_rate": 0.00017244897959183675, + "loss": 1.6215, + "step": 216 + }, + { + "epoch": 4.39, + "learning_rate": 0.00017232142857142858, + "loss": 1.6334, + "step": 217 + }, + { + "epoch": 4.41, + "learning_rate": 0.0001721938775510204, + "loss": 1.6283, + "step": 218 + }, + { + "epoch": 4.43, + "learning_rate": 0.00017206632653061224, + "loss": 1.6462, + "step": 219 + }, + { + "epoch": 4.45, + "learning_rate": 0.0001719387755102041, + "loss": 1.7233, + "step": 220 + }, + { + "epoch": 4.47, + "learning_rate": 0.0001718112244897959, + "loss": 1.7839, + "step": 221 + }, + { + "epoch": 4.49, + "learning_rate": 0.00017168367346938776, + "loss": 1.7204, + "step": 222 + }, + { + "epoch": 4.51, + "learning_rate": 0.0001715561224489796, + "loss": 1.7671, + "step": 223 + }, + { + "epoch": 4.53, + "learning_rate": 0.00017142857142857143, + "loss": 1.6824, + "step": 224 + }, + { + "epoch": 4.55, + "learning_rate": 0.00017130102040816328, + "loss": 1.7068, + "step": 225 + }, + { + "epoch": 4.57, + "learning_rate": 0.00017117346938775511, + "loss": 1.6515, + "step": 226 + }, + { + "epoch": 4.59, + "learning_rate": 0.00017104591836734694, + "loss": 1.6586, + "step": 227 + }, + { + "epoch": 4.61, + "learning_rate": 0.00017091836734693878, + "loss": 1.6355, + "step": 228 + }, + { + "epoch": 4.63, + "learning_rate": 0.00017079081632653063, + "loss": 1.7173, + "step": 229 + }, + { + "epoch": 4.65, + "learning_rate": 0.00017066326530612246, + "loss": 1.6585, + "step": 230 + }, + { + "epoch": 4.67, + "learning_rate": 0.0001705357142857143, + "loss": 1.5856, + "step": 231 + }, + { + "epoch": 4.69, + "learning_rate": 0.00017040816326530613, + "loss": 1.5923, + "step": 232 + }, + { + "epoch": 4.71, + "learning_rate": 0.00017028061224489798, + "loss": 1.7128, + "step": 233 + }, + { + "epoch": 4.73, + "learning_rate": 0.0001701530612244898, + "loss": 1.6971, + "step": 234 + }, + { + "epoch": 4.75, + "learning_rate": 0.00017002551020408165, + "loss": 1.6416, + "step": 235 + }, + { + "epoch": 4.78, + "learning_rate": 0.00016989795918367348, + "loss": 1.645, + "step": 236 + }, + { + "epoch": 4.8, + "learning_rate": 0.0001697704081632653, + "loss": 1.6792, + "step": 237 + }, + { + "epoch": 4.82, + "learning_rate": 0.00016964285714285714, + "loss": 1.6522, + "step": 238 + }, + { + "epoch": 4.84, + "learning_rate": 0.000169515306122449, + "loss": 1.6315, + "step": 239 + }, + { + "epoch": 4.86, + "learning_rate": 0.00016938775510204083, + "loss": 1.6622, + "step": 240 + }, + { + "epoch": 4.88, + "learning_rate": 0.00016926020408163266, + "loss": 1.6566, + "step": 241 + }, + { + "epoch": 4.9, + "learning_rate": 0.0001691326530612245, + "loss": 1.7141, + "step": 242 + }, + { + "epoch": 4.92, + "learning_rate": 0.00016900510204081635, + "loss": 1.5873, + "step": 243 + }, + { + "epoch": 4.94, + "learning_rate": 0.00016887755102040818, + "loss": 1.6571, + "step": 244 + }, + { + "epoch": 4.96, + "learning_rate": 0.00016875, + "loss": 1.6829, + "step": 245 + }, + { + "epoch": 4.98, + "learning_rate": 0.00016862244897959184, + "loss": 1.6935, + "step": 246 + }, + { + "epoch": 5.0, + "learning_rate": 0.00016849489795918367, + "loss": 1.6782, + "step": 247 + }, + { + "epoch": 5.02, + "learning_rate": 0.00016836734693877553, + "loss": 1.622, + "step": 248 + }, + { + "epoch": 5.04, + "learning_rate": 0.00016823979591836736, + "loss": 1.6596, + "step": 249 + }, + { + "epoch": 5.06, + "learning_rate": 0.0001681122448979592, + "loss": 1.5821, + "step": 250 + }, + { + "epoch": 5.08, + "learning_rate": 0.00016798469387755102, + "loss": 1.7292, + "step": 251 + }, + { + "epoch": 5.1, + "learning_rate": 0.00016785714285714288, + "loss": 1.646, + "step": 252 + }, + { + "epoch": 5.12, + "learning_rate": 0.0001677295918367347, + "loss": 1.6969, + "step": 253 + }, + { + "epoch": 5.14, + "learning_rate": 0.00016760204081632654, + "loss": 1.6082, + "step": 254 + }, + { + "epoch": 5.16, + "learning_rate": 0.00016747448979591837, + "loss": 1.5843, + "step": 255 + }, + { + "epoch": 5.18, + "learning_rate": 0.00016734693877551023, + "loss": 1.6827, + "step": 256 + }, + { + "epoch": 5.2, + "learning_rate": 0.00016721938775510203, + "loss": 1.5824, + "step": 257 + }, + { + "epoch": 5.22, + "learning_rate": 0.0001670918367346939, + "loss": 1.6795, + "step": 258 + }, + { + "epoch": 5.24, + "learning_rate": 0.00016696428571428572, + "loss": 1.5639, + "step": 259 + }, + { + "epoch": 5.26, + "learning_rate": 0.00016683673469387755, + "loss": 1.592, + "step": 260 + }, + { + "epoch": 5.28, + "learning_rate": 0.00016670918367346938, + "loss": 1.65, + "step": 261 + }, + { + "epoch": 5.3, + "learning_rate": 0.00016658163265306124, + "loss": 1.5592, + "step": 262 + }, + { + "epoch": 5.32, + "learning_rate": 0.00016645408163265305, + "loss": 1.5091, + "step": 263 + }, + { + "epoch": 5.34, + "learning_rate": 0.0001663265306122449, + "loss": 1.6138, + "step": 264 + }, + { + "epoch": 5.36, + "learning_rate": 0.00016619897959183673, + "loss": 1.625, + "step": 265 + }, + { + "epoch": 5.38, + "learning_rate": 0.0001660714285714286, + "loss": 1.5757, + "step": 266 + }, + { + "epoch": 5.4, + "learning_rate": 0.00016594387755102042, + "loss": 1.6372, + "step": 267 + }, + { + "epoch": 5.42, + "learning_rate": 0.00016581632653061225, + "loss": 1.5891, + "step": 268 + }, + { + "epoch": 5.44, + "learning_rate": 0.00016568877551020409, + "loss": 1.6893, + "step": 269 + }, + { + "epoch": 5.46, + "learning_rate": 0.00016556122448979592, + "loss": 1.6662, + "step": 270 + }, + { + "epoch": 5.48, + "learning_rate": 0.00016543367346938777, + "loss": 1.7132, + "step": 271 + }, + { + "epoch": 5.5, + "learning_rate": 0.0001653061224489796, + "loss": 1.5835, + "step": 272 + }, + { + "epoch": 5.52, + "learning_rate": 0.00016517857142857144, + "loss": 1.6342, + "step": 273 + }, + { + "epoch": 5.54, + "learning_rate": 0.00016505102040816327, + "loss": 1.6717, + "step": 274 + }, + { + "epoch": 5.56, + "learning_rate": 0.00016492346938775512, + "loss": 1.6248, + "step": 275 + }, + { + "epoch": 5.58, + "learning_rate": 0.00016479591836734696, + "loss": 1.6117, + "step": 276 + }, + { + "epoch": 5.6, + "learning_rate": 0.0001646683673469388, + "loss": 1.6798, + "step": 277 + }, + { + "epoch": 5.63, + "learning_rate": 0.00016454081632653062, + "loss": 1.6406, + "step": 278 + }, + { + "epoch": 5.65, + "learning_rate": 0.00016441326530612248, + "loss": 1.6512, + "step": 279 + }, + { + "epoch": 5.67, + "learning_rate": 0.00016428571428571428, + "loss": 1.6102, + "step": 280 + }, + { + "epoch": 5.69, + "learning_rate": 0.00016415816326530614, + "loss": 1.6113, + "step": 281 + }, + { + "epoch": 5.71, + "learning_rate": 0.00016403061224489797, + "loss": 1.7116, + "step": 282 + }, + { + "epoch": 5.73, + "learning_rate": 0.0001639030612244898, + "loss": 1.6846, + "step": 283 + }, + { + "epoch": 5.75, + "learning_rate": 0.00016377551020408163, + "loss": 1.6911, + "step": 284 + }, + { + "epoch": 5.77, + "learning_rate": 0.0001636479591836735, + "loss": 1.6202, + "step": 285 + }, + { + "epoch": 5.79, + "learning_rate": 0.0001635204081632653, + "loss": 1.5715, + "step": 286 + }, + { + "epoch": 5.81, + "learning_rate": 0.00016339285714285715, + "loss": 1.6461, + "step": 287 + }, + { + "epoch": 5.83, + "learning_rate": 0.00016326530612244898, + "loss": 1.6624, + "step": 288 + }, + { + "epoch": 5.85, + "learning_rate": 0.00016313775510204084, + "loss": 1.6535, + "step": 289 + }, + { + "epoch": 5.87, + "learning_rate": 0.00016301020408163267, + "loss": 1.6275, + "step": 290 + }, + { + "epoch": 5.89, + "learning_rate": 0.0001628826530612245, + "loss": 1.6636, + "step": 291 + }, + { + "epoch": 5.91, + "learning_rate": 0.00016275510204081633, + "loss": 1.6546, + "step": 292 + }, + { + "epoch": 5.93, + "learning_rate": 0.00016262755102040816, + "loss": 1.7274, + "step": 293 + }, + { + "epoch": 5.95, + "learning_rate": 0.00016250000000000002, + "loss": 1.5901, + "step": 294 + }, + { + "epoch": 5.97, + "learning_rate": 0.00016237244897959185, + "loss": 1.6046, + "step": 295 + }, + { + "epoch": 5.99, + "learning_rate": 0.00016224489795918368, + "loss": 1.5828, + "step": 296 + }, + { + "epoch": 6.01, + "learning_rate": 0.0001621173469387755, + "loss": 1.6435, + "step": 297 + }, + { + "epoch": 6.03, + "learning_rate": 0.00016198979591836737, + "loss": 1.6263, + "step": 298 + }, + { + "epoch": 6.05, + "learning_rate": 0.00016186224489795917, + "loss": 1.4944, + "step": 299 + }, + { + "epoch": 6.07, + "learning_rate": 0.00016173469387755103, + "loss": 1.6286, + "step": 300 + }, + { + "epoch": 6.09, + "learning_rate": 0.00016160714285714286, + "loss": 1.694, + "step": 301 + }, + { + "epoch": 6.11, + "learning_rate": 0.00016147959183673472, + "loss": 1.6197, + "step": 302 + }, + { + "epoch": 6.13, + "learning_rate": 0.00016135204081632652, + "loss": 1.5597, + "step": 303 + }, + { + "epoch": 6.15, + "learning_rate": 0.00016122448979591838, + "loss": 1.5487, + "step": 304 + }, + { + "epoch": 6.17, + "learning_rate": 0.0001610969387755102, + "loss": 1.5769, + "step": 305 + }, + { + "epoch": 6.19, + "learning_rate": 0.00016096938775510204, + "loss": 1.6367, + "step": 306 + }, + { + "epoch": 6.21, + "learning_rate": 0.00016084183673469388, + "loss": 1.583, + "step": 307 + }, + { + "epoch": 6.23, + "learning_rate": 0.00016071428571428573, + "loss": 1.6201, + "step": 308 + }, + { + "epoch": 6.25, + "learning_rate": 0.00016058673469387754, + "loss": 1.6586, + "step": 309 + }, + { + "epoch": 6.27, + "learning_rate": 0.0001604591836734694, + "loss": 1.6711, + "step": 310 + }, + { + "epoch": 6.29, + "learning_rate": 0.00016033163265306123, + "loss": 1.6402, + "step": 311 + }, + { + "epoch": 6.31, + "learning_rate": 0.00016020408163265306, + "loss": 1.5247, + "step": 312 + }, + { + "epoch": 6.33, + "learning_rate": 0.00016007653061224491, + "loss": 1.5356, + "step": 313 + }, + { + "epoch": 6.35, + "learning_rate": 0.00015994897959183675, + "loss": 1.564, + "step": 314 + }, + { + "epoch": 6.37, + "learning_rate": 0.00015982142857142858, + "loss": 1.563, + "step": 315 + }, + { + "epoch": 6.39, + "learning_rate": 0.0001596938775510204, + "loss": 1.5198, + "step": 316 + }, + { + "epoch": 6.41, + "learning_rate": 0.00015956632653061227, + "loss": 1.6558, + "step": 317 + }, + { + "epoch": 6.43, + "learning_rate": 0.0001594387755102041, + "loss": 1.5534, + "step": 318 + }, + { + "epoch": 6.45, + "learning_rate": 0.00015931122448979593, + "loss": 1.6239, + "step": 319 + }, + { + "epoch": 6.47, + "learning_rate": 0.00015918367346938776, + "loss": 1.5645, + "step": 320 + }, + { + "epoch": 6.5, + "learning_rate": 0.00015905612244897962, + "loss": 1.5713, + "step": 321 + }, + { + "epoch": 6.52, + "learning_rate": 0.00015892857142857142, + "loss": 1.6176, + "step": 322 + }, + { + "epoch": 6.54, + "learning_rate": 0.00015880102040816328, + "loss": 1.502, + "step": 323 + }, + { + "epoch": 6.56, + "learning_rate": 0.0001586734693877551, + "loss": 1.645, + "step": 324 + }, + { + "epoch": 6.58, + "learning_rate": 0.00015854591836734697, + "loss": 1.5904, + "step": 325 + }, + { + "epoch": 6.6, + "learning_rate": 0.00015841836734693877, + "loss": 1.6149, + "step": 326 + }, + { + "epoch": 6.62, + "learning_rate": 0.00015829081632653063, + "loss": 1.6757, + "step": 327 + }, + { + "epoch": 6.64, + "learning_rate": 0.00015816326530612246, + "loss": 1.541, + "step": 328 + }, + { + "epoch": 6.66, + "learning_rate": 0.0001580357142857143, + "loss": 1.5898, + "step": 329 + }, + { + "epoch": 6.68, + "learning_rate": 0.00015790816326530612, + "loss": 1.5441, + "step": 330 + }, + { + "epoch": 6.7, + "learning_rate": 0.00015778061224489798, + "loss": 1.61, + "step": 331 + }, + { + "epoch": 6.72, + "learning_rate": 0.00015765306122448978, + "loss": 1.615, + "step": 332 + }, + { + "epoch": 6.74, + "learning_rate": 0.00015752551020408164, + "loss": 1.6575, + "step": 333 + }, + { + "epoch": 6.76, + "learning_rate": 0.00015739795918367347, + "loss": 1.6702, + "step": 334 + }, + { + "epoch": 6.78, + "learning_rate": 0.0001572704081632653, + "loss": 1.6009, + "step": 335 + }, + { + "epoch": 6.8, + "learning_rate": 0.00015714285714285716, + "loss": 1.5568, + "step": 336 + }, + { + "epoch": 6.82, + "learning_rate": 0.000157015306122449, + "loss": 1.619, + "step": 337 + }, + { + "epoch": 6.84, + "learning_rate": 0.00015688775510204082, + "loss": 1.5563, + "step": 338 + }, + { + "epoch": 6.86, + "learning_rate": 0.00015676020408163265, + "loss": 1.6328, + "step": 339 + }, + { + "epoch": 6.88, + "learning_rate": 0.0001566326530612245, + "loss": 1.5726, + "step": 340 + }, + { + "epoch": 6.9, + "learning_rate": 0.00015650510204081634, + "loss": 1.6199, + "step": 341 + }, + { + "epoch": 6.92, + "learning_rate": 0.00015637755102040817, + "loss": 1.5722, + "step": 342 + }, + { + "epoch": 6.94, + "learning_rate": 0.00015625, + "loss": 1.5685, + "step": 343 + }, + { + "epoch": 6.96, + "learning_rate": 0.00015612244897959186, + "loss": 1.5615, + "step": 344 + }, + { + "epoch": 6.98, + "learning_rate": 0.00015599489795918366, + "loss": 1.5994, + "step": 345 + }, + { + "epoch": 7.0, + "learning_rate": 0.00015586734693877552, + "loss": 1.5579, + "step": 346 + }, + { + "epoch": 7.02, + "learning_rate": 0.00015573979591836735, + "loss": 1.547, + "step": 347 + }, + { + "epoch": 7.04, + "learning_rate": 0.00015561224489795918, + "loss": 1.5292, + "step": 348 + }, + { + "epoch": 7.06, + "learning_rate": 0.00015548469387755102, + "loss": 1.6032, + "step": 349 + }, + { + "epoch": 7.08, + "learning_rate": 0.00015535714285714287, + "loss": 1.5149, + "step": 350 + }, + { + "epoch": 7.1, + "learning_rate": 0.0001552295918367347, + "loss": 1.6093, + "step": 351 + }, + { + "epoch": 7.12, + "learning_rate": 0.00015510204081632654, + "loss": 1.5421, + "step": 352 + }, + { + "epoch": 7.14, + "learning_rate": 0.00015497448979591837, + "loss": 1.5733, + "step": 353 + }, + { + "epoch": 7.16, + "learning_rate": 0.00015484693877551022, + "loss": 1.5703, + "step": 354 + }, + { + "epoch": 7.18, + "learning_rate": 0.00015471938775510203, + "loss": 1.6141, + "step": 355 + }, + { + "epoch": 7.2, + "learning_rate": 0.00015459183673469389, + "loss": 1.5526, + "step": 356 + }, + { + "epoch": 7.22, + "learning_rate": 0.00015446428571428572, + "loss": 1.5347, + "step": 357 + }, + { + "epoch": 7.24, + "learning_rate": 0.00015433673469387755, + "loss": 1.5682, + "step": 358 + }, + { + "epoch": 7.26, + "learning_rate": 0.0001542091836734694, + "loss": 1.5292, + "step": 359 + }, + { + "epoch": 7.28, + "learning_rate": 0.00015408163265306124, + "loss": 1.499, + "step": 360 + }, + { + "epoch": 7.3, + "learning_rate": 0.00015395408163265307, + "loss": 1.5624, + "step": 361 + }, + { + "epoch": 7.32, + "learning_rate": 0.0001538265306122449, + "loss": 1.627, + "step": 362 + }, + { + "epoch": 7.34, + "learning_rate": 0.00015369897959183676, + "loss": 1.5327, + "step": 363 + }, + { + "epoch": 7.37, + "learning_rate": 0.0001535714285714286, + "loss": 1.5622, + "step": 364 + }, + { + "epoch": 7.39, + "learning_rate": 0.00015344387755102042, + "loss": 1.5659, + "step": 365 + }, + { + "epoch": 7.41, + "learning_rate": 0.00015331632653061225, + "loss": 1.5019, + "step": 366 + }, + { + "epoch": 7.43, + "learning_rate": 0.0001531887755102041, + "loss": 1.5921, + "step": 367 + }, + { + "epoch": 7.45, + "learning_rate": 0.0001530612244897959, + "loss": 1.5914, + "step": 368 + }, + { + "epoch": 7.47, + "learning_rate": 0.00015293367346938777, + "loss": 1.5045, + "step": 369 + }, + { + "epoch": 7.49, + "learning_rate": 0.0001528061224489796, + "loss": 1.6209, + "step": 370 + }, + { + "epoch": 7.51, + "learning_rate": 0.00015267857142857143, + "loss": 1.5198, + "step": 371 + }, + { + "epoch": 7.53, + "learning_rate": 0.00015255102040816326, + "loss": 1.5363, + "step": 372 + }, + { + "epoch": 7.55, + "learning_rate": 0.00015242346938775512, + "loss": 1.5391, + "step": 373 + }, + { + "epoch": 7.57, + "learning_rate": 0.00015229591836734695, + "loss": 1.4546, + "step": 374 + }, + { + "epoch": 7.59, + "learning_rate": 0.00015216836734693878, + "loss": 1.5546, + "step": 375 + }, + { + "epoch": 7.61, + "learning_rate": 0.0001520408163265306, + "loss": 1.5629, + "step": 376 + }, + { + "epoch": 7.63, + "learning_rate": 0.00015191326530612247, + "loss": 1.6002, + "step": 377 + }, + { + "epoch": 7.65, + "learning_rate": 0.00015178571428571427, + "loss": 1.5543, + "step": 378 + }, + { + "epoch": 7.67, + "learning_rate": 0.00015165816326530613, + "loss": 1.5925, + "step": 379 + }, + { + "epoch": 7.69, + "learning_rate": 0.00015153061224489796, + "loss": 1.5631, + "step": 380 + }, + { + "epoch": 7.71, + "learning_rate": 0.0001514030612244898, + "loss": 1.5677, + "step": 381 + }, + { + "epoch": 7.73, + "learning_rate": 0.00015127551020408165, + "loss": 1.5828, + "step": 382 + }, + { + "epoch": 7.75, + "learning_rate": 0.00015114795918367348, + "loss": 1.6494, + "step": 383 + }, + { + "epoch": 7.77, + "learning_rate": 0.0001510204081632653, + "loss": 1.553, + "step": 384 + }, + { + "epoch": 7.79, + "learning_rate": 0.00015089285714285714, + "loss": 1.6156, + "step": 385 + }, + { + "epoch": 7.81, + "learning_rate": 0.000150765306122449, + "loss": 1.5001, + "step": 386 + }, + { + "epoch": 7.83, + "learning_rate": 0.00015063775510204083, + "loss": 1.5321, + "step": 387 + }, + { + "epoch": 7.85, + "learning_rate": 0.00015051020408163266, + "loss": 1.5307, + "step": 388 + }, + { + "epoch": 7.87, + "learning_rate": 0.0001503826530612245, + "loss": 1.5639, + "step": 389 + }, + { + "epoch": 7.89, + "learning_rate": 0.00015025510204081635, + "loss": 1.517, + "step": 390 + }, + { + "epoch": 7.91, + "learning_rate": 0.00015012755102040816, + "loss": 1.4776, + "step": 391 + }, + { + "epoch": 7.93, + "learning_rate": 0.00015000000000000001, + "loss": 1.5368, + "step": 392 + }, + { + "epoch": 7.95, + "learning_rate": 0.00014987244897959184, + "loss": 1.5636, + "step": 393 + }, + { + "epoch": 7.97, + "learning_rate": 0.00014974489795918368, + "loss": 1.6004, + "step": 394 + }, + { + "epoch": 7.99, + "learning_rate": 0.0001496173469387755, + "loss": 1.5524, + "step": 395 + }, + { + "epoch": 8.01, + "learning_rate": 0.00014948979591836736, + "loss": 1.5307, + "step": 396 + }, + { + "epoch": 8.03, + "learning_rate": 0.00014936224489795917, + "loss": 1.5123, + "step": 397 + }, + { + "epoch": 8.05, + "learning_rate": 0.00014923469387755103, + "loss": 1.5132, + "step": 398 + }, + { + "epoch": 8.07, + "learning_rate": 0.00014910714285714286, + "loss": 1.5109, + "step": 399 + }, + { + "epoch": 8.09, + "learning_rate": 0.00014897959183673472, + "loss": 1.5302, + "step": 400 + }, + { + "epoch": 8.11, + "learning_rate": 0.00014885204081632652, + "loss": 1.5238, + "step": 401 + }, + { + "epoch": 8.13, + "learning_rate": 0.00014872448979591838, + "loss": 1.4781, + "step": 402 + }, + { + "epoch": 8.15, + "learning_rate": 0.0001485969387755102, + "loss": 1.5446, + "step": 403 + }, + { + "epoch": 8.17, + "learning_rate": 0.00014846938775510204, + "loss": 1.5, + "step": 404 + }, + { + "epoch": 8.19, + "learning_rate": 0.0001483418367346939, + "loss": 1.5458, + "step": 405 + }, + { + "epoch": 8.21, + "learning_rate": 0.00014821428571428573, + "loss": 1.5257, + "step": 406 + }, + { + "epoch": 8.24, + "learning_rate": 0.00014808673469387756, + "loss": 1.4607, + "step": 407 + }, + { + "epoch": 8.26, + "learning_rate": 0.0001479591836734694, + "loss": 1.4282, + "step": 408 + }, + { + "epoch": 8.28, + "learning_rate": 0.00014783163265306125, + "loss": 1.4519, + "step": 409 + }, + { + "epoch": 8.3, + "learning_rate": 0.00014770408163265305, + "loss": 1.475, + "step": 410 + }, + { + "epoch": 8.32, + "learning_rate": 0.0001475765306122449, + "loss": 1.5425, + "step": 411 + }, + { + "epoch": 8.34, + "learning_rate": 0.00014744897959183674, + "loss": 1.5407, + "step": 412 + }, + { + "epoch": 8.36, + "learning_rate": 0.0001473214285714286, + "loss": 1.5698, + "step": 413 + }, + { + "epoch": 8.38, + "learning_rate": 0.0001471938775510204, + "loss": 1.4282, + "step": 414 + }, + { + "epoch": 8.4, + "learning_rate": 0.00014706632653061226, + "loss": 1.5301, + "step": 415 + }, + { + "epoch": 8.42, + "learning_rate": 0.0001469387755102041, + "loss": 1.5083, + "step": 416 + }, + { + "epoch": 8.44, + "learning_rate": 0.00014681122448979592, + "loss": 1.5712, + "step": 417 + }, + { + "epoch": 8.46, + "learning_rate": 0.00014668367346938775, + "loss": 1.4363, + "step": 418 + }, + { + "epoch": 8.48, + "learning_rate": 0.0001465561224489796, + "loss": 1.4463, + "step": 419 + }, + { + "epoch": 8.5, + "learning_rate": 0.00014642857142857141, + "loss": 1.4738, + "step": 420 + }, + { + "epoch": 8.52, + "learning_rate": 0.00014630102040816327, + "loss": 1.5396, + "step": 421 + }, + { + "epoch": 8.54, + "learning_rate": 0.0001461734693877551, + "loss": 1.4384, + "step": 422 + }, + { + "epoch": 8.56, + "learning_rate": 0.00014604591836734696, + "loss": 1.5345, + "step": 423 + }, + { + "epoch": 8.58, + "learning_rate": 0.0001459183673469388, + "loss": 1.5355, + "step": 424 + }, + { + "epoch": 8.6, + "learning_rate": 0.00014579081632653062, + "loss": 1.5188, + "step": 425 + }, + { + "epoch": 8.62, + "learning_rate": 0.00014566326530612245, + "loss": 1.5575, + "step": 426 + }, + { + "epoch": 8.64, + "learning_rate": 0.00014553571428571428, + "loss": 1.5279, + "step": 427 + }, + { + "epoch": 8.66, + "learning_rate": 0.00014540816326530614, + "loss": 1.5484, + "step": 428 + }, + { + "epoch": 8.68, + "learning_rate": 0.00014528061224489797, + "loss": 1.4878, + "step": 429 + }, + { + "epoch": 8.7, + "learning_rate": 0.0001451530612244898, + "loss": 1.503, + "step": 430 + }, + { + "epoch": 8.72, + "learning_rate": 0.00014502551020408163, + "loss": 1.4723, + "step": 431 + }, + { + "epoch": 8.74, + "learning_rate": 0.0001448979591836735, + "loss": 1.5579, + "step": 432 + }, + { + "epoch": 8.76, + "learning_rate": 0.0001447704081632653, + "loss": 1.4789, + "step": 433 + }, + { + "epoch": 8.78, + "learning_rate": 0.00014464285714285715, + "loss": 1.5501, + "step": 434 + }, + { + "epoch": 8.8, + "learning_rate": 0.00014451530612244899, + "loss": 1.5204, + "step": 435 + }, + { + "epoch": 8.82, + "learning_rate": 0.00014438775510204084, + "loss": 1.5489, + "step": 436 + }, + { + "epoch": 8.84, + "learning_rate": 0.00014426020408163265, + "loss": 1.5464, + "step": 437 + }, + { + "epoch": 8.86, + "learning_rate": 0.0001441326530612245, + "loss": 1.5896, + "step": 438 + }, + { + "epoch": 8.88, + "learning_rate": 0.00014400510204081634, + "loss": 1.5465, + "step": 439 + }, + { + "epoch": 8.9, + "learning_rate": 0.00014387755102040817, + "loss": 1.5094, + "step": 440 + }, + { + "epoch": 8.92, + "learning_rate": 0.00014375, + "loss": 1.5144, + "step": 441 + }, + { + "epoch": 8.94, + "learning_rate": 0.00014362244897959186, + "loss": 1.4919, + "step": 442 + }, + { + "epoch": 8.96, + "learning_rate": 0.00014349489795918366, + "loss": 1.4702, + "step": 443 + }, + { + "epoch": 8.98, + "learning_rate": 0.00014336734693877552, + "loss": 1.4996, + "step": 444 + }, + { + "epoch": 9.0, + "learning_rate": 0.00014323979591836735, + "loss": 1.5503, + "step": 445 + }, + { + "epoch": 9.02, + "learning_rate": 0.00014311224489795918, + "loss": 1.4125, + "step": 446 + }, + { + "epoch": 9.04, + "learning_rate": 0.00014298469387755104, + "loss": 1.4722, + "step": 447 + }, + { + "epoch": 9.06, + "learning_rate": 0.00014285714285714287, + "loss": 1.5199, + "step": 448 + }, + { + "epoch": 9.09, + "learning_rate": 0.0001427295918367347, + "loss": 1.4571, + "step": 449 + }, + { + "epoch": 9.11, + "learning_rate": 0.00014260204081632653, + "loss": 1.4996, + "step": 450 + }, + { + "epoch": 9.13, + "learning_rate": 0.0001424744897959184, + "loss": 1.4092, + "step": 451 + }, + { + "epoch": 9.15, + "learning_rate": 0.00014234693877551022, + "loss": 1.4198, + "step": 452 + }, + { + "epoch": 9.17, + "learning_rate": 0.00014221938775510205, + "loss": 1.4916, + "step": 453 + }, + { + "epoch": 9.19, + "learning_rate": 0.00014209183673469388, + "loss": 1.5051, + "step": 454 + }, + { + "epoch": 9.21, + "learning_rate": 0.00014196428571428574, + "loss": 1.4321, + "step": 455 + }, + { + "epoch": 9.23, + "learning_rate": 0.00014183673469387754, + "loss": 1.4097, + "step": 456 + }, + { + "epoch": 9.25, + "learning_rate": 0.0001417091836734694, + "loss": 1.4853, + "step": 457 + }, + { + "epoch": 9.27, + "learning_rate": 0.00014158163265306123, + "loss": 1.4593, + "step": 458 + }, + { + "epoch": 9.29, + "learning_rate": 0.00014145408163265306, + "loss": 1.3729, + "step": 459 + }, + { + "epoch": 9.31, + "learning_rate": 0.0001413265306122449, + "loss": 1.4467, + "step": 460 + }, + { + "epoch": 9.33, + "learning_rate": 0.00014119897959183675, + "loss": 1.4467, + "step": 461 + }, + { + "epoch": 9.35, + "learning_rate": 0.00014107142857142858, + "loss": 1.4785, + "step": 462 + }, + { + "epoch": 9.37, + "learning_rate": 0.0001409438775510204, + "loss": 1.4089, + "step": 463 + }, + { + "epoch": 9.39, + "learning_rate": 0.00014081632653061224, + "loss": 1.5026, + "step": 464 + }, + { + "epoch": 9.41, + "learning_rate": 0.0001406887755102041, + "loss": 1.4857, + "step": 465 + }, + { + "epoch": 9.43, + "learning_rate": 0.0001405612244897959, + "loss": 1.3745, + "step": 466 + }, + { + "epoch": 9.45, + "learning_rate": 0.00014043367346938776, + "loss": 1.4733, + "step": 467 + }, + { + "epoch": 9.47, + "learning_rate": 0.0001403061224489796, + "loss": 1.5212, + "step": 468 + }, + { + "epoch": 9.49, + "learning_rate": 0.00014017857142857142, + "loss": 1.5398, + "step": 469 + }, + { + "epoch": 9.51, + "learning_rate": 0.00014005102040816328, + "loss": 1.478, + "step": 470 + }, + { + "epoch": 9.53, + "learning_rate": 0.0001399234693877551, + "loss": 1.496, + "step": 471 + }, + { + "epoch": 9.55, + "learning_rate": 0.00013979591836734694, + "loss": 1.4837, + "step": 472 + }, + { + "epoch": 9.57, + "learning_rate": 0.00013966836734693878, + "loss": 1.4724, + "step": 473 + }, + { + "epoch": 9.59, + "learning_rate": 0.00013954081632653063, + "loss": 1.4828, + "step": 474 + }, + { + "epoch": 9.61, + "learning_rate": 0.00013941326530612246, + "loss": 1.5012, + "step": 475 + }, + { + "epoch": 9.63, + "learning_rate": 0.0001392857142857143, + "loss": 1.4879, + "step": 476 + }, + { + "epoch": 9.65, + "learning_rate": 0.00013915816326530613, + "loss": 1.4196, + "step": 477 + }, + { + "epoch": 9.67, + "learning_rate": 0.00013903061224489798, + "loss": 1.4915, + "step": 478 + }, + { + "epoch": 9.69, + "learning_rate": 0.0001389030612244898, + "loss": 1.3878, + "step": 479 + }, + { + "epoch": 9.71, + "learning_rate": 0.00013877551020408165, + "loss": 1.466, + "step": 480 + }, + { + "epoch": 9.73, + "learning_rate": 0.00013864795918367348, + "loss": 1.4582, + "step": 481 + }, + { + "epoch": 9.75, + "learning_rate": 0.0001385204081632653, + "loss": 1.533, + "step": 482 + }, + { + "epoch": 9.77, + "learning_rate": 0.00013839285714285714, + "loss": 1.4697, + "step": 483 + }, + { + "epoch": 9.79, + "learning_rate": 0.000138265306122449, + "loss": 1.3989, + "step": 484 + }, + { + "epoch": 9.81, + "learning_rate": 0.00013813775510204083, + "loss": 1.4361, + "step": 485 + }, + { + "epoch": 9.83, + "learning_rate": 0.00013801020408163266, + "loss": 1.5271, + "step": 486 + }, + { + "epoch": 9.85, + "learning_rate": 0.0001378826530612245, + "loss": 1.4905, + "step": 487 + }, + { + "epoch": 9.87, + "learning_rate": 0.00013775510204081635, + "loss": 1.4757, + "step": 488 + }, + { + "epoch": 9.89, + "learning_rate": 0.00013762755102040815, + "loss": 1.5485, + "step": 489 + }, + { + "epoch": 9.91, + "learning_rate": 0.0001375, + "loss": 1.4783, + "step": 490 + }, + { + "epoch": 9.93, + "learning_rate": 0.00013737244897959184, + "loss": 1.4849, + "step": 491 + }, + { + "epoch": 9.96, + "learning_rate": 0.00013724489795918367, + "loss": 1.5382, + "step": 492 + }, + { + "epoch": 9.98, + "learning_rate": 0.00013711734693877553, + "loss": 1.4902, + "step": 493 + }, + { + "epoch": 10.0, + "learning_rate": 0.00013698979591836736, + "loss": 1.4865, + "step": 494 + }, + { + "epoch": 10.02, + "learning_rate": 0.0001368622448979592, + "loss": 1.4436, + "step": 495 + }, + { + "epoch": 10.04, + "learning_rate": 0.00013673469387755102, + "loss": 1.408, + "step": 496 + }, + { + "epoch": 10.06, + "learning_rate": 0.00013660714285714288, + "loss": 1.4764, + "step": 497 + }, + { + "epoch": 10.08, + "learning_rate": 0.0001364795918367347, + "loss": 1.4646, + "step": 498 + }, + { + "epoch": 10.1, + "learning_rate": 0.00013635204081632654, + "loss": 1.406, + "step": 499 + }, + { + "epoch": 10.12, + "learning_rate": 0.00013622448979591837, + "loss": 1.4785, + "step": 500 + }, + { + "epoch": 10.14, + "learning_rate": 0.00013609693877551023, + "loss": 1.4117, + "step": 501 + }, + { + "epoch": 10.16, + "learning_rate": 0.00013596938775510203, + "loss": 1.4108, + "step": 502 + }, + { + "epoch": 10.18, + "learning_rate": 0.0001358418367346939, + "loss": 1.4155, + "step": 503 + }, + { + "epoch": 10.2, + "learning_rate": 0.00013571428571428572, + "loss": 1.4021, + "step": 504 + }, + { + "epoch": 10.22, + "learning_rate": 0.00013558673469387755, + "loss": 1.411, + "step": 505 + }, + { + "epoch": 10.24, + "learning_rate": 0.00013545918367346938, + "loss": 1.3851, + "step": 506 + }, + { + "epoch": 10.26, + "learning_rate": 0.00013533163265306124, + "loss": 1.387, + "step": 507 + }, + { + "epoch": 10.28, + "learning_rate": 0.00013520408163265305, + "loss": 1.4163, + "step": 508 + }, + { + "epoch": 10.3, + "learning_rate": 0.0001350765306122449, + "loss": 1.3343, + "step": 509 + }, + { + "epoch": 10.32, + "learning_rate": 0.00013494897959183673, + "loss": 1.4811, + "step": 510 + }, + { + "epoch": 10.34, + "learning_rate": 0.0001348214285714286, + "loss": 1.4086, + "step": 511 + }, + { + "epoch": 10.36, + "learning_rate": 0.0001346938775510204, + "loss": 1.3879, + "step": 512 + }, + { + "epoch": 10.38, + "learning_rate": 0.00013456632653061225, + "loss": 1.4204, + "step": 513 + }, + { + "epoch": 10.4, + "learning_rate": 0.00013443877551020408, + "loss": 1.4158, + "step": 514 + }, + { + "epoch": 10.42, + "learning_rate": 0.00013431122448979592, + "loss": 1.4521, + "step": 515 + }, + { + "epoch": 10.44, + "learning_rate": 0.00013418367346938777, + "loss": 1.4196, + "step": 516 + }, + { + "epoch": 10.46, + "learning_rate": 0.0001340561224489796, + "loss": 1.4361, + "step": 517 + }, + { + "epoch": 10.48, + "learning_rate": 0.00013392857142857144, + "loss": 1.4482, + "step": 518 + }, + { + "epoch": 10.5, + "learning_rate": 0.00013380102040816327, + "loss": 1.4801, + "step": 519 + }, + { + "epoch": 10.52, + "learning_rate": 0.00013367346938775512, + "loss": 1.4556, + "step": 520 + }, + { + "epoch": 10.54, + "learning_rate": 0.00013354591836734695, + "loss": 1.3902, + "step": 521 + }, + { + "epoch": 10.56, + "learning_rate": 0.00013341836734693879, + "loss": 1.4269, + "step": 522 + }, + { + "epoch": 10.58, + "learning_rate": 0.00013329081632653062, + "loss": 1.4899, + "step": 523 + }, + { + "epoch": 10.6, + "learning_rate": 0.00013316326530612247, + "loss": 1.3952, + "step": 524 + }, + { + "epoch": 10.62, + "learning_rate": 0.00013303571428571428, + "loss": 1.4116, + "step": 525 + }, + { + "epoch": 10.64, + "learning_rate": 0.00013290816326530614, + "loss": 1.4583, + "step": 526 + }, + { + "epoch": 10.66, + "learning_rate": 0.00013278061224489797, + "loss": 1.4466, + "step": 527 + }, + { + "epoch": 10.68, + "learning_rate": 0.0001326530612244898, + "loss": 1.4242, + "step": 528 + }, + { + "epoch": 10.7, + "learning_rate": 0.00013252551020408163, + "loss": 1.3717, + "step": 529 + }, + { + "epoch": 10.72, + "learning_rate": 0.0001323979591836735, + "loss": 1.4583, + "step": 530 + }, + { + "epoch": 10.74, + "learning_rate": 0.0001322704081632653, + "loss": 1.4185, + "step": 531 + }, + { + "epoch": 10.76, + "learning_rate": 0.00013214285714285715, + "loss": 1.4287, + "step": 532 + }, + { + "epoch": 10.78, + "learning_rate": 0.00013201530612244898, + "loss": 1.4385, + "step": 533 + }, + { + "epoch": 10.8, + "learning_rate": 0.00013188775510204084, + "loss": 1.453, + "step": 534 + }, + { + "epoch": 10.83, + "learning_rate": 0.00013176020408163264, + "loss": 1.4161, + "step": 535 + }, + { + "epoch": 10.85, + "learning_rate": 0.0001316326530612245, + "loss": 1.457, + "step": 536 + }, + { + "epoch": 10.87, + "learning_rate": 0.00013150510204081633, + "loss": 1.4367, + "step": 537 + }, + { + "epoch": 10.89, + "learning_rate": 0.00013137755102040816, + "loss": 1.4256, + "step": 538 + }, + { + "epoch": 10.91, + "learning_rate": 0.00013125000000000002, + "loss": 1.424, + "step": 539 + }, + { + "epoch": 10.93, + "learning_rate": 0.00013112244897959185, + "loss": 1.3923, + "step": 540 + }, + { + "epoch": 10.95, + "learning_rate": 0.00013099489795918368, + "loss": 1.4225, + "step": 541 + }, + { + "epoch": 10.97, + "learning_rate": 0.0001308673469387755, + "loss": 1.3969, + "step": 542 + }, + { + "epoch": 10.99, + "learning_rate": 0.00013073979591836737, + "loss": 1.4446, + "step": 543 + }, + { + "epoch": 11.01, + "learning_rate": 0.00013061224489795917, + "loss": 1.4375, + "step": 544 + }, + { + "epoch": 11.03, + "learning_rate": 0.00013048469387755103, + "loss": 1.4064, + "step": 545 + }, + { + "epoch": 11.05, + "learning_rate": 0.00013035714285714286, + "loss": 1.3454, + "step": 546 + }, + { + "epoch": 11.07, + "learning_rate": 0.00013022959183673472, + "loss": 1.3234, + "step": 547 + }, + { + "epoch": 11.09, + "learning_rate": 0.00013010204081632652, + "loss": 1.3759, + "step": 548 + }, + { + "epoch": 11.11, + "learning_rate": 0.00012997448979591838, + "loss": 1.4221, + "step": 549 + }, + { + "epoch": 11.13, + "learning_rate": 0.0001298469387755102, + "loss": 1.4261, + "step": 550 + }, + { + "epoch": 11.15, + "learning_rate": 0.00012971938775510204, + "loss": 1.3341, + "step": 551 + }, + { + "epoch": 11.17, + "learning_rate": 0.00012959183673469387, + "loss": 1.3994, + "step": 552 + }, + { + "epoch": 11.19, + "learning_rate": 0.00012946428571428573, + "loss": 1.3894, + "step": 553 + }, + { + "epoch": 11.21, + "learning_rate": 0.00012933673469387754, + "loss": 1.3585, + "step": 554 + }, + { + "epoch": 11.23, + "learning_rate": 0.0001292091836734694, + "loss": 1.3763, + "step": 555 + }, + { + "epoch": 11.25, + "learning_rate": 0.00012908163265306123, + "loss": 1.3623, + "step": 556 + }, + { + "epoch": 11.27, + "learning_rate": 0.00012895408163265306, + "loss": 1.3907, + "step": 557 + }, + { + "epoch": 11.29, + "learning_rate": 0.0001288265306122449, + "loss": 1.3807, + "step": 558 + }, + { + "epoch": 11.31, + "learning_rate": 0.00012869897959183674, + "loss": 1.4045, + "step": 559 + }, + { + "epoch": 11.33, + "learning_rate": 0.00012857142857142858, + "loss": 1.4038, + "step": 560 + }, + { + "epoch": 11.35, + "learning_rate": 0.0001284438775510204, + "loss": 1.3466, + "step": 561 + }, + { + "epoch": 11.37, + "learning_rate": 0.00012831632653061226, + "loss": 1.3449, + "step": 562 + }, + { + "epoch": 11.39, + "learning_rate": 0.0001281887755102041, + "loss": 1.3866, + "step": 563 + }, + { + "epoch": 11.41, + "learning_rate": 0.00012806122448979593, + "loss": 1.3106, + "step": 564 + }, + { + "epoch": 11.43, + "learning_rate": 0.00012793367346938776, + "loss": 1.4414, + "step": 565 + }, + { + "epoch": 11.45, + "learning_rate": 0.00012780612244897962, + "loss": 1.3737, + "step": 566 + }, + { + "epoch": 11.47, + "learning_rate": 0.00012767857142857142, + "loss": 1.4053, + "step": 567 + }, + { + "epoch": 11.49, + "learning_rate": 0.00012755102040816328, + "loss": 1.4561, + "step": 568 + }, + { + "epoch": 11.51, + "learning_rate": 0.0001274234693877551, + "loss": 1.3684, + "step": 569 + }, + { + "epoch": 11.53, + "learning_rate": 0.00012729591836734697, + "loss": 1.3117, + "step": 570 + }, + { + "epoch": 11.55, + "learning_rate": 0.00012716836734693877, + "loss": 1.3474, + "step": 571 + }, + { + "epoch": 11.57, + "learning_rate": 0.00012704081632653063, + "loss": 1.3804, + "step": 572 + }, + { + "epoch": 11.59, + "learning_rate": 0.00012691326530612246, + "loss": 1.3656, + "step": 573 + }, + { + "epoch": 11.61, + "learning_rate": 0.0001267857142857143, + "loss": 1.3133, + "step": 574 + }, + { + "epoch": 11.63, + "learning_rate": 0.00012665816326530612, + "loss": 1.4077, + "step": 575 + }, + { + "epoch": 11.65, + "learning_rate": 0.00012653061224489798, + "loss": 1.4087, + "step": 576 + }, + { + "epoch": 11.67, + "learning_rate": 0.00012640306122448978, + "loss": 1.3524, + "step": 577 + }, + { + "epoch": 11.7, + "learning_rate": 0.00012627551020408164, + "loss": 1.3481, + "step": 578 + }, + { + "epoch": 11.72, + "learning_rate": 0.00012614795918367347, + "loss": 1.4497, + "step": 579 + }, + { + "epoch": 11.74, + "learning_rate": 0.0001260204081632653, + "loss": 1.3866, + "step": 580 + }, + { + "epoch": 11.76, + "learning_rate": 0.00012589285714285713, + "loss": 1.42, + "step": 581 + }, + { + "epoch": 11.78, + "learning_rate": 0.000125765306122449, + "loss": 1.3562, + "step": 582 + }, + { + "epoch": 11.8, + "learning_rate": 0.00012563775510204082, + "loss": 1.3249, + "step": 583 + }, + { + "epoch": 11.82, + "learning_rate": 0.00012551020408163265, + "loss": 1.4277, + "step": 584 + }, + { + "epoch": 11.84, + "learning_rate": 0.0001253826530612245, + "loss": 1.3734, + "step": 585 + }, + { + "epoch": 11.86, + "learning_rate": 0.00012525510204081634, + "loss": 1.3765, + "step": 586 + }, + { + "epoch": 11.88, + "learning_rate": 0.00012512755102040817, + "loss": 1.4153, + "step": 587 + }, + { + "epoch": 11.9, + "learning_rate": 0.000125, + "loss": 1.3847, + "step": 588 + }, + { + "epoch": 11.92, + "learning_rate": 0.00012487244897959186, + "loss": 1.3824, + "step": 589 + }, + { + "epoch": 11.94, + "learning_rate": 0.00012474489795918366, + "loss": 1.3938, + "step": 590 + }, + { + "epoch": 11.96, + "learning_rate": 0.00012461734693877552, + "loss": 1.4143, + "step": 591 + }, + { + "epoch": 11.98, + "learning_rate": 0.00012448979591836735, + "loss": 1.3794, + "step": 592 + }, + { + "epoch": 12.0, + "learning_rate": 0.00012436224489795918, + "loss": 1.3755, + "step": 593 + }, + { + "epoch": 12.02, + "learning_rate": 0.00012423469387755101, + "loss": 1.3736, + "step": 594 + }, + { + "epoch": 12.04, + "learning_rate": 0.00012410714285714287, + "loss": 1.2957, + "step": 595 + }, + { + "epoch": 12.06, + "learning_rate": 0.0001239795918367347, + "loss": 1.2996, + "step": 596 + }, + { + "epoch": 12.08, + "learning_rate": 0.00012385204081632653, + "loss": 1.3648, + "step": 597 + }, + { + "epoch": 12.1, + "learning_rate": 0.00012372448979591837, + "loss": 1.3031, + "step": 598 + }, + { + "epoch": 12.12, + "learning_rate": 0.00012359693877551022, + "loss": 1.2933, + "step": 599 + }, + { + "epoch": 12.14, + "learning_rate": 0.00012346938775510203, + "loss": 1.322, + "step": 600 + }, + { + "epoch": 12.16, + "learning_rate": 0.00012334183673469389, + "loss": 1.3123, + "step": 601 + }, + { + "epoch": 12.18, + "learning_rate": 0.00012321428571428572, + "loss": 1.3187, + "step": 602 + }, + { + "epoch": 12.2, + "learning_rate": 0.00012308673469387755, + "loss": 1.3353, + "step": 603 + }, + { + "epoch": 12.22, + "learning_rate": 0.0001229591836734694, + "loss": 1.3221, + "step": 604 + }, + { + "epoch": 12.24, + "learning_rate": 0.00012283163265306124, + "loss": 1.3458, + "step": 605 + }, + { + "epoch": 12.26, + "learning_rate": 0.00012270408163265307, + "loss": 1.275, + "step": 606 + }, + { + "epoch": 12.28, + "learning_rate": 0.0001225765306122449, + "loss": 1.3455, + "step": 607 + }, + { + "epoch": 12.3, + "learning_rate": 0.00012244897959183676, + "loss": 1.2769, + "step": 608 + }, + { + "epoch": 12.32, + "learning_rate": 0.00012232142857142859, + "loss": 1.3201, + "step": 609 + }, + { + "epoch": 12.34, + "learning_rate": 0.00012219387755102042, + "loss": 1.3073, + "step": 610 + }, + { + "epoch": 12.36, + "learning_rate": 0.00012206632653061225, + "loss": 1.3103, + "step": 611 + }, + { + "epoch": 12.38, + "learning_rate": 0.00012193877551020409, + "loss": 1.4437, + "step": 612 + }, + { + "epoch": 12.4, + "learning_rate": 0.00012181122448979591, + "loss": 1.3086, + "step": 613 + }, + { + "epoch": 12.42, + "learning_rate": 0.00012168367346938775, + "loss": 1.3867, + "step": 614 + }, + { + "epoch": 12.44, + "learning_rate": 0.0001215561224489796, + "loss": 1.2565, + "step": 615 + }, + { + "epoch": 12.46, + "learning_rate": 0.00012142857142857143, + "loss": 1.335, + "step": 616 + }, + { + "epoch": 12.48, + "learning_rate": 0.00012130102040816327, + "loss": 1.3423, + "step": 617 + }, + { + "epoch": 12.5, + "learning_rate": 0.00012117346938775512, + "loss": 1.3433, + "step": 618 + }, + { + "epoch": 12.52, + "learning_rate": 0.00012104591836734695, + "loss": 1.3387, + "step": 619 + }, + { + "epoch": 12.55, + "learning_rate": 0.00012091836734693878, + "loss": 1.3923, + "step": 620 + }, + { + "epoch": 12.57, + "learning_rate": 0.00012079081632653062, + "loss": 1.3774, + "step": 621 + }, + { + "epoch": 12.59, + "learning_rate": 0.00012066326530612247, + "loss": 1.3203, + "step": 622 + }, + { + "epoch": 12.61, + "learning_rate": 0.00012053571428571429, + "loss": 1.2924, + "step": 623 + }, + { + "epoch": 12.63, + "learning_rate": 0.00012040816326530613, + "loss": 1.3292, + "step": 624 + }, + { + "epoch": 12.65, + "learning_rate": 0.00012028061224489798, + "loss": 1.3161, + "step": 625 + }, + { + "epoch": 12.67, + "learning_rate": 0.00012015306122448979, + "loss": 1.352, + "step": 626 + }, + { + "epoch": 12.69, + "learning_rate": 0.00012002551020408164, + "loss": 1.3577, + "step": 627 + }, + { + "epoch": 12.71, + "learning_rate": 0.00011989795918367348, + "loss": 1.3575, + "step": 628 + }, + { + "epoch": 12.73, + "learning_rate": 0.0001197704081632653, + "loss": 1.3727, + "step": 629 + }, + { + "epoch": 12.75, + "learning_rate": 0.00011964285714285714, + "loss": 1.3312, + "step": 630 + }, + { + "epoch": 12.77, + "learning_rate": 0.00011951530612244899, + "loss": 1.3378, + "step": 631 + }, + { + "epoch": 12.79, + "learning_rate": 0.00011938775510204083, + "loss": 1.295, + "step": 632 + }, + { + "epoch": 12.81, + "learning_rate": 0.00011926020408163265, + "loss": 1.3447, + "step": 633 + }, + { + "epoch": 12.83, + "learning_rate": 0.0001191326530612245, + "loss": 1.3835, + "step": 634 + }, + { + "epoch": 12.85, + "learning_rate": 0.00011900510204081634, + "loss": 1.3222, + "step": 635 + }, + { + "epoch": 12.87, + "learning_rate": 0.00011887755102040817, + "loss": 1.2851, + "step": 636 + }, + { + "epoch": 12.89, + "learning_rate": 0.00011875, + "loss": 1.2723, + "step": 637 + }, + { + "epoch": 12.91, + "learning_rate": 0.00011862244897959184, + "loss": 1.3924, + "step": 638 + }, + { + "epoch": 12.93, + "learning_rate": 0.00011849489795918368, + "loss": 1.4625, + "step": 639 + }, + { + "epoch": 12.95, + "learning_rate": 0.00011836734693877552, + "loss": 1.3245, + "step": 640 + }, + { + "epoch": 12.97, + "learning_rate": 0.00011823979591836736, + "loss": 1.4042, + "step": 641 + }, + { + "epoch": 12.99, + "learning_rate": 0.00011811224489795918, + "loss": 1.3761, + "step": 642 + }, + { + "epoch": 13.01, + "learning_rate": 0.00011798469387755103, + "loss": 1.3376, + "step": 643 + }, + { + "epoch": 13.03, + "learning_rate": 0.00011785714285714287, + "loss": 1.2174, + "step": 644 + }, + { + "epoch": 13.05, + "learning_rate": 0.00011772959183673471, + "loss": 1.3602, + "step": 645 + }, + { + "epoch": 13.07, + "learning_rate": 0.00011760204081632653, + "loss": 1.3002, + "step": 646 + }, + { + "epoch": 13.09, + "learning_rate": 0.00011747448979591838, + "loss": 1.2262, + "step": 647 + }, + { + "epoch": 13.11, + "learning_rate": 0.00011734693877551022, + "loss": 1.3048, + "step": 648 + }, + { + "epoch": 13.13, + "learning_rate": 0.00011721938775510204, + "loss": 1.2231, + "step": 649 + }, + { + "epoch": 13.15, + "learning_rate": 0.00011709183673469388, + "loss": 1.2996, + "step": 650 + }, + { + "epoch": 13.17, + "learning_rate": 0.00011696428571428573, + "loss": 1.2708, + "step": 651 + }, + { + "epoch": 13.19, + "learning_rate": 0.00011683673469387754, + "loss": 1.2776, + "step": 652 + }, + { + "epoch": 13.21, + "learning_rate": 0.00011670918367346939, + "loss": 1.248, + "step": 653 + }, + { + "epoch": 13.23, + "learning_rate": 0.00011658163265306123, + "loss": 1.2582, + "step": 654 + }, + { + "epoch": 13.25, + "learning_rate": 0.00011645408163265305, + "loss": 1.3011, + "step": 655 + }, + { + "epoch": 13.27, + "learning_rate": 0.0001163265306122449, + "loss": 1.2969, + "step": 656 + }, + { + "epoch": 13.29, + "learning_rate": 0.00011619897959183674, + "loss": 1.2454, + "step": 657 + }, + { + "epoch": 13.31, + "learning_rate": 0.00011607142857142858, + "loss": 1.1914, + "step": 658 + }, + { + "epoch": 13.33, + "learning_rate": 0.00011594387755102041, + "loss": 1.34, + "step": 659 + }, + { + "epoch": 13.35, + "learning_rate": 0.00011581632653061225, + "loss": 1.2828, + "step": 660 + }, + { + "epoch": 13.37, + "learning_rate": 0.00011568877551020409, + "loss": 1.2962, + "step": 661 + }, + { + "epoch": 13.39, + "learning_rate": 0.00011556122448979592, + "loss": 1.3334, + "step": 662 + }, + { + "epoch": 13.42, + "learning_rate": 0.00011543367346938776, + "loss": 1.2832, + "step": 663 + }, + { + "epoch": 13.44, + "learning_rate": 0.00011530612244897961, + "loss": 1.3012, + "step": 664 + }, + { + "epoch": 13.46, + "learning_rate": 0.00011517857142857143, + "loss": 1.2857, + "step": 665 + }, + { + "epoch": 13.48, + "learning_rate": 0.00011505102040816327, + "loss": 1.2855, + "step": 666 + }, + { + "epoch": 13.5, + "learning_rate": 0.00011492346938775512, + "loss": 1.3077, + "step": 667 + }, + { + "epoch": 13.52, + "learning_rate": 0.00011479591836734696, + "loss": 1.3139, + "step": 668 + }, + { + "epoch": 13.54, + "learning_rate": 0.00011466836734693878, + "loss": 1.3138, + "step": 669 + }, + { + "epoch": 13.56, + "learning_rate": 0.00011454081632653062, + "loss": 1.2808, + "step": 670 + }, + { + "epoch": 13.58, + "learning_rate": 0.00011441326530612247, + "loss": 1.2492, + "step": 671 + }, + { + "epoch": 13.6, + "learning_rate": 0.00011428571428571428, + "loss": 1.2027, + "step": 672 + }, + { + "epoch": 13.62, + "learning_rate": 0.00011415816326530613, + "loss": 1.33, + "step": 673 + }, + { + "epoch": 13.64, + "learning_rate": 0.00011403061224489797, + "loss": 1.3112, + "step": 674 + }, + { + "epoch": 13.66, + "learning_rate": 0.00011390306122448979, + "loss": 1.2772, + "step": 675 + }, + { + "epoch": 13.68, + "learning_rate": 0.00011377551020408163, + "loss": 1.2701, + "step": 676 + }, + { + "epoch": 13.7, + "learning_rate": 0.00011364795918367348, + "loss": 1.1973, + "step": 677 + }, + { + "epoch": 13.72, + "learning_rate": 0.0001135204081632653, + "loss": 1.3124, + "step": 678 + }, + { + "epoch": 13.74, + "learning_rate": 0.00011339285714285714, + "loss": 1.3085, + "step": 679 + }, + { + "epoch": 13.76, + "learning_rate": 0.00011326530612244898, + "loss": 1.3457, + "step": 680 + }, + { + "epoch": 13.78, + "learning_rate": 0.00011313775510204083, + "loss": 1.3338, + "step": 681 + }, + { + "epoch": 13.8, + "learning_rate": 0.00011301020408163266, + "loss": 1.2753, + "step": 682 + }, + { + "epoch": 13.82, + "learning_rate": 0.00011288265306122449, + "loss": 1.2786, + "step": 683 + }, + { + "epoch": 13.84, + "learning_rate": 0.00011275510204081634, + "loss": 1.2584, + "step": 684 + }, + { + "epoch": 13.86, + "learning_rate": 0.00011262755102040817, + "loss": 1.2779, + "step": 685 + }, + { + "epoch": 13.88, + "learning_rate": 0.00011250000000000001, + "loss": 1.3502, + "step": 686 + }, + { + "epoch": 13.9, + "learning_rate": 0.00011237244897959185, + "loss": 1.3251, + "step": 687 + }, + { + "epoch": 13.92, + "learning_rate": 0.00011224489795918367, + "loss": 1.273, + "step": 688 + }, + { + "epoch": 13.94, + "learning_rate": 0.00011211734693877552, + "loss": 1.3341, + "step": 689 + }, + { + "epoch": 13.96, + "learning_rate": 0.00011198979591836736, + "loss": 1.2654, + "step": 690 + }, + { + "epoch": 13.98, + "learning_rate": 0.00011186224489795918, + "loss": 1.3333, + "step": 691 + }, + { + "epoch": 14.0, + "learning_rate": 0.00011173469387755102, + "loss": 1.3246, + "step": 692 + }, + { + "epoch": 14.02, + "learning_rate": 0.00011160714285714287, + "loss": 1.2547, + "step": 693 + }, + { + "epoch": 14.04, + "learning_rate": 0.00011147959183673471, + "loss": 1.208, + "step": 694 + }, + { + "epoch": 14.06, + "learning_rate": 0.00011135204081632653, + "loss": 1.223, + "step": 695 + }, + { + "epoch": 14.08, + "learning_rate": 0.00011122448979591837, + "loss": 1.2483, + "step": 696 + }, + { + "epoch": 14.1, + "learning_rate": 0.00011109693877551022, + "loss": 1.2823, + "step": 697 + }, + { + "epoch": 14.12, + "learning_rate": 0.00011096938775510204, + "loss": 1.2013, + "step": 698 + }, + { + "epoch": 14.14, + "learning_rate": 0.00011084183673469388, + "loss": 1.1883, + "step": 699 + }, + { + "epoch": 14.16, + "learning_rate": 0.00011071428571428572, + "loss": 1.2364, + "step": 700 + }, + { + "epoch": 14.18, + "learning_rate": 0.00011058673469387754, + "loss": 1.2069, + "step": 701 + }, + { + "epoch": 14.2, + "learning_rate": 0.00011045918367346939, + "loss": 1.1968, + "step": 702 + }, + { + "epoch": 14.22, + "learning_rate": 0.00011033163265306123, + "loss": 1.2236, + "step": 703 + }, + { + "epoch": 14.24, + "learning_rate": 0.00011020408163265306, + "loss": 1.1942, + "step": 704 + }, + { + "epoch": 14.26, + "learning_rate": 0.0001100765306122449, + "loss": 1.2561, + "step": 705 + }, + { + "epoch": 14.29, + "learning_rate": 0.00010994897959183674, + "loss": 1.1839, + "step": 706 + }, + { + "epoch": 14.31, + "learning_rate": 0.00010982142857142858, + "loss": 1.2128, + "step": 707 + }, + { + "epoch": 14.33, + "learning_rate": 0.00010969387755102041, + "loss": 1.3086, + "step": 708 + }, + { + "epoch": 14.35, + "learning_rate": 0.00010956632653061226, + "loss": 1.2379, + "step": 709 + }, + { + "epoch": 14.37, + "learning_rate": 0.0001094387755102041, + "loss": 1.176, + "step": 710 + }, + { + "epoch": 14.39, + "learning_rate": 0.00010931122448979592, + "loss": 1.2105, + "step": 711 + }, + { + "epoch": 14.41, + "learning_rate": 0.00010918367346938776, + "loss": 1.2149, + "step": 712 + }, + { + "epoch": 14.43, + "learning_rate": 0.0001090561224489796, + "loss": 1.2392, + "step": 713 + }, + { + "epoch": 14.45, + "learning_rate": 0.00010892857142857142, + "loss": 1.2471, + "step": 714 + }, + { + "epoch": 14.47, + "learning_rate": 0.00010880102040816327, + "loss": 1.2561, + "step": 715 + }, + { + "epoch": 14.49, + "learning_rate": 0.00010867346938775511, + "loss": 1.2179, + "step": 716 + }, + { + "epoch": 14.51, + "learning_rate": 0.00010854591836734696, + "loss": 1.2459, + "step": 717 + }, + { + "epoch": 14.53, + "learning_rate": 0.00010841836734693877, + "loss": 1.2933, + "step": 718 + }, + { + "epoch": 14.55, + "learning_rate": 0.00010829081632653062, + "loss": 1.2862, + "step": 719 + }, + { + "epoch": 14.57, + "learning_rate": 0.00010816326530612246, + "loss": 1.2976, + "step": 720 + }, + { + "epoch": 14.59, + "learning_rate": 0.00010803571428571428, + "loss": 1.231, + "step": 721 + }, + { + "epoch": 14.61, + "learning_rate": 0.00010790816326530613, + "loss": 1.2464, + "step": 722 + }, + { + "epoch": 14.63, + "learning_rate": 0.00010778061224489797, + "loss": 1.2181, + "step": 723 + }, + { + "epoch": 14.65, + "learning_rate": 0.00010765306122448979, + "loss": 1.3307, + "step": 724 + }, + { + "epoch": 14.67, + "learning_rate": 0.00010752551020408163, + "loss": 1.1723, + "step": 725 + }, + { + "epoch": 14.69, + "learning_rate": 0.00010739795918367348, + "loss": 1.1528, + "step": 726 + }, + { + "epoch": 14.71, + "learning_rate": 0.0001072704081632653, + "loss": 1.215, + "step": 727 + }, + { + "epoch": 14.73, + "learning_rate": 0.00010714285714285715, + "loss": 1.2624, + "step": 728 + }, + { + "epoch": 14.75, + "learning_rate": 0.00010701530612244898, + "loss": 1.3117, + "step": 729 + }, + { + "epoch": 14.77, + "learning_rate": 0.00010688775510204083, + "loss": 1.2572, + "step": 730 + }, + { + "epoch": 14.79, + "learning_rate": 0.00010676020408163266, + "loss": 1.222, + "step": 731 + }, + { + "epoch": 14.81, + "learning_rate": 0.0001066326530612245, + "loss": 1.2881, + "step": 732 + }, + { + "epoch": 14.83, + "learning_rate": 0.00010650510204081635, + "loss": 1.2676, + "step": 733 + }, + { + "epoch": 14.85, + "learning_rate": 0.00010637755102040816, + "loss": 1.2734, + "step": 734 + }, + { + "epoch": 14.87, + "learning_rate": 0.00010625000000000001, + "loss": 1.2885, + "step": 735 + }, + { + "epoch": 14.89, + "learning_rate": 0.00010612244897959185, + "loss": 1.2764, + "step": 736 + }, + { + "epoch": 14.91, + "learning_rate": 0.00010599489795918367, + "loss": 1.3267, + "step": 737 + }, + { + "epoch": 14.93, + "learning_rate": 0.00010586734693877551, + "loss": 1.2445, + "step": 738 + }, + { + "epoch": 14.95, + "learning_rate": 0.00010573979591836736, + "loss": 1.3359, + "step": 739 + }, + { + "epoch": 14.97, + "learning_rate": 0.00010561224489795918, + "loss": 1.2508, + "step": 740 + }, + { + "epoch": 14.99, + "learning_rate": 0.00010548469387755102, + "loss": 1.2227, + "step": 741 + }, + { + "epoch": 15.01, + "learning_rate": 0.00010535714285714286, + "loss": 1.1889, + "step": 742 + }, + { + "epoch": 15.03, + "learning_rate": 0.00010522959183673471, + "loss": 1.1919, + "step": 743 + }, + { + "epoch": 15.05, + "learning_rate": 0.00010510204081632653, + "loss": 1.2383, + "step": 744 + }, + { + "epoch": 15.07, + "learning_rate": 0.00010497448979591837, + "loss": 1.2401, + "step": 745 + }, + { + "epoch": 15.09, + "learning_rate": 0.00010484693877551021, + "loss": 1.2015, + "step": 746 + }, + { + "epoch": 15.11, + "learning_rate": 0.00010471938775510203, + "loss": 1.1509, + "step": 747 + }, + { + "epoch": 15.13, + "learning_rate": 0.00010459183673469388, + "loss": 1.1878, + "step": 748 + }, + { + "epoch": 15.16, + "learning_rate": 0.00010446428571428572, + "loss": 1.1706, + "step": 749 + }, + { + "epoch": 15.18, + "learning_rate": 0.00010433673469387755, + "loss": 1.1285, + "step": 750 + }, + { + "epoch": 15.2, + "learning_rate": 0.0001042091836734694, + "loss": 1.1608, + "step": 751 + }, + { + "epoch": 15.22, + "learning_rate": 0.00010408163265306123, + "loss": 1.1178, + "step": 752 + }, + { + "epoch": 15.24, + "learning_rate": 0.00010395408163265306, + "loss": 1.1293, + "step": 753 + }, + { + "epoch": 15.26, + "learning_rate": 0.0001038265306122449, + "loss": 1.2306, + "step": 754 + }, + { + "epoch": 15.28, + "learning_rate": 0.00010369897959183675, + "loss": 1.1541, + "step": 755 + }, + { + "epoch": 15.3, + "learning_rate": 0.00010357142857142859, + "loss": 1.1702, + "step": 756 + }, + { + "epoch": 15.32, + "learning_rate": 0.00010344387755102041, + "loss": 1.2119, + "step": 757 + }, + { + "epoch": 15.34, + "learning_rate": 0.00010331632653061225, + "loss": 1.2239, + "step": 758 + }, + { + "epoch": 15.36, + "learning_rate": 0.0001031887755102041, + "loss": 1.2019, + "step": 759 + }, + { + "epoch": 15.38, + "learning_rate": 0.00010306122448979591, + "loss": 1.2197, + "step": 760 + }, + { + "epoch": 15.4, + "learning_rate": 0.00010293367346938776, + "loss": 1.1769, + "step": 761 + }, + { + "epoch": 15.42, + "learning_rate": 0.0001028061224489796, + "loss": 1.1907, + "step": 762 + }, + { + "epoch": 15.44, + "learning_rate": 0.00010267857142857142, + "loss": 1.2089, + "step": 763 + }, + { + "epoch": 15.46, + "learning_rate": 0.00010255102040816327, + "loss": 1.1335, + "step": 764 + }, + { + "epoch": 15.48, + "learning_rate": 0.00010242346938775511, + "loss": 1.1633, + "step": 765 + }, + { + "epoch": 15.5, + "learning_rate": 0.00010229591836734695, + "loss": 1.1578, + "step": 766 + }, + { + "epoch": 15.52, + "learning_rate": 0.00010216836734693877, + "loss": 1.2236, + "step": 767 + }, + { + "epoch": 15.54, + "learning_rate": 0.00010204081632653062, + "loss": 1.1941, + "step": 768 + }, + { + "epoch": 15.56, + "learning_rate": 0.00010191326530612246, + "loss": 1.2666, + "step": 769 + }, + { + "epoch": 15.58, + "learning_rate": 0.00010178571428571428, + "loss": 1.1232, + "step": 770 + }, + { + "epoch": 15.6, + "learning_rate": 0.00010165816326530612, + "loss": 1.2242, + "step": 771 + }, + { + "epoch": 15.62, + "learning_rate": 0.00010153061224489797, + "loss": 1.1852, + "step": 772 + }, + { + "epoch": 15.64, + "learning_rate": 0.0001014030612244898, + "loss": 1.2626, + "step": 773 + }, + { + "epoch": 15.66, + "learning_rate": 0.00010127551020408164, + "loss": 1.1873, + "step": 774 + }, + { + "epoch": 15.68, + "learning_rate": 0.00010114795918367349, + "loss": 1.3005, + "step": 775 + }, + { + "epoch": 15.7, + "learning_rate": 0.0001010204081632653, + "loss": 1.1904, + "step": 776 + }, + { + "epoch": 15.72, + "learning_rate": 0.00010089285714285715, + "loss": 1.2927, + "step": 777 + }, + { + "epoch": 15.74, + "learning_rate": 0.00010076530612244899, + "loss": 1.179, + "step": 778 + }, + { + "epoch": 15.76, + "learning_rate": 0.00010063775510204084, + "loss": 1.2027, + "step": 779 + }, + { + "epoch": 15.78, + "learning_rate": 0.00010051020408163265, + "loss": 1.2428, + "step": 780 + }, + { + "epoch": 15.8, + "learning_rate": 0.0001003826530612245, + "loss": 1.2324, + "step": 781 + }, + { + "epoch": 15.82, + "learning_rate": 0.00010025510204081634, + "loss": 1.1251, + "step": 782 + }, + { + "epoch": 15.84, + "learning_rate": 0.00010012755102040816, + "loss": 1.2405, + "step": 783 + }, + { + "epoch": 15.86, + "learning_rate": 0.0001, + "loss": 1.2005, + "step": 784 + }, + { + "epoch": 15.88, + "learning_rate": 9.987244897959184e-05, + "loss": 1.2259, + "step": 785 + }, + { + "epoch": 15.9, + "learning_rate": 9.974489795918368e-05, + "loss": 1.1576, + "step": 786 + }, + { + "epoch": 15.92, + "learning_rate": 9.961734693877551e-05, + "loss": 1.1834, + "step": 787 + }, + { + "epoch": 15.94, + "learning_rate": 9.948979591836736e-05, + "loss": 1.2396, + "step": 788 + }, + { + "epoch": 15.96, + "learning_rate": 9.936224489795919e-05, + "loss": 1.1865, + "step": 789 + }, + { + "epoch": 15.98, + "learning_rate": 9.923469387755102e-05, + "loss": 1.2356, + "step": 790 + }, + { + "epoch": 16.01, + "learning_rate": 9.910714285714286e-05, + "loss": 1.2639, + "step": 791 + }, + { + "epoch": 16.03, + "learning_rate": 9.897959183673469e-05, + "loss": 1.1216, + "step": 792 + }, + { + "epoch": 16.05, + "learning_rate": 9.885204081632652e-05, + "loss": 1.1051, + "step": 793 + }, + { + "epoch": 16.07, + "learning_rate": 9.872448979591837e-05, + "loss": 1.0864, + "step": 794 + }, + { + "epoch": 16.09, + "learning_rate": 9.859693877551021e-05, + "loss": 1.182, + "step": 795 + }, + { + "epoch": 16.11, + "learning_rate": 9.846938775510204e-05, + "loss": 1.1272, + "step": 796 + }, + { + "epoch": 16.13, + "learning_rate": 9.834183673469389e-05, + "loss": 1.1946, + "step": 797 + }, + { + "epoch": 16.15, + "learning_rate": 9.821428571428572e-05, + "loss": 1.0875, + "step": 798 + }, + { + "epoch": 16.17, + "learning_rate": 9.808673469387756e-05, + "loss": 1.1671, + "step": 799 + }, + { + "epoch": 16.19, + "learning_rate": 9.79591836734694e-05, + "loss": 1.1502, + "step": 800 + } + ], + "logging_steps": 1, + "max_steps": 1568, + "num_train_epochs": 32, + "save_steps": 100, + "total_flos": 1.1836061421370368e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-800/training_args.bin b/checkpoint-800/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..db23e07d097c18532e52f58a70eb72d22e39c8c1 --- /dev/null +++ b/checkpoint-800/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b610cbc4242bb50b4985b00e205994ae514fec6d9e2273f2b545a583a07b154b +size 4155 diff --git a/checkpoint-900/README.md b/checkpoint-900/README.md new file mode 100644 index 0000000000000000000000000000000000000000..08371015f02382e6fcba318f4aaea54ae52cd3c4 --- /dev/null +++ b/checkpoint-900/README.md @@ -0,0 +1,34 @@ +--- +library_name: peft +--- +## Training procedure + + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 + +The following `bitsandbytes` quantization config was used during training: +- quant_method: bitsandbytes +- load_in_8bit: True +- load_in_4bit: False +- llm_int8_threshold: 6.0 +- llm_int8_skip_modules: None +- llm_int8_enable_fp32_cpu_offload: False +- llm_int8_has_fp16_weight: False +- bnb_4bit_quant_type: fp4 +- bnb_4bit_use_double_quant: False +- bnb_4bit_compute_dtype: float32 +### Framework versions + +- PEFT 0.6.0.dev0 + +- PEFT 0.6.0.dev0 diff --git a/checkpoint-900/adapter_config.json b/checkpoint-900/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..751d838ac0c1ae5ca71ca448b25d7a8a0173f01b --- /dev/null +++ b/checkpoint-900/adapter_config.json @@ -0,0 +1,23 @@ +{ + "auto_mapping": null, + "base_model_name_or_path": "bigscience/bloomz-3b", + "bias": "none", + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": true, + "layers_pattern": null, + "layers_to_transform": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "modules_to_save": null, + "peft_type": "LORA", + "r": 8, + "revision": null, + "target_modules": [ + "dense_4h_to_h", + "dense", + "dense_h_to_4h", + "query_key_value" + ], + "task_type": "CAUSAL_LM" +} \ No newline at end of file diff --git a/checkpoint-900/adapter_model.bin b/checkpoint-900/adapter_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..db7ed4f6ae4a91e62162d01678a6701bba9864f2 --- /dev/null +++ b/checkpoint-900/adapter_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f51c1d84f0d247f48d41fba2b15457feed1d404eeff6da6d13cabd701e815176 +size 39409357 diff --git a/checkpoint-900/optimizer.pt b/checkpoint-900/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..24c7142a336fb1d38f0ad6b55a013afa82c328c9 --- /dev/null +++ b/checkpoint-900/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:450feba545ce1622e78b86c9c73450f53cd2c861fd82ef80a705f3ff9afe3e5a +size 78844421 diff --git a/checkpoint-900/rng_state.pth b/checkpoint-900/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..318fa43e6207ee82707af116de91e1ec428b2697 --- /dev/null +++ b/checkpoint-900/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18451c8480ecb75a9e33d086345a13c29ae0a8eae26b215a820f4299cc32f2a0 +size 14575 diff --git a/checkpoint-900/scheduler.pt b/checkpoint-900/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..819e0686a4ea5e085317f0dd042805cc2503ad09 --- /dev/null +++ b/checkpoint-900/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f7ed7c2c4b503d29d3bdb091842bdfbacee353eb03798fc384008a89c404484 +size 627 diff --git a/checkpoint-900/special_tokens_map.json b/checkpoint-900/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..fdafe480f024ff444c7492147536765ce5d55a2d --- /dev/null +++ b/checkpoint-900/special_tokens_map.json @@ -0,0 +1,6 @@ +{ + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/checkpoint-900/tokenizer.json b/checkpoint-900/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf002cafbd4818dcff2abc9156c088d681b4533 --- /dev/null +++ b/checkpoint-900/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17a208233d2ee8d8c83b23bc214df737c44806a1919f444e89b31e586cd956ba +size 14500471 diff --git a/checkpoint-900/tokenizer_config.json b/checkpoint-900/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4b56cc9c2965c07132c35df3e2972e93d98c82c3 --- /dev/null +++ b/checkpoint-900/tokenizer_config.json @@ -0,0 +1,10 @@ +{ + "add_prefix_space": false, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "tokenizer_class": "BloomTokenizer", + "unk_token": "" +} diff --git a/checkpoint-900/trainer_state.json b/checkpoint-900/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..64e9788c0c92d579e2bac3e81f871d41896de5f2 --- /dev/null +++ b/checkpoint-900/trainer_state.json @@ -0,0 +1,5419 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 18.2105595953209, + "eval_steps": 500, + "global_step": 900, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.02, + "learning_rate": 0.00019987244897959184, + "loss": 3.2215, + "step": 1 + }, + { + "epoch": 0.04, + "learning_rate": 0.00019974489795918367, + "loss": 2.8365, + "step": 2 + }, + { + "epoch": 0.06, + "learning_rate": 0.00019961734693877553, + "loss": 2.602, + "step": 3 + }, + { + "epoch": 0.08, + "learning_rate": 0.00019948979591836736, + "loss": 2.4196, + "step": 4 + }, + { + "epoch": 0.1, + "learning_rate": 0.0001993622448979592, + "loss": 2.2574, + "step": 5 + }, + { + "epoch": 0.12, + "learning_rate": 0.00019923469387755102, + "loss": 2.2239, + "step": 6 + }, + { + "epoch": 0.14, + "learning_rate": 0.00019910714285714288, + "loss": 2.1661, + "step": 7 + }, + { + "epoch": 0.16, + "learning_rate": 0.0001989795918367347, + "loss": 2.0987, + "step": 8 + }, + { + "epoch": 0.18, + "learning_rate": 0.00019885204081632654, + "loss": 2.015, + "step": 9 + }, + { + "epoch": 0.2, + "learning_rate": 0.00019872448979591837, + "loss": 1.9771, + "step": 10 + }, + { + "epoch": 0.22, + "learning_rate": 0.00019859693877551023, + "loss": 2.0271, + "step": 11 + }, + { + "epoch": 0.24, + "learning_rate": 0.00019846938775510203, + "loss": 1.9812, + "step": 12 + }, + { + "epoch": 0.26, + "learning_rate": 0.0001983418367346939, + "loss": 2.0834, + "step": 13 + }, + { + "epoch": 0.28, + "learning_rate": 0.00019821428571428572, + "loss": 1.9174, + "step": 14 + }, + { + "epoch": 0.3, + "learning_rate": 0.00019808673469387755, + "loss": 1.8409, + "step": 15 + }, + { + "epoch": 0.32, + "learning_rate": 0.00019795918367346938, + "loss": 1.929, + "step": 16 + }, + { + "epoch": 0.34, + "learning_rate": 0.00019783163265306124, + "loss": 2.0041, + "step": 17 + }, + { + "epoch": 0.36, + "learning_rate": 0.00019770408163265305, + "loss": 1.9385, + "step": 18 + }, + { + "epoch": 0.38, + "learning_rate": 0.0001975765306122449, + "loss": 1.9592, + "step": 19 + }, + { + "epoch": 0.4, + "learning_rate": 0.00019744897959183674, + "loss": 1.9701, + "step": 20 + }, + { + "epoch": 0.42, + "learning_rate": 0.0001973214285714286, + "loss": 1.9277, + "step": 21 + }, + { + "epoch": 0.45, + "learning_rate": 0.00019719387755102042, + "loss": 1.8394, + "step": 22 + }, + { + "epoch": 0.47, + "learning_rate": 0.00019706632653061226, + "loss": 1.8666, + "step": 23 + }, + { + "epoch": 0.49, + "learning_rate": 0.00019693877551020409, + "loss": 1.8997, + "step": 24 + }, + { + "epoch": 0.51, + "learning_rate": 0.00019681122448979592, + "loss": 1.9432, + "step": 25 + }, + { + "epoch": 0.53, + "learning_rate": 0.00019668367346938777, + "loss": 1.9137, + "step": 26 + }, + { + "epoch": 0.55, + "learning_rate": 0.0001965561224489796, + "loss": 1.905, + "step": 27 + }, + { + "epoch": 0.57, + "learning_rate": 0.00019642857142857144, + "loss": 1.8708, + "step": 28 + }, + { + "epoch": 0.59, + "learning_rate": 0.00019630102040816327, + "loss": 1.9097, + "step": 29 + }, + { + "epoch": 0.61, + "learning_rate": 0.00019617346938775513, + "loss": 1.896, + "step": 30 + }, + { + "epoch": 0.63, + "learning_rate": 0.00019604591836734696, + "loss": 1.8834, + "step": 31 + }, + { + "epoch": 0.65, + "learning_rate": 0.0001959183673469388, + "loss": 1.8323, + "step": 32 + }, + { + "epoch": 0.67, + "learning_rate": 0.00019579081632653062, + "loss": 1.804, + "step": 33 + }, + { + "epoch": 0.69, + "learning_rate": 0.00019566326530612248, + "loss": 1.8906, + "step": 34 + }, + { + "epoch": 0.71, + "learning_rate": 0.00019553571428571428, + "loss": 1.8693, + "step": 35 + }, + { + "epoch": 0.73, + "learning_rate": 0.00019540816326530614, + "loss": 1.9308, + "step": 36 + }, + { + "epoch": 0.75, + "learning_rate": 0.00019528061224489797, + "loss": 1.8082, + "step": 37 + }, + { + "epoch": 0.77, + "learning_rate": 0.0001951530612244898, + "loss": 1.848, + "step": 38 + }, + { + "epoch": 0.79, + "learning_rate": 0.00019502551020408163, + "loss": 1.8866, + "step": 39 + }, + { + "epoch": 0.81, + "learning_rate": 0.0001948979591836735, + "loss": 1.7844, + "step": 40 + }, + { + "epoch": 0.83, + "learning_rate": 0.0001947704081632653, + "loss": 1.8485, + "step": 41 + }, + { + "epoch": 0.85, + "learning_rate": 0.00019464285714285715, + "loss": 1.7917, + "step": 42 + }, + { + "epoch": 0.87, + "learning_rate": 0.00019451530612244898, + "loss": 1.7342, + "step": 43 + }, + { + "epoch": 0.89, + "learning_rate": 0.00019438775510204084, + "loss": 1.8479, + "step": 44 + }, + { + "epoch": 0.91, + "learning_rate": 0.00019426020408163267, + "loss": 1.8639, + "step": 45 + }, + { + "epoch": 0.93, + "learning_rate": 0.0001941326530612245, + "loss": 1.8166, + "step": 46 + }, + { + "epoch": 0.95, + "learning_rate": 0.00019400510204081633, + "loss": 1.7566, + "step": 47 + }, + { + "epoch": 0.97, + "learning_rate": 0.00019387755102040816, + "loss": 1.8071, + "step": 48 + }, + { + "epoch": 0.99, + "learning_rate": 0.00019375000000000002, + "loss": 1.8612, + "step": 49 + }, + { + "epoch": 1.01, + "learning_rate": 0.00019362244897959185, + "loss": 1.7819, + "step": 50 + }, + { + "epoch": 1.03, + "learning_rate": 0.00019349489795918368, + "loss": 1.8647, + "step": 51 + }, + { + "epoch": 1.05, + "learning_rate": 0.0001933673469387755, + "loss": 1.8196, + "step": 52 + }, + { + "epoch": 1.07, + "learning_rate": 0.00019323979591836737, + "loss": 1.8027, + "step": 53 + }, + { + "epoch": 1.09, + "learning_rate": 0.00019311224489795917, + "loss": 1.8927, + "step": 54 + }, + { + "epoch": 1.11, + "learning_rate": 0.00019298469387755103, + "loss": 1.8481, + "step": 55 + }, + { + "epoch": 1.13, + "learning_rate": 0.00019285714285714286, + "loss": 1.7781, + "step": 56 + }, + { + "epoch": 1.15, + "learning_rate": 0.00019272959183673472, + "loss": 1.8101, + "step": 57 + }, + { + "epoch": 1.17, + "learning_rate": 0.00019260204081632653, + "loss": 1.7257, + "step": 58 + }, + { + "epoch": 1.19, + "learning_rate": 0.00019247448979591838, + "loss": 1.8185, + "step": 59 + }, + { + "epoch": 1.21, + "learning_rate": 0.00019234693877551021, + "loss": 1.8557, + "step": 60 + }, + { + "epoch": 1.23, + "learning_rate": 0.00019221938775510204, + "loss": 1.7418, + "step": 61 + }, + { + "epoch": 1.25, + "learning_rate": 0.00019209183673469388, + "loss": 1.6879, + "step": 62 + }, + { + "epoch": 1.27, + "learning_rate": 0.00019196428571428573, + "loss": 1.7651, + "step": 63 + }, + { + "epoch": 1.29, + "learning_rate": 0.00019183673469387756, + "loss": 1.7759, + "step": 64 + }, + { + "epoch": 1.32, + "learning_rate": 0.0001917091836734694, + "loss": 1.7691, + "step": 65 + }, + { + "epoch": 1.34, + "learning_rate": 0.00019158163265306123, + "loss": 1.7794, + "step": 66 + }, + { + "epoch": 1.36, + "learning_rate": 0.00019145408163265306, + "loss": 1.8152, + "step": 67 + }, + { + "epoch": 1.38, + "learning_rate": 0.00019132653061224492, + "loss": 1.8052, + "step": 68 + }, + { + "epoch": 1.4, + "learning_rate": 0.00019119897959183675, + "loss": 1.8054, + "step": 69 + }, + { + "epoch": 1.42, + "learning_rate": 0.00019107142857142858, + "loss": 1.8114, + "step": 70 + }, + { + "epoch": 1.44, + "learning_rate": 0.0001909438775510204, + "loss": 1.7749, + "step": 71 + }, + { + "epoch": 1.46, + "learning_rate": 0.00019081632653061227, + "loss": 1.777, + "step": 72 + }, + { + "epoch": 1.48, + "learning_rate": 0.0001906887755102041, + "loss": 1.7896, + "step": 73 + }, + { + "epoch": 1.5, + "learning_rate": 0.00019056122448979593, + "loss": 1.8335, + "step": 74 + }, + { + "epoch": 1.52, + "learning_rate": 0.00019043367346938776, + "loss": 1.8155, + "step": 75 + }, + { + "epoch": 1.54, + "learning_rate": 0.00019030612244897962, + "loss": 1.8224, + "step": 76 + }, + { + "epoch": 1.56, + "learning_rate": 0.00019017857142857142, + "loss": 1.7889, + "step": 77 + }, + { + "epoch": 1.58, + "learning_rate": 0.00019005102040816328, + "loss": 1.8866, + "step": 78 + }, + { + "epoch": 1.6, + "learning_rate": 0.0001899234693877551, + "loss": 1.8439, + "step": 79 + }, + { + "epoch": 1.62, + "learning_rate": 0.00018979591836734697, + "loss": 1.7906, + "step": 80 + }, + { + "epoch": 1.64, + "learning_rate": 0.00018966836734693877, + "loss": 1.8627, + "step": 81 + }, + { + "epoch": 1.66, + "learning_rate": 0.00018954081632653063, + "loss": 1.7497, + "step": 82 + }, + { + "epoch": 1.68, + "learning_rate": 0.00018941326530612246, + "loss": 1.7936, + "step": 83 + }, + { + "epoch": 1.7, + "learning_rate": 0.0001892857142857143, + "loss": 1.8341, + "step": 84 + }, + { + "epoch": 1.72, + "learning_rate": 0.00018915816326530612, + "loss": 1.7868, + "step": 85 + }, + { + "epoch": 1.74, + "learning_rate": 0.00018903061224489798, + "loss": 1.7493, + "step": 86 + }, + { + "epoch": 1.76, + "learning_rate": 0.0001889030612244898, + "loss": 1.7926, + "step": 87 + }, + { + "epoch": 1.78, + "learning_rate": 0.00018877551020408164, + "loss": 1.8278, + "step": 88 + }, + { + "epoch": 1.8, + "learning_rate": 0.00018864795918367347, + "loss": 1.7387, + "step": 89 + }, + { + "epoch": 1.82, + "learning_rate": 0.0001885204081632653, + "loss": 1.7669, + "step": 90 + }, + { + "epoch": 1.84, + "learning_rate": 0.00018839285714285716, + "loss": 1.7686, + "step": 91 + }, + { + "epoch": 1.86, + "learning_rate": 0.000188265306122449, + "loss": 1.7759, + "step": 92 + }, + { + "epoch": 1.88, + "learning_rate": 0.00018813775510204082, + "loss": 1.7016, + "step": 93 + }, + { + "epoch": 1.9, + "learning_rate": 0.00018801020408163265, + "loss": 1.8123, + "step": 94 + }, + { + "epoch": 1.92, + "learning_rate": 0.0001878826530612245, + "loss": 1.8315, + "step": 95 + }, + { + "epoch": 1.94, + "learning_rate": 0.00018775510204081634, + "loss": 1.7679, + "step": 96 + }, + { + "epoch": 1.96, + "learning_rate": 0.00018762755102040817, + "loss": 1.7874, + "step": 97 + }, + { + "epoch": 1.98, + "learning_rate": 0.0001875, + "loss": 1.8008, + "step": 98 + }, + { + "epoch": 2.0, + "learning_rate": 0.00018737244897959186, + "loss": 1.7177, + "step": 99 + }, + { + "epoch": 2.02, + "learning_rate": 0.00018724489795918367, + "loss": 1.7272, + "step": 100 + }, + { + "epoch": 2.04, + "learning_rate": 0.00018711734693877552, + "loss": 1.7848, + "step": 101 + }, + { + "epoch": 2.06, + "learning_rate": 0.00018698979591836735, + "loss": 1.744, + "step": 102 + }, + { + "epoch": 2.08, + "learning_rate": 0.00018686224489795919, + "loss": 1.7005, + "step": 103 + }, + { + "epoch": 2.1, + "learning_rate": 0.00018673469387755102, + "loss": 1.8247, + "step": 104 + }, + { + "epoch": 2.12, + "learning_rate": 0.00018660714285714287, + "loss": 1.6855, + "step": 105 + }, + { + "epoch": 2.14, + "learning_rate": 0.0001864795918367347, + "loss": 1.7627, + "step": 106 + }, + { + "epoch": 2.17, + "learning_rate": 0.00018635204081632654, + "loss": 1.7564, + "step": 107 + }, + { + "epoch": 2.19, + "learning_rate": 0.00018622448979591837, + "loss": 1.8237, + "step": 108 + }, + { + "epoch": 2.21, + "learning_rate": 0.00018609693877551022, + "loss": 1.7421, + "step": 109 + }, + { + "epoch": 2.23, + "learning_rate": 0.00018596938775510206, + "loss": 1.7517, + "step": 110 + }, + { + "epoch": 2.25, + "learning_rate": 0.0001858418367346939, + "loss": 1.7515, + "step": 111 + }, + { + "epoch": 2.27, + "learning_rate": 0.00018571428571428572, + "loss": 1.7842, + "step": 112 + }, + { + "epoch": 2.29, + "learning_rate": 0.00018558673469387755, + "loss": 1.8001, + "step": 113 + }, + { + "epoch": 2.31, + "learning_rate": 0.0001854591836734694, + "loss": 1.7653, + "step": 114 + }, + { + "epoch": 2.33, + "learning_rate": 0.00018533163265306124, + "loss": 1.694, + "step": 115 + }, + { + "epoch": 2.35, + "learning_rate": 0.00018520408163265307, + "loss": 1.7457, + "step": 116 + }, + { + "epoch": 2.37, + "learning_rate": 0.0001850765306122449, + "loss": 1.7899, + "step": 117 + }, + { + "epoch": 2.39, + "learning_rate": 0.00018494897959183676, + "loss": 1.7473, + "step": 118 + }, + { + "epoch": 2.41, + "learning_rate": 0.0001848214285714286, + "loss": 1.6639, + "step": 119 + }, + { + "epoch": 2.43, + "learning_rate": 0.00018469387755102042, + "loss": 1.762, + "step": 120 + }, + { + "epoch": 2.45, + "learning_rate": 0.00018456632653061225, + "loss": 1.7378, + "step": 121 + }, + { + "epoch": 2.47, + "learning_rate": 0.0001844387755102041, + "loss": 1.672, + "step": 122 + }, + { + "epoch": 2.49, + "learning_rate": 0.0001843112244897959, + "loss": 1.7267, + "step": 123 + }, + { + "epoch": 2.51, + "learning_rate": 0.00018418367346938777, + "loss": 1.7825, + "step": 124 + }, + { + "epoch": 2.53, + "learning_rate": 0.0001840561224489796, + "loss": 1.7566, + "step": 125 + }, + { + "epoch": 2.55, + "learning_rate": 0.00018392857142857143, + "loss": 1.8169, + "step": 126 + }, + { + "epoch": 2.57, + "learning_rate": 0.00018380102040816326, + "loss": 1.6801, + "step": 127 + }, + { + "epoch": 2.59, + "learning_rate": 0.00018367346938775512, + "loss": 1.7292, + "step": 128 + }, + { + "epoch": 2.61, + "learning_rate": 0.00018354591836734695, + "loss": 1.737, + "step": 129 + }, + { + "epoch": 2.63, + "learning_rate": 0.00018341836734693878, + "loss": 1.7696, + "step": 130 + }, + { + "epoch": 2.65, + "learning_rate": 0.0001832908163265306, + "loss": 1.7239, + "step": 131 + }, + { + "epoch": 2.67, + "learning_rate": 0.00018316326530612247, + "loss": 1.7441, + "step": 132 + }, + { + "epoch": 2.69, + "learning_rate": 0.0001830357142857143, + "loss": 1.7825, + "step": 133 + }, + { + "epoch": 2.71, + "learning_rate": 0.00018290816326530613, + "loss": 1.7411, + "step": 134 + }, + { + "epoch": 2.73, + "learning_rate": 0.00018278061224489796, + "loss": 1.7119, + "step": 135 + }, + { + "epoch": 2.75, + "learning_rate": 0.0001826530612244898, + "loss": 1.7443, + "step": 136 + }, + { + "epoch": 2.77, + "learning_rate": 0.00018252551020408165, + "loss": 1.7197, + "step": 137 + }, + { + "epoch": 2.79, + "learning_rate": 0.00018239795918367348, + "loss": 1.7273, + "step": 138 + }, + { + "epoch": 2.81, + "learning_rate": 0.0001822704081632653, + "loss": 1.7681, + "step": 139 + }, + { + "epoch": 2.83, + "learning_rate": 0.00018214285714285714, + "loss": 1.8088, + "step": 140 + }, + { + "epoch": 2.85, + "learning_rate": 0.000182015306122449, + "loss": 1.7301, + "step": 141 + }, + { + "epoch": 2.87, + "learning_rate": 0.00018188775510204083, + "loss": 1.6853, + "step": 142 + }, + { + "epoch": 2.89, + "learning_rate": 0.00018176020408163266, + "loss": 1.6966, + "step": 143 + }, + { + "epoch": 2.91, + "learning_rate": 0.0001816326530612245, + "loss": 1.7938, + "step": 144 + }, + { + "epoch": 2.93, + "learning_rate": 0.00018150510204081635, + "loss": 1.7639, + "step": 145 + }, + { + "epoch": 2.95, + "learning_rate": 0.00018137755102040816, + "loss": 1.7527, + "step": 146 + }, + { + "epoch": 2.97, + "learning_rate": 0.00018125000000000001, + "loss": 1.7386, + "step": 147 + }, + { + "epoch": 2.99, + "learning_rate": 0.00018112244897959185, + "loss": 1.7223, + "step": 148 + }, + { + "epoch": 3.01, + "learning_rate": 0.00018099489795918368, + "loss": 1.7571, + "step": 149 + }, + { + "epoch": 3.04, + "learning_rate": 0.0001808673469387755, + "loss": 1.7054, + "step": 150 + }, + { + "epoch": 3.06, + "learning_rate": 0.00018073979591836737, + "loss": 1.6581, + "step": 151 + }, + { + "epoch": 3.08, + "learning_rate": 0.00018061224489795917, + "loss": 1.681, + "step": 152 + }, + { + "epoch": 3.1, + "learning_rate": 0.00018048469387755103, + "loss": 1.7425, + "step": 153 + }, + { + "epoch": 3.12, + "learning_rate": 0.00018035714285714286, + "loss": 1.7108, + "step": 154 + }, + { + "epoch": 3.14, + "learning_rate": 0.00018022959183673472, + "loss": 1.7194, + "step": 155 + }, + { + "epoch": 3.16, + "learning_rate": 0.00018010204081632655, + "loss": 1.6953, + "step": 156 + }, + { + "epoch": 3.18, + "learning_rate": 0.00017997448979591838, + "loss": 1.669, + "step": 157 + }, + { + "epoch": 3.2, + "learning_rate": 0.0001798469387755102, + "loss": 1.744, + "step": 158 + }, + { + "epoch": 3.22, + "learning_rate": 0.00017971938775510204, + "loss": 1.6467, + "step": 159 + }, + { + "epoch": 3.24, + "learning_rate": 0.0001795918367346939, + "loss": 1.7103, + "step": 160 + }, + { + "epoch": 3.26, + "learning_rate": 0.00017946428571428573, + "loss": 1.6662, + "step": 161 + }, + { + "epoch": 3.28, + "learning_rate": 0.00017933673469387756, + "loss": 1.6657, + "step": 162 + }, + { + "epoch": 3.3, + "learning_rate": 0.0001792091836734694, + "loss": 1.791, + "step": 163 + }, + { + "epoch": 3.32, + "learning_rate": 0.00017908163265306125, + "loss": 1.7704, + "step": 164 + }, + { + "epoch": 3.34, + "learning_rate": 0.00017895408163265305, + "loss": 1.7229, + "step": 165 + }, + { + "epoch": 3.36, + "learning_rate": 0.0001788265306122449, + "loss": 1.76, + "step": 166 + }, + { + "epoch": 3.38, + "learning_rate": 0.00017869897959183674, + "loss": 1.6482, + "step": 167 + }, + { + "epoch": 3.4, + "learning_rate": 0.0001785714285714286, + "loss": 1.8076, + "step": 168 + }, + { + "epoch": 3.42, + "learning_rate": 0.0001784438775510204, + "loss": 1.7368, + "step": 169 + }, + { + "epoch": 3.44, + "learning_rate": 0.00017831632653061226, + "loss": 1.6264, + "step": 170 + }, + { + "epoch": 3.46, + "learning_rate": 0.0001781887755102041, + "loss": 1.6289, + "step": 171 + }, + { + "epoch": 3.48, + "learning_rate": 0.00017806122448979592, + "loss": 1.7913, + "step": 172 + }, + { + "epoch": 3.5, + "learning_rate": 0.00017793367346938775, + "loss": 1.6985, + "step": 173 + }, + { + "epoch": 3.52, + "learning_rate": 0.0001778061224489796, + "loss": 1.6936, + "step": 174 + }, + { + "epoch": 3.54, + "learning_rate": 0.00017767857142857141, + "loss": 1.8068, + "step": 175 + }, + { + "epoch": 3.56, + "learning_rate": 0.00017755102040816327, + "loss": 1.7243, + "step": 176 + }, + { + "epoch": 3.58, + "learning_rate": 0.0001774234693877551, + "loss": 1.6893, + "step": 177 + }, + { + "epoch": 3.6, + "learning_rate": 0.00017729591836734696, + "loss": 1.8122, + "step": 178 + }, + { + "epoch": 3.62, + "learning_rate": 0.0001771683673469388, + "loss": 1.6562, + "step": 179 + }, + { + "epoch": 3.64, + "learning_rate": 0.00017704081632653062, + "loss": 1.6999, + "step": 180 + }, + { + "epoch": 3.66, + "learning_rate": 0.00017691326530612245, + "loss": 1.7229, + "step": 181 + }, + { + "epoch": 3.68, + "learning_rate": 0.00017678571428571428, + "loss": 1.6764, + "step": 182 + }, + { + "epoch": 3.7, + "learning_rate": 0.00017665816326530614, + "loss": 1.6982, + "step": 183 + }, + { + "epoch": 3.72, + "learning_rate": 0.00017653061224489797, + "loss": 1.696, + "step": 184 + }, + { + "epoch": 3.74, + "learning_rate": 0.0001764030612244898, + "loss": 1.6797, + "step": 185 + }, + { + "epoch": 3.76, + "learning_rate": 0.00017627551020408164, + "loss": 1.637, + "step": 186 + }, + { + "epoch": 3.78, + "learning_rate": 0.0001761479591836735, + "loss": 1.7074, + "step": 187 + }, + { + "epoch": 3.8, + "learning_rate": 0.0001760204081632653, + "loss": 1.705, + "step": 188 + }, + { + "epoch": 3.82, + "learning_rate": 0.00017589285714285716, + "loss": 1.6153, + "step": 189 + }, + { + "epoch": 3.84, + "learning_rate": 0.00017576530612244899, + "loss": 1.7354, + "step": 190 + }, + { + "epoch": 3.86, + "learning_rate": 0.00017563775510204084, + "loss": 1.6941, + "step": 191 + }, + { + "epoch": 3.88, + "learning_rate": 0.00017551020408163265, + "loss": 1.7231, + "step": 192 + }, + { + "epoch": 3.91, + "learning_rate": 0.0001753826530612245, + "loss": 1.7663, + "step": 193 + }, + { + "epoch": 3.93, + "learning_rate": 0.00017525510204081634, + "loss": 1.6532, + "step": 194 + }, + { + "epoch": 3.95, + "learning_rate": 0.00017512755102040817, + "loss": 1.7115, + "step": 195 + }, + { + "epoch": 3.97, + "learning_rate": 0.000175, + "loss": 1.6955, + "step": 196 + }, + { + "epoch": 3.99, + "learning_rate": 0.00017487244897959186, + "loss": 1.6863, + "step": 197 + }, + { + "epoch": 4.01, + "learning_rate": 0.00017474489795918366, + "loss": 1.7012, + "step": 198 + }, + { + "epoch": 4.03, + "learning_rate": 0.00017461734693877552, + "loss": 1.5927, + "step": 199 + }, + { + "epoch": 4.05, + "learning_rate": 0.00017448979591836735, + "loss": 1.6272, + "step": 200 + }, + { + "epoch": 4.07, + "learning_rate": 0.00017436224489795918, + "loss": 1.5994, + "step": 201 + }, + { + "epoch": 4.09, + "learning_rate": 0.00017423469387755104, + "loss": 1.7141, + "step": 202 + }, + { + "epoch": 4.11, + "learning_rate": 0.00017410714285714287, + "loss": 1.7547, + "step": 203 + }, + { + "epoch": 4.13, + "learning_rate": 0.0001739795918367347, + "loss": 1.6254, + "step": 204 + }, + { + "epoch": 4.15, + "learning_rate": 0.00017385204081632653, + "loss": 1.6686, + "step": 205 + }, + { + "epoch": 4.17, + "learning_rate": 0.0001737244897959184, + "loss": 1.6684, + "step": 206 + }, + { + "epoch": 4.19, + "learning_rate": 0.00017359693877551022, + "loss": 1.6724, + "step": 207 + }, + { + "epoch": 4.21, + "learning_rate": 0.00017346938775510205, + "loss": 1.7361, + "step": 208 + }, + { + "epoch": 4.23, + "learning_rate": 0.00017334183673469388, + "loss": 1.7167, + "step": 209 + }, + { + "epoch": 4.25, + "learning_rate": 0.00017321428571428574, + "loss": 1.7226, + "step": 210 + }, + { + "epoch": 4.27, + "learning_rate": 0.00017308673469387754, + "loss": 1.7133, + "step": 211 + }, + { + "epoch": 4.29, + "learning_rate": 0.0001729591836734694, + "loss": 1.649, + "step": 212 + }, + { + "epoch": 4.31, + "learning_rate": 0.00017283163265306123, + "loss": 1.7104, + "step": 213 + }, + { + "epoch": 4.33, + "learning_rate": 0.00017270408163265306, + "loss": 1.6861, + "step": 214 + }, + { + "epoch": 4.35, + "learning_rate": 0.0001725765306122449, + "loss": 1.648, + "step": 215 + }, + { + "epoch": 4.37, + "learning_rate": 0.00017244897959183675, + "loss": 1.6215, + "step": 216 + }, + { + "epoch": 4.39, + "learning_rate": 0.00017232142857142858, + "loss": 1.6334, + "step": 217 + }, + { + "epoch": 4.41, + "learning_rate": 0.0001721938775510204, + "loss": 1.6283, + "step": 218 + }, + { + "epoch": 4.43, + "learning_rate": 0.00017206632653061224, + "loss": 1.6462, + "step": 219 + }, + { + "epoch": 4.45, + "learning_rate": 0.0001719387755102041, + "loss": 1.7233, + "step": 220 + }, + { + "epoch": 4.47, + "learning_rate": 0.0001718112244897959, + "loss": 1.7839, + "step": 221 + }, + { + "epoch": 4.49, + "learning_rate": 0.00017168367346938776, + "loss": 1.7204, + "step": 222 + }, + { + "epoch": 4.51, + "learning_rate": 0.0001715561224489796, + "loss": 1.7671, + "step": 223 + }, + { + "epoch": 4.53, + "learning_rate": 0.00017142857142857143, + "loss": 1.6824, + "step": 224 + }, + { + "epoch": 4.55, + "learning_rate": 0.00017130102040816328, + "loss": 1.7068, + "step": 225 + }, + { + "epoch": 4.57, + "learning_rate": 0.00017117346938775511, + "loss": 1.6515, + "step": 226 + }, + { + "epoch": 4.59, + "learning_rate": 0.00017104591836734694, + "loss": 1.6586, + "step": 227 + }, + { + "epoch": 4.61, + "learning_rate": 0.00017091836734693878, + "loss": 1.6355, + "step": 228 + }, + { + "epoch": 4.63, + "learning_rate": 0.00017079081632653063, + "loss": 1.7173, + "step": 229 + }, + { + "epoch": 4.65, + "learning_rate": 0.00017066326530612246, + "loss": 1.6585, + "step": 230 + }, + { + "epoch": 4.67, + "learning_rate": 0.0001705357142857143, + "loss": 1.5856, + "step": 231 + }, + { + "epoch": 4.69, + "learning_rate": 0.00017040816326530613, + "loss": 1.5923, + "step": 232 + }, + { + "epoch": 4.71, + "learning_rate": 0.00017028061224489798, + "loss": 1.7128, + "step": 233 + }, + { + "epoch": 4.73, + "learning_rate": 0.0001701530612244898, + "loss": 1.6971, + "step": 234 + }, + { + "epoch": 4.75, + "learning_rate": 0.00017002551020408165, + "loss": 1.6416, + "step": 235 + }, + { + "epoch": 4.78, + "learning_rate": 0.00016989795918367348, + "loss": 1.645, + "step": 236 + }, + { + "epoch": 4.8, + "learning_rate": 0.0001697704081632653, + "loss": 1.6792, + "step": 237 + }, + { + "epoch": 4.82, + "learning_rate": 0.00016964285714285714, + "loss": 1.6522, + "step": 238 + }, + { + "epoch": 4.84, + "learning_rate": 0.000169515306122449, + "loss": 1.6315, + "step": 239 + }, + { + "epoch": 4.86, + "learning_rate": 0.00016938775510204083, + "loss": 1.6622, + "step": 240 + }, + { + "epoch": 4.88, + "learning_rate": 0.00016926020408163266, + "loss": 1.6566, + "step": 241 + }, + { + "epoch": 4.9, + "learning_rate": 0.0001691326530612245, + "loss": 1.7141, + "step": 242 + }, + { + "epoch": 4.92, + "learning_rate": 0.00016900510204081635, + "loss": 1.5873, + "step": 243 + }, + { + "epoch": 4.94, + "learning_rate": 0.00016887755102040818, + "loss": 1.6571, + "step": 244 + }, + { + "epoch": 4.96, + "learning_rate": 0.00016875, + "loss": 1.6829, + "step": 245 + }, + { + "epoch": 4.98, + "learning_rate": 0.00016862244897959184, + "loss": 1.6935, + "step": 246 + }, + { + "epoch": 5.0, + "learning_rate": 0.00016849489795918367, + "loss": 1.6782, + "step": 247 + }, + { + "epoch": 5.02, + "learning_rate": 0.00016836734693877553, + "loss": 1.622, + "step": 248 + }, + { + "epoch": 5.04, + "learning_rate": 0.00016823979591836736, + "loss": 1.6596, + "step": 249 + }, + { + "epoch": 5.06, + "learning_rate": 0.0001681122448979592, + "loss": 1.5821, + "step": 250 + }, + { + "epoch": 5.08, + "learning_rate": 0.00016798469387755102, + "loss": 1.7292, + "step": 251 + }, + { + "epoch": 5.1, + "learning_rate": 0.00016785714285714288, + "loss": 1.646, + "step": 252 + }, + { + "epoch": 5.12, + "learning_rate": 0.0001677295918367347, + "loss": 1.6969, + "step": 253 + }, + { + "epoch": 5.14, + "learning_rate": 0.00016760204081632654, + "loss": 1.6082, + "step": 254 + }, + { + "epoch": 5.16, + "learning_rate": 0.00016747448979591837, + "loss": 1.5843, + "step": 255 + }, + { + "epoch": 5.18, + "learning_rate": 0.00016734693877551023, + "loss": 1.6827, + "step": 256 + }, + { + "epoch": 5.2, + "learning_rate": 0.00016721938775510203, + "loss": 1.5824, + "step": 257 + }, + { + "epoch": 5.22, + "learning_rate": 0.0001670918367346939, + "loss": 1.6795, + "step": 258 + }, + { + "epoch": 5.24, + "learning_rate": 0.00016696428571428572, + "loss": 1.5639, + "step": 259 + }, + { + "epoch": 5.26, + "learning_rate": 0.00016683673469387755, + "loss": 1.592, + "step": 260 + }, + { + "epoch": 5.28, + "learning_rate": 0.00016670918367346938, + "loss": 1.65, + "step": 261 + }, + { + "epoch": 5.3, + "learning_rate": 0.00016658163265306124, + "loss": 1.5592, + "step": 262 + }, + { + "epoch": 5.32, + "learning_rate": 0.00016645408163265305, + "loss": 1.5091, + "step": 263 + }, + { + "epoch": 5.34, + "learning_rate": 0.0001663265306122449, + "loss": 1.6138, + "step": 264 + }, + { + "epoch": 5.36, + "learning_rate": 0.00016619897959183673, + "loss": 1.625, + "step": 265 + }, + { + "epoch": 5.38, + "learning_rate": 0.0001660714285714286, + "loss": 1.5757, + "step": 266 + }, + { + "epoch": 5.4, + "learning_rate": 0.00016594387755102042, + "loss": 1.6372, + "step": 267 + }, + { + "epoch": 5.42, + "learning_rate": 0.00016581632653061225, + "loss": 1.5891, + "step": 268 + }, + { + "epoch": 5.44, + "learning_rate": 0.00016568877551020409, + "loss": 1.6893, + "step": 269 + }, + { + "epoch": 5.46, + "learning_rate": 0.00016556122448979592, + "loss": 1.6662, + "step": 270 + }, + { + "epoch": 5.48, + "learning_rate": 0.00016543367346938777, + "loss": 1.7132, + "step": 271 + }, + { + "epoch": 5.5, + "learning_rate": 0.0001653061224489796, + "loss": 1.5835, + "step": 272 + }, + { + "epoch": 5.52, + "learning_rate": 0.00016517857142857144, + "loss": 1.6342, + "step": 273 + }, + { + "epoch": 5.54, + "learning_rate": 0.00016505102040816327, + "loss": 1.6717, + "step": 274 + }, + { + "epoch": 5.56, + "learning_rate": 0.00016492346938775512, + "loss": 1.6248, + "step": 275 + }, + { + "epoch": 5.58, + "learning_rate": 0.00016479591836734696, + "loss": 1.6117, + "step": 276 + }, + { + "epoch": 5.6, + "learning_rate": 0.0001646683673469388, + "loss": 1.6798, + "step": 277 + }, + { + "epoch": 5.63, + "learning_rate": 0.00016454081632653062, + "loss": 1.6406, + "step": 278 + }, + { + "epoch": 5.65, + "learning_rate": 0.00016441326530612248, + "loss": 1.6512, + "step": 279 + }, + { + "epoch": 5.67, + "learning_rate": 0.00016428571428571428, + "loss": 1.6102, + "step": 280 + }, + { + "epoch": 5.69, + "learning_rate": 0.00016415816326530614, + "loss": 1.6113, + "step": 281 + }, + { + "epoch": 5.71, + "learning_rate": 0.00016403061224489797, + "loss": 1.7116, + "step": 282 + }, + { + "epoch": 5.73, + "learning_rate": 0.0001639030612244898, + "loss": 1.6846, + "step": 283 + }, + { + "epoch": 5.75, + "learning_rate": 0.00016377551020408163, + "loss": 1.6911, + "step": 284 + }, + { + "epoch": 5.77, + "learning_rate": 0.0001636479591836735, + "loss": 1.6202, + "step": 285 + }, + { + "epoch": 5.79, + "learning_rate": 0.0001635204081632653, + "loss": 1.5715, + "step": 286 + }, + { + "epoch": 5.81, + "learning_rate": 0.00016339285714285715, + "loss": 1.6461, + "step": 287 + }, + { + "epoch": 5.83, + "learning_rate": 0.00016326530612244898, + "loss": 1.6624, + "step": 288 + }, + { + "epoch": 5.85, + "learning_rate": 0.00016313775510204084, + "loss": 1.6535, + "step": 289 + }, + { + "epoch": 5.87, + "learning_rate": 0.00016301020408163267, + "loss": 1.6275, + "step": 290 + }, + { + "epoch": 5.89, + "learning_rate": 0.0001628826530612245, + "loss": 1.6636, + "step": 291 + }, + { + "epoch": 5.91, + "learning_rate": 0.00016275510204081633, + "loss": 1.6546, + "step": 292 + }, + { + "epoch": 5.93, + "learning_rate": 0.00016262755102040816, + "loss": 1.7274, + "step": 293 + }, + { + "epoch": 5.95, + "learning_rate": 0.00016250000000000002, + "loss": 1.5901, + "step": 294 + }, + { + "epoch": 5.97, + "learning_rate": 0.00016237244897959185, + "loss": 1.6046, + "step": 295 + }, + { + "epoch": 5.99, + "learning_rate": 0.00016224489795918368, + "loss": 1.5828, + "step": 296 + }, + { + "epoch": 6.01, + "learning_rate": 0.0001621173469387755, + "loss": 1.6435, + "step": 297 + }, + { + "epoch": 6.03, + "learning_rate": 0.00016198979591836737, + "loss": 1.6263, + "step": 298 + }, + { + "epoch": 6.05, + "learning_rate": 0.00016186224489795917, + "loss": 1.4944, + "step": 299 + }, + { + "epoch": 6.07, + "learning_rate": 0.00016173469387755103, + "loss": 1.6286, + "step": 300 + }, + { + "epoch": 6.09, + "learning_rate": 0.00016160714285714286, + "loss": 1.694, + "step": 301 + }, + { + "epoch": 6.11, + "learning_rate": 0.00016147959183673472, + "loss": 1.6197, + "step": 302 + }, + { + "epoch": 6.13, + "learning_rate": 0.00016135204081632652, + "loss": 1.5597, + "step": 303 + }, + { + "epoch": 6.15, + "learning_rate": 0.00016122448979591838, + "loss": 1.5487, + "step": 304 + }, + { + "epoch": 6.17, + "learning_rate": 0.0001610969387755102, + "loss": 1.5769, + "step": 305 + }, + { + "epoch": 6.19, + "learning_rate": 0.00016096938775510204, + "loss": 1.6367, + "step": 306 + }, + { + "epoch": 6.21, + "learning_rate": 0.00016084183673469388, + "loss": 1.583, + "step": 307 + }, + { + "epoch": 6.23, + "learning_rate": 0.00016071428571428573, + "loss": 1.6201, + "step": 308 + }, + { + "epoch": 6.25, + "learning_rate": 0.00016058673469387754, + "loss": 1.6586, + "step": 309 + }, + { + "epoch": 6.27, + "learning_rate": 0.0001604591836734694, + "loss": 1.6711, + "step": 310 + }, + { + "epoch": 6.29, + "learning_rate": 0.00016033163265306123, + "loss": 1.6402, + "step": 311 + }, + { + "epoch": 6.31, + "learning_rate": 0.00016020408163265306, + "loss": 1.5247, + "step": 312 + }, + { + "epoch": 6.33, + "learning_rate": 0.00016007653061224491, + "loss": 1.5356, + "step": 313 + }, + { + "epoch": 6.35, + "learning_rate": 0.00015994897959183675, + "loss": 1.564, + "step": 314 + }, + { + "epoch": 6.37, + "learning_rate": 0.00015982142857142858, + "loss": 1.563, + "step": 315 + }, + { + "epoch": 6.39, + "learning_rate": 0.0001596938775510204, + "loss": 1.5198, + "step": 316 + }, + { + "epoch": 6.41, + "learning_rate": 0.00015956632653061227, + "loss": 1.6558, + "step": 317 + }, + { + "epoch": 6.43, + "learning_rate": 0.0001594387755102041, + "loss": 1.5534, + "step": 318 + }, + { + "epoch": 6.45, + "learning_rate": 0.00015931122448979593, + "loss": 1.6239, + "step": 319 + }, + { + "epoch": 6.47, + "learning_rate": 0.00015918367346938776, + "loss": 1.5645, + "step": 320 + }, + { + "epoch": 6.5, + "learning_rate": 0.00015905612244897962, + "loss": 1.5713, + "step": 321 + }, + { + "epoch": 6.52, + "learning_rate": 0.00015892857142857142, + "loss": 1.6176, + "step": 322 + }, + { + "epoch": 6.54, + "learning_rate": 0.00015880102040816328, + "loss": 1.502, + "step": 323 + }, + { + "epoch": 6.56, + "learning_rate": 0.0001586734693877551, + "loss": 1.645, + "step": 324 + }, + { + "epoch": 6.58, + "learning_rate": 0.00015854591836734697, + "loss": 1.5904, + "step": 325 + }, + { + "epoch": 6.6, + "learning_rate": 0.00015841836734693877, + "loss": 1.6149, + "step": 326 + }, + { + "epoch": 6.62, + "learning_rate": 0.00015829081632653063, + "loss": 1.6757, + "step": 327 + }, + { + "epoch": 6.64, + "learning_rate": 0.00015816326530612246, + "loss": 1.541, + "step": 328 + }, + { + "epoch": 6.66, + "learning_rate": 0.0001580357142857143, + "loss": 1.5898, + "step": 329 + }, + { + "epoch": 6.68, + "learning_rate": 0.00015790816326530612, + "loss": 1.5441, + "step": 330 + }, + { + "epoch": 6.7, + "learning_rate": 0.00015778061224489798, + "loss": 1.61, + "step": 331 + }, + { + "epoch": 6.72, + "learning_rate": 0.00015765306122448978, + "loss": 1.615, + "step": 332 + }, + { + "epoch": 6.74, + "learning_rate": 0.00015752551020408164, + "loss": 1.6575, + "step": 333 + }, + { + "epoch": 6.76, + "learning_rate": 0.00015739795918367347, + "loss": 1.6702, + "step": 334 + }, + { + "epoch": 6.78, + "learning_rate": 0.0001572704081632653, + "loss": 1.6009, + "step": 335 + }, + { + "epoch": 6.8, + "learning_rate": 0.00015714285714285716, + "loss": 1.5568, + "step": 336 + }, + { + "epoch": 6.82, + "learning_rate": 0.000157015306122449, + "loss": 1.619, + "step": 337 + }, + { + "epoch": 6.84, + "learning_rate": 0.00015688775510204082, + "loss": 1.5563, + "step": 338 + }, + { + "epoch": 6.86, + "learning_rate": 0.00015676020408163265, + "loss": 1.6328, + "step": 339 + }, + { + "epoch": 6.88, + "learning_rate": 0.0001566326530612245, + "loss": 1.5726, + "step": 340 + }, + { + "epoch": 6.9, + "learning_rate": 0.00015650510204081634, + "loss": 1.6199, + "step": 341 + }, + { + "epoch": 6.92, + "learning_rate": 0.00015637755102040817, + "loss": 1.5722, + "step": 342 + }, + { + "epoch": 6.94, + "learning_rate": 0.00015625, + "loss": 1.5685, + "step": 343 + }, + { + "epoch": 6.96, + "learning_rate": 0.00015612244897959186, + "loss": 1.5615, + "step": 344 + }, + { + "epoch": 6.98, + "learning_rate": 0.00015599489795918366, + "loss": 1.5994, + "step": 345 + }, + { + "epoch": 7.0, + "learning_rate": 0.00015586734693877552, + "loss": 1.5579, + "step": 346 + }, + { + "epoch": 7.02, + "learning_rate": 0.00015573979591836735, + "loss": 1.547, + "step": 347 + }, + { + "epoch": 7.04, + "learning_rate": 0.00015561224489795918, + "loss": 1.5292, + "step": 348 + }, + { + "epoch": 7.06, + "learning_rate": 0.00015548469387755102, + "loss": 1.6032, + "step": 349 + }, + { + "epoch": 7.08, + "learning_rate": 0.00015535714285714287, + "loss": 1.5149, + "step": 350 + }, + { + "epoch": 7.1, + "learning_rate": 0.0001552295918367347, + "loss": 1.6093, + "step": 351 + }, + { + "epoch": 7.12, + "learning_rate": 0.00015510204081632654, + "loss": 1.5421, + "step": 352 + }, + { + "epoch": 7.14, + "learning_rate": 0.00015497448979591837, + "loss": 1.5733, + "step": 353 + }, + { + "epoch": 7.16, + "learning_rate": 0.00015484693877551022, + "loss": 1.5703, + "step": 354 + }, + { + "epoch": 7.18, + "learning_rate": 0.00015471938775510203, + "loss": 1.6141, + "step": 355 + }, + { + "epoch": 7.2, + "learning_rate": 0.00015459183673469389, + "loss": 1.5526, + "step": 356 + }, + { + "epoch": 7.22, + "learning_rate": 0.00015446428571428572, + "loss": 1.5347, + "step": 357 + }, + { + "epoch": 7.24, + "learning_rate": 0.00015433673469387755, + "loss": 1.5682, + "step": 358 + }, + { + "epoch": 7.26, + "learning_rate": 0.0001542091836734694, + "loss": 1.5292, + "step": 359 + }, + { + "epoch": 7.28, + "learning_rate": 0.00015408163265306124, + "loss": 1.499, + "step": 360 + }, + { + "epoch": 7.3, + "learning_rate": 0.00015395408163265307, + "loss": 1.5624, + "step": 361 + }, + { + "epoch": 7.32, + "learning_rate": 0.0001538265306122449, + "loss": 1.627, + "step": 362 + }, + { + "epoch": 7.34, + "learning_rate": 0.00015369897959183676, + "loss": 1.5327, + "step": 363 + }, + { + "epoch": 7.37, + "learning_rate": 0.0001535714285714286, + "loss": 1.5622, + "step": 364 + }, + { + "epoch": 7.39, + "learning_rate": 0.00015344387755102042, + "loss": 1.5659, + "step": 365 + }, + { + "epoch": 7.41, + "learning_rate": 0.00015331632653061225, + "loss": 1.5019, + "step": 366 + }, + { + "epoch": 7.43, + "learning_rate": 0.0001531887755102041, + "loss": 1.5921, + "step": 367 + }, + { + "epoch": 7.45, + "learning_rate": 0.0001530612244897959, + "loss": 1.5914, + "step": 368 + }, + { + "epoch": 7.47, + "learning_rate": 0.00015293367346938777, + "loss": 1.5045, + "step": 369 + }, + { + "epoch": 7.49, + "learning_rate": 0.0001528061224489796, + "loss": 1.6209, + "step": 370 + }, + { + "epoch": 7.51, + "learning_rate": 0.00015267857142857143, + "loss": 1.5198, + "step": 371 + }, + { + "epoch": 7.53, + "learning_rate": 0.00015255102040816326, + "loss": 1.5363, + "step": 372 + }, + { + "epoch": 7.55, + "learning_rate": 0.00015242346938775512, + "loss": 1.5391, + "step": 373 + }, + { + "epoch": 7.57, + "learning_rate": 0.00015229591836734695, + "loss": 1.4546, + "step": 374 + }, + { + "epoch": 7.59, + "learning_rate": 0.00015216836734693878, + "loss": 1.5546, + "step": 375 + }, + { + "epoch": 7.61, + "learning_rate": 0.0001520408163265306, + "loss": 1.5629, + "step": 376 + }, + { + "epoch": 7.63, + "learning_rate": 0.00015191326530612247, + "loss": 1.6002, + "step": 377 + }, + { + "epoch": 7.65, + "learning_rate": 0.00015178571428571427, + "loss": 1.5543, + "step": 378 + }, + { + "epoch": 7.67, + "learning_rate": 0.00015165816326530613, + "loss": 1.5925, + "step": 379 + }, + { + "epoch": 7.69, + "learning_rate": 0.00015153061224489796, + "loss": 1.5631, + "step": 380 + }, + { + "epoch": 7.71, + "learning_rate": 0.0001514030612244898, + "loss": 1.5677, + "step": 381 + }, + { + "epoch": 7.73, + "learning_rate": 0.00015127551020408165, + "loss": 1.5828, + "step": 382 + }, + { + "epoch": 7.75, + "learning_rate": 0.00015114795918367348, + "loss": 1.6494, + "step": 383 + }, + { + "epoch": 7.77, + "learning_rate": 0.0001510204081632653, + "loss": 1.553, + "step": 384 + }, + { + "epoch": 7.79, + "learning_rate": 0.00015089285714285714, + "loss": 1.6156, + "step": 385 + }, + { + "epoch": 7.81, + "learning_rate": 0.000150765306122449, + "loss": 1.5001, + "step": 386 + }, + { + "epoch": 7.83, + "learning_rate": 0.00015063775510204083, + "loss": 1.5321, + "step": 387 + }, + { + "epoch": 7.85, + "learning_rate": 0.00015051020408163266, + "loss": 1.5307, + "step": 388 + }, + { + "epoch": 7.87, + "learning_rate": 0.0001503826530612245, + "loss": 1.5639, + "step": 389 + }, + { + "epoch": 7.89, + "learning_rate": 0.00015025510204081635, + "loss": 1.517, + "step": 390 + }, + { + "epoch": 7.91, + "learning_rate": 0.00015012755102040816, + "loss": 1.4776, + "step": 391 + }, + { + "epoch": 7.93, + "learning_rate": 0.00015000000000000001, + "loss": 1.5368, + "step": 392 + }, + { + "epoch": 7.95, + "learning_rate": 0.00014987244897959184, + "loss": 1.5636, + "step": 393 + }, + { + "epoch": 7.97, + "learning_rate": 0.00014974489795918368, + "loss": 1.6004, + "step": 394 + }, + { + "epoch": 7.99, + "learning_rate": 0.0001496173469387755, + "loss": 1.5524, + "step": 395 + }, + { + "epoch": 8.01, + "learning_rate": 0.00014948979591836736, + "loss": 1.5307, + "step": 396 + }, + { + "epoch": 8.03, + "learning_rate": 0.00014936224489795917, + "loss": 1.5123, + "step": 397 + }, + { + "epoch": 8.05, + "learning_rate": 0.00014923469387755103, + "loss": 1.5132, + "step": 398 + }, + { + "epoch": 8.07, + "learning_rate": 0.00014910714285714286, + "loss": 1.5109, + "step": 399 + }, + { + "epoch": 8.09, + "learning_rate": 0.00014897959183673472, + "loss": 1.5302, + "step": 400 + }, + { + "epoch": 8.11, + "learning_rate": 0.00014885204081632652, + "loss": 1.5238, + "step": 401 + }, + { + "epoch": 8.13, + "learning_rate": 0.00014872448979591838, + "loss": 1.4781, + "step": 402 + }, + { + "epoch": 8.15, + "learning_rate": 0.0001485969387755102, + "loss": 1.5446, + "step": 403 + }, + { + "epoch": 8.17, + "learning_rate": 0.00014846938775510204, + "loss": 1.5, + "step": 404 + }, + { + "epoch": 8.19, + "learning_rate": 0.0001483418367346939, + "loss": 1.5458, + "step": 405 + }, + { + "epoch": 8.21, + "learning_rate": 0.00014821428571428573, + "loss": 1.5257, + "step": 406 + }, + { + "epoch": 8.24, + "learning_rate": 0.00014808673469387756, + "loss": 1.4607, + "step": 407 + }, + { + "epoch": 8.26, + "learning_rate": 0.0001479591836734694, + "loss": 1.4282, + "step": 408 + }, + { + "epoch": 8.28, + "learning_rate": 0.00014783163265306125, + "loss": 1.4519, + "step": 409 + }, + { + "epoch": 8.3, + "learning_rate": 0.00014770408163265305, + "loss": 1.475, + "step": 410 + }, + { + "epoch": 8.32, + "learning_rate": 0.0001475765306122449, + "loss": 1.5425, + "step": 411 + }, + { + "epoch": 8.34, + "learning_rate": 0.00014744897959183674, + "loss": 1.5407, + "step": 412 + }, + { + "epoch": 8.36, + "learning_rate": 0.0001473214285714286, + "loss": 1.5698, + "step": 413 + }, + { + "epoch": 8.38, + "learning_rate": 0.0001471938775510204, + "loss": 1.4282, + "step": 414 + }, + { + "epoch": 8.4, + "learning_rate": 0.00014706632653061226, + "loss": 1.5301, + "step": 415 + }, + { + "epoch": 8.42, + "learning_rate": 0.0001469387755102041, + "loss": 1.5083, + "step": 416 + }, + { + "epoch": 8.44, + "learning_rate": 0.00014681122448979592, + "loss": 1.5712, + "step": 417 + }, + { + "epoch": 8.46, + "learning_rate": 0.00014668367346938775, + "loss": 1.4363, + "step": 418 + }, + { + "epoch": 8.48, + "learning_rate": 0.0001465561224489796, + "loss": 1.4463, + "step": 419 + }, + { + "epoch": 8.5, + "learning_rate": 0.00014642857142857141, + "loss": 1.4738, + "step": 420 + }, + { + "epoch": 8.52, + "learning_rate": 0.00014630102040816327, + "loss": 1.5396, + "step": 421 + }, + { + "epoch": 8.54, + "learning_rate": 0.0001461734693877551, + "loss": 1.4384, + "step": 422 + }, + { + "epoch": 8.56, + "learning_rate": 0.00014604591836734696, + "loss": 1.5345, + "step": 423 + }, + { + "epoch": 8.58, + "learning_rate": 0.0001459183673469388, + "loss": 1.5355, + "step": 424 + }, + { + "epoch": 8.6, + "learning_rate": 0.00014579081632653062, + "loss": 1.5188, + "step": 425 + }, + { + "epoch": 8.62, + "learning_rate": 0.00014566326530612245, + "loss": 1.5575, + "step": 426 + }, + { + "epoch": 8.64, + "learning_rate": 0.00014553571428571428, + "loss": 1.5279, + "step": 427 + }, + { + "epoch": 8.66, + "learning_rate": 0.00014540816326530614, + "loss": 1.5484, + "step": 428 + }, + { + "epoch": 8.68, + "learning_rate": 0.00014528061224489797, + "loss": 1.4878, + "step": 429 + }, + { + "epoch": 8.7, + "learning_rate": 0.0001451530612244898, + "loss": 1.503, + "step": 430 + }, + { + "epoch": 8.72, + "learning_rate": 0.00014502551020408163, + "loss": 1.4723, + "step": 431 + }, + { + "epoch": 8.74, + "learning_rate": 0.0001448979591836735, + "loss": 1.5579, + "step": 432 + }, + { + "epoch": 8.76, + "learning_rate": 0.0001447704081632653, + "loss": 1.4789, + "step": 433 + }, + { + "epoch": 8.78, + "learning_rate": 0.00014464285714285715, + "loss": 1.5501, + "step": 434 + }, + { + "epoch": 8.8, + "learning_rate": 0.00014451530612244899, + "loss": 1.5204, + "step": 435 + }, + { + "epoch": 8.82, + "learning_rate": 0.00014438775510204084, + "loss": 1.5489, + "step": 436 + }, + { + "epoch": 8.84, + "learning_rate": 0.00014426020408163265, + "loss": 1.5464, + "step": 437 + }, + { + "epoch": 8.86, + "learning_rate": 0.0001441326530612245, + "loss": 1.5896, + "step": 438 + }, + { + "epoch": 8.88, + "learning_rate": 0.00014400510204081634, + "loss": 1.5465, + "step": 439 + }, + { + "epoch": 8.9, + "learning_rate": 0.00014387755102040817, + "loss": 1.5094, + "step": 440 + }, + { + "epoch": 8.92, + "learning_rate": 0.00014375, + "loss": 1.5144, + "step": 441 + }, + { + "epoch": 8.94, + "learning_rate": 0.00014362244897959186, + "loss": 1.4919, + "step": 442 + }, + { + "epoch": 8.96, + "learning_rate": 0.00014349489795918366, + "loss": 1.4702, + "step": 443 + }, + { + "epoch": 8.98, + "learning_rate": 0.00014336734693877552, + "loss": 1.4996, + "step": 444 + }, + { + "epoch": 9.0, + "learning_rate": 0.00014323979591836735, + "loss": 1.5503, + "step": 445 + }, + { + "epoch": 9.02, + "learning_rate": 0.00014311224489795918, + "loss": 1.4125, + "step": 446 + }, + { + "epoch": 9.04, + "learning_rate": 0.00014298469387755104, + "loss": 1.4722, + "step": 447 + }, + { + "epoch": 9.06, + "learning_rate": 0.00014285714285714287, + "loss": 1.5199, + "step": 448 + }, + { + "epoch": 9.09, + "learning_rate": 0.0001427295918367347, + "loss": 1.4571, + "step": 449 + }, + { + "epoch": 9.11, + "learning_rate": 0.00014260204081632653, + "loss": 1.4996, + "step": 450 + }, + { + "epoch": 9.13, + "learning_rate": 0.0001424744897959184, + "loss": 1.4092, + "step": 451 + }, + { + "epoch": 9.15, + "learning_rate": 0.00014234693877551022, + "loss": 1.4198, + "step": 452 + }, + { + "epoch": 9.17, + "learning_rate": 0.00014221938775510205, + "loss": 1.4916, + "step": 453 + }, + { + "epoch": 9.19, + "learning_rate": 0.00014209183673469388, + "loss": 1.5051, + "step": 454 + }, + { + "epoch": 9.21, + "learning_rate": 0.00014196428571428574, + "loss": 1.4321, + "step": 455 + }, + { + "epoch": 9.23, + "learning_rate": 0.00014183673469387754, + "loss": 1.4097, + "step": 456 + }, + { + "epoch": 9.25, + "learning_rate": 0.0001417091836734694, + "loss": 1.4853, + "step": 457 + }, + { + "epoch": 9.27, + "learning_rate": 0.00014158163265306123, + "loss": 1.4593, + "step": 458 + }, + { + "epoch": 9.29, + "learning_rate": 0.00014145408163265306, + "loss": 1.3729, + "step": 459 + }, + { + "epoch": 9.31, + "learning_rate": 0.0001413265306122449, + "loss": 1.4467, + "step": 460 + }, + { + "epoch": 9.33, + "learning_rate": 0.00014119897959183675, + "loss": 1.4467, + "step": 461 + }, + { + "epoch": 9.35, + "learning_rate": 0.00014107142857142858, + "loss": 1.4785, + "step": 462 + }, + { + "epoch": 9.37, + "learning_rate": 0.0001409438775510204, + "loss": 1.4089, + "step": 463 + }, + { + "epoch": 9.39, + "learning_rate": 0.00014081632653061224, + "loss": 1.5026, + "step": 464 + }, + { + "epoch": 9.41, + "learning_rate": 0.0001406887755102041, + "loss": 1.4857, + "step": 465 + }, + { + "epoch": 9.43, + "learning_rate": 0.0001405612244897959, + "loss": 1.3745, + "step": 466 + }, + { + "epoch": 9.45, + "learning_rate": 0.00014043367346938776, + "loss": 1.4733, + "step": 467 + }, + { + "epoch": 9.47, + "learning_rate": 0.0001403061224489796, + "loss": 1.5212, + "step": 468 + }, + { + "epoch": 9.49, + "learning_rate": 0.00014017857142857142, + "loss": 1.5398, + "step": 469 + }, + { + "epoch": 9.51, + "learning_rate": 0.00014005102040816328, + "loss": 1.478, + "step": 470 + }, + { + "epoch": 9.53, + "learning_rate": 0.0001399234693877551, + "loss": 1.496, + "step": 471 + }, + { + "epoch": 9.55, + "learning_rate": 0.00013979591836734694, + "loss": 1.4837, + "step": 472 + }, + { + "epoch": 9.57, + "learning_rate": 0.00013966836734693878, + "loss": 1.4724, + "step": 473 + }, + { + "epoch": 9.59, + "learning_rate": 0.00013954081632653063, + "loss": 1.4828, + "step": 474 + }, + { + "epoch": 9.61, + "learning_rate": 0.00013941326530612246, + "loss": 1.5012, + "step": 475 + }, + { + "epoch": 9.63, + "learning_rate": 0.0001392857142857143, + "loss": 1.4879, + "step": 476 + }, + { + "epoch": 9.65, + "learning_rate": 0.00013915816326530613, + "loss": 1.4196, + "step": 477 + }, + { + "epoch": 9.67, + "learning_rate": 0.00013903061224489798, + "loss": 1.4915, + "step": 478 + }, + { + "epoch": 9.69, + "learning_rate": 0.0001389030612244898, + "loss": 1.3878, + "step": 479 + }, + { + "epoch": 9.71, + "learning_rate": 0.00013877551020408165, + "loss": 1.466, + "step": 480 + }, + { + "epoch": 9.73, + "learning_rate": 0.00013864795918367348, + "loss": 1.4582, + "step": 481 + }, + { + "epoch": 9.75, + "learning_rate": 0.0001385204081632653, + "loss": 1.533, + "step": 482 + }, + { + "epoch": 9.77, + "learning_rate": 0.00013839285714285714, + "loss": 1.4697, + "step": 483 + }, + { + "epoch": 9.79, + "learning_rate": 0.000138265306122449, + "loss": 1.3989, + "step": 484 + }, + { + "epoch": 9.81, + "learning_rate": 0.00013813775510204083, + "loss": 1.4361, + "step": 485 + }, + { + "epoch": 9.83, + "learning_rate": 0.00013801020408163266, + "loss": 1.5271, + "step": 486 + }, + { + "epoch": 9.85, + "learning_rate": 0.0001378826530612245, + "loss": 1.4905, + "step": 487 + }, + { + "epoch": 9.87, + "learning_rate": 0.00013775510204081635, + "loss": 1.4757, + "step": 488 + }, + { + "epoch": 9.89, + "learning_rate": 0.00013762755102040815, + "loss": 1.5485, + "step": 489 + }, + { + "epoch": 9.91, + "learning_rate": 0.0001375, + "loss": 1.4783, + "step": 490 + }, + { + "epoch": 9.93, + "learning_rate": 0.00013737244897959184, + "loss": 1.4849, + "step": 491 + }, + { + "epoch": 9.96, + "learning_rate": 0.00013724489795918367, + "loss": 1.5382, + "step": 492 + }, + { + "epoch": 9.98, + "learning_rate": 0.00013711734693877553, + "loss": 1.4902, + "step": 493 + }, + { + "epoch": 10.0, + "learning_rate": 0.00013698979591836736, + "loss": 1.4865, + "step": 494 + }, + { + "epoch": 10.02, + "learning_rate": 0.0001368622448979592, + "loss": 1.4436, + "step": 495 + }, + { + "epoch": 10.04, + "learning_rate": 0.00013673469387755102, + "loss": 1.408, + "step": 496 + }, + { + "epoch": 10.06, + "learning_rate": 0.00013660714285714288, + "loss": 1.4764, + "step": 497 + }, + { + "epoch": 10.08, + "learning_rate": 0.0001364795918367347, + "loss": 1.4646, + "step": 498 + }, + { + "epoch": 10.1, + "learning_rate": 0.00013635204081632654, + "loss": 1.406, + "step": 499 + }, + { + "epoch": 10.12, + "learning_rate": 0.00013622448979591837, + "loss": 1.4785, + "step": 500 + }, + { + "epoch": 10.14, + "learning_rate": 0.00013609693877551023, + "loss": 1.4117, + "step": 501 + }, + { + "epoch": 10.16, + "learning_rate": 0.00013596938775510203, + "loss": 1.4108, + "step": 502 + }, + { + "epoch": 10.18, + "learning_rate": 0.0001358418367346939, + "loss": 1.4155, + "step": 503 + }, + { + "epoch": 10.2, + "learning_rate": 0.00013571428571428572, + "loss": 1.4021, + "step": 504 + }, + { + "epoch": 10.22, + "learning_rate": 0.00013558673469387755, + "loss": 1.411, + "step": 505 + }, + { + "epoch": 10.24, + "learning_rate": 0.00013545918367346938, + "loss": 1.3851, + "step": 506 + }, + { + "epoch": 10.26, + "learning_rate": 0.00013533163265306124, + "loss": 1.387, + "step": 507 + }, + { + "epoch": 10.28, + "learning_rate": 0.00013520408163265305, + "loss": 1.4163, + "step": 508 + }, + { + "epoch": 10.3, + "learning_rate": 0.0001350765306122449, + "loss": 1.3343, + "step": 509 + }, + { + "epoch": 10.32, + "learning_rate": 0.00013494897959183673, + "loss": 1.4811, + "step": 510 + }, + { + "epoch": 10.34, + "learning_rate": 0.0001348214285714286, + "loss": 1.4086, + "step": 511 + }, + { + "epoch": 10.36, + "learning_rate": 0.0001346938775510204, + "loss": 1.3879, + "step": 512 + }, + { + "epoch": 10.38, + "learning_rate": 0.00013456632653061225, + "loss": 1.4204, + "step": 513 + }, + { + "epoch": 10.4, + "learning_rate": 0.00013443877551020408, + "loss": 1.4158, + "step": 514 + }, + { + "epoch": 10.42, + "learning_rate": 0.00013431122448979592, + "loss": 1.4521, + "step": 515 + }, + { + "epoch": 10.44, + "learning_rate": 0.00013418367346938777, + "loss": 1.4196, + "step": 516 + }, + { + "epoch": 10.46, + "learning_rate": 0.0001340561224489796, + "loss": 1.4361, + "step": 517 + }, + { + "epoch": 10.48, + "learning_rate": 0.00013392857142857144, + "loss": 1.4482, + "step": 518 + }, + { + "epoch": 10.5, + "learning_rate": 0.00013380102040816327, + "loss": 1.4801, + "step": 519 + }, + { + "epoch": 10.52, + "learning_rate": 0.00013367346938775512, + "loss": 1.4556, + "step": 520 + }, + { + "epoch": 10.54, + "learning_rate": 0.00013354591836734695, + "loss": 1.3902, + "step": 521 + }, + { + "epoch": 10.56, + "learning_rate": 0.00013341836734693879, + "loss": 1.4269, + "step": 522 + }, + { + "epoch": 10.58, + "learning_rate": 0.00013329081632653062, + "loss": 1.4899, + "step": 523 + }, + { + "epoch": 10.6, + "learning_rate": 0.00013316326530612247, + "loss": 1.3952, + "step": 524 + }, + { + "epoch": 10.62, + "learning_rate": 0.00013303571428571428, + "loss": 1.4116, + "step": 525 + }, + { + "epoch": 10.64, + "learning_rate": 0.00013290816326530614, + "loss": 1.4583, + "step": 526 + }, + { + "epoch": 10.66, + "learning_rate": 0.00013278061224489797, + "loss": 1.4466, + "step": 527 + }, + { + "epoch": 10.68, + "learning_rate": 0.0001326530612244898, + "loss": 1.4242, + "step": 528 + }, + { + "epoch": 10.7, + "learning_rate": 0.00013252551020408163, + "loss": 1.3717, + "step": 529 + }, + { + "epoch": 10.72, + "learning_rate": 0.0001323979591836735, + "loss": 1.4583, + "step": 530 + }, + { + "epoch": 10.74, + "learning_rate": 0.0001322704081632653, + "loss": 1.4185, + "step": 531 + }, + { + "epoch": 10.76, + "learning_rate": 0.00013214285714285715, + "loss": 1.4287, + "step": 532 + }, + { + "epoch": 10.78, + "learning_rate": 0.00013201530612244898, + "loss": 1.4385, + "step": 533 + }, + { + "epoch": 10.8, + "learning_rate": 0.00013188775510204084, + "loss": 1.453, + "step": 534 + }, + { + "epoch": 10.83, + "learning_rate": 0.00013176020408163264, + "loss": 1.4161, + "step": 535 + }, + { + "epoch": 10.85, + "learning_rate": 0.0001316326530612245, + "loss": 1.457, + "step": 536 + }, + { + "epoch": 10.87, + "learning_rate": 0.00013150510204081633, + "loss": 1.4367, + "step": 537 + }, + { + "epoch": 10.89, + "learning_rate": 0.00013137755102040816, + "loss": 1.4256, + "step": 538 + }, + { + "epoch": 10.91, + "learning_rate": 0.00013125000000000002, + "loss": 1.424, + "step": 539 + }, + { + "epoch": 10.93, + "learning_rate": 0.00013112244897959185, + "loss": 1.3923, + "step": 540 + }, + { + "epoch": 10.95, + "learning_rate": 0.00013099489795918368, + "loss": 1.4225, + "step": 541 + }, + { + "epoch": 10.97, + "learning_rate": 0.0001308673469387755, + "loss": 1.3969, + "step": 542 + }, + { + "epoch": 10.99, + "learning_rate": 0.00013073979591836737, + "loss": 1.4446, + "step": 543 + }, + { + "epoch": 11.01, + "learning_rate": 0.00013061224489795917, + "loss": 1.4375, + "step": 544 + }, + { + "epoch": 11.03, + "learning_rate": 0.00013048469387755103, + "loss": 1.4064, + "step": 545 + }, + { + "epoch": 11.05, + "learning_rate": 0.00013035714285714286, + "loss": 1.3454, + "step": 546 + }, + { + "epoch": 11.07, + "learning_rate": 0.00013022959183673472, + "loss": 1.3234, + "step": 547 + }, + { + "epoch": 11.09, + "learning_rate": 0.00013010204081632652, + "loss": 1.3759, + "step": 548 + }, + { + "epoch": 11.11, + "learning_rate": 0.00012997448979591838, + "loss": 1.4221, + "step": 549 + }, + { + "epoch": 11.13, + "learning_rate": 0.0001298469387755102, + "loss": 1.4261, + "step": 550 + }, + { + "epoch": 11.15, + "learning_rate": 0.00012971938775510204, + "loss": 1.3341, + "step": 551 + }, + { + "epoch": 11.17, + "learning_rate": 0.00012959183673469387, + "loss": 1.3994, + "step": 552 + }, + { + "epoch": 11.19, + "learning_rate": 0.00012946428571428573, + "loss": 1.3894, + "step": 553 + }, + { + "epoch": 11.21, + "learning_rate": 0.00012933673469387754, + "loss": 1.3585, + "step": 554 + }, + { + "epoch": 11.23, + "learning_rate": 0.0001292091836734694, + "loss": 1.3763, + "step": 555 + }, + { + "epoch": 11.25, + "learning_rate": 0.00012908163265306123, + "loss": 1.3623, + "step": 556 + }, + { + "epoch": 11.27, + "learning_rate": 0.00012895408163265306, + "loss": 1.3907, + "step": 557 + }, + { + "epoch": 11.29, + "learning_rate": 0.0001288265306122449, + "loss": 1.3807, + "step": 558 + }, + { + "epoch": 11.31, + "learning_rate": 0.00012869897959183674, + "loss": 1.4045, + "step": 559 + }, + { + "epoch": 11.33, + "learning_rate": 0.00012857142857142858, + "loss": 1.4038, + "step": 560 + }, + { + "epoch": 11.35, + "learning_rate": 0.0001284438775510204, + "loss": 1.3466, + "step": 561 + }, + { + "epoch": 11.37, + "learning_rate": 0.00012831632653061226, + "loss": 1.3449, + "step": 562 + }, + { + "epoch": 11.39, + "learning_rate": 0.0001281887755102041, + "loss": 1.3866, + "step": 563 + }, + { + "epoch": 11.41, + "learning_rate": 0.00012806122448979593, + "loss": 1.3106, + "step": 564 + }, + { + "epoch": 11.43, + "learning_rate": 0.00012793367346938776, + "loss": 1.4414, + "step": 565 + }, + { + "epoch": 11.45, + "learning_rate": 0.00012780612244897962, + "loss": 1.3737, + "step": 566 + }, + { + "epoch": 11.47, + "learning_rate": 0.00012767857142857142, + "loss": 1.4053, + "step": 567 + }, + { + "epoch": 11.49, + "learning_rate": 0.00012755102040816328, + "loss": 1.4561, + "step": 568 + }, + { + "epoch": 11.51, + "learning_rate": 0.0001274234693877551, + "loss": 1.3684, + "step": 569 + }, + { + "epoch": 11.53, + "learning_rate": 0.00012729591836734697, + "loss": 1.3117, + "step": 570 + }, + { + "epoch": 11.55, + "learning_rate": 0.00012716836734693877, + "loss": 1.3474, + "step": 571 + }, + { + "epoch": 11.57, + "learning_rate": 0.00012704081632653063, + "loss": 1.3804, + "step": 572 + }, + { + "epoch": 11.59, + "learning_rate": 0.00012691326530612246, + "loss": 1.3656, + "step": 573 + }, + { + "epoch": 11.61, + "learning_rate": 0.0001267857142857143, + "loss": 1.3133, + "step": 574 + }, + { + "epoch": 11.63, + "learning_rate": 0.00012665816326530612, + "loss": 1.4077, + "step": 575 + }, + { + "epoch": 11.65, + "learning_rate": 0.00012653061224489798, + "loss": 1.4087, + "step": 576 + }, + { + "epoch": 11.67, + "learning_rate": 0.00012640306122448978, + "loss": 1.3524, + "step": 577 + }, + { + "epoch": 11.7, + "learning_rate": 0.00012627551020408164, + "loss": 1.3481, + "step": 578 + }, + { + "epoch": 11.72, + "learning_rate": 0.00012614795918367347, + "loss": 1.4497, + "step": 579 + }, + { + "epoch": 11.74, + "learning_rate": 0.0001260204081632653, + "loss": 1.3866, + "step": 580 + }, + { + "epoch": 11.76, + "learning_rate": 0.00012589285714285713, + "loss": 1.42, + "step": 581 + }, + { + "epoch": 11.78, + "learning_rate": 0.000125765306122449, + "loss": 1.3562, + "step": 582 + }, + { + "epoch": 11.8, + "learning_rate": 0.00012563775510204082, + "loss": 1.3249, + "step": 583 + }, + { + "epoch": 11.82, + "learning_rate": 0.00012551020408163265, + "loss": 1.4277, + "step": 584 + }, + { + "epoch": 11.84, + "learning_rate": 0.0001253826530612245, + "loss": 1.3734, + "step": 585 + }, + { + "epoch": 11.86, + "learning_rate": 0.00012525510204081634, + "loss": 1.3765, + "step": 586 + }, + { + "epoch": 11.88, + "learning_rate": 0.00012512755102040817, + "loss": 1.4153, + "step": 587 + }, + { + "epoch": 11.9, + "learning_rate": 0.000125, + "loss": 1.3847, + "step": 588 + }, + { + "epoch": 11.92, + "learning_rate": 0.00012487244897959186, + "loss": 1.3824, + "step": 589 + }, + { + "epoch": 11.94, + "learning_rate": 0.00012474489795918366, + "loss": 1.3938, + "step": 590 + }, + { + "epoch": 11.96, + "learning_rate": 0.00012461734693877552, + "loss": 1.4143, + "step": 591 + }, + { + "epoch": 11.98, + "learning_rate": 0.00012448979591836735, + "loss": 1.3794, + "step": 592 + }, + { + "epoch": 12.0, + "learning_rate": 0.00012436224489795918, + "loss": 1.3755, + "step": 593 + }, + { + "epoch": 12.02, + "learning_rate": 0.00012423469387755101, + "loss": 1.3736, + "step": 594 + }, + { + "epoch": 12.04, + "learning_rate": 0.00012410714285714287, + "loss": 1.2957, + "step": 595 + }, + { + "epoch": 12.06, + "learning_rate": 0.0001239795918367347, + "loss": 1.2996, + "step": 596 + }, + { + "epoch": 12.08, + "learning_rate": 0.00012385204081632653, + "loss": 1.3648, + "step": 597 + }, + { + "epoch": 12.1, + "learning_rate": 0.00012372448979591837, + "loss": 1.3031, + "step": 598 + }, + { + "epoch": 12.12, + "learning_rate": 0.00012359693877551022, + "loss": 1.2933, + "step": 599 + }, + { + "epoch": 12.14, + "learning_rate": 0.00012346938775510203, + "loss": 1.322, + "step": 600 + }, + { + "epoch": 12.16, + "learning_rate": 0.00012334183673469389, + "loss": 1.3123, + "step": 601 + }, + { + "epoch": 12.18, + "learning_rate": 0.00012321428571428572, + "loss": 1.3187, + "step": 602 + }, + { + "epoch": 12.2, + "learning_rate": 0.00012308673469387755, + "loss": 1.3353, + "step": 603 + }, + { + "epoch": 12.22, + "learning_rate": 0.0001229591836734694, + "loss": 1.3221, + "step": 604 + }, + { + "epoch": 12.24, + "learning_rate": 0.00012283163265306124, + "loss": 1.3458, + "step": 605 + }, + { + "epoch": 12.26, + "learning_rate": 0.00012270408163265307, + "loss": 1.275, + "step": 606 + }, + { + "epoch": 12.28, + "learning_rate": 0.0001225765306122449, + "loss": 1.3455, + "step": 607 + }, + { + "epoch": 12.3, + "learning_rate": 0.00012244897959183676, + "loss": 1.2769, + "step": 608 + }, + { + "epoch": 12.32, + "learning_rate": 0.00012232142857142859, + "loss": 1.3201, + "step": 609 + }, + { + "epoch": 12.34, + "learning_rate": 0.00012219387755102042, + "loss": 1.3073, + "step": 610 + }, + { + "epoch": 12.36, + "learning_rate": 0.00012206632653061225, + "loss": 1.3103, + "step": 611 + }, + { + "epoch": 12.38, + "learning_rate": 0.00012193877551020409, + "loss": 1.4437, + "step": 612 + }, + { + "epoch": 12.4, + "learning_rate": 0.00012181122448979591, + "loss": 1.3086, + "step": 613 + }, + { + "epoch": 12.42, + "learning_rate": 0.00012168367346938775, + "loss": 1.3867, + "step": 614 + }, + { + "epoch": 12.44, + "learning_rate": 0.0001215561224489796, + "loss": 1.2565, + "step": 615 + }, + { + "epoch": 12.46, + "learning_rate": 0.00012142857142857143, + "loss": 1.335, + "step": 616 + }, + { + "epoch": 12.48, + "learning_rate": 0.00012130102040816327, + "loss": 1.3423, + "step": 617 + }, + { + "epoch": 12.5, + "learning_rate": 0.00012117346938775512, + "loss": 1.3433, + "step": 618 + }, + { + "epoch": 12.52, + "learning_rate": 0.00012104591836734695, + "loss": 1.3387, + "step": 619 + }, + { + "epoch": 12.55, + "learning_rate": 0.00012091836734693878, + "loss": 1.3923, + "step": 620 + }, + { + "epoch": 12.57, + "learning_rate": 0.00012079081632653062, + "loss": 1.3774, + "step": 621 + }, + { + "epoch": 12.59, + "learning_rate": 0.00012066326530612247, + "loss": 1.3203, + "step": 622 + }, + { + "epoch": 12.61, + "learning_rate": 0.00012053571428571429, + "loss": 1.2924, + "step": 623 + }, + { + "epoch": 12.63, + "learning_rate": 0.00012040816326530613, + "loss": 1.3292, + "step": 624 + }, + { + "epoch": 12.65, + "learning_rate": 0.00012028061224489798, + "loss": 1.3161, + "step": 625 + }, + { + "epoch": 12.67, + "learning_rate": 0.00012015306122448979, + "loss": 1.352, + "step": 626 + }, + { + "epoch": 12.69, + "learning_rate": 0.00012002551020408164, + "loss": 1.3577, + "step": 627 + }, + { + "epoch": 12.71, + "learning_rate": 0.00011989795918367348, + "loss": 1.3575, + "step": 628 + }, + { + "epoch": 12.73, + "learning_rate": 0.0001197704081632653, + "loss": 1.3727, + "step": 629 + }, + { + "epoch": 12.75, + "learning_rate": 0.00011964285714285714, + "loss": 1.3312, + "step": 630 + }, + { + "epoch": 12.77, + "learning_rate": 0.00011951530612244899, + "loss": 1.3378, + "step": 631 + }, + { + "epoch": 12.79, + "learning_rate": 0.00011938775510204083, + "loss": 1.295, + "step": 632 + }, + { + "epoch": 12.81, + "learning_rate": 0.00011926020408163265, + "loss": 1.3447, + "step": 633 + }, + { + "epoch": 12.83, + "learning_rate": 0.0001191326530612245, + "loss": 1.3835, + "step": 634 + }, + { + "epoch": 12.85, + "learning_rate": 0.00011900510204081634, + "loss": 1.3222, + "step": 635 + }, + { + "epoch": 12.87, + "learning_rate": 0.00011887755102040817, + "loss": 1.2851, + "step": 636 + }, + { + "epoch": 12.89, + "learning_rate": 0.00011875, + "loss": 1.2723, + "step": 637 + }, + { + "epoch": 12.91, + "learning_rate": 0.00011862244897959184, + "loss": 1.3924, + "step": 638 + }, + { + "epoch": 12.93, + "learning_rate": 0.00011849489795918368, + "loss": 1.4625, + "step": 639 + }, + { + "epoch": 12.95, + "learning_rate": 0.00011836734693877552, + "loss": 1.3245, + "step": 640 + }, + { + "epoch": 12.97, + "learning_rate": 0.00011823979591836736, + "loss": 1.4042, + "step": 641 + }, + { + "epoch": 12.99, + "learning_rate": 0.00011811224489795918, + "loss": 1.3761, + "step": 642 + }, + { + "epoch": 13.01, + "learning_rate": 0.00011798469387755103, + "loss": 1.3376, + "step": 643 + }, + { + "epoch": 13.03, + "learning_rate": 0.00011785714285714287, + "loss": 1.2174, + "step": 644 + }, + { + "epoch": 13.05, + "learning_rate": 0.00011772959183673471, + "loss": 1.3602, + "step": 645 + }, + { + "epoch": 13.07, + "learning_rate": 0.00011760204081632653, + "loss": 1.3002, + "step": 646 + }, + { + "epoch": 13.09, + "learning_rate": 0.00011747448979591838, + "loss": 1.2262, + "step": 647 + }, + { + "epoch": 13.11, + "learning_rate": 0.00011734693877551022, + "loss": 1.3048, + "step": 648 + }, + { + "epoch": 13.13, + "learning_rate": 0.00011721938775510204, + "loss": 1.2231, + "step": 649 + }, + { + "epoch": 13.15, + "learning_rate": 0.00011709183673469388, + "loss": 1.2996, + "step": 650 + }, + { + "epoch": 13.17, + "learning_rate": 0.00011696428571428573, + "loss": 1.2708, + "step": 651 + }, + { + "epoch": 13.19, + "learning_rate": 0.00011683673469387754, + "loss": 1.2776, + "step": 652 + }, + { + "epoch": 13.21, + "learning_rate": 0.00011670918367346939, + "loss": 1.248, + "step": 653 + }, + { + "epoch": 13.23, + "learning_rate": 0.00011658163265306123, + "loss": 1.2582, + "step": 654 + }, + { + "epoch": 13.25, + "learning_rate": 0.00011645408163265305, + "loss": 1.3011, + "step": 655 + }, + { + "epoch": 13.27, + "learning_rate": 0.0001163265306122449, + "loss": 1.2969, + "step": 656 + }, + { + "epoch": 13.29, + "learning_rate": 0.00011619897959183674, + "loss": 1.2454, + "step": 657 + }, + { + "epoch": 13.31, + "learning_rate": 0.00011607142857142858, + "loss": 1.1914, + "step": 658 + }, + { + "epoch": 13.33, + "learning_rate": 0.00011594387755102041, + "loss": 1.34, + "step": 659 + }, + { + "epoch": 13.35, + "learning_rate": 0.00011581632653061225, + "loss": 1.2828, + "step": 660 + }, + { + "epoch": 13.37, + "learning_rate": 0.00011568877551020409, + "loss": 1.2962, + "step": 661 + }, + { + "epoch": 13.39, + "learning_rate": 0.00011556122448979592, + "loss": 1.3334, + "step": 662 + }, + { + "epoch": 13.42, + "learning_rate": 0.00011543367346938776, + "loss": 1.2832, + "step": 663 + }, + { + "epoch": 13.44, + "learning_rate": 0.00011530612244897961, + "loss": 1.3012, + "step": 664 + }, + { + "epoch": 13.46, + "learning_rate": 0.00011517857142857143, + "loss": 1.2857, + "step": 665 + }, + { + "epoch": 13.48, + "learning_rate": 0.00011505102040816327, + "loss": 1.2855, + "step": 666 + }, + { + "epoch": 13.5, + "learning_rate": 0.00011492346938775512, + "loss": 1.3077, + "step": 667 + }, + { + "epoch": 13.52, + "learning_rate": 0.00011479591836734696, + "loss": 1.3139, + "step": 668 + }, + { + "epoch": 13.54, + "learning_rate": 0.00011466836734693878, + "loss": 1.3138, + "step": 669 + }, + { + "epoch": 13.56, + "learning_rate": 0.00011454081632653062, + "loss": 1.2808, + "step": 670 + }, + { + "epoch": 13.58, + "learning_rate": 0.00011441326530612247, + "loss": 1.2492, + "step": 671 + }, + { + "epoch": 13.6, + "learning_rate": 0.00011428571428571428, + "loss": 1.2027, + "step": 672 + }, + { + "epoch": 13.62, + "learning_rate": 0.00011415816326530613, + "loss": 1.33, + "step": 673 + }, + { + "epoch": 13.64, + "learning_rate": 0.00011403061224489797, + "loss": 1.3112, + "step": 674 + }, + { + "epoch": 13.66, + "learning_rate": 0.00011390306122448979, + "loss": 1.2772, + "step": 675 + }, + { + "epoch": 13.68, + "learning_rate": 0.00011377551020408163, + "loss": 1.2701, + "step": 676 + }, + { + "epoch": 13.7, + "learning_rate": 0.00011364795918367348, + "loss": 1.1973, + "step": 677 + }, + { + "epoch": 13.72, + "learning_rate": 0.0001135204081632653, + "loss": 1.3124, + "step": 678 + }, + { + "epoch": 13.74, + "learning_rate": 0.00011339285714285714, + "loss": 1.3085, + "step": 679 + }, + { + "epoch": 13.76, + "learning_rate": 0.00011326530612244898, + "loss": 1.3457, + "step": 680 + }, + { + "epoch": 13.78, + "learning_rate": 0.00011313775510204083, + "loss": 1.3338, + "step": 681 + }, + { + "epoch": 13.8, + "learning_rate": 0.00011301020408163266, + "loss": 1.2753, + "step": 682 + }, + { + "epoch": 13.82, + "learning_rate": 0.00011288265306122449, + "loss": 1.2786, + "step": 683 + }, + { + "epoch": 13.84, + "learning_rate": 0.00011275510204081634, + "loss": 1.2584, + "step": 684 + }, + { + "epoch": 13.86, + "learning_rate": 0.00011262755102040817, + "loss": 1.2779, + "step": 685 + }, + { + "epoch": 13.88, + "learning_rate": 0.00011250000000000001, + "loss": 1.3502, + "step": 686 + }, + { + "epoch": 13.9, + "learning_rate": 0.00011237244897959185, + "loss": 1.3251, + "step": 687 + }, + { + "epoch": 13.92, + "learning_rate": 0.00011224489795918367, + "loss": 1.273, + "step": 688 + }, + { + "epoch": 13.94, + "learning_rate": 0.00011211734693877552, + "loss": 1.3341, + "step": 689 + }, + { + "epoch": 13.96, + "learning_rate": 0.00011198979591836736, + "loss": 1.2654, + "step": 690 + }, + { + "epoch": 13.98, + "learning_rate": 0.00011186224489795918, + "loss": 1.3333, + "step": 691 + }, + { + "epoch": 14.0, + "learning_rate": 0.00011173469387755102, + "loss": 1.3246, + "step": 692 + }, + { + "epoch": 14.02, + "learning_rate": 0.00011160714285714287, + "loss": 1.2547, + "step": 693 + }, + { + "epoch": 14.04, + "learning_rate": 0.00011147959183673471, + "loss": 1.208, + "step": 694 + }, + { + "epoch": 14.06, + "learning_rate": 0.00011135204081632653, + "loss": 1.223, + "step": 695 + }, + { + "epoch": 14.08, + "learning_rate": 0.00011122448979591837, + "loss": 1.2483, + "step": 696 + }, + { + "epoch": 14.1, + "learning_rate": 0.00011109693877551022, + "loss": 1.2823, + "step": 697 + }, + { + "epoch": 14.12, + "learning_rate": 0.00011096938775510204, + "loss": 1.2013, + "step": 698 + }, + { + "epoch": 14.14, + "learning_rate": 0.00011084183673469388, + "loss": 1.1883, + "step": 699 + }, + { + "epoch": 14.16, + "learning_rate": 0.00011071428571428572, + "loss": 1.2364, + "step": 700 + }, + { + "epoch": 14.18, + "learning_rate": 0.00011058673469387754, + "loss": 1.2069, + "step": 701 + }, + { + "epoch": 14.2, + "learning_rate": 0.00011045918367346939, + "loss": 1.1968, + "step": 702 + }, + { + "epoch": 14.22, + "learning_rate": 0.00011033163265306123, + "loss": 1.2236, + "step": 703 + }, + { + "epoch": 14.24, + "learning_rate": 0.00011020408163265306, + "loss": 1.1942, + "step": 704 + }, + { + "epoch": 14.26, + "learning_rate": 0.0001100765306122449, + "loss": 1.2561, + "step": 705 + }, + { + "epoch": 14.29, + "learning_rate": 0.00010994897959183674, + "loss": 1.1839, + "step": 706 + }, + { + "epoch": 14.31, + "learning_rate": 0.00010982142857142858, + "loss": 1.2128, + "step": 707 + }, + { + "epoch": 14.33, + "learning_rate": 0.00010969387755102041, + "loss": 1.3086, + "step": 708 + }, + { + "epoch": 14.35, + "learning_rate": 0.00010956632653061226, + "loss": 1.2379, + "step": 709 + }, + { + "epoch": 14.37, + "learning_rate": 0.0001094387755102041, + "loss": 1.176, + "step": 710 + }, + { + "epoch": 14.39, + "learning_rate": 0.00010931122448979592, + "loss": 1.2105, + "step": 711 + }, + { + "epoch": 14.41, + "learning_rate": 0.00010918367346938776, + "loss": 1.2149, + "step": 712 + }, + { + "epoch": 14.43, + "learning_rate": 0.0001090561224489796, + "loss": 1.2392, + "step": 713 + }, + { + "epoch": 14.45, + "learning_rate": 0.00010892857142857142, + "loss": 1.2471, + "step": 714 + }, + { + "epoch": 14.47, + "learning_rate": 0.00010880102040816327, + "loss": 1.2561, + "step": 715 + }, + { + "epoch": 14.49, + "learning_rate": 0.00010867346938775511, + "loss": 1.2179, + "step": 716 + }, + { + "epoch": 14.51, + "learning_rate": 0.00010854591836734696, + "loss": 1.2459, + "step": 717 + }, + { + "epoch": 14.53, + "learning_rate": 0.00010841836734693877, + "loss": 1.2933, + "step": 718 + }, + { + "epoch": 14.55, + "learning_rate": 0.00010829081632653062, + "loss": 1.2862, + "step": 719 + }, + { + "epoch": 14.57, + "learning_rate": 0.00010816326530612246, + "loss": 1.2976, + "step": 720 + }, + { + "epoch": 14.59, + "learning_rate": 0.00010803571428571428, + "loss": 1.231, + "step": 721 + }, + { + "epoch": 14.61, + "learning_rate": 0.00010790816326530613, + "loss": 1.2464, + "step": 722 + }, + { + "epoch": 14.63, + "learning_rate": 0.00010778061224489797, + "loss": 1.2181, + "step": 723 + }, + { + "epoch": 14.65, + "learning_rate": 0.00010765306122448979, + "loss": 1.3307, + "step": 724 + }, + { + "epoch": 14.67, + "learning_rate": 0.00010752551020408163, + "loss": 1.1723, + "step": 725 + }, + { + "epoch": 14.69, + "learning_rate": 0.00010739795918367348, + "loss": 1.1528, + "step": 726 + }, + { + "epoch": 14.71, + "learning_rate": 0.0001072704081632653, + "loss": 1.215, + "step": 727 + }, + { + "epoch": 14.73, + "learning_rate": 0.00010714285714285715, + "loss": 1.2624, + "step": 728 + }, + { + "epoch": 14.75, + "learning_rate": 0.00010701530612244898, + "loss": 1.3117, + "step": 729 + }, + { + "epoch": 14.77, + "learning_rate": 0.00010688775510204083, + "loss": 1.2572, + "step": 730 + }, + { + "epoch": 14.79, + "learning_rate": 0.00010676020408163266, + "loss": 1.222, + "step": 731 + }, + { + "epoch": 14.81, + "learning_rate": 0.0001066326530612245, + "loss": 1.2881, + "step": 732 + }, + { + "epoch": 14.83, + "learning_rate": 0.00010650510204081635, + "loss": 1.2676, + "step": 733 + }, + { + "epoch": 14.85, + "learning_rate": 0.00010637755102040816, + "loss": 1.2734, + "step": 734 + }, + { + "epoch": 14.87, + "learning_rate": 0.00010625000000000001, + "loss": 1.2885, + "step": 735 + }, + { + "epoch": 14.89, + "learning_rate": 0.00010612244897959185, + "loss": 1.2764, + "step": 736 + }, + { + "epoch": 14.91, + "learning_rate": 0.00010599489795918367, + "loss": 1.3267, + "step": 737 + }, + { + "epoch": 14.93, + "learning_rate": 0.00010586734693877551, + "loss": 1.2445, + "step": 738 + }, + { + "epoch": 14.95, + "learning_rate": 0.00010573979591836736, + "loss": 1.3359, + "step": 739 + }, + { + "epoch": 14.97, + "learning_rate": 0.00010561224489795918, + "loss": 1.2508, + "step": 740 + }, + { + "epoch": 14.99, + "learning_rate": 0.00010548469387755102, + "loss": 1.2227, + "step": 741 + }, + { + "epoch": 15.01, + "learning_rate": 0.00010535714285714286, + "loss": 1.1889, + "step": 742 + }, + { + "epoch": 15.03, + "learning_rate": 0.00010522959183673471, + "loss": 1.1919, + "step": 743 + }, + { + "epoch": 15.05, + "learning_rate": 0.00010510204081632653, + "loss": 1.2383, + "step": 744 + }, + { + "epoch": 15.07, + "learning_rate": 0.00010497448979591837, + "loss": 1.2401, + "step": 745 + }, + { + "epoch": 15.09, + "learning_rate": 0.00010484693877551021, + "loss": 1.2015, + "step": 746 + }, + { + "epoch": 15.11, + "learning_rate": 0.00010471938775510203, + "loss": 1.1509, + "step": 747 + }, + { + "epoch": 15.13, + "learning_rate": 0.00010459183673469388, + "loss": 1.1878, + "step": 748 + }, + { + "epoch": 15.16, + "learning_rate": 0.00010446428571428572, + "loss": 1.1706, + "step": 749 + }, + { + "epoch": 15.18, + "learning_rate": 0.00010433673469387755, + "loss": 1.1285, + "step": 750 + }, + { + "epoch": 15.2, + "learning_rate": 0.0001042091836734694, + "loss": 1.1608, + "step": 751 + }, + { + "epoch": 15.22, + "learning_rate": 0.00010408163265306123, + "loss": 1.1178, + "step": 752 + }, + { + "epoch": 15.24, + "learning_rate": 0.00010395408163265306, + "loss": 1.1293, + "step": 753 + }, + { + "epoch": 15.26, + "learning_rate": 0.0001038265306122449, + "loss": 1.2306, + "step": 754 + }, + { + "epoch": 15.28, + "learning_rate": 0.00010369897959183675, + "loss": 1.1541, + "step": 755 + }, + { + "epoch": 15.3, + "learning_rate": 0.00010357142857142859, + "loss": 1.1702, + "step": 756 + }, + { + "epoch": 15.32, + "learning_rate": 0.00010344387755102041, + "loss": 1.2119, + "step": 757 + }, + { + "epoch": 15.34, + "learning_rate": 0.00010331632653061225, + "loss": 1.2239, + "step": 758 + }, + { + "epoch": 15.36, + "learning_rate": 0.0001031887755102041, + "loss": 1.2019, + "step": 759 + }, + { + "epoch": 15.38, + "learning_rate": 0.00010306122448979591, + "loss": 1.2197, + "step": 760 + }, + { + "epoch": 15.4, + "learning_rate": 0.00010293367346938776, + "loss": 1.1769, + "step": 761 + }, + { + "epoch": 15.42, + "learning_rate": 0.0001028061224489796, + "loss": 1.1907, + "step": 762 + }, + { + "epoch": 15.44, + "learning_rate": 0.00010267857142857142, + "loss": 1.2089, + "step": 763 + }, + { + "epoch": 15.46, + "learning_rate": 0.00010255102040816327, + "loss": 1.1335, + "step": 764 + }, + { + "epoch": 15.48, + "learning_rate": 0.00010242346938775511, + "loss": 1.1633, + "step": 765 + }, + { + "epoch": 15.5, + "learning_rate": 0.00010229591836734695, + "loss": 1.1578, + "step": 766 + }, + { + "epoch": 15.52, + "learning_rate": 0.00010216836734693877, + "loss": 1.2236, + "step": 767 + }, + { + "epoch": 15.54, + "learning_rate": 0.00010204081632653062, + "loss": 1.1941, + "step": 768 + }, + { + "epoch": 15.56, + "learning_rate": 0.00010191326530612246, + "loss": 1.2666, + "step": 769 + }, + { + "epoch": 15.58, + "learning_rate": 0.00010178571428571428, + "loss": 1.1232, + "step": 770 + }, + { + "epoch": 15.6, + "learning_rate": 0.00010165816326530612, + "loss": 1.2242, + "step": 771 + }, + { + "epoch": 15.62, + "learning_rate": 0.00010153061224489797, + "loss": 1.1852, + "step": 772 + }, + { + "epoch": 15.64, + "learning_rate": 0.0001014030612244898, + "loss": 1.2626, + "step": 773 + }, + { + "epoch": 15.66, + "learning_rate": 0.00010127551020408164, + "loss": 1.1873, + "step": 774 + }, + { + "epoch": 15.68, + "learning_rate": 0.00010114795918367349, + "loss": 1.3005, + "step": 775 + }, + { + "epoch": 15.7, + "learning_rate": 0.0001010204081632653, + "loss": 1.1904, + "step": 776 + }, + { + "epoch": 15.72, + "learning_rate": 0.00010089285714285715, + "loss": 1.2927, + "step": 777 + }, + { + "epoch": 15.74, + "learning_rate": 0.00010076530612244899, + "loss": 1.179, + "step": 778 + }, + { + "epoch": 15.76, + "learning_rate": 0.00010063775510204084, + "loss": 1.2027, + "step": 779 + }, + { + "epoch": 15.78, + "learning_rate": 0.00010051020408163265, + "loss": 1.2428, + "step": 780 + }, + { + "epoch": 15.8, + "learning_rate": 0.0001003826530612245, + "loss": 1.2324, + "step": 781 + }, + { + "epoch": 15.82, + "learning_rate": 0.00010025510204081634, + "loss": 1.1251, + "step": 782 + }, + { + "epoch": 15.84, + "learning_rate": 0.00010012755102040816, + "loss": 1.2405, + "step": 783 + }, + { + "epoch": 15.86, + "learning_rate": 0.0001, + "loss": 1.2005, + "step": 784 + }, + { + "epoch": 15.88, + "learning_rate": 9.987244897959184e-05, + "loss": 1.2259, + "step": 785 + }, + { + "epoch": 15.9, + "learning_rate": 9.974489795918368e-05, + "loss": 1.1576, + "step": 786 + }, + { + "epoch": 15.92, + "learning_rate": 9.961734693877551e-05, + "loss": 1.1834, + "step": 787 + }, + { + "epoch": 15.94, + "learning_rate": 9.948979591836736e-05, + "loss": 1.2396, + "step": 788 + }, + { + "epoch": 15.96, + "learning_rate": 9.936224489795919e-05, + "loss": 1.1865, + "step": 789 + }, + { + "epoch": 15.98, + "learning_rate": 9.923469387755102e-05, + "loss": 1.2356, + "step": 790 + }, + { + "epoch": 16.01, + "learning_rate": 9.910714285714286e-05, + "loss": 1.2639, + "step": 791 + }, + { + "epoch": 16.03, + "learning_rate": 9.897959183673469e-05, + "loss": 1.1216, + "step": 792 + }, + { + "epoch": 16.05, + "learning_rate": 9.885204081632652e-05, + "loss": 1.1051, + "step": 793 + }, + { + "epoch": 16.07, + "learning_rate": 9.872448979591837e-05, + "loss": 1.0864, + "step": 794 + }, + { + "epoch": 16.09, + "learning_rate": 9.859693877551021e-05, + "loss": 1.182, + "step": 795 + }, + { + "epoch": 16.11, + "learning_rate": 9.846938775510204e-05, + "loss": 1.1272, + "step": 796 + }, + { + "epoch": 16.13, + "learning_rate": 9.834183673469389e-05, + "loss": 1.1946, + "step": 797 + }, + { + "epoch": 16.15, + "learning_rate": 9.821428571428572e-05, + "loss": 1.0875, + "step": 798 + }, + { + "epoch": 16.17, + "learning_rate": 9.808673469387756e-05, + "loss": 1.1671, + "step": 799 + }, + { + "epoch": 16.19, + "learning_rate": 9.79591836734694e-05, + "loss": 1.1502, + "step": 800 + }, + { + "epoch": 16.21, + "learning_rate": 9.783163265306124e-05, + "loss": 1.19, + "step": 801 + }, + { + "epoch": 16.23, + "learning_rate": 9.770408163265307e-05, + "loss": 1.1258, + "step": 802 + }, + { + "epoch": 16.25, + "learning_rate": 9.75765306122449e-05, + "loss": 1.1765, + "step": 803 + }, + { + "epoch": 16.27, + "learning_rate": 9.744897959183674e-05, + "loss": 1.1217, + "step": 804 + }, + { + "epoch": 16.29, + "learning_rate": 9.732142857142858e-05, + "loss": 1.1293, + "step": 805 + }, + { + "epoch": 16.31, + "learning_rate": 9.719387755102042e-05, + "loss": 1.17, + "step": 806 + }, + { + "epoch": 16.33, + "learning_rate": 9.706632653061225e-05, + "loss": 1.17, + "step": 807 + }, + { + "epoch": 16.35, + "learning_rate": 9.693877551020408e-05, + "loss": 1.2004, + "step": 808 + }, + { + "epoch": 16.37, + "learning_rate": 9.681122448979593e-05, + "loss": 1.1648, + "step": 809 + }, + { + "epoch": 16.39, + "learning_rate": 9.668367346938776e-05, + "loss": 1.0688, + "step": 810 + }, + { + "epoch": 16.41, + "learning_rate": 9.655612244897959e-05, + "loss": 1.1607, + "step": 811 + }, + { + "epoch": 16.43, + "learning_rate": 9.642857142857143e-05, + "loss": 1.1298, + "step": 812 + }, + { + "epoch": 16.45, + "learning_rate": 9.630102040816326e-05, + "loss": 1.1064, + "step": 813 + }, + { + "epoch": 16.47, + "learning_rate": 9.617346938775511e-05, + "loss": 1.1472, + "step": 814 + }, + { + "epoch": 16.49, + "learning_rate": 9.604591836734694e-05, + "loss": 1.1577, + "step": 815 + }, + { + "epoch": 16.51, + "learning_rate": 9.591836734693878e-05, + "loss": 1.1436, + "step": 816 + }, + { + "epoch": 16.53, + "learning_rate": 9.579081632653061e-05, + "loss": 1.1657, + "step": 817 + }, + { + "epoch": 16.55, + "learning_rate": 9.566326530612246e-05, + "loss": 1.1147, + "step": 818 + }, + { + "epoch": 16.57, + "learning_rate": 9.553571428571429e-05, + "loss": 1.1839, + "step": 819 + }, + { + "epoch": 16.59, + "learning_rate": 9.540816326530613e-05, + "loss": 1.1298, + "step": 820 + }, + { + "epoch": 16.61, + "learning_rate": 9.528061224489796e-05, + "loss": 1.2141, + "step": 821 + }, + { + "epoch": 16.63, + "learning_rate": 9.515306122448981e-05, + "loss": 1.2045, + "step": 822 + }, + { + "epoch": 16.65, + "learning_rate": 9.502551020408164e-05, + "loss": 1.1791, + "step": 823 + }, + { + "epoch": 16.67, + "learning_rate": 9.489795918367348e-05, + "loss": 1.1137, + "step": 824 + }, + { + "epoch": 16.69, + "learning_rate": 9.477040816326531e-05, + "loss": 1.1312, + "step": 825 + }, + { + "epoch": 16.71, + "learning_rate": 9.464285714285715e-05, + "loss": 1.1102, + "step": 826 + }, + { + "epoch": 16.73, + "learning_rate": 9.451530612244899e-05, + "loss": 1.1865, + "step": 827 + }, + { + "epoch": 16.75, + "learning_rate": 9.438775510204082e-05, + "loss": 1.1232, + "step": 828 + }, + { + "epoch": 16.77, + "learning_rate": 9.426020408163265e-05, + "loss": 1.2068, + "step": 829 + }, + { + "epoch": 16.79, + "learning_rate": 9.41326530612245e-05, + "loss": 1.1864, + "step": 830 + }, + { + "epoch": 16.81, + "learning_rate": 9.400510204081633e-05, + "loss": 1.2195, + "step": 831 + }, + { + "epoch": 16.83, + "learning_rate": 9.387755102040817e-05, + "loss": 1.2063, + "step": 832 + }, + { + "epoch": 16.85, + "learning_rate": 9.375e-05, + "loss": 1.1455, + "step": 833 + }, + { + "epoch": 16.88, + "learning_rate": 9.362244897959183e-05, + "loss": 1.1819, + "step": 834 + }, + { + "epoch": 16.9, + "learning_rate": 9.349489795918368e-05, + "loss": 1.1887, + "step": 835 + }, + { + "epoch": 16.92, + "learning_rate": 9.336734693877551e-05, + "loss": 1.1557, + "step": 836 + }, + { + "epoch": 16.94, + "learning_rate": 9.323979591836735e-05, + "loss": 1.2094, + "step": 837 + }, + { + "epoch": 16.96, + "learning_rate": 9.311224489795918e-05, + "loss": 1.1512, + "step": 838 + }, + { + "epoch": 16.98, + "learning_rate": 9.298469387755103e-05, + "loss": 1.1463, + "step": 839 + }, + { + "epoch": 17.0, + "learning_rate": 9.285714285714286e-05, + "loss": 1.155, + "step": 840 + }, + { + "epoch": 17.02, + "learning_rate": 9.27295918367347e-05, + "loss": 1.1292, + "step": 841 + }, + { + "epoch": 17.04, + "learning_rate": 9.260204081632653e-05, + "loss": 1.0996, + "step": 842 + }, + { + "epoch": 17.06, + "learning_rate": 9.247448979591838e-05, + "loss": 1.0662, + "step": 843 + }, + { + "epoch": 17.08, + "learning_rate": 9.234693877551021e-05, + "loss": 1.0931, + "step": 844 + }, + { + "epoch": 17.1, + "learning_rate": 9.221938775510205e-05, + "loss": 1.0727, + "step": 845 + }, + { + "epoch": 17.12, + "learning_rate": 9.209183673469388e-05, + "loss": 1.1043, + "step": 846 + }, + { + "epoch": 17.14, + "learning_rate": 9.196428571428572e-05, + "loss": 1.0594, + "step": 847 + }, + { + "epoch": 17.16, + "learning_rate": 9.183673469387756e-05, + "loss": 1.0952, + "step": 848 + }, + { + "epoch": 17.18, + "learning_rate": 9.170918367346939e-05, + "loss": 1.0639, + "step": 849 + }, + { + "epoch": 17.2, + "learning_rate": 9.158163265306124e-05, + "loss": 1.132, + "step": 850 + }, + { + "epoch": 17.22, + "learning_rate": 9.145408163265307e-05, + "loss": 1.1083, + "step": 851 + }, + { + "epoch": 17.24, + "learning_rate": 9.13265306122449e-05, + "loss": 1.1282, + "step": 852 + }, + { + "epoch": 17.26, + "learning_rate": 9.119897959183674e-05, + "loss": 1.0474, + "step": 853 + }, + { + "epoch": 17.28, + "learning_rate": 9.107142857142857e-05, + "loss": 1.1138, + "step": 854 + }, + { + "epoch": 17.3, + "learning_rate": 9.094387755102042e-05, + "loss": 1.1025, + "step": 855 + }, + { + "epoch": 17.32, + "learning_rate": 9.081632653061225e-05, + "loss": 1.0968, + "step": 856 + }, + { + "epoch": 17.34, + "learning_rate": 9.068877551020408e-05, + "loss": 1.1683, + "step": 857 + }, + { + "epoch": 17.36, + "learning_rate": 9.056122448979592e-05, + "loss": 1.0975, + "step": 858 + }, + { + "epoch": 17.38, + "learning_rate": 9.043367346938775e-05, + "loss": 1.1274, + "step": 859 + }, + { + "epoch": 17.4, + "learning_rate": 9.030612244897958e-05, + "loss": 1.0916, + "step": 860 + }, + { + "epoch": 17.42, + "learning_rate": 9.017857142857143e-05, + "loss": 1.0912, + "step": 861 + }, + { + "epoch": 17.44, + "learning_rate": 9.005102040816327e-05, + "loss": 1.0875, + "step": 862 + }, + { + "epoch": 17.46, + "learning_rate": 8.99234693877551e-05, + "loss": 1.05, + "step": 863 + }, + { + "epoch": 17.48, + "learning_rate": 8.979591836734695e-05, + "loss": 1.1418, + "step": 864 + }, + { + "epoch": 17.5, + "learning_rate": 8.966836734693878e-05, + "loss": 1.0609, + "step": 865 + }, + { + "epoch": 17.52, + "learning_rate": 8.954081632653062e-05, + "loss": 1.1611, + "step": 866 + }, + { + "epoch": 17.54, + "learning_rate": 8.941326530612245e-05, + "loss": 1.1065, + "step": 867 + }, + { + "epoch": 17.56, + "learning_rate": 8.92857142857143e-05, + "loss": 1.1611, + "step": 868 + }, + { + "epoch": 17.58, + "learning_rate": 8.915816326530613e-05, + "loss": 1.1398, + "step": 869 + }, + { + "epoch": 17.6, + "learning_rate": 8.903061224489796e-05, + "loss": 1.1055, + "step": 870 + }, + { + "epoch": 17.62, + "learning_rate": 8.89030612244898e-05, + "loss": 1.1314, + "step": 871 + }, + { + "epoch": 17.64, + "learning_rate": 8.877551020408164e-05, + "loss": 1.1084, + "step": 872 + }, + { + "epoch": 17.66, + "learning_rate": 8.864795918367348e-05, + "loss": 1.1254, + "step": 873 + }, + { + "epoch": 17.68, + "learning_rate": 8.852040816326531e-05, + "loss": 1.142, + "step": 874 + }, + { + "epoch": 17.7, + "learning_rate": 8.839285714285714e-05, + "loss": 1.1371, + "step": 875 + }, + { + "epoch": 17.72, + "learning_rate": 8.826530612244899e-05, + "loss": 1.1092, + "step": 876 + }, + { + "epoch": 17.75, + "learning_rate": 8.813775510204082e-05, + "loss": 1.161, + "step": 877 + }, + { + "epoch": 17.77, + "learning_rate": 8.801020408163265e-05, + "loss": 1.1044, + "step": 878 + }, + { + "epoch": 17.79, + "learning_rate": 8.788265306122449e-05, + "loss": 1.117, + "step": 879 + }, + { + "epoch": 17.81, + "learning_rate": 8.775510204081632e-05, + "loss": 1.1262, + "step": 880 + }, + { + "epoch": 17.83, + "learning_rate": 8.762755102040817e-05, + "loss": 1.0829, + "step": 881 + }, + { + "epoch": 17.85, + "learning_rate": 8.75e-05, + "loss": 1.1393, + "step": 882 + }, + { + "epoch": 17.87, + "learning_rate": 8.737244897959183e-05, + "loss": 1.1781, + "step": 883 + }, + { + "epoch": 17.89, + "learning_rate": 8.724489795918367e-05, + "loss": 1.1582, + "step": 884 + }, + { + "epoch": 17.91, + "learning_rate": 8.711734693877552e-05, + "loss": 1.1469, + "step": 885 + }, + { + "epoch": 17.93, + "learning_rate": 8.698979591836735e-05, + "loss": 1.1494, + "step": 886 + }, + { + "epoch": 17.95, + "learning_rate": 8.68622448979592e-05, + "loss": 1.1251, + "step": 887 + }, + { + "epoch": 17.97, + "learning_rate": 8.673469387755102e-05, + "loss": 1.1624, + "step": 888 + }, + { + "epoch": 17.99, + "learning_rate": 8.660714285714287e-05, + "loss": 1.0842, + "step": 889 + }, + { + "epoch": 18.01, + "learning_rate": 8.64795918367347e-05, + "loss": 1.1944, + "step": 890 + }, + { + "epoch": 18.03, + "learning_rate": 8.635204081632653e-05, + "loss": 1.0642, + "step": 891 + }, + { + "epoch": 18.05, + "learning_rate": 8.622448979591838e-05, + "loss": 1.0459, + "step": 892 + }, + { + "epoch": 18.07, + "learning_rate": 8.60969387755102e-05, + "loss": 1.0941, + "step": 893 + }, + { + "epoch": 18.09, + "learning_rate": 8.596938775510205e-05, + "loss": 1.0457, + "step": 894 + }, + { + "epoch": 18.11, + "learning_rate": 8.584183673469388e-05, + "loss": 1.1033, + "step": 895 + }, + { + "epoch": 18.13, + "learning_rate": 8.571428571428571e-05, + "loss": 1.0756, + "step": 896 + }, + { + "epoch": 18.15, + "learning_rate": 8.558673469387756e-05, + "loss": 1.0615, + "step": 897 + }, + { + "epoch": 18.17, + "learning_rate": 8.545918367346939e-05, + "loss": 1.0828, + "step": 898 + }, + { + "epoch": 18.19, + "learning_rate": 8.533163265306123e-05, + "loss": 1.1158, + "step": 899 + }, + { + "epoch": 18.21, + "learning_rate": 8.520408163265306e-05, + "loss": 1.0133, + "step": 900 + } + ], + "logging_steps": 1, + "max_steps": 1568, + "num_train_epochs": 32, + "save_steps": 100, + "total_flos": 1.3323843726343987e+18, + "trial_name": null, + "trial_params": null +} diff --git a/checkpoint-900/training_args.bin b/checkpoint-900/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..db23e07d097c18532e52f58a70eb72d22e39c8c1 --- /dev/null +++ b/checkpoint-900/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b610cbc4242bb50b4985b00e205994ae514fec6d9e2273f2b545a583a07b154b +size 4155